summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMichael Butler <butlermichael@google.com>2018-04-17 19:26:58 -0700
committerMichael Butler <butlermichael@google.com>2018-04-21 00:22:41 +0000
commit3c2272e1cf69ef14d1a5dce34541bfd492272f69 (patch)
treeaf4a3ed9c6b412d6b3b14f071036b33f4678d807
parent56a09d4da981c00ce9709abad6c5d34325e18ea6 (diff)
downloadml-3c2272e1cf69ef14d1a5dce34541bfd492272f69.tar.gz
Create _relaxed tests for newly added tests -- generated tests
New tests were added to V1_0 and V1_1 specs, but they did not have a "_relaxed" test in V1_1. This CL generates the CTS and VTS tests as a followup CL. Bug: 76434855 Test: mma Test: NeuralNetworksTest_static Test: VtsHalNeuralnetworksV1_0TargetTest Test: VtsHalNeuralnetworksV1_1TargetTest Change-Id: Id3703db7091583de19e22b82eab75e88da785428
-rw-r--r--nn/runtime/test/generated/all_generated_V1_1_vts_tests.cpp735
-rw-r--r--nn/runtime/test/generated/all_generated_tests.cpp686
-rw-r--r--nn/runtime/test/generated/examples/avg_pool_float_5_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/batch_to_space_float_1_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/batch_to_space_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/conv_float_2_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/depthwise_conv2d_float_2_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/depthwise_conv2d_float_large_2_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/depthwise_conv2d_float_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/depthwise_conv2d_float_weights_as_inputs_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/dequantize_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/div_broadcast_float_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/div_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/fully_connected_float_2_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/fully_connected_float_4d_simple_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/fully_connected_float_large_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/fully_connected_float_large_weights_as_inputs_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/l2_normalization_2_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/l2_pool_float_2_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/l2_pool_float_large_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/max_pool_float_4_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/mean_float_1_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/mean_float_2_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/mean_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/pad_float_1_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/pad_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/relu_float_2_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/resize_bilinear_2_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/space_to_batch_float_1_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/space_to_batch_float_2_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/space_to_batch_float_3_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/space_to_batch_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/squeeze_float_1_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/squeeze_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/strided_slice_float_10_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/strided_slice_float_11_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/strided_slice_float_1_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/strided_slice_float_2_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/strided_slice_float_3_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/strided_slice_float_4_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/strided_slice_float_5_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/strided_slice_float_6_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/strided_slice_float_7_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/strided_slice_float_8_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/strided_slice_float_9_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/strided_slice_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/sub_broadcast_float_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/sub_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/transpose_float_1_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/examples/transpose_relaxed.example.cpp22
-rw-r--r--nn/runtime/test/generated/models/avg_pool_float_5_relaxed.model.cpp32
-rw-r--r--nn/runtime/test/generated/models/batch_to_space_float_1_relaxed.model.cpp26
-rw-r--r--nn/runtime/test/generated/models/batch_to_space_relaxed.model.cpp26
-rw-r--r--nn/runtime/test/generated/models/conv_float_2_relaxed.model.cpp39
-rw-r--r--nn/runtime/test/generated/models/depthwise_conv2d_float_2_relaxed.model.cpp43
-rw-r--r--nn/runtime/test/generated/models/depthwise_conv2d_float_large_2_relaxed.model.cpp42
-rw-r--r--nn/runtime/test/generated/models/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.model.cpp39
-rw-r--r--nn/runtime/test/generated/models/depthwise_conv2d_float_relaxed.model.cpp42
-rw-r--r--nn/runtime/test/generated/models/depthwise_conv2d_float_weights_as_inputs_relaxed.model.cpp38
-rw-r--r--nn/runtime/test/generated/models/dequantize_relaxed.model.cpp22
-rw-r--r--nn/runtime/test/generated/models/div_broadcast_float_relaxed.model.cpp27
-rw-r--r--nn/runtime/test/generated/models/div_relaxed.model.cpp26
-rw-r--r--nn/runtime/test/generated/models/fully_connected_float_2_relaxed.model.cpp34
-rw-r--r--nn/runtime/test/generated/models/fully_connected_float_4d_simple.model.cpp2
-rw-r--r--nn/runtime/test/generated/models/fully_connected_float_4d_simple_relaxed.model.cpp34
-rw-r--r--nn/runtime/test/generated/models/fully_connected_float_large_relaxed.model.cpp33
-rw-r--r--nn/runtime/test/generated/models/fully_connected_float_large_weights_as_inputs_relaxed.model.cpp29
-rw-r--r--nn/runtime/test/generated/models/l2_normalization_2_relaxed.model.cpp21
-rw-r--r--nn/runtime/test/generated/models/l2_pool_float_2_relaxed.model.cpp32
-rw-r--r--nn/runtime/test/generated/models/l2_pool_float_large_relaxed.model.cpp41
-rw-r--r--nn/runtime/test/generated/models/max_pool_float_4_relaxed.model.cpp32
-rw-r--r--nn/runtime/test/generated/models/mean_float_1_relaxed.model.cpp30
-rw-r--r--nn/runtime/test/generated/models/mean_float_2_relaxed.model.cpp30
-rw-r--r--nn/runtime/test/generated/models/mean_relaxed.model.cpp30
-rw-r--r--nn/runtime/test/generated/models/mobilenet_224_gender_basic_fixed.model.cpp2
-rw-r--r--nn/runtime/test/generated/models/mobilenet_quantized.model.cpp2
-rw-r--r--nn/runtime/test/generated/models/pad_float_1_relaxed.model.cpp26
-rw-r--r--nn/runtime/test/generated/models/pad_relaxed.model.cpp26
-rw-r--r--nn/runtime/test/generated/models/relu_float_2_relaxed.model.cpp21
-rw-r--r--nn/runtime/test/generated/models/resize_bilinear_2_relaxed.model.cpp29
-rw-r--r--nn/runtime/test/generated/models/space_to_batch_float_1_relaxed.model.cpp30
-rw-r--r--nn/runtime/test/generated/models/space_to_batch_float_2_relaxed.model.cpp30
-rw-r--r--nn/runtime/test/generated/models/space_to_batch_float_3_relaxed.model.cpp30
-rw-r--r--nn/runtime/test/generated/models/space_to_batch_relaxed.model.cpp30
-rw-r--r--nn/runtime/test/generated/models/squeeze_float_1_relaxed.model.cpp26
-rw-r--r--nn/runtime/test/generated/models/squeeze_relaxed.model.cpp26
-rw-r--r--nn/runtime/test/generated/models/strided_slice_float_10_relaxed.model.cpp42
-rw-r--r--nn/runtime/test/generated/models/strided_slice_float_11_relaxed.model.cpp42
-rw-r--r--nn/runtime/test/generated/models/strided_slice_float_1_relaxed.model.cpp42
-rw-r--r--nn/runtime/test/generated/models/strided_slice_float_2_relaxed.model.cpp42
-rw-r--r--nn/runtime/test/generated/models/strided_slice_float_3_relaxed.model.cpp42
-rw-r--r--nn/runtime/test/generated/models/strided_slice_float_4_relaxed.model.cpp42
-rw-r--r--nn/runtime/test/generated/models/strided_slice_float_5_relaxed.model.cpp42
-rw-r--r--nn/runtime/test/generated/models/strided_slice_float_6_relaxed.model.cpp42
-rw-r--r--nn/runtime/test/generated/models/strided_slice_float_7_relaxed.model.cpp41
-rw-r--r--nn/runtime/test/generated/models/strided_slice_float_8_relaxed.model.cpp42
-rw-r--r--nn/runtime/test/generated/models/strided_slice_float_9_relaxed.model.cpp42
-rw-r--r--nn/runtime/test/generated/models/strided_slice_relaxed.model.cpp42
-rw-r--r--nn/runtime/test/generated/models/sub_broadcast_float_relaxed.model.cpp27
-rw-r--r--nn/runtime/test/generated/models/sub_relaxed.model.cpp26
-rw-r--r--nn/runtime/test/generated/models/transpose_float_1_relaxed.model.cpp26
-rw-r--r--nn/runtime/test/generated/models/transpose_relaxed.model.cpp25
-rw-r--r--nn/runtime/test/generated/vts_models/avg_pool_float_5_relaxed.model.cpp82
-rw-r--r--nn/runtime/test/generated/vts_models/batch_to_space_float_1_relaxed.model.cpp64
-rw-r--r--nn/runtime/test/generated/vts_models/batch_to_space_relaxed.model.cpp64
-rw-r--r--nn/runtime/test/generated/vts_models/conv_float_2_relaxed.model.cpp100
-rw-r--r--nn/runtime/test/generated/vts_models/depthwise_conv2d_float_2_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/depthwise_conv2d_float_large_2_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/depthwise_conv2d_float_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/depthwise_conv2d_float_weights_as_inputs_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/dequantize_relaxed.model.cpp53
-rw-r--r--nn/runtime/test/generated/vts_models/div_broadcast_float_relaxed.model.cpp73
-rw-r--r--nn/runtime/test/generated/vts_models/div_relaxed.model.cpp73
-rw-r--r--nn/runtime/test/generated/vts_models/fully_connected_float_2_relaxed.model.cpp82
-rw-r--r--nn/runtime/test/generated/vts_models/fully_connected_float_4d_simple.model.cpp1
-rw-r--r--nn/runtime/test/generated/vts_models/fully_connected_float_4d_simple_relaxed.model.cpp82
-rw-r--r--nn/runtime/test/generated/vts_models/fully_connected_float_large_relaxed.model.cpp82
-rw-r--r--nn/runtime/test/generated/vts_models/fully_connected_float_large_weights_as_inputs_relaxed.model.cpp82
-rw-r--r--nn/runtime/test/generated/vts_models/l2_normalization_2_relaxed.model.cpp53
-rw-r--r--nn/runtime/test/generated/vts_models/l2_pool_float_2_relaxed.model.cpp82
-rw-r--r--nn/runtime/test/generated/vts_models/l2_pool_float_large_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/max_pool_float_4_relaxed.model.cpp82
-rw-r--r--nn/runtime/test/generated/vts_models/mean_float_1_relaxed.model.cpp73
-rw-r--r--nn/runtime/test/generated/vts_models/mean_float_2_relaxed.model.cpp73
-rw-r--r--nn/runtime/test/generated/vts_models/mean_relaxed.model.cpp73
-rw-r--r--nn/runtime/test/generated/vts_models/pad_float_1_relaxed.model.cpp64
-rw-r--r--nn/runtime/test/generated/vts_models/pad_relaxed.model.cpp64
-rw-r--r--nn/runtime/test/generated/vts_models/relu_float_2_relaxed.model.cpp53
-rw-r--r--nn/runtime/test/generated/vts_models/resize_bilinear_2_relaxed.model.cpp73
-rw-r--r--nn/runtime/test/generated/vts_models/space_to_batch_float_1_relaxed.model.cpp73
-rw-r--r--nn/runtime/test/generated/vts_models/space_to_batch_float_2_relaxed.model.cpp73
-rw-r--r--nn/runtime/test/generated/vts_models/space_to_batch_float_3_relaxed.model.cpp73
-rw-r--r--nn/runtime/test/generated/vts_models/space_to_batch_relaxed.model.cpp73
-rw-r--r--nn/runtime/test/generated/vts_models/squeeze_float_1_relaxed.model.cpp64
-rw-r--r--nn/runtime/test/generated/vts_models/squeeze_relaxed.model.cpp64
-rw-r--r--nn/runtime/test/generated/vts_models/strided_slice_float_10_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/strided_slice_float_11_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/strided_slice_float_1_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/strided_slice_float_2_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/strided_slice_float_3_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/strided_slice_float_4_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/strided_slice_float_5_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/strided_slice_float_6_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/strided_slice_float_7_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/strided_slice_float_8_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/strided_slice_float_9_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/strided_slice_relaxed.model.cpp109
-rw-r--r--nn/runtime/test/generated/vts_models/sub_broadcast_float_relaxed.model.cpp73
-rw-r--r--nn/runtime/test/generated/vts_models/sub_relaxed.model.cpp73
-rw-r--r--nn/runtime/test/generated/vts_models/transpose_float_1_relaxed.model.cpp64
-rw-r--r--nn/runtime/test/generated/vts_models/transpose_relaxed.model.cpp64
153 files changed, 8313 insertions, 5 deletions
diff --git a/nn/runtime/test/generated/all_generated_V1_1_vts_tests.cpp b/nn/runtime/test/generated/all_generated_V1_1_vts_tests.cpp
index adbbf847f..d84b46052 100644
--- a/nn/runtime/test/generated/all_generated_V1_1_vts_tests.cpp
+++ b/nn/runtime/test/generated/all_generated_V1_1_vts_tests.cpp
@@ -76,6 +76,21 @@ TEST_F(NeuralnetworksHidlTest, avg_pool_float_4_relaxed) {
avg_pool_float_4_relaxed::examples);
}
+namespace avg_pool_float_5_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated avg_pool_float_5_relaxed test
+#include "examples/avg_pool_float_5_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/avg_pool_float_5_relaxed.model.cpp"
+} // namespace avg_pool_float_5_relaxed
+TEST_F(NeuralnetworksHidlTest, avg_pool_float_5_relaxed) {
+ generated_tests::Execute(device,
+ avg_pool_float_5_relaxed::createTestModel,
+ avg_pool_float_5_relaxed::is_ignored,
+ avg_pool_float_5_relaxed::examples);
+}
+
namespace batch_to_space_float_1 {
std::vector<MixedTypedExample> examples = {
// Generated batch_to_space_float_1 test
@@ -91,6 +106,21 @@ TEST_F(NeuralnetworksHidlTest, batch_to_space_float_1) {
batch_to_space_float_1::examples);
}
+namespace batch_to_space_float_1_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated batch_to_space_float_1_relaxed test
+#include "examples/batch_to_space_float_1_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/batch_to_space_float_1_relaxed.model.cpp"
+} // namespace batch_to_space_float_1_relaxed
+TEST_F(NeuralnetworksHidlTest, batch_to_space_float_1_relaxed) {
+ generated_tests::Execute(device,
+ batch_to_space_float_1_relaxed::createTestModel,
+ batch_to_space_float_1_relaxed::is_ignored,
+ batch_to_space_float_1_relaxed::examples);
+}
+
namespace batch_to_space {
std::vector<MixedTypedExample> examples = {
// Generated batch_to_space test
@@ -121,6 +151,21 @@ TEST_F(NeuralnetworksHidlTest, batch_to_space_quant8_1) {
batch_to_space_quant8_1::examples);
}
+namespace batch_to_space_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated batch_to_space_relaxed test
+#include "examples/batch_to_space_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/batch_to_space_relaxed.model.cpp"
+} // namespace batch_to_space_relaxed
+TEST_F(NeuralnetworksHidlTest, batch_to_space_relaxed) {
+ generated_tests::Execute(device,
+ batch_to_space_relaxed::createTestModel,
+ batch_to_space_relaxed::is_ignored,
+ batch_to_space_relaxed::examples);
+}
+
namespace concat_float_1_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated concat_float_1_relaxed test
@@ -226,6 +271,21 @@ TEST_F(NeuralnetworksHidlTest, conv_3_h3_w2_VALID_relaxed) {
conv_3_h3_w2_VALID_relaxed::examples);
}
+namespace conv_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated conv_float_2_relaxed test
+#include "examples/conv_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/conv_float_2_relaxed.model.cpp"
+} // namespace conv_float_2_relaxed
+TEST_F(NeuralnetworksHidlTest, conv_float_2_relaxed) {
+ generated_tests::Execute(device,
+ conv_float_2_relaxed::createTestModel,
+ conv_float_2_relaxed::is_ignored,
+ conv_float_2_relaxed::examples);
+}
+
namespace conv_float_channels_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated conv_float_channels_relaxed test
@@ -361,6 +421,51 @@ TEST_F(NeuralnetworksHidlTest, depth_to_space_float_3_relaxed) {
depth_to_space_float_3_relaxed::examples);
}
+namespace depthwise_conv2d_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated depthwise_conv2d_float_2_relaxed test
+#include "examples/depthwise_conv2d_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/depthwise_conv2d_float_2_relaxed.model.cpp"
+} // namespace depthwise_conv2d_float_2_relaxed
+TEST_F(NeuralnetworksHidlTest, depthwise_conv2d_float_2_relaxed) {
+ generated_tests::Execute(device,
+ depthwise_conv2d_float_2_relaxed::createTestModel,
+ depthwise_conv2d_float_2_relaxed::is_ignored,
+ depthwise_conv2d_float_2_relaxed::examples);
+}
+
+namespace depthwise_conv2d_float_large_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated depthwise_conv2d_float_large_2_relaxed test
+#include "examples/depthwise_conv2d_float_large_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/depthwise_conv2d_float_large_2_relaxed.model.cpp"
+} // namespace depthwise_conv2d_float_large_2_relaxed
+TEST_F(NeuralnetworksHidlTest, depthwise_conv2d_float_large_2_relaxed) {
+ generated_tests::Execute(device,
+ depthwise_conv2d_float_large_2_relaxed::createTestModel,
+ depthwise_conv2d_float_large_2_relaxed::is_ignored,
+ depthwise_conv2d_float_large_2_relaxed::examples);
+}
+
+namespace depthwise_conv2d_float_large_2_weights_as_inputs_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated depthwise_conv2d_float_large_2_weights_as_inputs_relaxed test
+#include "examples/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.model.cpp"
+} // namespace depthwise_conv2d_float_large_2_weights_as_inputs_relaxed
+TEST_F(NeuralnetworksHidlTest, depthwise_conv2d_float_large_2_weights_as_inputs_relaxed) {
+ generated_tests::Execute(device,
+ depthwise_conv2d_float_large_2_weights_as_inputs_relaxed::createTestModel,
+ depthwise_conv2d_float_large_2_weights_as_inputs_relaxed::is_ignored,
+ depthwise_conv2d_float_large_2_weights_as_inputs_relaxed::examples);
+}
+
namespace depthwise_conv2d_float_large_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated depthwise_conv2d_float_large_relaxed test
@@ -391,6 +496,36 @@ TEST_F(NeuralnetworksHidlTest, depthwise_conv2d_float_large_weights_as_inputs_re
depthwise_conv2d_float_large_weights_as_inputs_relaxed::examples);
}
+namespace depthwise_conv2d_float_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated depthwise_conv2d_float_relaxed test
+#include "examples/depthwise_conv2d_float_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/depthwise_conv2d_float_relaxed.model.cpp"
+} // namespace depthwise_conv2d_float_relaxed
+TEST_F(NeuralnetworksHidlTest, depthwise_conv2d_float_relaxed) {
+ generated_tests::Execute(device,
+ depthwise_conv2d_float_relaxed::createTestModel,
+ depthwise_conv2d_float_relaxed::is_ignored,
+ depthwise_conv2d_float_relaxed::examples);
+}
+
+namespace depthwise_conv2d_float_weights_as_inputs_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated depthwise_conv2d_float_weights_as_inputs_relaxed test
+#include "examples/depthwise_conv2d_float_weights_as_inputs_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/depthwise_conv2d_float_weights_as_inputs_relaxed.model.cpp"
+} // namespace depthwise_conv2d_float_weights_as_inputs_relaxed
+TEST_F(NeuralnetworksHidlTest, depthwise_conv2d_float_weights_as_inputs_relaxed) {
+ generated_tests::Execute(device,
+ depthwise_conv2d_float_weights_as_inputs_relaxed::createTestModel,
+ depthwise_conv2d_float_weights_as_inputs_relaxed::is_ignored,
+ depthwise_conv2d_float_weights_as_inputs_relaxed::examples);
+}
+
namespace depthwise_conv_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated depthwise_conv_relaxed test
@@ -406,6 +541,21 @@ TEST_F(NeuralnetworksHidlTest, depthwise_conv_relaxed) {
depthwise_conv_relaxed::examples);
}
+namespace dequantize_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated dequantize_relaxed test
+#include "examples/dequantize_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/dequantize_relaxed.model.cpp"
+} // namespace dequantize_relaxed
+TEST_F(NeuralnetworksHidlTest, dequantize_relaxed) {
+ generated_tests::Execute(device,
+ dequantize_relaxed::createTestModel,
+ dequantize_relaxed::is_ignored,
+ dequantize_relaxed::examples);
+}
+
namespace div_broadcast_float {
std::vector<MixedTypedExample> examples = {
// Generated div_broadcast_float test
@@ -421,6 +571,21 @@ TEST_F(NeuralnetworksHidlTest, div_broadcast_float) {
div_broadcast_float::examples);
}
+namespace div_broadcast_float_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated div_broadcast_float_relaxed test
+#include "examples/div_broadcast_float_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/div_broadcast_float_relaxed.model.cpp"
+} // namespace div_broadcast_float_relaxed
+TEST_F(NeuralnetworksHidlTest, div_broadcast_float_relaxed) {
+ generated_tests::Execute(device,
+ div_broadcast_float_relaxed::createTestModel,
+ div_broadcast_float_relaxed::is_ignored,
+ div_broadcast_float_relaxed::examples);
+}
+
namespace div {
std::vector<MixedTypedExample> examples = {
// Generated div test
@@ -436,6 +601,21 @@ TEST_F(NeuralnetworksHidlTest, div) {
div::examples);
}
+namespace div_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated div_relaxed test
+#include "examples/div_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/div_relaxed.model.cpp"
+} // namespace div_relaxed
+TEST_F(NeuralnetworksHidlTest, div_relaxed) {
+ generated_tests::Execute(device,
+ div_relaxed::createTestModel,
+ div_relaxed::is_ignored,
+ div_relaxed::examples);
+}
+
namespace embedding_lookup_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated embedding_lookup_relaxed test
@@ -466,6 +646,21 @@ TEST_F(NeuralnetworksHidlTest, floor_relaxed) {
floor_relaxed::examples);
}
+namespace fully_connected_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated fully_connected_float_2_relaxed test
+#include "examples/fully_connected_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/fully_connected_float_2_relaxed.model.cpp"
+} // namespace fully_connected_float_2_relaxed
+TEST_F(NeuralnetworksHidlTest, fully_connected_float_2_relaxed) {
+ generated_tests::Execute(device,
+ fully_connected_float_2_relaxed::createTestModel,
+ fully_connected_float_2_relaxed::is_ignored,
+ fully_connected_float_2_relaxed::examples);
+}
+
namespace fully_connected_float_4d_simple {
std::vector<MixedTypedExample> examples = {
// Generated fully_connected_float_4d_simple test
@@ -481,6 +676,51 @@ TEST_F(NeuralnetworksHidlTest, fully_connected_float_4d_simple) {
fully_connected_float_4d_simple::examples);
}
+namespace fully_connected_float_4d_simple_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated fully_connected_float_4d_simple_relaxed test
+#include "examples/fully_connected_float_4d_simple_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/fully_connected_float_4d_simple_relaxed.model.cpp"
+} // namespace fully_connected_float_4d_simple_relaxed
+TEST_F(NeuralnetworksHidlTest, fully_connected_float_4d_simple_relaxed) {
+ generated_tests::Execute(device,
+ fully_connected_float_4d_simple_relaxed::createTestModel,
+ fully_connected_float_4d_simple_relaxed::is_ignored,
+ fully_connected_float_4d_simple_relaxed::examples);
+}
+
+namespace fully_connected_float_large_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated fully_connected_float_large_relaxed test
+#include "examples/fully_connected_float_large_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/fully_connected_float_large_relaxed.model.cpp"
+} // namespace fully_connected_float_large_relaxed
+TEST_F(NeuralnetworksHidlTest, fully_connected_float_large_relaxed) {
+ generated_tests::Execute(device,
+ fully_connected_float_large_relaxed::createTestModel,
+ fully_connected_float_large_relaxed::is_ignored,
+ fully_connected_float_large_relaxed::examples);
+}
+
+namespace fully_connected_float_large_weights_as_inputs_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated fully_connected_float_large_weights_as_inputs_relaxed test
+#include "examples/fully_connected_float_large_weights_as_inputs_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/fully_connected_float_large_weights_as_inputs_relaxed.model.cpp"
+} // namespace fully_connected_float_large_weights_as_inputs_relaxed
+TEST_F(NeuralnetworksHidlTest, fully_connected_float_large_weights_as_inputs_relaxed) {
+ generated_tests::Execute(device,
+ fully_connected_float_large_weights_as_inputs_relaxed::createTestModel,
+ fully_connected_float_large_weights_as_inputs_relaxed::is_ignored,
+ fully_connected_float_large_weights_as_inputs_relaxed::examples);
+}
+
namespace fully_connected_float_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated fully_connected_float_relaxed test
@@ -526,6 +766,21 @@ TEST_F(NeuralnetworksHidlTest, hashtable_lookup_float_relaxed) {
hashtable_lookup_float_relaxed::examples);
}
+namespace l2_normalization_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated l2_normalization_2_relaxed test
+#include "examples/l2_normalization_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/l2_normalization_2_relaxed.model.cpp"
+} // namespace l2_normalization_2_relaxed
+TEST_F(NeuralnetworksHidlTest, l2_normalization_2_relaxed) {
+ generated_tests::Execute(device,
+ l2_normalization_2_relaxed::createTestModel,
+ l2_normalization_2_relaxed::is_ignored,
+ l2_normalization_2_relaxed::examples);
+}
+
namespace l2_normalization_large_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated l2_normalization_large_relaxed test
@@ -556,6 +811,36 @@ TEST_F(NeuralnetworksHidlTest, l2_normalization_relaxed) {
l2_normalization_relaxed::examples);
}
+namespace l2_pool_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated l2_pool_float_2_relaxed test
+#include "examples/l2_pool_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/l2_pool_float_2_relaxed.model.cpp"
+} // namespace l2_pool_float_2_relaxed
+TEST_F(NeuralnetworksHidlTest, l2_pool_float_2_relaxed) {
+ generated_tests::Execute(device,
+ l2_pool_float_2_relaxed::createTestModel,
+ l2_pool_float_2_relaxed::is_ignored,
+ l2_pool_float_2_relaxed::examples);
+}
+
+namespace l2_pool_float_large_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated l2_pool_float_large_relaxed test
+#include "examples/l2_pool_float_large_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/l2_pool_float_large_relaxed.model.cpp"
+} // namespace l2_pool_float_large_relaxed
+TEST_F(NeuralnetworksHidlTest, l2_pool_float_large_relaxed) {
+ generated_tests::Execute(device,
+ l2_pool_float_large_relaxed::createTestModel,
+ l2_pool_float_large_relaxed::is_ignored,
+ l2_pool_float_large_relaxed::examples);
+}
+
namespace l2_pool_float_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated l2_pool_float_relaxed test
@@ -901,6 +1186,21 @@ TEST_F(NeuralnetworksHidlTest, max_pool_float_3_relaxed) {
max_pool_float_3_relaxed::examples);
}
+namespace max_pool_float_4_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated max_pool_float_4_relaxed test
+#include "examples/max_pool_float_4_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/max_pool_float_4_relaxed.model.cpp"
+} // namespace max_pool_float_4_relaxed
+TEST_F(NeuralnetworksHidlTest, max_pool_float_4_relaxed) {
+ generated_tests::Execute(device,
+ max_pool_float_4_relaxed::createTestModel,
+ max_pool_float_4_relaxed::is_ignored,
+ max_pool_float_4_relaxed::examples);
+}
+
namespace mean_float_1 {
std::vector<MixedTypedExample> examples = {
// Generated mean_float_1 test
@@ -916,6 +1216,21 @@ TEST_F(NeuralnetworksHidlTest, mean_float_1) {
mean_float_1::examples);
}
+namespace mean_float_1_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated mean_float_1_relaxed test
+#include "examples/mean_float_1_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/mean_float_1_relaxed.model.cpp"
+} // namespace mean_float_1_relaxed
+TEST_F(NeuralnetworksHidlTest, mean_float_1_relaxed) {
+ generated_tests::Execute(device,
+ mean_float_1_relaxed::createTestModel,
+ mean_float_1_relaxed::is_ignored,
+ mean_float_1_relaxed::examples);
+}
+
namespace mean_float_2 {
std::vector<MixedTypedExample> examples = {
// Generated mean_float_2 test
@@ -931,6 +1246,21 @@ TEST_F(NeuralnetworksHidlTest, mean_float_2) {
mean_float_2::examples);
}
+namespace mean_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated mean_float_2_relaxed test
+#include "examples/mean_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/mean_float_2_relaxed.model.cpp"
+} // namespace mean_float_2_relaxed
+TEST_F(NeuralnetworksHidlTest, mean_float_2_relaxed) {
+ generated_tests::Execute(device,
+ mean_float_2_relaxed::createTestModel,
+ mean_float_2_relaxed::is_ignored,
+ mean_float_2_relaxed::examples);
+}
+
namespace mean {
std::vector<MixedTypedExample> examples = {
// Generated mean test
@@ -976,6 +1306,21 @@ TEST_F(NeuralnetworksHidlTest, mean_quant8_2) {
mean_quant8_2::examples);
}
+namespace mean_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated mean_relaxed test
+#include "examples/mean_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/mean_relaxed.model.cpp"
+} // namespace mean_relaxed
+TEST_F(NeuralnetworksHidlTest, mean_relaxed) {
+ generated_tests::Execute(device,
+ mean_relaxed::createTestModel,
+ mean_relaxed::is_ignored,
+ mean_relaxed::examples);
+}
+
namespace mobilenet_224_gender_basic_fixed_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated mobilenet_224_gender_basic_fixed_relaxed test
@@ -1036,6 +1381,21 @@ TEST_F(NeuralnetworksHidlTest, pad_float_1) {
pad_float_1::examples);
}
+namespace pad_float_1_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated pad_float_1_relaxed test
+#include "examples/pad_float_1_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/pad_float_1_relaxed.model.cpp"
+} // namespace pad_float_1_relaxed
+TEST_F(NeuralnetworksHidlTest, pad_float_1_relaxed) {
+ generated_tests::Execute(device,
+ pad_float_1_relaxed::createTestModel,
+ pad_float_1_relaxed::is_ignored,
+ pad_float_1_relaxed::examples);
+}
+
namespace pad {
std::vector<MixedTypedExample> examples = {
// Generated pad test
@@ -1051,6 +1411,21 @@ TEST_F(NeuralnetworksHidlTest, pad) {
pad::examples);
}
+namespace pad_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated pad_relaxed test
+#include "examples/pad_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/pad_relaxed.model.cpp"
+} // namespace pad_relaxed
+TEST_F(NeuralnetworksHidlTest, pad_relaxed) {
+ generated_tests::Execute(device,
+ pad_relaxed::createTestModel,
+ pad_relaxed::is_ignored,
+ pad_relaxed::examples);
+}
+
namespace relu1_float_1_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated relu1_float_1_relaxed test
@@ -1126,6 +1501,21 @@ TEST_F(NeuralnetworksHidlTest, relu_float_1_relaxed) {
relu_float_1_relaxed::examples);
}
+namespace relu_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated relu_float_2_relaxed test
+#include "examples/relu_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/relu_float_2_relaxed.model.cpp"
+} // namespace relu_float_2_relaxed
+TEST_F(NeuralnetworksHidlTest, relu_float_2_relaxed) {
+ generated_tests::Execute(device,
+ relu_float_2_relaxed::createTestModel,
+ relu_float_2_relaxed::is_ignored,
+ relu_float_2_relaxed::examples);
+}
+
namespace reshape_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated reshape_relaxed test
@@ -1156,6 +1546,21 @@ TEST_F(NeuralnetworksHidlTest, reshape_weights_as_inputs_relaxed) {
reshape_weights_as_inputs_relaxed::examples);
}
+namespace resize_bilinear_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated resize_bilinear_2_relaxed test
+#include "examples/resize_bilinear_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/resize_bilinear_2_relaxed.model.cpp"
+} // namespace resize_bilinear_2_relaxed
+TEST_F(NeuralnetworksHidlTest, resize_bilinear_2_relaxed) {
+ generated_tests::Execute(device,
+ resize_bilinear_2_relaxed::createTestModel,
+ resize_bilinear_2_relaxed::is_ignored,
+ resize_bilinear_2_relaxed::examples);
+}
+
namespace resize_bilinear_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated resize_bilinear_relaxed test
@@ -1246,6 +1651,21 @@ TEST_F(NeuralnetworksHidlTest, space_to_batch_float_1) {
space_to_batch_float_1::examples);
}
+namespace space_to_batch_float_1_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated space_to_batch_float_1_relaxed test
+#include "examples/space_to_batch_float_1_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/space_to_batch_float_1_relaxed.model.cpp"
+} // namespace space_to_batch_float_1_relaxed
+TEST_F(NeuralnetworksHidlTest, space_to_batch_float_1_relaxed) {
+ generated_tests::Execute(device,
+ space_to_batch_float_1_relaxed::createTestModel,
+ space_to_batch_float_1_relaxed::is_ignored,
+ space_to_batch_float_1_relaxed::examples);
+}
+
namespace space_to_batch_float_2 {
std::vector<MixedTypedExample> examples = {
// Generated space_to_batch_float_2 test
@@ -1261,6 +1681,21 @@ TEST_F(NeuralnetworksHidlTest, space_to_batch_float_2) {
space_to_batch_float_2::examples);
}
+namespace space_to_batch_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated space_to_batch_float_2_relaxed test
+#include "examples/space_to_batch_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/space_to_batch_float_2_relaxed.model.cpp"
+} // namespace space_to_batch_float_2_relaxed
+TEST_F(NeuralnetworksHidlTest, space_to_batch_float_2_relaxed) {
+ generated_tests::Execute(device,
+ space_to_batch_float_2_relaxed::createTestModel,
+ space_to_batch_float_2_relaxed::is_ignored,
+ space_to_batch_float_2_relaxed::examples);
+}
+
namespace space_to_batch_float_3 {
std::vector<MixedTypedExample> examples = {
// Generated space_to_batch_float_3 test
@@ -1276,6 +1711,21 @@ TEST_F(NeuralnetworksHidlTest, space_to_batch_float_3) {
space_to_batch_float_3::examples);
}
+namespace space_to_batch_float_3_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated space_to_batch_float_3_relaxed test
+#include "examples/space_to_batch_float_3_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/space_to_batch_float_3_relaxed.model.cpp"
+} // namespace space_to_batch_float_3_relaxed
+TEST_F(NeuralnetworksHidlTest, space_to_batch_float_3_relaxed) {
+ generated_tests::Execute(device,
+ space_to_batch_float_3_relaxed::createTestModel,
+ space_to_batch_float_3_relaxed::is_ignored,
+ space_to_batch_float_3_relaxed::examples);
+}
+
namespace space_to_batch {
std::vector<MixedTypedExample> examples = {
// Generated space_to_batch test
@@ -1336,6 +1786,21 @@ TEST_F(NeuralnetworksHidlTest, space_to_batch_quant8_3) {
space_to_batch_quant8_3::examples);
}
+namespace space_to_batch_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated space_to_batch_relaxed test
+#include "examples/space_to_batch_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/space_to_batch_relaxed.model.cpp"
+} // namespace space_to_batch_relaxed
+TEST_F(NeuralnetworksHidlTest, space_to_batch_relaxed) {
+ generated_tests::Execute(device,
+ space_to_batch_relaxed::createTestModel,
+ space_to_batch_relaxed::is_ignored,
+ space_to_batch_relaxed::examples);
+}
+
namespace space_to_depth_float_1_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated space_to_depth_float_1_relaxed test
@@ -1396,6 +1861,21 @@ TEST_F(NeuralnetworksHidlTest, squeeze_float_1) {
squeeze_float_1::examples);
}
+namespace squeeze_float_1_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated squeeze_float_1_relaxed test
+#include "examples/squeeze_float_1_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/squeeze_float_1_relaxed.model.cpp"
+} // namespace squeeze_float_1_relaxed
+TEST_F(NeuralnetworksHidlTest, squeeze_float_1_relaxed) {
+ generated_tests::Execute(device,
+ squeeze_float_1_relaxed::createTestModel,
+ squeeze_float_1_relaxed::is_ignored,
+ squeeze_float_1_relaxed::examples);
+}
+
namespace squeeze {
std::vector<MixedTypedExample> examples = {
// Generated squeeze test
@@ -1426,6 +1906,21 @@ TEST_F(NeuralnetworksHidlTest, squeeze_quant8_1) {
squeeze_quant8_1::examples);
}
+namespace squeeze_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated squeeze_relaxed test
+#include "examples/squeeze_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/squeeze_relaxed.model.cpp"
+} // namespace squeeze_relaxed
+TEST_F(NeuralnetworksHidlTest, squeeze_relaxed) {
+ generated_tests::Execute(device,
+ squeeze_relaxed::createTestModel,
+ squeeze_relaxed::is_ignored,
+ squeeze_relaxed::examples);
+}
+
namespace strided_slice_float_10 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_10 test
@@ -1441,6 +1936,21 @@ TEST_F(NeuralnetworksHidlTest, strided_slice_float_10) {
strided_slice_float_10::examples);
}
+namespace strided_slice_float_10_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_10_relaxed test
+#include "examples/strided_slice_float_10_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/strided_slice_float_10_relaxed.model.cpp"
+} // namespace strided_slice_float_10_relaxed
+TEST_F(NeuralnetworksHidlTest, strided_slice_float_10_relaxed) {
+ generated_tests::Execute(device,
+ strided_slice_float_10_relaxed::createTestModel,
+ strided_slice_float_10_relaxed::is_ignored,
+ strided_slice_float_10_relaxed::examples);
+}
+
namespace strided_slice_float_11 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_11 test
@@ -1456,6 +1966,21 @@ TEST_F(NeuralnetworksHidlTest, strided_slice_float_11) {
strided_slice_float_11::examples);
}
+namespace strided_slice_float_11_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_11_relaxed test
+#include "examples/strided_slice_float_11_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/strided_slice_float_11_relaxed.model.cpp"
+} // namespace strided_slice_float_11_relaxed
+TEST_F(NeuralnetworksHidlTest, strided_slice_float_11_relaxed) {
+ generated_tests::Execute(device,
+ strided_slice_float_11_relaxed::createTestModel,
+ strided_slice_float_11_relaxed::is_ignored,
+ strided_slice_float_11_relaxed::examples);
+}
+
namespace strided_slice_float_1 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_1 test
@@ -1471,6 +1996,21 @@ TEST_F(NeuralnetworksHidlTest, strided_slice_float_1) {
strided_slice_float_1::examples);
}
+namespace strided_slice_float_1_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_1_relaxed test
+#include "examples/strided_slice_float_1_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/strided_slice_float_1_relaxed.model.cpp"
+} // namespace strided_slice_float_1_relaxed
+TEST_F(NeuralnetworksHidlTest, strided_slice_float_1_relaxed) {
+ generated_tests::Execute(device,
+ strided_slice_float_1_relaxed::createTestModel,
+ strided_slice_float_1_relaxed::is_ignored,
+ strided_slice_float_1_relaxed::examples);
+}
+
namespace strided_slice_float_2 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_2 test
@@ -1486,6 +2026,21 @@ TEST_F(NeuralnetworksHidlTest, strided_slice_float_2) {
strided_slice_float_2::examples);
}
+namespace strided_slice_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_2_relaxed test
+#include "examples/strided_slice_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/strided_slice_float_2_relaxed.model.cpp"
+} // namespace strided_slice_float_2_relaxed
+TEST_F(NeuralnetworksHidlTest, strided_slice_float_2_relaxed) {
+ generated_tests::Execute(device,
+ strided_slice_float_2_relaxed::createTestModel,
+ strided_slice_float_2_relaxed::is_ignored,
+ strided_slice_float_2_relaxed::examples);
+}
+
namespace strided_slice_float_3 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_3 test
@@ -1501,6 +2056,21 @@ TEST_F(NeuralnetworksHidlTest, strided_slice_float_3) {
strided_slice_float_3::examples);
}
+namespace strided_slice_float_3_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_3_relaxed test
+#include "examples/strided_slice_float_3_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/strided_slice_float_3_relaxed.model.cpp"
+} // namespace strided_slice_float_3_relaxed
+TEST_F(NeuralnetworksHidlTest, strided_slice_float_3_relaxed) {
+ generated_tests::Execute(device,
+ strided_slice_float_3_relaxed::createTestModel,
+ strided_slice_float_3_relaxed::is_ignored,
+ strided_slice_float_3_relaxed::examples);
+}
+
namespace strided_slice_float_4 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_4 test
@@ -1516,6 +2086,21 @@ TEST_F(NeuralnetworksHidlTest, strided_slice_float_4) {
strided_slice_float_4::examples);
}
+namespace strided_slice_float_4_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_4_relaxed test
+#include "examples/strided_slice_float_4_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/strided_slice_float_4_relaxed.model.cpp"
+} // namespace strided_slice_float_4_relaxed
+TEST_F(NeuralnetworksHidlTest, strided_slice_float_4_relaxed) {
+ generated_tests::Execute(device,
+ strided_slice_float_4_relaxed::createTestModel,
+ strided_slice_float_4_relaxed::is_ignored,
+ strided_slice_float_4_relaxed::examples);
+}
+
namespace strided_slice_float_5 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_5 test
@@ -1531,6 +2116,21 @@ TEST_F(NeuralnetworksHidlTest, strided_slice_float_5) {
strided_slice_float_5::examples);
}
+namespace strided_slice_float_5_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_5_relaxed test
+#include "examples/strided_slice_float_5_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/strided_slice_float_5_relaxed.model.cpp"
+} // namespace strided_slice_float_5_relaxed
+TEST_F(NeuralnetworksHidlTest, strided_slice_float_5_relaxed) {
+ generated_tests::Execute(device,
+ strided_slice_float_5_relaxed::createTestModel,
+ strided_slice_float_5_relaxed::is_ignored,
+ strided_slice_float_5_relaxed::examples);
+}
+
namespace strided_slice_float_6 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_6 test
@@ -1546,6 +2146,21 @@ TEST_F(NeuralnetworksHidlTest, strided_slice_float_6) {
strided_slice_float_6::examples);
}
+namespace strided_slice_float_6_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_6_relaxed test
+#include "examples/strided_slice_float_6_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/strided_slice_float_6_relaxed.model.cpp"
+} // namespace strided_slice_float_6_relaxed
+TEST_F(NeuralnetworksHidlTest, strided_slice_float_6_relaxed) {
+ generated_tests::Execute(device,
+ strided_slice_float_6_relaxed::createTestModel,
+ strided_slice_float_6_relaxed::is_ignored,
+ strided_slice_float_6_relaxed::examples);
+}
+
namespace strided_slice_float_7 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_7 test
@@ -1561,6 +2176,21 @@ TEST_F(NeuralnetworksHidlTest, strided_slice_float_7) {
strided_slice_float_7::examples);
}
+namespace strided_slice_float_7_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_7_relaxed test
+#include "examples/strided_slice_float_7_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/strided_slice_float_7_relaxed.model.cpp"
+} // namespace strided_slice_float_7_relaxed
+TEST_F(NeuralnetworksHidlTest, strided_slice_float_7_relaxed) {
+ generated_tests::Execute(device,
+ strided_slice_float_7_relaxed::createTestModel,
+ strided_slice_float_7_relaxed::is_ignored,
+ strided_slice_float_7_relaxed::examples);
+}
+
namespace strided_slice_float_8 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_8 test
@@ -1576,6 +2206,21 @@ TEST_F(NeuralnetworksHidlTest, strided_slice_float_8) {
strided_slice_float_8::examples);
}
+namespace strided_slice_float_8_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_8_relaxed test
+#include "examples/strided_slice_float_8_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/strided_slice_float_8_relaxed.model.cpp"
+} // namespace strided_slice_float_8_relaxed
+TEST_F(NeuralnetworksHidlTest, strided_slice_float_8_relaxed) {
+ generated_tests::Execute(device,
+ strided_slice_float_8_relaxed::createTestModel,
+ strided_slice_float_8_relaxed::is_ignored,
+ strided_slice_float_8_relaxed::examples);
+}
+
namespace strided_slice_float_9 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_9 test
@@ -1591,6 +2236,21 @@ TEST_F(NeuralnetworksHidlTest, strided_slice_float_9) {
strided_slice_float_9::examples);
}
+namespace strided_slice_float_9_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_9_relaxed test
+#include "examples/strided_slice_float_9_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/strided_slice_float_9_relaxed.model.cpp"
+} // namespace strided_slice_float_9_relaxed
+TEST_F(NeuralnetworksHidlTest, strided_slice_float_9_relaxed) {
+ generated_tests::Execute(device,
+ strided_slice_float_9_relaxed::createTestModel,
+ strided_slice_float_9_relaxed::is_ignored,
+ strided_slice_float_9_relaxed::examples);
+}
+
namespace strided_slice {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice test
@@ -1771,6 +2431,21 @@ TEST_F(NeuralnetworksHidlTest, strided_slice_quant8_9) {
strided_slice_quant8_9::examples);
}
+namespace strided_slice_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_relaxed test
+#include "examples/strided_slice_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/strided_slice_relaxed.model.cpp"
+} // namespace strided_slice_relaxed
+TEST_F(NeuralnetworksHidlTest, strided_slice_relaxed) {
+ generated_tests::Execute(device,
+ strided_slice_relaxed::createTestModel,
+ strided_slice_relaxed::is_ignored,
+ strided_slice_relaxed::examples);
+}
+
namespace sub_broadcast_float {
std::vector<MixedTypedExample> examples = {
// Generated sub_broadcast_float test
@@ -1786,6 +2461,21 @@ TEST_F(NeuralnetworksHidlTest, sub_broadcast_float) {
sub_broadcast_float::examples);
}
+namespace sub_broadcast_float_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated sub_broadcast_float_relaxed test
+#include "examples/sub_broadcast_float_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/sub_broadcast_float_relaxed.model.cpp"
+} // namespace sub_broadcast_float_relaxed
+TEST_F(NeuralnetworksHidlTest, sub_broadcast_float_relaxed) {
+ generated_tests::Execute(device,
+ sub_broadcast_float_relaxed::createTestModel,
+ sub_broadcast_float_relaxed::is_ignored,
+ sub_broadcast_float_relaxed::examples);
+}
+
namespace sub {
std::vector<MixedTypedExample> examples = {
// Generated sub test
@@ -1801,6 +2491,21 @@ TEST_F(NeuralnetworksHidlTest, sub) {
sub::examples);
}
+namespace sub_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated sub_relaxed test
+#include "examples/sub_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/sub_relaxed.model.cpp"
+} // namespace sub_relaxed
+TEST_F(NeuralnetworksHidlTest, sub_relaxed) {
+ generated_tests::Execute(device,
+ sub_relaxed::createTestModel,
+ sub_relaxed::is_ignored,
+ sub_relaxed::examples);
+}
+
namespace svdf2_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated svdf2_relaxed test
@@ -1876,6 +2581,21 @@ TEST_F(NeuralnetworksHidlTest, transpose_float_1) {
transpose_float_1::examples);
}
+namespace transpose_float_1_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated transpose_float_1_relaxed test
+#include "examples/transpose_float_1_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/transpose_float_1_relaxed.model.cpp"
+} // namespace transpose_float_1_relaxed
+TEST_F(NeuralnetworksHidlTest, transpose_float_1_relaxed) {
+ generated_tests::Execute(device,
+ transpose_float_1_relaxed::createTestModel,
+ transpose_float_1_relaxed::is_ignored,
+ transpose_float_1_relaxed::examples);
+}
+
namespace transpose {
std::vector<MixedTypedExample> examples = {
// Generated transpose test
@@ -1905,3 +2625,18 @@ TEST_F(NeuralnetworksHidlTest, transpose_quant8_1) {
transpose_quant8_1::is_ignored,
transpose_quant8_1::examples);
}
+
+namespace transpose_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated transpose_relaxed test
+#include "examples/transpose_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "vts_models/transpose_relaxed.model.cpp"
+} // namespace transpose_relaxed
+TEST_F(NeuralnetworksHidlTest, transpose_relaxed) {
+ generated_tests::Execute(device,
+ transpose_relaxed::createTestModel,
+ transpose_relaxed::is_ignored,
+ transpose_relaxed::examples);
+}
diff --git a/nn/runtime/test/generated/all_generated_tests.cpp b/nn/runtime/test/generated/all_generated_tests.cpp
index 240f9193d..1a20a5ac2 100644
--- a/nn/runtime/test/generated/all_generated_tests.cpp
+++ b/nn/runtime/test/generated/all_generated_tests.cpp
@@ -2115,6 +2115,20 @@ TEST_F(GeneratedTests, avg_pool_float_4_relaxed) {
avg_pool_float_4_relaxed::examples);
}
+namespace avg_pool_float_5_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated avg_pool_float_5_relaxed test
+#include "generated/examples/avg_pool_float_5_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/avg_pool_float_5_relaxed.model.cpp"
+} // namespace avg_pool_float_5_relaxed
+TEST_F(GeneratedTests, avg_pool_float_5_relaxed) {
+ execute(avg_pool_float_5_relaxed::CreateModel,
+ avg_pool_float_5_relaxed::is_ignored,
+ avg_pool_float_5_relaxed::examples);
+}
+
namespace batch_to_space_float_1 {
std::vector<MixedTypedExample> examples = {
// Generated batch_to_space_float_1 test
@@ -2129,6 +2143,20 @@ TEST_F(GeneratedTests, batch_to_space_float_1) {
batch_to_space_float_1::examples);
}
+namespace batch_to_space_float_1_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated batch_to_space_float_1_relaxed test
+#include "generated/examples/batch_to_space_float_1_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/batch_to_space_float_1_relaxed.model.cpp"
+} // namespace batch_to_space_float_1_relaxed
+TEST_F(GeneratedTests, batch_to_space_float_1_relaxed) {
+ execute(batch_to_space_float_1_relaxed::CreateModel,
+ batch_to_space_float_1_relaxed::is_ignored,
+ batch_to_space_float_1_relaxed::examples);
+}
+
namespace batch_to_space {
std::vector<MixedTypedExample> examples = {
// Generated batch_to_space test
@@ -2157,6 +2185,20 @@ TEST_F(GeneratedTests, batch_to_space_quant8_1) {
batch_to_space_quant8_1::examples);
}
+namespace batch_to_space_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated batch_to_space_relaxed test
+#include "generated/examples/batch_to_space_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/batch_to_space_relaxed.model.cpp"
+} // namespace batch_to_space_relaxed
+TEST_F(GeneratedTests, batch_to_space_relaxed) {
+ execute(batch_to_space_relaxed::CreateModel,
+ batch_to_space_relaxed::is_ignored,
+ batch_to_space_relaxed::examples);
+}
+
namespace concat_float_1_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated concat_float_1_relaxed test
@@ -2255,6 +2297,20 @@ TEST_F(GeneratedTests, conv_3_h3_w2_VALID_relaxed) {
conv_3_h3_w2_VALID_relaxed::examples);
}
+namespace conv_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated conv_float_2_relaxed test
+#include "generated/examples/conv_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/conv_float_2_relaxed.model.cpp"
+} // namespace conv_float_2_relaxed
+TEST_F(GeneratedTests, conv_float_2_relaxed) {
+ execute(conv_float_2_relaxed::CreateModel,
+ conv_float_2_relaxed::is_ignored,
+ conv_float_2_relaxed::examples);
+}
+
namespace conv_float_channels_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated conv_float_channels_relaxed test
@@ -2381,6 +2437,48 @@ TEST_F(GeneratedTests, depth_to_space_float_3_relaxed) {
depth_to_space_float_3_relaxed::examples);
}
+namespace depthwise_conv2d_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated depthwise_conv2d_float_2_relaxed test
+#include "generated/examples/depthwise_conv2d_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/depthwise_conv2d_float_2_relaxed.model.cpp"
+} // namespace depthwise_conv2d_float_2_relaxed
+TEST_F(GeneratedTests, depthwise_conv2d_float_2_relaxed) {
+ execute(depthwise_conv2d_float_2_relaxed::CreateModel,
+ depthwise_conv2d_float_2_relaxed::is_ignored,
+ depthwise_conv2d_float_2_relaxed::examples);
+}
+
+namespace depthwise_conv2d_float_large_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated depthwise_conv2d_float_large_2_relaxed test
+#include "generated/examples/depthwise_conv2d_float_large_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/depthwise_conv2d_float_large_2_relaxed.model.cpp"
+} // namespace depthwise_conv2d_float_large_2_relaxed
+TEST_F(GeneratedTests, depthwise_conv2d_float_large_2_relaxed) {
+ execute(depthwise_conv2d_float_large_2_relaxed::CreateModel,
+ depthwise_conv2d_float_large_2_relaxed::is_ignored,
+ depthwise_conv2d_float_large_2_relaxed::examples);
+}
+
+namespace depthwise_conv2d_float_large_2_weights_as_inputs_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated depthwise_conv2d_float_large_2_weights_as_inputs_relaxed test
+#include "generated/examples/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.model.cpp"
+} // namespace depthwise_conv2d_float_large_2_weights_as_inputs_relaxed
+TEST_F(GeneratedTests, depthwise_conv2d_float_large_2_weights_as_inputs_relaxed) {
+ execute(depthwise_conv2d_float_large_2_weights_as_inputs_relaxed::CreateModel,
+ depthwise_conv2d_float_large_2_weights_as_inputs_relaxed::is_ignored,
+ depthwise_conv2d_float_large_2_weights_as_inputs_relaxed::examples);
+}
+
namespace depthwise_conv2d_float_large_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated depthwise_conv2d_float_large_relaxed test
@@ -2409,6 +2507,34 @@ TEST_F(GeneratedTests, depthwise_conv2d_float_large_weights_as_inputs_relaxed) {
depthwise_conv2d_float_large_weights_as_inputs_relaxed::examples);
}
+namespace depthwise_conv2d_float_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated depthwise_conv2d_float_relaxed test
+#include "generated/examples/depthwise_conv2d_float_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/depthwise_conv2d_float_relaxed.model.cpp"
+} // namespace depthwise_conv2d_float_relaxed
+TEST_F(GeneratedTests, depthwise_conv2d_float_relaxed) {
+ execute(depthwise_conv2d_float_relaxed::CreateModel,
+ depthwise_conv2d_float_relaxed::is_ignored,
+ depthwise_conv2d_float_relaxed::examples);
+}
+
+namespace depthwise_conv2d_float_weights_as_inputs_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated depthwise_conv2d_float_weights_as_inputs_relaxed test
+#include "generated/examples/depthwise_conv2d_float_weights_as_inputs_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/depthwise_conv2d_float_weights_as_inputs_relaxed.model.cpp"
+} // namespace depthwise_conv2d_float_weights_as_inputs_relaxed
+TEST_F(GeneratedTests, depthwise_conv2d_float_weights_as_inputs_relaxed) {
+ execute(depthwise_conv2d_float_weights_as_inputs_relaxed::CreateModel,
+ depthwise_conv2d_float_weights_as_inputs_relaxed::is_ignored,
+ depthwise_conv2d_float_weights_as_inputs_relaxed::examples);
+}
+
namespace depthwise_conv_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated depthwise_conv_relaxed test
@@ -2423,6 +2549,20 @@ TEST_F(GeneratedTests, depthwise_conv_relaxed) {
depthwise_conv_relaxed::examples);
}
+namespace dequantize_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated dequantize_relaxed test
+#include "generated/examples/dequantize_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/dequantize_relaxed.model.cpp"
+} // namespace dequantize_relaxed
+TEST_F(GeneratedTests, dequantize_relaxed) {
+ execute(dequantize_relaxed::CreateModel,
+ dequantize_relaxed::is_ignored,
+ dequantize_relaxed::examples);
+}
+
namespace div_broadcast_float {
std::vector<MixedTypedExample> examples = {
// Generated div_broadcast_float test
@@ -2437,6 +2577,20 @@ TEST_F(GeneratedTests, div_broadcast_float) {
div_broadcast_float::examples);
}
+namespace div_broadcast_float_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated div_broadcast_float_relaxed test
+#include "generated/examples/div_broadcast_float_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/div_broadcast_float_relaxed.model.cpp"
+} // namespace div_broadcast_float_relaxed
+TEST_F(GeneratedTests, div_broadcast_float_relaxed) {
+ execute(div_broadcast_float_relaxed::CreateModel,
+ div_broadcast_float_relaxed::is_ignored,
+ div_broadcast_float_relaxed::examples);
+}
+
namespace div {
std::vector<MixedTypedExample> examples = {
// Generated div test
@@ -2451,6 +2605,20 @@ TEST_F(GeneratedTests, div) {
div::examples);
}
+namespace div_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated div_relaxed test
+#include "generated/examples/div_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/div_relaxed.model.cpp"
+} // namespace div_relaxed
+TEST_F(GeneratedTests, div_relaxed) {
+ execute(div_relaxed::CreateModel,
+ div_relaxed::is_ignored,
+ div_relaxed::examples);
+}
+
namespace embedding_lookup_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated embedding_lookup_relaxed test
@@ -2479,6 +2647,20 @@ TEST_F(GeneratedTests, floor_relaxed) {
floor_relaxed::examples);
}
+namespace fully_connected_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated fully_connected_float_2_relaxed test
+#include "generated/examples/fully_connected_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/fully_connected_float_2_relaxed.model.cpp"
+} // namespace fully_connected_float_2_relaxed
+TEST_F(GeneratedTests, fully_connected_float_2_relaxed) {
+ execute(fully_connected_float_2_relaxed::CreateModel,
+ fully_connected_float_2_relaxed::is_ignored,
+ fully_connected_float_2_relaxed::examples);
+}
+
namespace fully_connected_float_4d_simple {
std::vector<MixedTypedExample> examples = {
// Generated fully_connected_float_4d_simple test
@@ -2493,6 +2675,48 @@ TEST_F(GeneratedTests, fully_connected_float_4d_simple) {
fully_connected_float_4d_simple::examples);
}
+namespace fully_connected_float_4d_simple_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated fully_connected_float_4d_simple_relaxed test
+#include "generated/examples/fully_connected_float_4d_simple_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/fully_connected_float_4d_simple_relaxed.model.cpp"
+} // namespace fully_connected_float_4d_simple_relaxed
+TEST_F(GeneratedTests, fully_connected_float_4d_simple_relaxed) {
+ execute(fully_connected_float_4d_simple_relaxed::CreateModel,
+ fully_connected_float_4d_simple_relaxed::is_ignored,
+ fully_connected_float_4d_simple_relaxed::examples);
+}
+
+namespace fully_connected_float_large_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated fully_connected_float_large_relaxed test
+#include "generated/examples/fully_connected_float_large_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/fully_connected_float_large_relaxed.model.cpp"
+} // namespace fully_connected_float_large_relaxed
+TEST_F(GeneratedTests, fully_connected_float_large_relaxed) {
+ execute(fully_connected_float_large_relaxed::CreateModel,
+ fully_connected_float_large_relaxed::is_ignored,
+ fully_connected_float_large_relaxed::examples);
+}
+
+namespace fully_connected_float_large_weights_as_inputs_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated fully_connected_float_large_weights_as_inputs_relaxed test
+#include "generated/examples/fully_connected_float_large_weights_as_inputs_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/fully_connected_float_large_weights_as_inputs_relaxed.model.cpp"
+} // namespace fully_connected_float_large_weights_as_inputs_relaxed
+TEST_F(GeneratedTests, fully_connected_float_large_weights_as_inputs_relaxed) {
+ execute(fully_connected_float_large_weights_as_inputs_relaxed::CreateModel,
+ fully_connected_float_large_weights_as_inputs_relaxed::is_ignored,
+ fully_connected_float_large_weights_as_inputs_relaxed::examples);
+}
+
namespace fully_connected_float_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated fully_connected_float_relaxed test
@@ -2535,6 +2759,20 @@ TEST_F(GeneratedTests, hashtable_lookup_float_relaxed) {
hashtable_lookup_float_relaxed::examples);
}
+namespace l2_normalization_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated l2_normalization_2_relaxed test
+#include "generated/examples/l2_normalization_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/l2_normalization_2_relaxed.model.cpp"
+} // namespace l2_normalization_2_relaxed
+TEST_F(GeneratedTests, l2_normalization_2_relaxed) {
+ execute(l2_normalization_2_relaxed::CreateModel,
+ l2_normalization_2_relaxed::is_ignored,
+ l2_normalization_2_relaxed::examples);
+}
+
namespace l2_normalization_large_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated l2_normalization_large_relaxed test
@@ -2563,6 +2801,34 @@ TEST_F(GeneratedTests, l2_normalization_relaxed) {
l2_normalization_relaxed::examples);
}
+namespace l2_pool_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated l2_pool_float_2_relaxed test
+#include "generated/examples/l2_pool_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/l2_pool_float_2_relaxed.model.cpp"
+} // namespace l2_pool_float_2_relaxed
+TEST_F(GeneratedTests, l2_pool_float_2_relaxed) {
+ execute(l2_pool_float_2_relaxed::CreateModel,
+ l2_pool_float_2_relaxed::is_ignored,
+ l2_pool_float_2_relaxed::examples);
+}
+
+namespace l2_pool_float_large_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated l2_pool_float_large_relaxed test
+#include "generated/examples/l2_pool_float_large_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/l2_pool_float_large_relaxed.model.cpp"
+} // namespace l2_pool_float_large_relaxed
+TEST_F(GeneratedTests, l2_pool_float_large_relaxed) {
+ execute(l2_pool_float_large_relaxed::CreateModel,
+ l2_pool_float_large_relaxed::is_ignored,
+ l2_pool_float_large_relaxed::examples);
+}
+
namespace l2_pool_float_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated l2_pool_float_relaxed test
@@ -2885,6 +3151,20 @@ TEST_F(GeneratedTests, max_pool_float_3_relaxed) {
max_pool_float_3_relaxed::examples);
}
+namespace max_pool_float_4_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated max_pool_float_4_relaxed test
+#include "generated/examples/max_pool_float_4_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/max_pool_float_4_relaxed.model.cpp"
+} // namespace max_pool_float_4_relaxed
+TEST_F(GeneratedTests, max_pool_float_4_relaxed) {
+ execute(max_pool_float_4_relaxed::CreateModel,
+ max_pool_float_4_relaxed::is_ignored,
+ max_pool_float_4_relaxed::examples);
+}
+
namespace mean_float_1 {
std::vector<MixedTypedExample> examples = {
// Generated mean_float_1 test
@@ -2899,6 +3179,20 @@ TEST_F(GeneratedTests, mean_float_1) {
mean_float_1::examples);
}
+namespace mean_float_1_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated mean_float_1_relaxed test
+#include "generated/examples/mean_float_1_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/mean_float_1_relaxed.model.cpp"
+} // namespace mean_float_1_relaxed
+TEST_F(GeneratedTests, mean_float_1_relaxed) {
+ execute(mean_float_1_relaxed::CreateModel,
+ mean_float_1_relaxed::is_ignored,
+ mean_float_1_relaxed::examples);
+}
+
namespace mean_float_2 {
std::vector<MixedTypedExample> examples = {
// Generated mean_float_2 test
@@ -2913,6 +3207,20 @@ TEST_F(GeneratedTests, mean_float_2) {
mean_float_2::examples);
}
+namespace mean_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated mean_float_2_relaxed test
+#include "generated/examples/mean_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/mean_float_2_relaxed.model.cpp"
+} // namespace mean_float_2_relaxed
+TEST_F(GeneratedTests, mean_float_2_relaxed) {
+ execute(mean_float_2_relaxed::CreateModel,
+ mean_float_2_relaxed::is_ignored,
+ mean_float_2_relaxed::examples);
+}
+
namespace mean {
std::vector<MixedTypedExample> examples = {
// Generated mean test
@@ -2955,6 +3263,20 @@ TEST_F(GeneratedTests, mean_quant8_2) {
mean_quant8_2::examples);
}
+namespace mean_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated mean_relaxed test
+#include "generated/examples/mean_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/mean_relaxed.model.cpp"
+} // namespace mean_relaxed
+TEST_F(GeneratedTests, mean_relaxed) {
+ execute(mean_relaxed::CreateModel,
+ mean_relaxed::is_ignored,
+ mean_relaxed::examples);
+}
+
namespace mobilenet_224_gender_basic_fixed_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated mobilenet_224_gender_basic_fixed_relaxed test
@@ -3011,6 +3333,20 @@ TEST_F(GeneratedTests, pad_float_1) {
pad_float_1::examples);
}
+namespace pad_float_1_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated pad_float_1_relaxed test
+#include "generated/examples/pad_float_1_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/pad_float_1_relaxed.model.cpp"
+} // namespace pad_float_1_relaxed
+TEST_F(GeneratedTests, pad_float_1_relaxed) {
+ execute(pad_float_1_relaxed::CreateModel,
+ pad_float_1_relaxed::is_ignored,
+ pad_float_1_relaxed::examples);
+}
+
namespace pad {
std::vector<MixedTypedExample> examples = {
// Generated pad test
@@ -3025,6 +3361,20 @@ TEST_F(GeneratedTests, pad) {
pad::examples);
}
+namespace pad_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated pad_relaxed test
+#include "generated/examples/pad_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/pad_relaxed.model.cpp"
+} // namespace pad_relaxed
+TEST_F(GeneratedTests, pad_relaxed) {
+ execute(pad_relaxed::CreateModel,
+ pad_relaxed::is_ignored,
+ pad_relaxed::examples);
+}
+
namespace relu1_float_1_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated relu1_float_1_relaxed test
@@ -3095,6 +3445,20 @@ TEST_F(GeneratedTests, relu_float_1_relaxed) {
relu_float_1_relaxed::examples);
}
+namespace relu_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated relu_float_2_relaxed test
+#include "generated/examples/relu_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/relu_float_2_relaxed.model.cpp"
+} // namespace relu_float_2_relaxed
+TEST_F(GeneratedTests, relu_float_2_relaxed) {
+ execute(relu_float_2_relaxed::CreateModel,
+ relu_float_2_relaxed::is_ignored,
+ relu_float_2_relaxed::examples);
+}
+
namespace reshape_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated reshape_relaxed test
@@ -3123,6 +3487,20 @@ TEST_F(GeneratedTests, reshape_weights_as_inputs_relaxed) {
reshape_weights_as_inputs_relaxed::examples);
}
+namespace resize_bilinear_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated resize_bilinear_2_relaxed test
+#include "generated/examples/resize_bilinear_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/resize_bilinear_2_relaxed.model.cpp"
+} // namespace resize_bilinear_2_relaxed
+TEST_F(GeneratedTests, resize_bilinear_2_relaxed) {
+ execute(resize_bilinear_2_relaxed::CreateModel,
+ resize_bilinear_2_relaxed::is_ignored,
+ resize_bilinear_2_relaxed::examples);
+}
+
namespace resize_bilinear_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated resize_bilinear_relaxed test
@@ -3207,6 +3585,20 @@ TEST_F(GeneratedTests, space_to_batch_float_1) {
space_to_batch_float_1::examples);
}
+namespace space_to_batch_float_1_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated space_to_batch_float_1_relaxed test
+#include "generated/examples/space_to_batch_float_1_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/space_to_batch_float_1_relaxed.model.cpp"
+} // namespace space_to_batch_float_1_relaxed
+TEST_F(GeneratedTests, space_to_batch_float_1_relaxed) {
+ execute(space_to_batch_float_1_relaxed::CreateModel,
+ space_to_batch_float_1_relaxed::is_ignored,
+ space_to_batch_float_1_relaxed::examples);
+}
+
namespace space_to_batch_float_2 {
std::vector<MixedTypedExample> examples = {
// Generated space_to_batch_float_2 test
@@ -3221,6 +3613,20 @@ TEST_F(GeneratedTests, space_to_batch_float_2) {
space_to_batch_float_2::examples);
}
+namespace space_to_batch_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated space_to_batch_float_2_relaxed test
+#include "generated/examples/space_to_batch_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/space_to_batch_float_2_relaxed.model.cpp"
+} // namespace space_to_batch_float_2_relaxed
+TEST_F(GeneratedTests, space_to_batch_float_2_relaxed) {
+ execute(space_to_batch_float_2_relaxed::CreateModel,
+ space_to_batch_float_2_relaxed::is_ignored,
+ space_to_batch_float_2_relaxed::examples);
+}
+
namespace space_to_batch_float_3 {
std::vector<MixedTypedExample> examples = {
// Generated space_to_batch_float_3 test
@@ -3235,6 +3641,20 @@ TEST_F(GeneratedTests, space_to_batch_float_3) {
space_to_batch_float_3::examples);
}
+namespace space_to_batch_float_3_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated space_to_batch_float_3_relaxed test
+#include "generated/examples/space_to_batch_float_3_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/space_to_batch_float_3_relaxed.model.cpp"
+} // namespace space_to_batch_float_3_relaxed
+TEST_F(GeneratedTests, space_to_batch_float_3_relaxed) {
+ execute(space_to_batch_float_3_relaxed::CreateModel,
+ space_to_batch_float_3_relaxed::is_ignored,
+ space_to_batch_float_3_relaxed::examples);
+}
+
namespace space_to_batch {
std::vector<MixedTypedExample> examples = {
// Generated space_to_batch test
@@ -3291,6 +3711,20 @@ TEST_F(GeneratedTests, space_to_batch_quant8_3) {
space_to_batch_quant8_3::examples);
}
+namespace space_to_batch_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated space_to_batch_relaxed test
+#include "generated/examples/space_to_batch_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/space_to_batch_relaxed.model.cpp"
+} // namespace space_to_batch_relaxed
+TEST_F(GeneratedTests, space_to_batch_relaxed) {
+ execute(space_to_batch_relaxed::CreateModel,
+ space_to_batch_relaxed::is_ignored,
+ space_to_batch_relaxed::examples);
+}
+
namespace space_to_depth_float_1_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated space_to_depth_float_1_relaxed test
@@ -3347,6 +3781,20 @@ TEST_F(GeneratedTests, squeeze_float_1) {
squeeze_float_1::examples);
}
+namespace squeeze_float_1_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated squeeze_float_1_relaxed test
+#include "generated/examples/squeeze_float_1_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/squeeze_float_1_relaxed.model.cpp"
+} // namespace squeeze_float_1_relaxed
+TEST_F(GeneratedTests, squeeze_float_1_relaxed) {
+ execute(squeeze_float_1_relaxed::CreateModel,
+ squeeze_float_1_relaxed::is_ignored,
+ squeeze_float_1_relaxed::examples);
+}
+
namespace squeeze {
std::vector<MixedTypedExample> examples = {
// Generated squeeze test
@@ -3375,6 +3823,20 @@ TEST_F(GeneratedTests, squeeze_quant8_1) {
squeeze_quant8_1::examples);
}
+namespace squeeze_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated squeeze_relaxed test
+#include "generated/examples/squeeze_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/squeeze_relaxed.model.cpp"
+} // namespace squeeze_relaxed
+TEST_F(GeneratedTests, squeeze_relaxed) {
+ execute(squeeze_relaxed::CreateModel,
+ squeeze_relaxed::is_ignored,
+ squeeze_relaxed::examples);
+}
+
namespace strided_slice_float_10 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_10 test
@@ -3389,6 +3851,20 @@ TEST_F(GeneratedTests, strided_slice_float_10) {
strided_slice_float_10::examples);
}
+namespace strided_slice_float_10_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_10_relaxed test
+#include "generated/examples/strided_slice_float_10_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/strided_slice_float_10_relaxed.model.cpp"
+} // namespace strided_slice_float_10_relaxed
+TEST_F(GeneratedTests, strided_slice_float_10_relaxed) {
+ execute(strided_slice_float_10_relaxed::CreateModel,
+ strided_slice_float_10_relaxed::is_ignored,
+ strided_slice_float_10_relaxed::examples);
+}
+
namespace strided_slice_float_11 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_11 test
@@ -3403,6 +3879,20 @@ TEST_F(GeneratedTests, strided_slice_float_11) {
strided_slice_float_11::examples);
}
+namespace strided_slice_float_11_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_11_relaxed test
+#include "generated/examples/strided_slice_float_11_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/strided_slice_float_11_relaxed.model.cpp"
+} // namespace strided_slice_float_11_relaxed
+TEST_F(GeneratedTests, strided_slice_float_11_relaxed) {
+ execute(strided_slice_float_11_relaxed::CreateModel,
+ strided_slice_float_11_relaxed::is_ignored,
+ strided_slice_float_11_relaxed::examples);
+}
+
namespace strided_slice_float_1 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_1 test
@@ -3417,6 +3907,20 @@ TEST_F(GeneratedTests, strided_slice_float_1) {
strided_slice_float_1::examples);
}
+namespace strided_slice_float_1_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_1_relaxed test
+#include "generated/examples/strided_slice_float_1_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/strided_slice_float_1_relaxed.model.cpp"
+} // namespace strided_slice_float_1_relaxed
+TEST_F(GeneratedTests, strided_slice_float_1_relaxed) {
+ execute(strided_slice_float_1_relaxed::CreateModel,
+ strided_slice_float_1_relaxed::is_ignored,
+ strided_slice_float_1_relaxed::examples);
+}
+
namespace strided_slice_float_2 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_2 test
@@ -3431,6 +3935,20 @@ TEST_F(GeneratedTests, strided_slice_float_2) {
strided_slice_float_2::examples);
}
+namespace strided_slice_float_2_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_2_relaxed test
+#include "generated/examples/strided_slice_float_2_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/strided_slice_float_2_relaxed.model.cpp"
+} // namespace strided_slice_float_2_relaxed
+TEST_F(GeneratedTests, strided_slice_float_2_relaxed) {
+ execute(strided_slice_float_2_relaxed::CreateModel,
+ strided_slice_float_2_relaxed::is_ignored,
+ strided_slice_float_2_relaxed::examples);
+}
+
namespace strided_slice_float_3 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_3 test
@@ -3445,6 +3963,20 @@ TEST_F(GeneratedTests, strided_slice_float_3) {
strided_slice_float_3::examples);
}
+namespace strided_slice_float_3_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_3_relaxed test
+#include "generated/examples/strided_slice_float_3_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/strided_slice_float_3_relaxed.model.cpp"
+} // namespace strided_slice_float_3_relaxed
+TEST_F(GeneratedTests, strided_slice_float_3_relaxed) {
+ execute(strided_slice_float_3_relaxed::CreateModel,
+ strided_slice_float_3_relaxed::is_ignored,
+ strided_slice_float_3_relaxed::examples);
+}
+
namespace strided_slice_float_4 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_4 test
@@ -3459,6 +3991,20 @@ TEST_F(GeneratedTests, strided_slice_float_4) {
strided_slice_float_4::examples);
}
+namespace strided_slice_float_4_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_4_relaxed test
+#include "generated/examples/strided_slice_float_4_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/strided_slice_float_4_relaxed.model.cpp"
+} // namespace strided_slice_float_4_relaxed
+TEST_F(GeneratedTests, strided_slice_float_4_relaxed) {
+ execute(strided_slice_float_4_relaxed::CreateModel,
+ strided_slice_float_4_relaxed::is_ignored,
+ strided_slice_float_4_relaxed::examples);
+}
+
namespace strided_slice_float_5 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_5 test
@@ -3473,6 +4019,20 @@ TEST_F(GeneratedTests, strided_slice_float_5) {
strided_slice_float_5::examples);
}
+namespace strided_slice_float_5_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_5_relaxed test
+#include "generated/examples/strided_slice_float_5_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/strided_slice_float_5_relaxed.model.cpp"
+} // namespace strided_slice_float_5_relaxed
+TEST_F(GeneratedTests, strided_slice_float_5_relaxed) {
+ execute(strided_slice_float_5_relaxed::CreateModel,
+ strided_slice_float_5_relaxed::is_ignored,
+ strided_slice_float_5_relaxed::examples);
+}
+
namespace strided_slice_float_6 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_6 test
@@ -3487,6 +4047,20 @@ TEST_F(GeneratedTests, strided_slice_float_6) {
strided_slice_float_6::examples);
}
+namespace strided_slice_float_6_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_6_relaxed test
+#include "generated/examples/strided_slice_float_6_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/strided_slice_float_6_relaxed.model.cpp"
+} // namespace strided_slice_float_6_relaxed
+TEST_F(GeneratedTests, strided_slice_float_6_relaxed) {
+ execute(strided_slice_float_6_relaxed::CreateModel,
+ strided_slice_float_6_relaxed::is_ignored,
+ strided_slice_float_6_relaxed::examples);
+}
+
namespace strided_slice_float_7 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_7 test
@@ -3501,6 +4075,20 @@ TEST_F(GeneratedTests, strided_slice_float_7) {
strided_slice_float_7::examples);
}
+namespace strided_slice_float_7_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_7_relaxed test
+#include "generated/examples/strided_slice_float_7_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/strided_slice_float_7_relaxed.model.cpp"
+} // namespace strided_slice_float_7_relaxed
+TEST_F(GeneratedTests, strided_slice_float_7_relaxed) {
+ execute(strided_slice_float_7_relaxed::CreateModel,
+ strided_slice_float_7_relaxed::is_ignored,
+ strided_slice_float_7_relaxed::examples);
+}
+
namespace strided_slice_float_8 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_8 test
@@ -3515,6 +4103,20 @@ TEST_F(GeneratedTests, strided_slice_float_8) {
strided_slice_float_8::examples);
}
+namespace strided_slice_float_8_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_8_relaxed test
+#include "generated/examples/strided_slice_float_8_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/strided_slice_float_8_relaxed.model.cpp"
+} // namespace strided_slice_float_8_relaxed
+TEST_F(GeneratedTests, strided_slice_float_8_relaxed) {
+ execute(strided_slice_float_8_relaxed::CreateModel,
+ strided_slice_float_8_relaxed::is_ignored,
+ strided_slice_float_8_relaxed::examples);
+}
+
namespace strided_slice_float_9 {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice_float_9 test
@@ -3529,6 +4131,20 @@ TEST_F(GeneratedTests, strided_slice_float_9) {
strided_slice_float_9::examples);
}
+namespace strided_slice_float_9_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_float_9_relaxed test
+#include "generated/examples/strided_slice_float_9_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/strided_slice_float_9_relaxed.model.cpp"
+} // namespace strided_slice_float_9_relaxed
+TEST_F(GeneratedTests, strided_slice_float_9_relaxed) {
+ execute(strided_slice_float_9_relaxed::CreateModel,
+ strided_slice_float_9_relaxed::is_ignored,
+ strided_slice_float_9_relaxed::examples);
+}
+
namespace strided_slice {
std::vector<MixedTypedExample> examples = {
// Generated strided_slice test
@@ -3697,6 +4313,20 @@ TEST_F(GeneratedTests, strided_slice_quant8_9) {
strided_slice_quant8_9::examples);
}
+namespace strided_slice_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated strided_slice_relaxed test
+#include "generated/examples/strided_slice_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/strided_slice_relaxed.model.cpp"
+} // namespace strided_slice_relaxed
+TEST_F(GeneratedTests, strided_slice_relaxed) {
+ execute(strided_slice_relaxed::CreateModel,
+ strided_slice_relaxed::is_ignored,
+ strided_slice_relaxed::examples);
+}
+
namespace sub_broadcast_float {
std::vector<MixedTypedExample> examples = {
// Generated sub_broadcast_float test
@@ -3711,6 +4341,20 @@ TEST_F(GeneratedTests, sub_broadcast_float) {
sub_broadcast_float::examples);
}
+namespace sub_broadcast_float_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated sub_broadcast_float_relaxed test
+#include "generated/examples/sub_broadcast_float_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/sub_broadcast_float_relaxed.model.cpp"
+} // namespace sub_broadcast_float_relaxed
+TEST_F(GeneratedTests, sub_broadcast_float_relaxed) {
+ execute(sub_broadcast_float_relaxed::CreateModel,
+ sub_broadcast_float_relaxed::is_ignored,
+ sub_broadcast_float_relaxed::examples);
+}
+
namespace sub {
std::vector<MixedTypedExample> examples = {
// Generated sub test
@@ -3725,6 +4369,20 @@ TEST_F(GeneratedTests, sub) {
sub::examples);
}
+namespace sub_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated sub_relaxed test
+#include "generated/examples/sub_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/sub_relaxed.model.cpp"
+} // namespace sub_relaxed
+TEST_F(GeneratedTests, sub_relaxed) {
+ execute(sub_relaxed::CreateModel,
+ sub_relaxed::is_ignored,
+ sub_relaxed::examples);
+}
+
namespace svdf2_relaxed {
std::vector<MixedTypedExample> examples = {
// Generated svdf2_relaxed test
@@ -3795,6 +4453,20 @@ TEST_F(GeneratedTests, transpose_float_1) {
transpose_float_1::examples);
}
+namespace transpose_float_1_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated transpose_float_1_relaxed test
+#include "generated/examples/transpose_float_1_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/transpose_float_1_relaxed.model.cpp"
+} // namespace transpose_float_1_relaxed
+TEST_F(GeneratedTests, transpose_float_1_relaxed) {
+ execute(transpose_float_1_relaxed::CreateModel,
+ transpose_float_1_relaxed::is_ignored,
+ transpose_float_1_relaxed::examples);
+}
+
namespace transpose {
std::vector<MixedTypedExample> examples = {
// Generated transpose test
@@ -3822,3 +4494,17 @@ TEST_F(GeneratedTests, transpose_quant8_1) {
transpose_quant8_1::is_ignored,
transpose_quant8_1::examples);
}
+
+namespace transpose_relaxed {
+std::vector<MixedTypedExample> examples = {
+// Generated transpose_relaxed test
+#include "generated/examples/transpose_relaxed.example.cpp"
+};
+// Generated model constructor
+#include "generated/models/transpose_relaxed.model.cpp"
+} // namespace transpose_relaxed
+TEST_F(GeneratedTests, transpose_relaxed) {
+ execute(transpose_relaxed::CreateModel,
+ transpose_relaxed::is_ignored,
+ transpose_relaxed::examples);
+}
diff --git a/nn/runtime/test/generated/examples/avg_pool_float_5_relaxed.example.cpp b/nn/runtime/test/generated/examples/avg_pool_float_5_relaxed.example.cpp
new file mode 100644
index 000000000..cbc61b75f
--- /dev/null
+++ b/nn/runtime/test/generated/examples/avg_pool_float_5_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: avg_pool_float_5_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0, 6, 2, 4, 3, 2, 10, 7}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {2.75f, 5.75f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/batch_to_space_float_1_relaxed.example.cpp b/nn/runtime/test/generated/examples/batch_to_space_float_1_relaxed.example.cpp
new file mode 100644
index 000000000..32031d315
--- /dev/null
+++ b/nn/runtime/test/generated/examples/batch_to_space_float_1_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: batch_to_space_float_1_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 5, 2, 6, 9, 13, 10, 14, 3, 7, 4, 8, 11, 15, 12, 16}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/batch_to_space_relaxed.example.cpp b/nn/runtime/test/generated/examples/batch_to_space_relaxed.example.cpp
new file mode 100644
index 000000000..eb1be3252
--- /dev/null
+++ b/nn/runtime/test/generated/examples/batch_to_space_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: batch_to_space_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.4f, 2.3f, 3.2f, 4.1f, 5.4f, 6.3f, 7.2f, 8.1f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.4f, 2.3f, 3.2f, 4.1f, 5.4f, 6.3f, 7.2f, 8.1f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/conv_float_2_relaxed.example.cpp b/nn/runtime/test/generated/examples/conv_float_2_relaxed.example.cpp
new file mode 100644
index 000000000..c93ca212b
--- /dev/null
+++ b/nn/runtime/test/generated/examples/conv_float_2_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: conv_float_2_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0, 0, 0, 0, 35, 112, 157, 0, 0, 34, 61, 0}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/depthwise_conv2d_float_2_relaxed.example.cpp b/nn/runtime/test/generated/examples/depthwise_conv2d_float_2_relaxed.example.cpp
new file mode 100644
index 000000000..4f108cf3a
--- /dev/null
+++ b/nn/runtime/test/generated/examples/depthwise_conv2d_float_2_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: depthwise_conv2d_float_2_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 7, 8, 3, 4, 9, 10, 5, 6, 11, 12}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {71, -34, 99, -20, 91, -26, 127, -4}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/depthwise_conv2d_float_large_2_relaxed.example.cpp b/nn/runtime/test/generated/examples/depthwise_conv2d_float_large_2_relaxed.example.cpp
new file mode 100644
index 000000000..d8c887da9
--- /dev/null
+++ b/nn/runtime/test/generated/examples/depthwise_conv2d_float_large_2_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: depthwise_conv2d_float_large_2_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {10, 21, 100, 0, 10, 22, 200, 0, 10, 23, 300, 0, 10, 24, 400, 0}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {600010, 700046, 830000, 900000}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.example.cpp b/nn/runtime/test/generated/examples/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.example.cpp
new file mode 100644
index 000000000..9b6e34286
--- /dev/null
+++ b/nn/runtime/test/generated/examples/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {10, 21, 100, 10, 22, 200, 10, 23, 300, 10, 24, 400}}, {1, {0.25f, 0, 10, 100, 0.25f, 1, 20, 100, 0.25f, 0, 30, 100, 0.25f, 1, 40, 100}}, {2, {600000, 700000, 800000, 900000}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {600010, 700046, 830000, 900000}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/depthwise_conv2d_float_relaxed.example.cpp b/nn/runtime/test/generated/examples/depthwise_conv2d_float_relaxed.example.cpp
new file mode 100644
index 000000000..32a9f64ed
--- /dev/null
+++ b/nn/runtime/test/generated/examples/depthwise_conv2d_float_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: depthwise_conv2d_float_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {10, 21, 10, 22, 10, 23, 10, 24, 10, 25, 10, 26, 10, 27, 10, 28, 10, 29}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {11, 3, 7.2f, 10.6f, 11, 3, 7.4f, 10.9f, 11, 3, 7.8f, 11.5f, 11, 3, 8.0f, 11.8f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/depthwise_conv2d_float_weights_as_inputs_relaxed.example.cpp b/nn/runtime/test/generated/examples/depthwise_conv2d_float_weights_as_inputs_relaxed.example.cpp
new file mode 100644
index 000000000..1b5840df8
--- /dev/null
+++ b/nn/runtime/test/generated/examples/depthwise_conv2d_float_weights_as_inputs_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: depthwise_conv2d_float_weights_as_inputs_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {10, 21, 10, 22, 10, 23, 10, 24, 10, 25, 10, 26, 10, 27, 10, 28, 10, 29}}, {1, {0.25f, 0, 0.2f, 0, 0.25f, 0, 0, 0.3f, 0.25f, 0, 0, 0, 0.25f, 0.1f, 0, 0}}, {2, {1, 2, 3, 4}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {11, 3, 7.2f, 10.6f, 11, 3, 7.4f, 10.9f, 11, 3, 7.8f, 11.5f, 11, 3, 8.0f, 11.8f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/dequantize_relaxed.example.cpp b/nn/runtime/test/generated/examples/dequantize_relaxed.example.cpp
new file mode 100644
index 000000000..d9a624c17
--- /dev/null
+++ b/nn/runtime/test/generated/examples/dequantize_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: dequantize_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {{0, {0, 32, 128, 255}}}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0.0f, 32.0f, 128.0f, 255.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/div_broadcast_float_relaxed.example.cpp b/nn/runtime/test/generated/examples/div_broadcast_float_relaxed.example.cpp
new file mode 100644
index 000000000..5e38550b8
--- /dev/null
+++ b/nn/runtime/test/generated/examples/div_broadcast_float_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: div_broadcast_float_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2}}, {1, {1, 1, 2, 2}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 0.5f, 1}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/div_relaxed.example.cpp b/nn/runtime/test/generated/examples/div_relaxed.example.cpp
new file mode 100644
index 000000000..49fc0ddf2
--- /dev/null
+++ b/nn/runtime/test/generated/examples/div_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: div_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {2.0f, -4.0f, 8.0f, -16.0f}}, {1, {2.0f, -2.0f, -4.0f, 4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, -2.0f, -4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/fully_connected_float_2_relaxed.example.cpp b/nn/runtime/test/generated/examples/fully_connected_float_2_relaxed.example.cpp
new file mode 100644
index 000000000..e71963a1d
--- /dev/null
+++ b/nn/runtime/test/generated/examples/fully_connected_float_2_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: fully_connected_float_2_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0.503691f, 0.196961f, 0.521017f, 0.554248f, 0.288678f, 0.792476f, 0.561653f, 0.46223f, 0.650736f, 0.163132f, 0.029658f, 0.411544f, 0.470539f, 0.57239f, 0.538755f, 0.21203f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0, 0.0732134f, 0, 0, 0, 0.280859f, 0, 0.128927f, 0, 0.0777251f, 0, 0.270268f, 0.271435f, 0.0173503f, 0.335465f, 0.235562f, 0, 0.0745866f, 0, 0.051611f, 0, 0.253876f, 0, 0.0814873f, 0, 0.104104f, 0, 0.248529f, 0.264194f, 0, 0.302973f, 0.166252f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/fully_connected_float_4d_simple_relaxed.example.cpp b/nn/runtime/test/generated/examples/fully_connected_float_4d_simple_relaxed.example.cpp
new file mode 100644
index 000000000..2a1aba7cf
--- /dev/null
+++ b/nn/runtime/test/generated/examples/fully_connected_float_4d_simple_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: fully_connected_float_4d_simple_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4, 5, 6, 7, 8, -9, -10, 1, 2, 3, 4, 5, 6, 7, -8, 9, -10}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {24, 25, 26, 58, 59, 60}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/fully_connected_float_large_relaxed.example.cpp b/nn/runtime/test/generated/examples/fully_connected_float_large_relaxed.example.cpp
new file mode 100644
index 000000000..131bc5fe3
--- /dev/null
+++ b/nn/runtime/test/generated/examples/fully_connected_float_large_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: fully_connected_float_large_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 10, 100, 1000, 10000}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {965432}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/fully_connected_float_large_weights_as_inputs_relaxed.example.cpp b/nn/runtime/test/generated/examples/fully_connected_float_large_weights_as_inputs_relaxed.example.cpp
new file mode 100644
index 000000000..990fd44a2
--- /dev/null
+++ b/nn/runtime/test/generated/examples/fully_connected_float_large_weights_as_inputs_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: fully_connected_float_large_weights_as_inputs_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 10, 100, 1000, 10000}}, {1, {2, 3, 4, 5, 6}}, {2, {900000}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {965432}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/l2_normalization_2_relaxed.example.cpp b/nn/runtime/test/generated/examples/l2_normalization_2_relaxed.example.cpp
new file mode 100644
index 000000000..262ccdd1e
--- /dev/null
+++ b/nn/runtime/test/generated/examples/l2_normalization_2_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: l2_normalization_2_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-1.1f, 0.6f, 0.7f, 1.2f, -0.7f, 0.1f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-0.55f, 0.3f, 0.35f, 0.6f, -0.35f, 0.05f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/l2_pool_float_2_relaxed.example.cpp b/nn/runtime/test/generated/examples/l2_pool_float_2_relaxed.example.cpp
new file mode 100644
index 000000000..fa964c56a
--- /dev/null
+++ b/nn/runtime/test/generated/examples/l2_pool_float_2_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: l2_pool_float_2_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0, 6, 2, 4, 3, 2, 10, 7}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {3.5f, 6.5f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/l2_pool_float_large_relaxed.example.cpp b/nn/runtime/test/generated/examples/l2_pool_float_large_relaxed.example.cpp
new file mode 100644
index 000000000..bae252198
--- /dev/null
+++ b/nn/runtime/test/generated/examples/l2_pool_float_large_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: l2_pool_float_large_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f, 11.0f, 12.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6.442049503326416f, 7.314369201660156f, 8.215838432312012f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/max_pool_float_4_relaxed.example.cpp b/nn/runtime/test/generated/examples/max_pool_float_4_relaxed.example.cpp
new file mode 100644
index 000000000..f13ab59f6
--- /dev/null
+++ b/nn/runtime/test/generated/examples/max_pool_float_4_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: max_pool_float_4_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0, 6, 2, 4, 3, 2, 10, 7}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6, 10}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/mean_float_1_relaxed.example.cpp b/nn/runtime/test/generated/examples/mean_float_1_relaxed.example.cpp
new file mode 100644
index 000000000..91d44215a
--- /dev/null
+++ b/nn/runtime/test/generated/examples/mean_float_1_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: mean_float_1_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f, 11.0f, 12.0f, 13.0f, 14.0f, 15.0f, 16.0f, 17.0f, 18.0f, 19.0f, 20.0f, 21.0f, 22.0f, 23.0f, 24.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {12.0f, 13.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/mean_float_2_relaxed.example.cpp b/nn/runtime/test/generated/examples/mean_float_2_relaxed.example.cpp
new file mode 100644
index 000000000..5a5cfbc69
--- /dev/null
+++ b/nn/runtime/test/generated/examples/mean_float_2_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: mean_float_2_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f, 11.0f, 12.0f, 13.0f, 14.0f, 15.0f, 16.0f, 17.0f, 18.0f, 19.0f, 20.0f, 21.0f, 22.0f, 23.0f, 24.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {10.5f, 12.5f, 14.5f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/mean_relaxed.example.cpp b/nn/runtime/test/generated/examples/mean_relaxed.example.cpp
new file mode 100644
index 000000000..43602b8a7
--- /dev/null
+++ b/nn/runtime/test/generated/examples/mean_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: mean_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.5f, 3.5f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/pad_float_1_relaxed.example.cpp b/nn/runtime/test/generated/examples/pad_float_1_relaxed.example.cpp
new file mode 100644
index 000000000..6d1dc6fd8
--- /dev/null
+++ b/nn/runtime/test/generated/examples/pad_float_1_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: pad_float_1_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0, 1, 2, 3, 0, 0, 0, 0, 4, 5, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/pad_relaxed.example.cpp b/nn/runtime/test/generated/examples/pad_relaxed.example.cpp
new file mode 100644
index 000000000..6ad7c7b2e
--- /dev/null
+++ b/nn/runtime/test/generated/examples/pad_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: pad_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 2.0f, 0.0f, 0.0f, 3.0f, 4.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/relu_float_2_relaxed.example.cpp b/nn/runtime/test/generated/examples/relu_float_2_relaxed.example.cpp
new file mode 100644
index 000000000..1b1ee321f
--- /dev/null
+++ b/nn/runtime/test/generated/examples/relu_float_2_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: relu_float_2_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {-0.0f, 1.0f, -2.0f, 3.0f, -4.0f, 5.0f, -6.0f, 7.0f, -8.0f, 9.0f, -10.0f, 11.0f, -12.0f, 13.0f, -14.0f, 15.0f, -16.0f, 17.0f, -18.0f, 19.0f, -20.0f, 21.0f, -22.0f, 23.0f, -24.0f, 25.0f, -26.0f, 27.0f, -28.0f, 29.0f, -30.0f, 31.0f, -32.0f, 33.0f, -34.0f, 35.0f, -36.0f, 37.0f, -38.0f, 39.0f, -40.0f, 41.0f, -42.0f, 43.0f, -44.0f, 45.0f, -46.0f, 47.0f, -48.0f, 49.0f, -50.0f, 51.0f, -52.0f, 53.0f, -54.0f, 55.0f, -56.0f, 57.0f, -58.0f, 59.0f, -60.0f, 61.0f, -62.0f, 63.0f, -64.0f, 65.0f, -66.0f, 67.0f, -68.0f, 69.0f, -70.0f, 71.0f, -72.0f, 73.0f, -74.0f, 75.0f, -76.0f, 77.0f, -78.0f, 79.0f, -80.0f, 81.0f, -82.0f, 83.0f, -84.0f, 85.0f, -86.0f, 87.0f, -88.0f, 89.0f, -90.0f, 91.0f, -92.0f, 93.0f, -94.0f, 95.0f, -96.0f, 97.0f, -98.0f, 99.0f, -100.0f, 101.0f, -102.0f, 103.0f, -104.0f, 105.0f, -106.0f, 107.0f, -108.0f, 109.0f, -110.0f, 111.0f, -112.0f, 113.0f, -114.0f, 115.0f, -116.0f, 117.0f, -118.0f, 119.0f, -120.0f, 121.0f, -122.0f, 123.0f, -124.0f, 125.0f, -126.0f, 127.0f, -128.0f, 129.0f, -130.0f, 131.0f, -132.0f, 133.0f, -134.0f, 135.0f, -136.0f, 137.0f, -138.0f, 139.0f, -140.0f, 141.0f, -142.0f, 143.0f, -144.0f, 145.0f, -146.0f, 147.0f, -148.0f, 149.0f, -150.0f, 151.0f, -152.0f, 153.0f, -154.0f, 155.0f, -156.0f, 157.0f, -158.0f, 159.0f, -160.0f, 161.0f, -162.0f, 163.0f, -164.0f, 165.0f, -166.0f, 167.0f, -168.0f, 169.0f, -170.0f, 171.0f, -172.0f, 173.0f, -174.0f, 175.0f, -176.0f, 177.0f, -178.0f, 179.0f, -180.0f, 181.0f, -182.0f, 183.0f, -184.0f, 185.0f, -186.0f, 187.0f, -188.0f, 189.0f, -190.0f, 191.0f, -192.0f, 193.0f, -194.0f, 195.0f, -196.0f, 197.0f, -198.0f, 199.0f, -200.0f, 201.0f, -202.0f, 203.0f, -204.0f, 205.0f, -206.0f, 207.0f, -208.0f, 209.0f, -210.0f, 211.0f, -212.0f, 213.0f, -214.0f, 215.0f, -216.0f, 217.0f, -218.0f, 219.0f, -220.0f, 221.0f, -222.0f, 223.0f, -224.0f, 225.0f, -226.0f, 227.0f, -228.0f, 229.0f, -230.0f, 231.0f, -232.0f, 233.0f, -234.0f, 235.0f, -236.0f, 237.0f, -238.0f, 239.0f, -240.0f, 241.0f, -242.0f, 243.0f, -244.0f, 245.0f, -246.0f, 247.0f, -248.0f, 249.0f, -250.0f, 251.0f, -252.0f, 253.0f, -254.0f, 255.0f, -256.0f, 257.0f, -258.0f, 259.0f, -260.0f, 261.0f, -262.0f, 263.0f, -264.0f, 265.0f, -266.0f, 267.0f, -268.0f, 269.0f, -270.0f, 271.0f, -272.0f, 273.0f, -274.0f, 275.0f, -276.0f, 277.0f, -278.0f, 279.0f, -280.0f, 281.0f, -282.0f, 283.0f, -284.0f, 285.0f, -286.0f, 287.0f, -288.0f, 289.0f, -290.0f, 291.0f, -292.0f, 293.0f, -294.0f, 295.0f, -296.0f, 297.0f, -298.0f, 299.0f, -300.0f, 301.0f, -302.0f, 303.0f, -304.0f, 305.0f, -306.0f, 307.0f, -308.0f, 309.0f, -310.0f, 311.0f, -312.0f, 313.0f, -314.0f, 315.0f, -316.0f, 317.0f, -318.0f, 319.0f, -320.0f, 321.0f, -322.0f, 323.0f, -324.0f, 325.0f, -326.0f, 327.0f, -328.0f, 329.0f, -330.0f, 331.0f, -332.0f, 333.0f, -334.0f, 335.0f, -336.0f, 337.0f, -338.0f, 339.0f, -340.0f, 341.0f, -342.0f, 343.0f, -344.0f, 345.0f, -346.0f, 347.0f, -348.0f, 349.0f, -350.0f, 351.0f, -352.0f, 353.0f, -354.0f, 355.0f, -356.0f, 357.0f, -358.0f, 359.0f, -360.0f, 361.0f, -362.0f, 363.0f, -364.0f, 365.0f, -366.0f, 367.0f, -368.0f, 369.0f, -370.0f, 371.0f, -372.0f, 373.0f, -374.0f, 375.0f, -376.0f, 377.0f, -378.0f, 379.0f, -380.0f, 381.0f, -382.0f, 383.0f, -384.0f, 385.0f, -386.0f, 387.0f, -388.0f, 389.0f, -390.0f, 391.0f, -392.0f, 393.0f, -394.0f, 395.0f, -396.0f, 397.0f, -398.0f, 399.0f, -400.0f, 401.0f, -402.0f, 403.0f, -404.0f, 405.0f, -406.0f, 407.0f, -408.0f, 409.0f, -410.0f, 411.0f, -412.0f, 413.0f, -414.0f, 415.0f, -416.0f, 417.0f, -418.0f, 419.0f, -420.0f, 421.0f, -422.0f, 423.0f, -424.0f, 425.0f, -426.0f, 427.0f, -428.0f, 429.0f, -430.0f, 431.0f, -432.0f, 433.0f, -434.0f, 435.0f, -436.0f, 437.0f, -438.0f, 439.0f, -440.0f, 441.0f, -442.0f, 443.0f, -444.0f, 445.0f, -446.0f, 447.0f, -448.0f, 449.0f, -450.0f, 451.0f, -452.0f, 453.0f, -454.0f, 455.0f, -456.0f, 457.0f, -458.0f, 459.0f, -460.0f, 461.0f, -462.0f, 463.0f, -464.0f, 465.0f, -466.0f, 467.0f, -468.0f, 469.0f, -470.0f, 471.0f, -472.0f, 473.0f, -474.0f, 475.0f, -476.0f, 477.0f, -478.0f, 479.0f, -480.0f, 481.0f, -482.0f, 483.0f, -484.0f, 485.0f, -486.0f, 487.0f, -488.0f, 489.0f, -490.0f, 491.0f, -492.0f, 493.0f, -494.0f, 495.0f, -496.0f, 497.0f, -498.0f, 499.0f, -500.0f, 501.0f, -502.0f, 503.0f, -504.0f, 505.0f, -506.0f, 507.0f, -508.0f, 509.0f, -510.0f, 511.0f, -512.0f, 513.0f, -514.0f, 515.0f, -516.0f, 517.0f, -518.0f, 519.0f, -520.0f, 521.0f, -522.0f, 523.0f, -524.0f, 525.0f, -526.0f, 527.0f, -528.0f, 529.0f, -530.0f, 531.0f, -532.0f, 533.0f, -534.0f, 535.0f, -536.0f, 537.0f, -538.0f, 539.0f, -540.0f, 541.0f, -542.0f, 543.0f, -544.0f, 545.0f, -546.0f, 547.0f, -548.0f, 549.0f, -550.0f, 551.0f, -552.0f, 553.0f, -554.0f, 555.0f, -556.0f, 557.0f, -558.0f, 559.0f, -560.0f, 561.0f, -562.0f, 563.0f, -564.0f, 565.0f, -566.0f, 567.0f, -568.0f, 569.0f, -570.0f, 571.0f, -572.0f, 573.0f, -574.0f, 575.0f, -576.0f, 577.0f, -578.0f, 579.0f, -580.0f, 581.0f, -582.0f, 583.0f, -584.0f, 585.0f, -586.0f, 587.0f, -588.0f, 589.0f, -590.0f, 591.0f, -592.0f, 593.0f, -594.0f, 595.0f, -596.0f, 597.0f, -598.0f, 599.0f, -600.0f, 601.0f, -602.0f, 603.0f, -604.0f, 605.0f, -606.0f, 607.0f, -608.0f, 609.0f, -610.0f, 611.0f, -612.0f, 613.0f, -614.0f, 615.0f, -616.0f, 617.0f, -618.0f, 619.0f, -620.0f, 621.0f, -622.0f, 623.0f, -624.0f, 625.0f, -626.0f, 627.0f, -628.0f, 629.0f, -630.0f, 631.0f, -632.0f, 633.0f, -634.0f, 635.0f, -636.0f, 637.0f, -638.0f, 639.0f, -640.0f, 641.0f, -642.0f, 643.0f, -644.0f, 645.0f, -646.0f, 647.0f, -648.0f, 649.0f, -650.0f, 651.0f, -652.0f, 653.0f, -654.0f, 655.0f, -656.0f, 657.0f, -658.0f, 659.0f, -660.0f, 661.0f, -662.0f, 663.0f, -664.0f, 665.0f, -666.0f, 667.0f, -668.0f, 669.0f, -670.0f, 671.0f, -672.0f, 673.0f, -674.0f, 675.0f, -676.0f, 677.0f, -678.0f, 679.0f, -680.0f, 681.0f, -682.0f, 683.0f, -684.0f, 685.0f, -686.0f, 687.0f, -688.0f, 689.0f, -690.0f, 691.0f, -692.0f, 693.0f, -694.0f, 695.0f, -696.0f, 697.0f, -698.0f, 699.0f, -700.0f, 701.0f, -702.0f, 703.0f, -704.0f, 705.0f, -706.0f, 707.0f, -708.0f, 709.0f, -710.0f, 711.0f, -712.0f, 713.0f, -714.0f, 715.0f, -716.0f, 717.0f, -718.0f, 719.0f, -720.0f, 721.0f, -722.0f, 723.0f, -724.0f, 725.0f, -726.0f, 727.0f, -728.0f, 729.0f, -730.0f, 731.0f, -732.0f, 733.0f, -734.0f, 735.0f, -736.0f, 737.0f, -738.0f, 739.0f, -740.0f, 741.0f, -742.0f, 743.0f, -744.0f, 745.0f, -746.0f, 747.0f, -748.0f, 749.0f, -750.0f, 751.0f, -752.0f, 753.0f, -754.0f, 755.0f, -756.0f, 757.0f, -758.0f, 759.0f, -760.0f, 761.0f, -762.0f, 763.0f, -764.0f, 765.0f, -766.0f, 767.0f, -768.0f, 769.0f, -770.0f, 771.0f, -772.0f, 773.0f, -774.0f, 775.0f, -776.0f, 777.0f, -778.0f, 779.0f, -780.0f, 781.0f, -782.0f, 783.0f, -784.0f, 785.0f, -786.0f, 787.0f, -788.0f, 789.0f, -790.0f, 791.0f, -792.0f, 793.0f, -794.0f, 795.0f, -796.0f, 797.0f, -798.0f, 799.0f, -800.0f, 801.0f, -802.0f, 803.0f, -804.0f, 805.0f, -806.0f, 807.0f, -808.0f, 809.0f, -810.0f, 811.0f, -812.0f, 813.0f, -814.0f, 815.0f, -816.0f, 817.0f, -818.0f, 819.0f, -820.0f, 821.0f, -822.0f, 823.0f, -824.0f, 825.0f, -826.0f, 827.0f, -828.0f, 829.0f, -830.0f, 831.0f, -832.0f, 833.0f, -834.0f, 835.0f, -836.0f, 837.0f, -838.0f, 839.0f, -840.0f, 841.0f, -842.0f, 843.0f, -844.0f, 845.0f, -846.0f, 847.0f, -848.0f, 849.0f, -850.0f, 851.0f, -852.0f, 853.0f, -854.0f, 855.0f, -856.0f, 857.0f, -858.0f, 859.0f, -860.0f, 861.0f, -862.0f, 863.0f, -864.0f, 865.0f, -866.0f, 867.0f, -868.0f, 869.0f, -870.0f, 871.0f, -872.0f, 873.0f, -874.0f, 875.0f, -876.0f, 877.0f, -878.0f, 879.0f, -880.0f, 881.0f, -882.0f, 883.0f, -884.0f, 885.0f, -886.0f, 887.0f, -888.0f, 889.0f, -890.0f, 891.0f, -892.0f, 893.0f, -894.0f, 895.0f, -896.0f, 897.0f, -898.0f, 899.0f, -900.0f, 901.0f, -902.0f, 903.0f, -904.0f, 905.0f, -906.0f, 907.0f, -908.0f, 909.0f, -910.0f, 911.0f, -912.0f, 913.0f, -914.0f, 915.0f, -916.0f, 917.0f, -918.0f, 919.0f, -920.0f, 921.0f, -922.0f, 923.0f, -924.0f, 925.0f, -926.0f, 927.0f, -928.0f, 929.0f, -930.0f, 931.0f, -932.0f, 933.0f, -934.0f, 935.0f, -936.0f, 937.0f, -938.0f, 939.0f, -940.0f, 941.0f, -942.0f, 943.0f, -944.0f, 945.0f, -946.0f, 947.0f, -948.0f, 949.0f, -950.0f, 951.0f, -952.0f, 953.0f, -954.0f, 955.0f, -956.0f, 957.0f, -958.0f, 959.0f, -960.0f, 961.0f, -962.0f, 963.0f, -964.0f, 965.0f, -966.0f, 967.0f, -968.0f, 969.0f, -970.0f, 971.0f, -972.0f, 973.0f, -974.0f, 975.0f, -976.0f, 977.0f, -978.0f, 979.0f, -980.0f, 981.0f, -982.0f, 983.0f, -984.0f, 985.0f, -986.0f, 987.0f, -988.0f, 989.0f, -990.0f, 991.0f, -992.0f, 993.0f, -994.0f, 995.0f, -996.0f, 997.0f, -998.0f, 999.0f, -1000.0f, 1001.0f, -1002.0f, 1003.0f, -1004.0f, 1005.0f, -1006.0f, 1007.0f, -1008.0f, 1009.0f, -1010.0f, 1011.0f, -1012.0f, 1013.0f, -1014.0f, 1015.0f, -1016.0f, 1017.0f, -1018.0f, 1019.0f, -1020.0f, 1021.0f, -1022.0f, 1023.0f, -1024.0f, 1025.0f, -1026.0f, 1027.0f, -1028.0f, 1029.0f, -1030.0f, 1031.0f, -1032.0f, 1033.0f, -1034.0f, 1035.0f, -1036.0f, 1037.0f, -1038.0f, 1039.0f, -1040.0f, 1041.0f, -1042.0f, 1043.0f, -1044.0f, 1045.0f, -1046.0f, 1047.0f, -1048.0f, 1049.0f, -1050.0f, 1051.0f, -1052.0f, 1053.0f, -1054.0f, 1055.0f, -1056.0f, 1057.0f, -1058.0f, 1059.0f, -1060.0f, 1061.0f, -1062.0f, 1063.0f, -1064.0f, 1065.0f, -1066.0f, 1067.0f, -1068.0f, 1069.0f, -1070.0f, 1071.0f, -1072.0f, 1073.0f, -1074.0f, 1075.0f, -1076.0f, 1077.0f, -1078.0f, 1079.0f, -1080.0f, 1081.0f, -1082.0f, 1083.0f, -1084.0f, 1085.0f, -1086.0f, 1087.0f, -1088.0f, 1089.0f, -1090.0f, 1091.0f, -1092.0f, 1093.0f, -1094.0f, 1095.0f, -1096.0f, 1097.0f, -1098.0f, 1099.0f, -1100.0f, 1101.0f, -1102.0f, 1103.0f, -1104.0f, 1105.0f, -1106.0f, 1107.0f, -1108.0f, 1109.0f, -1110.0f, 1111.0f, -1112.0f, 1113.0f, -1114.0f, 1115.0f, -1116.0f, 1117.0f, -1118.0f, 1119.0f, -1120.0f, 1121.0f, -1122.0f, 1123.0f, -1124.0f, 1125.0f, -1126.0f, 1127.0f, -1128.0f, 1129.0f, -1130.0f, 1131.0f, -1132.0f, 1133.0f, -1134.0f, 1135.0f, -1136.0f, 1137.0f, -1138.0f, 1139.0f, -1140.0f, 1141.0f, -1142.0f, 1143.0f, -1144.0f, 1145.0f, -1146.0f, 1147.0f, -1148.0f, 1149.0f, -1150.0f, 1151.0f, -1152.0f, 1153.0f, -1154.0f, 1155.0f, -1156.0f, 1157.0f, -1158.0f, 1159.0f, -1160.0f, 1161.0f, -1162.0f, 1163.0f, -1164.0f, 1165.0f, -1166.0f, 1167.0f, -1168.0f, 1169.0f, -1170.0f, 1171.0f, -1172.0f, 1173.0f, -1174.0f, 1175.0f, -1176.0f, 1177.0f, -1178.0f, 1179.0f, -1180.0f, 1181.0f, -1182.0f, 1183.0f, -1184.0f, 1185.0f, -1186.0f, 1187.0f, -1188.0f, 1189.0f, -1190.0f, 1191.0f, -1192.0f, 1193.0f, -1194.0f, 1195.0f, -1196.0f, 1197.0f, -1198.0f, 1199.0f, -1200.0f, 1201.0f, -1202.0f, 1203.0f, -1204.0f, 1205.0f, -1206.0f, 1207.0f, -1208.0f, 1209.0f, -1210.0f, 1211.0f, -1212.0f, 1213.0f, -1214.0f, 1215.0f, -1216.0f, 1217.0f, -1218.0f, 1219.0f, -1220.0f, 1221.0f, -1222.0f, 1223.0f, -1224.0f, 1225.0f, -1226.0f, 1227.0f, -1228.0f, 1229.0f, -1230.0f, 1231.0f, -1232.0f, 1233.0f, -1234.0f, 1235.0f, -1236.0f, 1237.0f, -1238.0f, 1239.0f, -1240.0f, 1241.0f, -1242.0f, 1243.0f, -1244.0f, 1245.0f, -1246.0f, 1247.0f, -1248.0f, 1249.0f, -1250.0f, 1251.0f, -1252.0f, 1253.0f, -1254.0f, 1255.0f, -1256.0f, 1257.0f, -1258.0f, 1259.0f, -1260.0f, 1261.0f, -1262.0f, 1263.0f, -1264.0f, 1265.0f, -1266.0f, 1267.0f, -1268.0f, 1269.0f, -1270.0f, 1271.0f, -1272.0f, 1273.0f, -1274.0f, 1275.0f, -1276.0f, 1277.0f, -1278.0f, 1279.0f, -1280.0f, 1281.0f, -1282.0f, 1283.0f, -1284.0f, 1285.0f, -1286.0f, 1287.0f, -1288.0f, 1289.0f, -1290.0f, 1291.0f, -1292.0f, 1293.0f, -1294.0f, 1295.0f, -1296.0f, 1297.0f, -1298.0f, 1299.0f, -1300.0f, 1301.0f, -1302.0f, 1303.0f, -1304.0f, 1305.0f, -1306.0f, 1307.0f, -1308.0f, 1309.0f, -1310.0f, 1311.0f, -1312.0f, 1313.0f, -1314.0f, 1315.0f, -1316.0f, 1317.0f, -1318.0f, 1319.0f, -1320.0f, 1321.0f, -1322.0f, 1323.0f, -1324.0f, 1325.0f, -1326.0f, 1327.0f, -1328.0f, 1329.0f, -1330.0f, 1331.0f, -1332.0f, 1333.0f, -1334.0f, 1335.0f, -1336.0f, 1337.0f, -1338.0f, 1339.0f, -1340.0f, 1341.0f, -1342.0f, 1343.0f, -1344.0f, 1345.0f, -1346.0f, 1347.0f, -1348.0f, 1349.0f, -1350.0f, 1351.0f, -1352.0f, 1353.0f, -1354.0f, 1355.0f, -1356.0f, 1357.0f, -1358.0f, 1359.0f, -1360.0f, 1361.0f, -1362.0f, 1363.0f, -1364.0f, 1365.0f, -1366.0f, 1367.0f, -1368.0f, 1369.0f, -1370.0f, 1371.0f, -1372.0f, 1373.0f, -1374.0f, 1375.0f, -1376.0f, 1377.0f, -1378.0f, 1379.0f, -1380.0f, 1381.0f, -1382.0f, 1383.0f, -1384.0f, 1385.0f, -1386.0f, 1387.0f, -1388.0f, 1389.0f, -1390.0f, 1391.0f, -1392.0f, 1393.0f, -1394.0f, 1395.0f, -1396.0f, 1397.0f, -1398.0f, 1399.0f, -1400.0f, 1401.0f, -1402.0f, 1403.0f, -1404.0f, 1405.0f, -1406.0f, 1407.0f, -1408.0f, 1409.0f, -1410.0f, 1411.0f, -1412.0f, 1413.0f, -1414.0f, 1415.0f, -1416.0f, 1417.0f, -1418.0f, 1419.0f, -1420.0f, 1421.0f, -1422.0f, 1423.0f, -1424.0f, 1425.0f, -1426.0f, 1427.0f, -1428.0f, 1429.0f, -1430.0f, 1431.0f, -1432.0f, 1433.0f, -1434.0f, 1435.0f, -1436.0f, 1437.0f, -1438.0f, 1439.0f, -1440.0f, 1441.0f, -1442.0f, 1443.0f, -1444.0f, 1445.0f, -1446.0f, 1447.0f, -1448.0f, 1449.0f, -1450.0f, 1451.0f, -1452.0f, 1453.0f, -1454.0f, 1455.0f, -1456.0f, 1457.0f, -1458.0f, 1459.0f, -1460.0f, 1461.0f, -1462.0f, 1463.0f, -1464.0f, 1465.0f, -1466.0f, 1467.0f, -1468.0f, 1469.0f, -1470.0f, 1471.0f, -1472.0f, 1473.0f, -1474.0f, 1475.0f, -1476.0f, 1477.0f, -1478.0f, 1479.0f, -1480.0f, 1481.0f, -1482.0f, 1483.0f, -1484.0f, 1485.0f, -1486.0f, 1487.0f, -1488.0f, 1489.0f, -1490.0f, 1491.0f, -1492.0f, 1493.0f, -1494.0f, 1495.0f, -1496.0f, 1497.0f, -1498.0f, 1499.0f, -1500.0f, 1501.0f, -1502.0f, 1503.0f, -1504.0f, 1505.0f, -1506.0f, 1507.0f, -1508.0f, 1509.0f, -1510.0f, 1511.0f, -1512.0f, 1513.0f, -1514.0f, 1515.0f, -1516.0f, 1517.0f, -1518.0f, 1519.0f, -1520.0f, 1521.0f, -1522.0f, 1523.0f, -1524.0f, 1525.0f, -1526.0f, 1527.0f, -1528.0f, 1529.0f, -1530.0f, 1531.0f, -1532.0f, 1533.0f, -1534.0f, 1535.0f, -1536.0f, 1537.0f, -1538.0f, 1539.0f, -1540.0f, 1541.0f, -1542.0f, 1543.0f, -1544.0f, 1545.0f, -1546.0f, 1547.0f, -1548.0f, 1549.0f, -1550.0f, 1551.0f, -1552.0f, 1553.0f, -1554.0f, 1555.0f, -1556.0f, 1557.0f, -1558.0f, 1559.0f, -1560.0f, 1561.0f, -1562.0f, 1563.0f, -1564.0f, 1565.0f, -1566.0f, 1567.0f, -1568.0f, 1569.0f, -1570.0f, 1571.0f, -1572.0f, 1573.0f, -1574.0f, 1575.0f, -1576.0f, 1577.0f, -1578.0f, 1579.0f, -1580.0f, 1581.0f, -1582.0f, 1583.0f, -1584.0f, 1585.0f, -1586.0f, 1587.0f, -1588.0f, 1589.0f, -1590.0f, 1591.0f, -1592.0f, 1593.0f, -1594.0f, 1595.0f, -1596.0f, 1597.0f, -1598.0f, 1599.0f, -1600.0f, 1601.0f, -1602.0f, 1603.0f, -1604.0f, 1605.0f, -1606.0f, 1607.0f, -1608.0f, 1609.0f, -1610.0f, 1611.0f, -1612.0f, 1613.0f, -1614.0f, 1615.0f, -1616.0f, 1617.0f, -1618.0f, 1619.0f, -1620.0f, 1621.0f, -1622.0f, 1623.0f, -1624.0f, 1625.0f, -1626.0f, 1627.0f, -1628.0f, 1629.0f, -1630.0f, 1631.0f, -1632.0f, 1633.0f, -1634.0f, 1635.0f, -1636.0f, 1637.0f, -1638.0f, 1639.0f, -1640.0f, 1641.0f, -1642.0f, 1643.0f, -1644.0f, 1645.0f, -1646.0f, 1647.0f, -1648.0f, 1649.0f, -1650.0f, 1651.0f, -1652.0f, 1653.0f, -1654.0f, 1655.0f, -1656.0f, 1657.0f, -1658.0f, 1659.0f, -1660.0f, 1661.0f, -1662.0f, 1663.0f, -1664.0f, 1665.0f, -1666.0f, 1667.0f, -1668.0f, 1669.0f, -1670.0f, 1671.0f, -1672.0f, 1673.0f, -1674.0f, 1675.0f, -1676.0f, 1677.0f, -1678.0f, 1679.0f, -1680.0f, 1681.0f, -1682.0f, 1683.0f, -1684.0f, 1685.0f, -1686.0f, 1687.0f, -1688.0f, 1689.0f, -1690.0f, 1691.0f, -1692.0f, 1693.0f, -1694.0f, 1695.0f, -1696.0f, 1697.0f, -1698.0f, 1699.0f, -1700.0f, 1701.0f, -1702.0f, 1703.0f, -1704.0f, 1705.0f, -1706.0f, 1707.0f, -1708.0f, 1709.0f, -1710.0f, 1711.0f, -1712.0f, 1713.0f, -1714.0f, 1715.0f, -1716.0f, 1717.0f, -1718.0f, 1719.0f, -1720.0f, 1721.0f, -1722.0f, 1723.0f, -1724.0f, 1725.0f, -1726.0f, 1727.0f, -1728.0f, 1729.0f, -1730.0f, 1731.0f, -1732.0f, 1733.0f, -1734.0f, 1735.0f, -1736.0f, 1737.0f, -1738.0f, 1739.0f, -1740.0f, 1741.0f, -1742.0f, 1743.0f, -1744.0f, 1745.0f, -1746.0f, 1747.0f, -1748.0f, 1749.0f, -1750.0f, 1751.0f, -1752.0f, 1753.0f, -1754.0f, 1755.0f, -1756.0f, 1757.0f, -1758.0f, 1759.0f, -1760.0f, 1761.0f, -1762.0f, 1763.0f, -1764.0f, 1765.0f, -1766.0f, 1767.0f, -1768.0f, 1769.0f, -1770.0f, 1771.0f, -1772.0f, 1773.0f, -1774.0f, 1775.0f, -1776.0f, 1777.0f, -1778.0f, 1779.0f, -1780.0f, 1781.0f, -1782.0f, 1783.0f, -1784.0f, 1785.0f, -1786.0f, 1787.0f, -1788.0f, 1789.0f, -1790.0f, 1791.0f, -1792.0f, 1793.0f, -1794.0f, 1795.0f, -1796.0f, 1797.0f, -1798.0f, 1799.0f, -1800.0f, 1801.0f, -1802.0f, 1803.0f, -1804.0f, 1805.0f, -1806.0f, 1807.0f, -1808.0f, 1809.0f, -1810.0f, 1811.0f, -1812.0f, 1813.0f, -1814.0f, 1815.0f, -1816.0f, 1817.0f, -1818.0f, 1819.0f, -1820.0f, 1821.0f, -1822.0f, 1823.0f, -1824.0f, 1825.0f, -1826.0f, 1827.0f, -1828.0f, 1829.0f, -1830.0f, 1831.0f, -1832.0f, 1833.0f, -1834.0f, 1835.0f, -1836.0f, 1837.0f, -1838.0f, 1839.0f, -1840.0f, 1841.0f, -1842.0f, 1843.0f, -1844.0f, 1845.0f, -1846.0f, 1847.0f, -1848.0f, 1849.0f, -1850.0f, 1851.0f, -1852.0f, 1853.0f, -1854.0f, 1855.0f, -1856.0f, 1857.0f, -1858.0f, 1859.0f, -1860.0f, 1861.0f, -1862.0f, 1863.0f, -1864.0f, 1865.0f, -1866.0f, 1867.0f, -1868.0f, 1869.0f, -1870.0f, 1871.0f, -1872.0f, 1873.0f, -1874.0f, 1875.0f, -1876.0f, 1877.0f, -1878.0f, 1879.0f, -1880.0f, 1881.0f, -1882.0f, 1883.0f, -1884.0f, 1885.0f, -1886.0f, 1887.0f, -1888.0f, 1889.0f, -1890.0f, 1891.0f, -1892.0f, 1893.0f, -1894.0f, 1895.0f, -1896.0f, 1897.0f, -1898.0f, 1899.0f, -1900.0f, 1901.0f, -1902.0f, 1903.0f, -1904.0f, 1905.0f, -1906.0f, 1907.0f, -1908.0f, 1909.0f, -1910.0f, 1911.0f, -1912.0f, 1913.0f, -1914.0f, 1915.0f, -1916.0f, 1917.0f, -1918.0f, 1919.0f, -1920.0f, 1921.0f, -1922.0f, 1923.0f, -1924.0f, 1925.0f, -1926.0f, 1927.0f, -1928.0f, 1929.0f, -1930.0f, 1931.0f, -1932.0f, 1933.0f, -1934.0f, 1935.0f, -1936.0f, 1937.0f, -1938.0f, 1939.0f, -1940.0f, 1941.0f, -1942.0f, 1943.0f, -1944.0f, 1945.0f, -1946.0f, 1947.0f, -1948.0f, 1949.0f, -1950.0f, 1951.0f, -1952.0f, 1953.0f, -1954.0f, 1955.0f, -1956.0f, 1957.0f, -1958.0f, 1959.0f, -1960.0f, 1961.0f, -1962.0f, 1963.0f, -1964.0f, 1965.0f, -1966.0f, 1967.0f, -1968.0f, 1969.0f, -1970.0f, 1971.0f, -1972.0f, 1973.0f, -1974.0f, 1975.0f, -1976.0f, 1977.0f, -1978.0f, 1979.0f, -1980.0f, 1981.0f, -1982.0f, 1983.0f, -1984.0f, 1985.0f, -1986.0f, 1987.0f, -1988.0f, 1989.0f, -1990.0f, 1991.0f, -1992.0f, 1993.0f, -1994.0f, 1995.0f, -1996.0f, 1997.0f, -1998.0f, 1999.0f, -2000.0f, 2001.0f, -2002.0f, 2003.0f, -2004.0f, 2005.0f, -2006.0f, 2007.0f, -2008.0f, 2009.0f, -2010.0f, 2011.0f, -2012.0f, 2013.0f, -2014.0f, 2015.0f, -2016.0f, 2017.0f, -2018.0f, 2019.0f, -2020.0f, 2021.0f, -2022.0f, 2023.0f, -2024.0f, 2025.0f, -2026.0f, 2027.0f, -2028.0f, 2029.0f, -2030.0f, 2031.0f, -2032.0f, 2033.0f, -2034.0f, 2035.0f, -2036.0f, 2037.0f, -2038.0f, 2039.0f, -2040.0f, 2041.0f, -2042.0f, 2043.0f, -2044.0f, 2045.0f, -2046.0f, 2047.0f, -2048.0f, 2049.0f, -2050.0f, 2051.0f, -2052.0f, 2053.0f, -2054.0f, 2055.0f, -2056.0f, 2057.0f, -2058.0f, 2059.0f, -2060.0f, 2061.0f, -2062.0f, 2063.0f, -2064.0f, 2065.0f, -2066.0f, 2067.0f, -2068.0f, 2069.0f, -2070.0f, 2071.0f, -2072.0f, 2073.0f, -2074.0f, 2075.0f, -2076.0f, 2077.0f, -2078.0f, 2079.0f, -2080.0f, 2081.0f, -2082.0f, 2083.0f, -2084.0f, 2085.0f, -2086.0f, 2087.0f, -2088.0f, 2089.0f, -2090.0f, 2091.0f, -2092.0f, 2093.0f, -2094.0f, 2095.0f, -2096.0f, 2097.0f, -2098.0f, 2099.0f, -2100.0f, 2101.0f, -2102.0f, 2103.0f, -2104.0f, 2105.0f, -2106.0f, 2107.0f, -2108.0f, 2109.0f, -2110.0f, 2111.0f, -2112.0f, 2113.0f, -2114.0f, 2115.0f, -2116.0f, 2117.0f, -2118.0f, 2119.0f, -2120.0f, 2121.0f, -2122.0f, 2123.0f, -2124.0f, 2125.0f, -2126.0f, 2127.0f, -2128.0f, 2129.0f, -2130.0f, 2131.0f, -2132.0f, 2133.0f, -2134.0f, 2135.0f, -2136.0f, 2137.0f, -2138.0f, 2139.0f, -2140.0f, 2141.0f, -2142.0f, 2143.0f, -2144.0f, 2145.0f, -2146.0f, 2147.0f, -2148.0f, 2149.0f, -2150.0f, 2151.0f, -2152.0f, 2153.0f, -2154.0f, 2155.0f, -2156.0f, 2157.0f, -2158.0f, 2159.0f, -2160.0f, 2161.0f, -2162.0f, 2163.0f, -2164.0f, 2165.0f, -2166.0f, 2167.0f, -2168.0f, 2169.0f, -2170.0f, 2171.0f, -2172.0f, 2173.0f, -2174.0f, 2175.0f, -2176.0f, 2177.0f, -2178.0f, 2179.0f, -2180.0f, 2181.0f, -2182.0f, 2183.0f, -2184.0f, 2185.0f, -2186.0f, 2187.0f, -2188.0f, 2189.0f, -2190.0f, 2191.0f, -2192.0f, 2193.0f, -2194.0f, 2195.0f, -2196.0f, 2197.0f, -2198.0f, 2199.0f, -2200.0f, 2201.0f, -2202.0f, 2203.0f, -2204.0f, 2205.0f, -2206.0f, 2207.0f, -2208.0f, 2209.0f, -2210.0f, 2211.0f, -2212.0f, 2213.0f, -2214.0f, 2215.0f, -2216.0f, 2217.0f, -2218.0f, 2219.0f, -2220.0f, 2221.0f, -2222.0f, 2223.0f, -2224.0f, 2225.0f, -2226.0f, 2227.0f, -2228.0f, 2229.0f, -2230.0f, 2231.0f, -2232.0f, 2233.0f, -2234.0f, 2235.0f, -2236.0f, 2237.0f, -2238.0f, 2239.0f, -2240.0f, 2241.0f, -2242.0f, 2243.0f, -2244.0f, 2245.0f, -2246.0f, 2247.0f, -2248.0f, 2249.0f, -2250.0f, 2251.0f, -2252.0f, 2253.0f, -2254.0f, 2255.0f, -2256.0f, 2257.0f, -2258.0f, 2259.0f, -2260.0f, 2261.0f, -2262.0f, 2263.0f, -2264.0f, 2265.0f, -2266.0f, 2267.0f, -2268.0f, 2269.0f, -2270.0f, 2271.0f, -2272.0f, 2273.0f, -2274.0f, 2275.0f, -2276.0f, 2277.0f, -2278.0f, 2279.0f, -2280.0f, 2281.0f, -2282.0f, 2283.0f, -2284.0f, 2285.0f, -2286.0f, 2287.0f, -2288.0f, 2289.0f, -2290.0f, 2291.0f, -2292.0f, 2293.0f, -2294.0f, 2295.0f, -2296.0f, 2297.0f, -2298.0f, 2299.0f, -2300.0f, 2301.0f, -2302.0f, 2303.0f, -2304.0f, 2305.0f, -2306.0f, 2307.0f, -2308.0f, 2309.0f, -2310.0f, 2311.0f, -2312.0f, 2313.0f, -2314.0f, 2315.0f, -2316.0f, 2317.0f, -2318.0f, 2319.0f, -2320.0f, 2321.0f, -2322.0f, 2323.0f, -2324.0f, 2325.0f, -2326.0f, 2327.0f, -2328.0f, 2329.0f, -2330.0f, 2331.0f, -2332.0f, 2333.0f, -2334.0f, 2335.0f, -2336.0f, 2337.0f, -2338.0f, 2339.0f, -2340.0f, 2341.0f, -2342.0f, 2343.0f, -2344.0f, 2345.0f, -2346.0f, 2347.0f, -2348.0f, 2349.0f, -2350.0f, 2351.0f, -2352.0f, 2353.0f, -2354.0f, 2355.0f, -2356.0f, 2357.0f, -2358.0f, 2359.0f, -2360.0f, 2361.0f, -2362.0f, 2363.0f, -2364.0f, 2365.0f, -2366.0f, 2367.0f, -2368.0f, 2369.0f, -2370.0f, 2371.0f, -2372.0f, 2373.0f, -2374.0f, 2375.0f, -2376.0f, 2377.0f, -2378.0f, 2379.0f, -2380.0f, 2381.0f, -2382.0f, 2383.0f, -2384.0f, 2385.0f, -2386.0f, 2387.0f, -2388.0f, 2389.0f, -2390.0f, 2391.0f, -2392.0f, 2393.0f, -2394.0f, 2395.0f, -2396.0f, 2397.0f, -2398.0f, 2399.0f, -2400.0f, 2401.0f, -2402.0f, 2403.0f, -2404.0f, 2405.0f, -2406.0f, 2407.0f, -2408.0f, 2409.0f, -2410.0f, 2411.0f, -2412.0f, 2413.0f, -2414.0f, 2415.0f, -2416.0f, 2417.0f, -2418.0f, 2419.0f, -2420.0f, 2421.0f, -2422.0f, 2423.0f, -2424.0f, 2425.0f, -2426.0f, 2427.0f, -2428.0f, 2429.0f, -2430.0f, 2431.0f, -2432.0f, 2433.0f, -2434.0f, 2435.0f, -2436.0f, 2437.0f, -2438.0f, 2439.0f, -2440.0f, 2441.0f, -2442.0f, 2443.0f, -2444.0f, 2445.0f, -2446.0f, 2447.0f, -2448.0f, 2449.0f, -2450.0f, 2451.0f, -2452.0f, 2453.0f, -2454.0f, 2455.0f, -2456.0f, 2457.0f, -2458.0f, 2459.0f, -2460.0f, 2461.0f, -2462.0f, 2463.0f, -2464.0f, 2465.0f, -2466.0f, 2467.0f, -2468.0f, 2469.0f, -2470.0f, 2471.0f, -2472.0f, 2473.0f, -2474.0f, 2475.0f, -2476.0f, 2477.0f, -2478.0f, 2479.0f, -2480.0f, 2481.0f, -2482.0f, 2483.0f, -2484.0f, 2485.0f, -2486.0f, 2487.0f, -2488.0f, 2489.0f, -2490.0f, 2491.0f, -2492.0f, 2493.0f, -2494.0f, 2495.0f, -2496.0f, 2497.0f, -2498.0f, 2499.0f, -2500.0f, 2501.0f, -2502.0f, 2503.0f, -2504.0f, 2505.0f, -2506.0f, 2507.0f, -2508.0f, 2509.0f, -2510.0f, 2511.0f, -2512.0f, 2513.0f, -2514.0f, 2515.0f, -2516.0f, 2517.0f, -2518.0f, 2519.0f, -2520.0f, 2521.0f, -2522.0f, 2523.0f, -2524.0f, 2525.0f, -2526.0f, 2527.0f, -2528.0f, 2529.0f, -2530.0f, 2531.0f, -2532.0f, 2533.0f, -2534.0f, 2535.0f, -2536.0f, 2537.0f, -2538.0f, 2539.0f, -2540.0f, 2541.0f, -2542.0f, 2543.0f, -2544.0f, 2545.0f, -2546.0f, 2547.0f, -2548.0f, 2549.0f, -2550.0f, 2551.0f, -2552.0f, 2553.0f, -2554.0f, 2555.0f, -2556.0f, 2557.0f, -2558.0f, 2559.0f, -2560.0f, 2561.0f, -2562.0f, 2563.0f, -2564.0f, 2565.0f, -2566.0f, 2567.0f, -2568.0f, 2569.0f, -2570.0f, 2571.0f, -2572.0f, 2573.0f, -2574.0f, 2575.0f, -2576.0f, 2577.0f, -2578.0f, 2579.0f, -2580.0f, 2581.0f, -2582.0f, 2583.0f, -2584.0f, 2585.0f, -2586.0f, 2587.0f, -2588.0f, 2589.0f, -2590.0f, 2591.0f, -2592.0f, 2593.0f, -2594.0f, 2595.0f, -2596.0f, 2597.0f, -2598.0f, 2599.0f, -2600.0f, 2601.0f, -2602.0f, 2603.0f, -2604.0f, 2605.0f, -2606.0f, 2607.0f, -2608.0f, 2609.0f, -2610.0f, 2611.0f, -2612.0f, 2613.0f, -2614.0f, 2615.0f, -2616.0f, 2617.0f, -2618.0f, 2619.0f, -2620.0f, 2621.0f, -2622.0f, 2623.0f, -2624.0f, 2625.0f, -2626.0f, 2627.0f, -2628.0f, 2629.0f, -2630.0f, 2631.0f, -2632.0f, 2633.0f, -2634.0f, 2635.0f, -2636.0f, 2637.0f, -2638.0f, 2639.0f, -2640.0f, 2641.0f, -2642.0f, 2643.0f, -2644.0f, 2645.0f, -2646.0f, 2647.0f, -2648.0f, 2649.0f, -2650.0f, 2651.0f, -2652.0f, 2653.0f, -2654.0f, 2655.0f, -2656.0f, 2657.0f, -2658.0f, 2659.0f, -2660.0f, 2661.0f, -2662.0f, 2663.0f, -2664.0f, 2665.0f, -2666.0f, 2667.0f, -2668.0f, 2669.0f, -2670.0f, 2671.0f, -2672.0f, 2673.0f, -2674.0f, 2675.0f, -2676.0f, 2677.0f, -2678.0f, 2679.0f, -2680.0f, 2681.0f, -2682.0f, 2683.0f, -2684.0f, 2685.0f, -2686.0f, 2687.0f, -2688.0f, 2689.0f, -2690.0f, 2691.0f, -2692.0f, 2693.0f, -2694.0f, 2695.0f, -2696.0f, 2697.0f, -2698.0f, 2699.0f, -2700.0f, 2701.0f, -2702.0f, 2703.0f, -2704.0f, 2705.0f, -2706.0f, 2707.0f, -2708.0f, 2709.0f, -2710.0f, 2711.0f, -2712.0f, 2713.0f, -2714.0f, 2715.0f, -2716.0f, 2717.0f, -2718.0f, 2719.0f, -2720.0f, 2721.0f, -2722.0f, 2723.0f, -2724.0f, 2725.0f, -2726.0f, 2727.0f, -2728.0f, 2729.0f, -2730.0f, 2731.0f, -2732.0f, 2733.0f, -2734.0f, 2735.0f, -2736.0f, 2737.0f, -2738.0f, 2739.0f, -2740.0f, 2741.0f, -2742.0f, 2743.0f, -2744.0f, 2745.0f, -2746.0f, 2747.0f, -2748.0f, 2749.0f, -2750.0f, 2751.0f, -2752.0f, 2753.0f, -2754.0f, 2755.0f, -2756.0f, 2757.0f, -2758.0f, 2759.0f, -2760.0f, 2761.0f, -2762.0f, 2763.0f, -2764.0f, 2765.0f, -2766.0f, 2767.0f, -2768.0f, 2769.0f, -2770.0f, 2771.0f, -2772.0f, 2773.0f, -2774.0f, 2775.0f, -2776.0f, 2777.0f, -2778.0f, 2779.0f, -2780.0f, 2781.0f, -2782.0f, 2783.0f, -2784.0f, 2785.0f, -2786.0f, 2787.0f, -2788.0f, 2789.0f, -2790.0f, 2791.0f, -2792.0f, 2793.0f, -2794.0f, 2795.0f, -2796.0f, 2797.0f, -2798.0f, 2799.0f, -2800.0f, 2801.0f, -2802.0f, 2803.0f, -2804.0f, 2805.0f, -2806.0f, 2807.0f, -2808.0f, 2809.0f, -2810.0f, 2811.0f, -2812.0f, 2813.0f, -2814.0f, 2815.0f, -2816.0f, 2817.0f, -2818.0f, 2819.0f, -2820.0f, 2821.0f, -2822.0f, 2823.0f, -2824.0f, 2825.0f, -2826.0f, 2827.0f, -2828.0f, 2829.0f, -2830.0f, 2831.0f, -2832.0f, 2833.0f, -2834.0f, 2835.0f, -2836.0f, 2837.0f, -2838.0f, 2839.0f, -2840.0f, 2841.0f, -2842.0f, 2843.0f, -2844.0f, 2845.0f, -2846.0f, 2847.0f, -2848.0f, 2849.0f, -2850.0f, 2851.0f, -2852.0f, 2853.0f, -2854.0f, 2855.0f, -2856.0f, 2857.0f, -2858.0f, 2859.0f, -2860.0f, 2861.0f, -2862.0f, 2863.0f, -2864.0f, 2865.0f, -2866.0f, 2867.0f, -2868.0f, 2869.0f, -2870.0f, 2871.0f, -2872.0f, 2873.0f, -2874.0f, 2875.0f, -2876.0f, 2877.0f, -2878.0f, 2879.0f, -2880.0f, 2881.0f, -2882.0f, 2883.0f, -2884.0f, 2885.0f, -2886.0f, 2887.0f, -2888.0f, 2889.0f, -2890.0f, 2891.0f, -2892.0f, 2893.0f, -2894.0f, 2895.0f, -2896.0f, 2897.0f, -2898.0f, 2899.0f, -2900.0f, 2901.0f, -2902.0f, 2903.0f, -2904.0f, 2905.0f, -2906.0f, 2907.0f, -2908.0f, 2909.0f, -2910.0f, 2911.0f, -2912.0f, 2913.0f, -2914.0f, 2915.0f, -2916.0f, 2917.0f, -2918.0f, 2919.0f, -2920.0f, 2921.0f, -2922.0f, 2923.0f, -2924.0f, 2925.0f, -2926.0f, 2927.0f, -2928.0f, 2929.0f, -2930.0f, 2931.0f, -2932.0f, 2933.0f, -2934.0f, 2935.0f, -2936.0f, 2937.0f, -2938.0f, 2939.0f, -2940.0f, 2941.0f, -2942.0f, 2943.0f, -2944.0f, 2945.0f, -2946.0f, 2947.0f, -2948.0f, 2949.0f, -2950.0f, 2951.0f, -2952.0f, 2953.0f, -2954.0f, 2955.0f, -2956.0f, 2957.0f, -2958.0f, 2959.0f, -2960.0f, 2961.0f, -2962.0f, 2963.0f, -2964.0f, 2965.0f, -2966.0f, 2967.0f, -2968.0f, 2969.0f, -2970.0f, 2971.0f, -2972.0f, 2973.0f, -2974.0f, 2975.0f, -2976.0f, 2977.0f, -2978.0f, 2979.0f, -2980.0f, 2981.0f, -2982.0f, 2983.0f, -2984.0f, 2985.0f, -2986.0f, 2987.0f, -2988.0f, 2989.0f, -2990.0f, 2991.0f, -2992.0f, 2993.0f, -2994.0f, 2995.0f, -2996.0f, 2997.0f, -2998.0f, 2999.0f, -3000.0f, 3001.0f, -3002.0f, 3003.0f, -3004.0f, 3005.0f, -3006.0f, 3007.0f, -3008.0f, 3009.0f, -3010.0f, 3011.0f, -3012.0f, 3013.0f, -3014.0f, 3015.0f, -3016.0f, 3017.0f, -3018.0f, 3019.0f, -3020.0f, 3021.0f, -3022.0f, 3023.0f, -3024.0f, 3025.0f, -3026.0f, 3027.0f, -3028.0f, 3029.0f, -3030.0f, 3031.0f, -3032.0f, 3033.0f, -3034.0f, 3035.0f, -3036.0f, 3037.0f, -3038.0f, 3039.0f, -3040.0f, 3041.0f, -3042.0f, 3043.0f, -3044.0f, 3045.0f, -3046.0f, 3047.0f, -3048.0f, 3049.0f, -3050.0f, 3051.0f, -3052.0f, 3053.0f, -3054.0f, 3055.0f, -3056.0f, 3057.0f, -3058.0f, 3059.0f, -3060.0f, 3061.0f, -3062.0f, 3063.0f, -3064.0f, 3065.0f, -3066.0f, 3067.0f, -3068.0f, 3069.0f, -3070.0f, 3071.0f, -3072.0f, 3073.0f, -3074.0f, 3075.0f, -3076.0f, 3077.0f, -3078.0f, 3079.0f, -3080.0f, 3081.0f, -3082.0f, 3083.0f, -3084.0f, 3085.0f, -3086.0f, 3087.0f, -3088.0f, 3089.0f, -3090.0f, 3091.0f, -3092.0f, 3093.0f, -3094.0f, 3095.0f, -3096.0f, 3097.0f, -3098.0f, 3099.0f, -3100.0f, 3101.0f, -3102.0f, 3103.0f, -3104.0f, 3105.0f, -3106.0f, 3107.0f, -3108.0f, 3109.0f, -3110.0f, 3111.0f, -3112.0f, 3113.0f, -3114.0f, 3115.0f, -3116.0f, 3117.0f, -3118.0f, 3119.0f, -3120.0f, 3121.0f, -3122.0f, 3123.0f, -3124.0f, 3125.0f, -3126.0f, 3127.0f, -3128.0f, 3129.0f, -3130.0f, 3131.0f, -3132.0f, 3133.0f, -3134.0f, 3135.0f, -3136.0f, 3137.0f, -3138.0f, 3139.0f, -3140.0f, 3141.0f, -3142.0f, 3143.0f, -3144.0f, 3145.0f, -3146.0f, 3147.0f, -3148.0f, 3149.0f, -3150.0f, 3151.0f, -3152.0f, 3153.0f, -3154.0f, 3155.0f, -3156.0f, 3157.0f, -3158.0f, 3159.0f, -3160.0f, 3161.0f, -3162.0f, 3163.0f, -3164.0f, 3165.0f, -3166.0f, 3167.0f, -3168.0f, 3169.0f, -3170.0f, 3171.0f, -3172.0f, 3173.0f, -3174.0f, 3175.0f, -3176.0f, 3177.0f, -3178.0f, 3179.0f, -3180.0f, 3181.0f, -3182.0f, 3183.0f, -3184.0f, 3185.0f, -3186.0f, 3187.0f, -3188.0f, 3189.0f, -3190.0f, 3191.0f, -3192.0f, 3193.0f, -3194.0f, 3195.0f, -3196.0f, 3197.0f, -3198.0f, 3199.0f, -3200.0f, 3201.0f, -3202.0f, 3203.0f, -3204.0f, 3205.0f, -3206.0f, 3207.0f, -3208.0f, 3209.0f, -3210.0f, 3211.0f, -3212.0f, 3213.0f, -3214.0f, 3215.0f, -3216.0f, 3217.0f, -3218.0f, 3219.0f, -3220.0f, 3221.0f, -3222.0f, 3223.0f, -3224.0f, 3225.0f, -3226.0f, 3227.0f, -3228.0f, 3229.0f, -3230.0f, 3231.0f, -3232.0f, 3233.0f, -3234.0f, 3235.0f, -3236.0f, 3237.0f, -3238.0f, 3239.0f, -3240.0f, 3241.0f, -3242.0f, 3243.0f, -3244.0f, 3245.0f, -3246.0f, 3247.0f, -3248.0f, 3249.0f, -3250.0f, 3251.0f, -3252.0f, 3253.0f, -3254.0f, 3255.0f, -3256.0f, 3257.0f, -3258.0f, 3259.0f, -3260.0f, 3261.0f, -3262.0f, 3263.0f, -3264.0f, 3265.0f, -3266.0f, 3267.0f, -3268.0f, 3269.0f, -3270.0f, 3271.0f, -3272.0f, 3273.0f, -3274.0f, 3275.0f, -3276.0f, 3277.0f, -3278.0f, 3279.0f, -3280.0f, 3281.0f, -3282.0f, 3283.0f, -3284.0f, 3285.0f, -3286.0f, 3287.0f, -3288.0f, 3289.0f, -3290.0f, 3291.0f, -3292.0f, 3293.0f, -3294.0f, 3295.0f, -3296.0f, 3297.0f, -3298.0f, 3299.0f, -3300.0f, 3301.0f, -3302.0f, 3303.0f, -3304.0f, 3305.0f, -3306.0f, 3307.0f, -3308.0f, 3309.0f, -3310.0f, 3311.0f, -3312.0f, 3313.0f, -3314.0f, 3315.0f, -3316.0f, 3317.0f, -3318.0f, 3319.0f, -3320.0f, 3321.0f, -3322.0f, 3323.0f, -3324.0f, 3325.0f, -3326.0f, 3327.0f, -3328.0f, 3329.0f, -3330.0f, 3331.0f, -3332.0f, 3333.0f, -3334.0f, 3335.0f, -3336.0f, 3337.0f, -3338.0f, 3339.0f, -3340.0f, 3341.0f, -3342.0f, 3343.0f, -3344.0f, 3345.0f, -3346.0f, 3347.0f, -3348.0f, 3349.0f, -3350.0f, 3351.0f, -3352.0f, 3353.0f, -3354.0f, 3355.0f, -3356.0f, 3357.0f, -3358.0f, 3359.0f, -3360.0f, 3361.0f, -3362.0f, 3363.0f, -3364.0f, 3365.0f, -3366.0f, 3367.0f, -3368.0f, 3369.0f, -3370.0f, 3371.0f, -3372.0f, 3373.0f, -3374.0f, 3375.0f, -3376.0f, 3377.0f, -3378.0f, 3379.0f, -3380.0f, 3381.0f, -3382.0f, 3383.0f, -3384.0f, 3385.0f, -3386.0f, 3387.0f, -3388.0f, 3389.0f, -3390.0f, 3391.0f, -3392.0f, 3393.0f, -3394.0f, 3395.0f, -3396.0f, 3397.0f, -3398.0f, 3399.0f, -3400.0f, 3401.0f, -3402.0f, 3403.0f, -3404.0f, 3405.0f, -3406.0f, 3407.0f, -3408.0f, 3409.0f, -3410.0f, 3411.0f, -3412.0f, 3413.0f, -3414.0f, 3415.0f, -3416.0f, 3417.0f, -3418.0f, 3419.0f, -3420.0f, 3421.0f, -3422.0f, 3423.0f, -3424.0f, 3425.0f, -3426.0f, 3427.0f, -3428.0f, 3429.0f, -3430.0f, 3431.0f, -3432.0f, 3433.0f, -3434.0f, 3435.0f, -3436.0f, 3437.0f, -3438.0f, 3439.0f, -3440.0f, 3441.0f, -3442.0f, 3443.0f, -3444.0f, 3445.0f, -3446.0f, 3447.0f, -3448.0f, 3449.0f, -3450.0f, 3451.0f, -3452.0f, 3453.0f, -3454.0f, 3455.0f, -3456.0f, 3457.0f, -3458.0f, 3459.0f, -3460.0f, 3461.0f, -3462.0f, 3463.0f, -3464.0f, 3465.0f, -3466.0f, 3467.0f, -3468.0f, 3469.0f, -3470.0f, 3471.0f, -3472.0f, 3473.0f, -3474.0f, 3475.0f, -3476.0f, 3477.0f, -3478.0f, 3479.0f, -3480.0f, 3481.0f, -3482.0f, 3483.0f, -3484.0f, 3485.0f, -3486.0f, 3487.0f, -3488.0f, 3489.0f, -3490.0f, 3491.0f, -3492.0f, 3493.0f, -3494.0f, 3495.0f, -3496.0f, 3497.0f, -3498.0f, 3499.0f, -3500.0f, 3501.0f, -3502.0f, 3503.0f, -3504.0f, 3505.0f, -3506.0f, 3507.0f, -3508.0f, 3509.0f, -3510.0f, 3511.0f, -3512.0f, 3513.0f, -3514.0f, 3515.0f, -3516.0f, 3517.0f, -3518.0f, 3519.0f, -3520.0f, 3521.0f, -3522.0f, 3523.0f, -3524.0f, 3525.0f, -3526.0f, 3527.0f, -3528.0f, 3529.0f, -3530.0f, 3531.0f, -3532.0f, 3533.0f, -3534.0f, 3535.0f, -3536.0f, 3537.0f, -3538.0f, 3539.0f, -3540.0f, 3541.0f, -3542.0f, 3543.0f, -3544.0f, 3545.0f, -3546.0f, 3547.0f, -3548.0f, 3549.0f, -3550.0f, 3551.0f, -3552.0f, 3553.0f, -3554.0f, 3555.0f, -3556.0f, 3557.0f, -3558.0f, 3559.0f, -3560.0f, 3561.0f, -3562.0f, 3563.0f, -3564.0f, 3565.0f, -3566.0f, 3567.0f, -3568.0f, 3569.0f, -3570.0f, 3571.0f, -3572.0f, 3573.0f, -3574.0f, 3575.0f, -3576.0f, 3577.0f, -3578.0f, 3579.0f, -3580.0f, 3581.0f, -3582.0f, 3583.0f, -3584.0f, 3585.0f, -3586.0f, 3587.0f, -3588.0f, 3589.0f, -3590.0f, 3591.0f, -3592.0f, 3593.0f, -3594.0f, 3595.0f, -3596.0f, 3597.0f, -3598.0f, 3599.0f, -3600.0f, 3601.0f, -3602.0f, 3603.0f, -3604.0f, 3605.0f, -3606.0f, 3607.0f, -3608.0f, 3609.0f, -3610.0f, 3611.0f, -3612.0f, 3613.0f, -3614.0f, 3615.0f, -3616.0f, 3617.0f, -3618.0f, 3619.0f, -3620.0f, 3621.0f, -3622.0f, 3623.0f, -3624.0f, 3625.0f, -3626.0f, 3627.0f, -3628.0f, 3629.0f, -3630.0f, 3631.0f, -3632.0f, 3633.0f, -3634.0f, 3635.0f, -3636.0f, 3637.0f, -3638.0f, 3639.0f, -3640.0f, 3641.0f, -3642.0f, 3643.0f, -3644.0f, 3645.0f, -3646.0f, 3647.0f, -3648.0f, 3649.0f, -3650.0f, 3651.0f, -3652.0f, 3653.0f, -3654.0f, 3655.0f, -3656.0f, 3657.0f, -3658.0f, 3659.0f, -3660.0f, 3661.0f, -3662.0f, 3663.0f, -3664.0f, 3665.0f, -3666.0f, 3667.0f, -3668.0f, 3669.0f, -3670.0f, 3671.0f, -3672.0f, 3673.0f, -3674.0f, 3675.0f, -3676.0f, 3677.0f, -3678.0f, 3679.0f, -3680.0f, 3681.0f, -3682.0f, 3683.0f, -3684.0f, 3685.0f, -3686.0f, 3687.0f, -3688.0f, 3689.0f, -3690.0f, 3691.0f, -3692.0f, 3693.0f, -3694.0f, 3695.0f, -3696.0f, 3697.0f, -3698.0f, 3699.0f, -3700.0f, 3701.0f, -3702.0f, 3703.0f, -3704.0f, 3705.0f, -3706.0f, 3707.0f, -3708.0f, 3709.0f, -3710.0f, 3711.0f, -3712.0f, 3713.0f, -3714.0f, 3715.0f, -3716.0f, 3717.0f, -3718.0f, 3719.0f, -3720.0f, 3721.0f, -3722.0f, 3723.0f, -3724.0f, 3725.0f, -3726.0f, 3727.0f, -3728.0f, 3729.0f, -3730.0f, 3731.0f, -3732.0f, 3733.0f, -3734.0f, 3735.0f, -3736.0f, 3737.0f, -3738.0f, 3739.0f, -3740.0f, 3741.0f, -3742.0f, 3743.0f, -3744.0f, 3745.0f, -3746.0f, 3747.0f, -3748.0f, 3749.0f, -3750.0f, 3751.0f, -3752.0f, 3753.0f, -3754.0f, 3755.0f, -3756.0f, 3757.0f, -3758.0f, 3759.0f, -3760.0f, 3761.0f, -3762.0f, 3763.0f, -3764.0f, 3765.0f, -3766.0f, 3767.0f, -3768.0f, 3769.0f, -3770.0f, 3771.0f, -3772.0f, 3773.0f, -3774.0f, 3775.0f, -3776.0f, 3777.0f, -3778.0f, 3779.0f, -3780.0f, 3781.0f, -3782.0f, 3783.0f, -3784.0f, 3785.0f, -3786.0f, 3787.0f, -3788.0f, 3789.0f, -3790.0f, 3791.0f, -3792.0f, 3793.0f, -3794.0f, 3795.0f, -3796.0f, 3797.0f, -3798.0f, 3799.0f, -3800.0f, 3801.0f, -3802.0f, 3803.0f, -3804.0f, 3805.0f, -3806.0f, 3807.0f, -3808.0f, 3809.0f, -3810.0f, 3811.0f, -3812.0f, 3813.0f, -3814.0f, 3815.0f, -3816.0f, 3817.0f, -3818.0f, 3819.0f, -3820.0f, 3821.0f, -3822.0f, 3823.0f, -3824.0f, 3825.0f, -3826.0f, 3827.0f, -3828.0f, 3829.0f, -3830.0f, 3831.0f, -3832.0f, 3833.0f, -3834.0f, 3835.0f, -3836.0f, 3837.0f, -3838.0f, 3839.0f, -3840.0f, 3841.0f, -3842.0f, 3843.0f, -3844.0f, 3845.0f, -3846.0f, 3847.0f, -3848.0f, 3849.0f, -3850.0f, 3851.0f, -3852.0f, 3853.0f, -3854.0f, 3855.0f, -3856.0f, 3857.0f, -3858.0f, 3859.0f, -3860.0f, 3861.0f, -3862.0f, 3863.0f, -3864.0f, 3865.0f, -3866.0f, 3867.0f, -3868.0f, 3869.0f, -3870.0f, 3871.0f, -3872.0f, 3873.0f, -3874.0f, 3875.0f, -3876.0f, 3877.0f, -3878.0f, 3879.0f, -3880.0f, 3881.0f, -3882.0f, 3883.0f, -3884.0f, 3885.0f, -3886.0f, 3887.0f, -3888.0f, 3889.0f, -3890.0f, 3891.0f, -3892.0f, 3893.0f, -3894.0f, 3895.0f, -3896.0f, 3897.0f, -3898.0f, 3899.0f, -3900.0f, 3901.0f, -3902.0f, 3903.0f, -3904.0f, 3905.0f, -3906.0f, 3907.0f, -3908.0f, 3909.0f, -3910.0f, 3911.0f, -3912.0f, 3913.0f, -3914.0f, 3915.0f, -3916.0f, 3917.0f, -3918.0f, 3919.0f, -3920.0f, 3921.0f, -3922.0f, 3923.0f, -3924.0f, 3925.0f, -3926.0f, 3927.0f, -3928.0f, 3929.0f, -3930.0f, 3931.0f, -3932.0f, 3933.0f, -3934.0f, 3935.0f, -3936.0f, 3937.0f, -3938.0f, 3939.0f, -3940.0f, 3941.0f, -3942.0f, 3943.0f, -3944.0f, 3945.0f, -3946.0f, 3947.0f, -3948.0f, 3949.0f, -3950.0f, 3951.0f, -3952.0f, 3953.0f, -3954.0f, 3955.0f, -3956.0f, 3957.0f, -3958.0f, 3959.0f, -3960.0f, 3961.0f, -3962.0f, 3963.0f, -3964.0f, 3965.0f, -3966.0f, 3967.0f, -3968.0f, 3969.0f, -3970.0f, 3971.0f, -3972.0f, 3973.0f, -3974.0f, 3975.0f, -3976.0f, 3977.0f, -3978.0f, 3979.0f, -3980.0f, 3981.0f, -3982.0f, 3983.0f, -3984.0f, 3985.0f, -3986.0f, 3987.0f, -3988.0f, 3989.0f, -3990.0f, 3991.0f, -3992.0f, 3993.0f, -3994.0f, 3995.0f, -3996.0f, 3997.0f, -3998.0f, 3999.0f, -4000.0f, 4001.0f, -4002.0f, 4003.0f, -4004.0f, 4005.0f, -4006.0f, 4007.0f, -4008.0f, 4009.0f, -4010.0f, 4011.0f, -4012.0f, 4013.0f, -4014.0f, 4015.0f, -4016.0f, 4017.0f, -4018.0f, 4019.0f, -4020.0f, 4021.0f, -4022.0f, 4023.0f, -4024.0f, 4025.0f, -4026.0f, 4027.0f, -4028.0f, 4029.0f, -4030.0f, 4031.0f, -4032.0f, 4033.0f, -4034.0f, 4035.0f, -4036.0f, 4037.0f, -4038.0f, 4039.0f, -4040.0f, 4041.0f, -4042.0f, 4043.0f, -4044.0f, 4045.0f, -4046.0f, 4047.0f, -4048.0f, 4049.0f, -4050.0f, 4051.0f, -4052.0f, 4053.0f, -4054.0f, 4055.0f, -4056.0f, 4057.0f, -4058.0f, 4059.0f, -4060.0f, 4061.0f, -4062.0f, 4063.0f, -4064.0f, 4065.0f, -4066.0f, 4067.0f, -4068.0f, 4069.0f, -4070.0f, 4071.0f, -4072.0f, 4073.0f, -4074.0f, 4075.0f, -4076.0f, 4077.0f, -4078.0f, 4079.0f, -4080.0f, 4081.0f, -4082.0f, 4083.0f, -4084.0f, 4085.0f, -4086.0f, 4087.0f, -4088.0f, 4089.0f, -4090.0f, 4091.0f, -4092.0f, 4093.0f, -4094.0f, 4095.0f, -4096.0f, 4097.0f, -4098.0f, 4099.0f, -4100.0f, 4101.0f, -4102.0f, 4103.0f, -4104.0f, 4105.0f, -4106.0f, 4107.0f, -4108.0f, 4109.0f, -4110.0f, 4111.0f, -4112.0f, 4113.0f, -4114.0f, 4115.0f, -4116.0f, 4117.0f, -4118.0f, 4119.0f, -4120.0f, 4121.0f, -4122.0f, 4123.0f, -4124.0f, 4125.0f, -4126.0f, 4127.0f, -4128.0f, 4129.0f, -4130.0f, 4131.0f, -4132.0f, 4133.0f, -4134.0f, 4135.0f, -4136.0f, 4137.0f, -4138.0f, 4139.0f, -4140.0f, 4141.0f, -4142.0f, 4143.0f, -4144.0f, 4145.0f, -4146.0f, 4147.0f, -4148.0f, 4149.0f, -4150.0f, 4151.0f, -4152.0f, 4153.0f, -4154.0f, 4155.0f, -4156.0f, 4157.0f, -4158.0f, 4159.0f, -4160.0f, 4161.0f, -4162.0f, 4163.0f, -4164.0f, 4165.0f, -4166.0f, 4167.0f, -4168.0f, 4169.0f, -4170.0f, 4171.0f, -4172.0f, 4173.0f, -4174.0f, 4175.0f, -4176.0f, 4177.0f, -4178.0f, 4179.0f, -4180.0f, 4181.0f, -4182.0f, 4183.0f, -4184.0f, 4185.0f, -4186.0f, 4187.0f, -4188.0f, 4189.0f, -4190.0f, 4191.0f, -4192.0f, 4193.0f, -4194.0f, 4195.0f, -4196.0f, 4197.0f, -4198.0f, 4199.0f, -4200.0f, 4201.0f, -4202.0f, 4203.0f, -4204.0f, 4205.0f, -4206.0f, 4207.0f, -4208.0f, 4209.0f, -4210.0f, 4211.0f, -4212.0f, 4213.0f, -4214.0f, 4215.0f, -4216.0f, 4217.0f, -4218.0f, 4219.0f, -4220.0f, 4221.0f, -4222.0f, 4223.0f, -4224.0f, 4225.0f, -4226.0f, 4227.0f, -4228.0f, 4229.0f, -4230.0f, 4231.0f, -4232.0f, 4233.0f, -4234.0f, 4235.0f, -4236.0f, 4237.0f, -4238.0f, 4239.0f, -4240.0f, 4241.0f, -4242.0f, 4243.0f, -4244.0f, 4245.0f, -4246.0f, 4247.0f, -4248.0f, 4249.0f, -4250.0f, 4251.0f, -4252.0f, 4253.0f, -4254.0f, 4255.0f, -4256.0f, 4257.0f, -4258.0f, 4259.0f, -4260.0f, 4261.0f, -4262.0f, 4263.0f, -4264.0f, 4265.0f, -4266.0f, 4267.0f, -4268.0f, 4269.0f, -4270.0f, 4271.0f, -4272.0f, 4273.0f, -4274.0f, 4275.0f, -4276.0f, 4277.0f, -4278.0f, 4279.0f, -4280.0f, 4281.0f, -4282.0f, 4283.0f, -4284.0f, 4285.0f, -4286.0f, 4287.0f, -4288.0f, 4289.0f, -4290.0f, 4291.0f, -4292.0f, 4293.0f, -4294.0f, 4295.0f, -4296.0f, 4297.0f, -4298.0f, 4299.0f, -4300.0f, 4301.0f, -4302.0f, 4303.0f, -4304.0f, 4305.0f, -4306.0f, 4307.0f, -4308.0f, 4309.0f, -4310.0f, 4311.0f, -4312.0f, 4313.0f, -4314.0f, 4315.0f, -4316.0f, 4317.0f, -4318.0f, 4319.0f, -4320.0f, 4321.0f, -4322.0f, 4323.0f, -4324.0f, 4325.0f, -4326.0f, 4327.0f, -4328.0f, 4329.0f, -4330.0f, 4331.0f, -4332.0f, 4333.0f, -4334.0f, 4335.0f, -4336.0f, 4337.0f, -4338.0f, 4339.0f, -4340.0f, 4341.0f, -4342.0f, 4343.0f, -4344.0f, 4345.0f, -4346.0f, 4347.0f, -4348.0f, 4349.0f, -4350.0f, 4351.0f, -4352.0f, 4353.0f, -4354.0f, 4355.0f, -4356.0f, 4357.0f, -4358.0f, 4359.0f, -4360.0f, 4361.0f, -4362.0f, 4363.0f, -4364.0f, 4365.0f, -4366.0f, 4367.0f, -4368.0f, 4369.0f, -4370.0f, 4371.0f, -4372.0f, 4373.0f, -4374.0f, 4375.0f, -4376.0f, 4377.0f, -4378.0f, 4379.0f, -4380.0f, 4381.0f, -4382.0f, 4383.0f, -4384.0f, 4385.0f, -4386.0f, 4387.0f, -4388.0f, 4389.0f, -4390.0f, 4391.0f, -4392.0f, 4393.0f, -4394.0f, 4395.0f, -4396.0f, 4397.0f, -4398.0f, 4399.0f, -4400.0f, 4401.0f, -4402.0f, 4403.0f, -4404.0f, 4405.0f, -4406.0f, 4407.0f, -4408.0f, 4409.0f, -4410.0f, 4411.0f, -4412.0f, 4413.0f, -4414.0f, 4415.0f, -4416.0f, 4417.0f, -4418.0f, 4419.0f, -4420.0f, 4421.0f, -4422.0f, 4423.0f, -4424.0f, 4425.0f, -4426.0f, 4427.0f, -4428.0f, 4429.0f, -4430.0f, 4431.0f, -4432.0f, 4433.0f, -4434.0f, 4435.0f, -4436.0f, 4437.0f, -4438.0f, 4439.0f, -4440.0f, 4441.0f, -4442.0f, 4443.0f, -4444.0f, 4445.0f, -4446.0f, 4447.0f, -4448.0f, 4449.0f, -4450.0f, 4451.0f, -4452.0f, 4453.0f, -4454.0f, 4455.0f, -4456.0f, 4457.0f, -4458.0f, 4459.0f, -4460.0f, 4461.0f, -4462.0f, 4463.0f, -4464.0f, 4465.0f, -4466.0f, 4467.0f, -4468.0f, 4469.0f, -4470.0f, 4471.0f, -4472.0f, 4473.0f, -4474.0f, 4475.0f, -4476.0f, 4477.0f, -4478.0f, 4479.0f, -4480.0f, 4481.0f, -4482.0f, 4483.0f, -4484.0f, 4485.0f, -4486.0f, 4487.0f, -4488.0f, 4489.0f, -4490.0f, 4491.0f, -4492.0f, 4493.0f, -4494.0f, 4495.0f, -4496.0f, 4497.0f, -4498.0f, 4499.0f, -4500.0f, 4501.0f, -4502.0f, 4503.0f, -4504.0f, 4505.0f, -4506.0f, 4507.0f, -4508.0f, 4509.0f, -4510.0f, 4511.0f, -4512.0f, 4513.0f, -4514.0f, 4515.0f, -4516.0f, 4517.0f, -4518.0f, 4519.0f, -4520.0f, 4521.0f, -4522.0f, 4523.0f, -4524.0f, 4525.0f, -4526.0f, 4527.0f, -4528.0f, 4529.0f, -4530.0f, 4531.0f, -4532.0f, 4533.0f, -4534.0f, 4535.0f, -4536.0f, 4537.0f, -4538.0f, 4539.0f, -4540.0f, 4541.0f, -4542.0f, 4543.0f, -4544.0f, 4545.0f, -4546.0f, 4547.0f, -4548.0f, 4549.0f, -4550.0f, 4551.0f, -4552.0f, 4553.0f, -4554.0f, 4555.0f, -4556.0f, 4557.0f, -4558.0f, 4559.0f, -4560.0f, 4561.0f, -4562.0f, 4563.0f, -4564.0f, 4565.0f, -4566.0f, 4567.0f, -4568.0f, 4569.0f, -4570.0f, 4571.0f, -4572.0f, 4573.0f, -4574.0f, 4575.0f, -4576.0f, 4577.0f, -4578.0f, 4579.0f, -4580.0f, 4581.0f, -4582.0f, 4583.0f, -4584.0f, 4585.0f, -4586.0f, 4587.0f, -4588.0f, 4589.0f, -4590.0f, 4591.0f, -4592.0f, 4593.0f, -4594.0f, 4595.0f, -4596.0f, 4597.0f, -4598.0f, 4599.0f, -4600.0f, 4601.0f, -4602.0f, 4603.0f, -4604.0f, 4605.0f, -4606.0f, 4607.0f, -4608.0f, 4609.0f, -4610.0f, 4611.0f, -4612.0f, 4613.0f, -4614.0f, 4615.0f, -4616.0f, 4617.0f, -4618.0f, 4619.0f, -4620.0f, 4621.0f, -4622.0f, 4623.0f, -4624.0f, 4625.0f, -4626.0f, 4627.0f, -4628.0f, 4629.0f, -4630.0f, 4631.0f, -4632.0f, 4633.0f, -4634.0f, 4635.0f, -4636.0f, 4637.0f, -4638.0f, 4639.0f, -4640.0f, 4641.0f, -4642.0f, 4643.0f, -4644.0f, 4645.0f, -4646.0f, 4647.0f, -4648.0f, 4649.0f, -4650.0f, 4651.0f, -4652.0f, 4653.0f, -4654.0f, 4655.0f, -4656.0f, 4657.0f, -4658.0f, 4659.0f, -4660.0f, 4661.0f, -4662.0f, 4663.0f, -4664.0f, 4665.0f, -4666.0f, 4667.0f, -4668.0f, 4669.0f, -4670.0f, 4671.0f, -4672.0f, 4673.0f, -4674.0f, 4675.0f, -4676.0f, 4677.0f, -4678.0f, 4679.0f, -4680.0f, 4681.0f, -4682.0f, 4683.0f, -4684.0f, 4685.0f, -4686.0f, 4687.0f, -4688.0f, 4689.0f, -4690.0f, 4691.0f, -4692.0f, 4693.0f, -4694.0f, 4695.0f, -4696.0f, 4697.0f, -4698.0f, 4699.0f, -4700.0f, 4701.0f, -4702.0f, 4703.0f, -4704.0f, 4705.0f, -4706.0f, 4707.0f, -4708.0f, 4709.0f, -4710.0f, 4711.0f, -4712.0f, 4713.0f, -4714.0f, 4715.0f, -4716.0f, 4717.0f, -4718.0f, 4719.0f, -4720.0f, 4721.0f, -4722.0f, 4723.0f, -4724.0f, 4725.0f, -4726.0f, 4727.0f, -4728.0f, 4729.0f, -4730.0f, 4731.0f, -4732.0f, 4733.0f, -4734.0f, 4735.0f, -4736.0f, 4737.0f, -4738.0f, 4739.0f, -4740.0f, 4741.0f, -4742.0f, 4743.0f, -4744.0f, 4745.0f, -4746.0f, 4747.0f, -4748.0f, 4749.0f, -4750.0f, 4751.0f, -4752.0f, 4753.0f, -4754.0f, 4755.0f, -4756.0f, 4757.0f, -4758.0f, 4759.0f, -4760.0f, 4761.0f, -4762.0f, 4763.0f, -4764.0f, 4765.0f, -4766.0f, 4767.0f, -4768.0f, 4769.0f, -4770.0f, 4771.0f, -4772.0f, 4773.0f, -4774.0f, 4775.0f, -4776.0f, 4777.0f, -4778.0f, 4779.0f, -4780.0f, 4781.0f, -4782.0f, 4783.0f, -4784.0f, 4785.0f, -4786.0f, 4787.0f, -4788.0f, 4789.0f, -4790.0f, 4791.0f, -4792.0f, 4793.0f, -4794.0f, 4795.0f, -4796.0f, 4797.0f, -4798.0f, 4799.0f, -4800.0f, 4801.0f, -4802.0f, 4803.0f, -4804.0f, 4805.0f, -4806.0f, 4807.0f, -4808.0f, 4809.0f, -4810.0f, 4811.0f, -4812.0f, 4813.0f, -4814.0f, 4815.0f, -4816.0f, 4817.0f, -4818.0f, 4819.0f, -4820.0f, 4821.0f, -4822.0f, 4823.0f, -4824.0f, 4825.0f, -4826.0f, 4827.0f, -4828.0f, 4829.0f, -4830.0f, 4831.0f, -4832.0f, 4833.0f, -4834.0f, 4835.0f, -4836.0f, 4837.0f, -4838.0f, 4839.0f, -4840.0f, 4841.0f, -4842.0f, 4843.0f, -4844.0f, 4845.0f, -4846.0f, 4847.0f, -4848.0f, 4849.0f, -4850.0f, 4851.0f, -4852.0f, 4853.0f, -4854.0f, 4855.0f, -4856.0f, 4857.0f, -4858.0f, 4859.0f, -4860.0f, 4861.0f, -4862.0f, 4863.0f, -4864.0f, 4865.0f, -4866.0f, 4867.0f, -4868.0f, 4869.0f, -4870.0f, 4871.0f, -4872.0f, 4873.0f, -4874.0f, 4875.0f, -4876.0f, 4877.0f, -4878.0f, 4879.0f, -4880.0f, 4881.0f, -4882.0f, 4883.0f, -4884.0f, 4885.0f, -4886.0f, 4887.0f, -4888.0f, 4889.0f, -4890.0f, 4891.0f, -4892.0f, 4893.0f, -4894.0f, 4895.0f, -4896.0f, 4897.0f, -4898.0f, 4899.0f, -4900.0f, 4901.0f, -4902.0f, 4903.0f, -4904.0f, 4905.0f, -4906.0f, 4907.0f, -4908.0f, 4909.0f, -4910.0f, 4911.0f, -4912.0f, 4913.0f, -4914.0f, 4915.0f, -4916.0f, 4917.0f, -4918.0f, 4919.0f, -4920.0f, 4921.0f, -4922.0f, 4923.0f, -4924.0f, 4925.0f, -4926.0f, 4927.0f, -4928.0f, 4929.0f, -4930.0f, 4931.0f, -4932.0f, 4933.0f, -4934.0f, 4935.0f, -4936.0f, 4937.0f, -4938.0f, 4939.0f, -4940.0f, 4941.0f, -4942.0f, 4943.0f, -4944.0f, 4945.0f, -4946.0f, 4947.0f, -4948.0f, 4949.0f, -4950.0f, 4951.0f, -4952.0f, 4953.0f, -4954.0f, 4955.0f, -4956.0f, 4957.0f, -4958.0f, 4959.0f, -4960.0f, 4961.0f, -4962.0f, 4963.0f, -4964.0f, 4965.0f, -4966.0f, 4967.0f, -4968.0f, 4969.0f, -4970.0f, 4971.0f, -4972.0f, 4973.0f, -4974.0f, 4975.0f, -4976.0f, 4977.0f, -4978.0f, 4979.0f, -4980.0f, 4981.0f, -4982.0f, 4983.0f, -4984.0f, 4985.0f, -4986.0f, 4987.0f, -4988.0f, 4989.0f, -4990.0f, 4991.0f, -4992.0f, 4993.0f, -4994.0f, 4995.0f, -4996.0f, 4997.0f, -4998.0f, 4999.0f, -5000.0f, 5001.0f, -5002.0f, 5003.0f, -5004.0f, 5005.0f, -5006.0f, 5007.0f, -5008.0f, 5009.0f, -5010.0f, 5011.0f, -5012.0f, 5013.0f, -5014.0f, 5015.0f, -5016.0f, 5017.0f, -5018.0f, 5019.0f, -5020.0f, 5021.0f, -5022.0f, 5023.0f, -5024.0f, 5025.0f, -5026.0f, 5027.0f, -5028.0f, 5029.0f, -5030.0f, 5031.0f, -5032.0f, 5033.0f, -5034.0f, 5035.0f, -5036.0f, 5037.0f, -5038.0f, 5039.0f, -5040.0f, 5041.0f, -5042.0f, 5043.0f, -5044.0f, 5045.0f, -5046.0f, 5047.0f, -5048.0f, 5049.0f, -5050.0f, 5051.0f, -5052.0f, 5053.0f, -5054.0f, 5055.0f, -5056.0f, 5057.0f, -5058.0f, 5059.0f, -5060.0f, 5061.0f, -5062.0f, 5063.0f, -5064.0f, 5065.0f, -5066.0f, 5067.0f, -5068.0f, 5069.0f, -5070.0f, 5071.0f, -5072.0f, 5073.0f, -5074.0f, 5075.0f, -5076.0f, 5077.0f, -5078.0f, 5079.0f, -5080.0f, 5081.0f, -5082.0f, 5083.0f, -5084.0f, 5085.0f, -5086.0f, 5087.0f, -5088.0f, 5089.0f, -5090.0f, 5091.0f, -5092.0f, 5093.0f, -5094.0f, 5095.0f, -5096.0f, 5097.0f, -5098.0f, 5099.0f, -5100.0f, 5101.0f, -5102.0f, 5103.0f, -5104.0f, 5105.0f, -5106.0f, 5107.0f, -5108.0f, 5109.0f, -5110.0f, 5111.0f, -5112.0f, 5113.0f, -5114.0f, 5115.0f, -5116.0f, 5117.0f, -5118.0f, 5119.0f, -5120.0f, 5121.0f, -5122.0f, 5123.0f, -5124.0f, 5125.0f, -5126.0f, 5127.0f, -5128.0f, 5129.0f, -5130.0f, 5131.0f, -5132.0f, 5133.0f, -5134.0f, 5135.0f, -5136.0f, 5137.0f, -5138.0f, 5139.0f, -5140.0f, 5141.0f, -5142.0f, 5143.0f, -5144.0f, 5145.0f, -5146.0f, 5147.0f, -5148.0f, 5149.0f, -5150.0f, 5151.0f, -5152.0f, 5153.0f, -5154.0f, 5155.0f, -5156.0f, 5157.0f, -5158.0f, 5159.0f, -5160.0f, 5161.0f, -5162.0f, 5163.0f, -5164.0f, 5165.0f, -5166.0f, 5167.0f, -5168.0f, 5169.0f, -5170.0f, 5171.0f, -5172.0f, 5173.0f, -5174.0f, 5175.0f, -5176.0f, 5177.0f, -5178.0f, 5179.0f, -5180.0f, 5181.0f, -5182.0f, 5183.0f, -5184.0f, 5185.0f, -5186.0f, 5187.0f, -5188.0f, 5189.0f, -5190.0f, 5191.0f, -5192.0f, 5193.0f, -5194.0f, 5195.0f, -5196.0f, 5197.0f, -5198.0f, 5199.0f, -5200.0f, 5201.0f, -5202.0f, 5203.0f, -5204.0f, 5205.0f, -5206.0f, 5207.0f, -5208.0f, 5209.0f, -5210.0f, 5211.0f, -5212.0f, 5213.0f, -5214.0f, 5215.0f, -5216.0f, 5217.0f, -5218.0f, 5219.0f, -5220.0f, 5221.0f, -5222.0f, 5223.0f, -5224.0f, 5225.0f, -5226.0f, 5227.0f, -5228.0f, 5229.0f, -5230.0f, 5231.0f, -5232.0f, 5233.0f, -5234.0f, 5235.0f, -5236.0f, 5237.0f, -5238.0f, 5239.0f, -5240.0f, 5241.0f, -5242.0f, 5243.0f, -5244.0f, 5245.0f, -5246.0f, 5247.0f, -5248.0f, 5249.0f, -5250.0f, 5251.0f, -5252.0f, 5253.0f, -5254.0f, 5255.0f, -5256.0f, 5257.0f, -5258.0f, 5259.0f, -5260.0f, 5261.0f, -5262.0f, 5263.0f, -5264.0f, 5265.0f, -5266.0f, 5267.0f, -5268.0f, 5269.0f, -5270.0f, 5271.0f, -5272.0f, 5273.0f, -5274.0f, 5275.0f, -5276.0f, 5277.0f, -5278.0f, 5279.0f, -5280.0f, 5281.0f, -5282.0f, 5283.0f, -5284.0f, 5285.0f, -5286.0f, 5287.0f, -5288.0f, 5289.0f, -5290.0f, 5291.0f, -5292.0f, 5293.0f, -5294.0f, 5295.0f, -5296.0f, 5297.0f, -5298.0f, 5299.0f, -5300.0f, 5301.0f, -5302.0f, 5303.0f, -5304.0f, 5305.0f, -5306.0f, 5307.0f, -5308.0f, 5309.0f, -5310.0f, 5311.0f, -5312.0f, 5313.0f, -5314.0f, 5315.0f, -5316.0f, 5317.0f, -5318.0f, 5319.0f, -5320.0f, 5321.0f, -5322.0f, 5323.0f, -5324.0f, 5325.0f, -5326.0f, 5327.0f, -5328.0f, 5329.0f, -5330.0f, 5331.0f, -5332.0f, 5333.0f, -5334.0f, 5335.0f, -5336.0f, 5337.0f, -5338.0f, 5339.0f, -5340.0f, 5341.0f, -5342.0f, 5343.0f, -5344.0f, 5345.0f, -5346.0f, 5347.0f, -5348.0f, 5349.0f, -5350.0f, 5351.0f, -5352.0f, 5353.0f, -5354.0f, 5355.0f, -5356.0f, 5357.0f, -5358.0f, 5359.0f, -5360.0f, 5361.0f, -5362.0f, 5363.0f, -5364.0f, 5365.0f, -5366.0f, 5367.0f, -5368.0f, 5369.0f, -5370.0f, 5371.0f, -5372.0f, 5373.0f, -5374.0f, 5375.0f, -5376.0f, 5377.0f, -5378.0f, 5379.0f, -5380.0f, 5381.0f, -5382.0f, 5383.0f, -5384.0f, 5385.0f, -5386.0f, 5387.0f, -5388.0f, 5389.0f, -5390.0f, 5391.0f, -5392.0f, 5393.0f, -5394.0f, 5395.0f, -5396.0f, 5397.0f, -5398.0f, 5399.0f, -5400.0f, 5401.0f, -5402.0f, 5403.0f, -5404.0f, 5405.0f, -5406.0f, 5407.0f, -5408.0f, 5409.0f, -5410.0f, 5411.0f, -5412.0f, 5413.0f, -5414.0f, 5415.0f, -5416.0f, 5417.0f, -5418.0f, 5419.0f, -5420.0f, 5421.0f, -5422.0f, 5423.0f, -5424.0f, 5425.0f, -5426.0f, 5427.0f, -5428.0f, 5429.0f, -5430.0f, 5431.0f, -5432.0f, 5433.0f, -5434.0f, 5435.0f, -5436.0f, 5437.0f, -5438.0f, 5439.0f, -5440.0f, 5441.0f, -5442.0f, 5443.0f, -5444.0f, 5445.0f, -5446.0f, 5447.0f, -5448.0f, 5449.0f, -5450.0f, 5451.0f, -5452.0f, 5453.0f, -5454.0f, 5455.0f, -5456.0f, 5457.0f, -5458.0f, 5459.0f, -5460.0f, 5461.0f, -5462.0f, 5463.0f, -5464.0f, 5465.0f, -5466.0f, 5467.0f, -5468.0f, 5469.0f, -5470.0f, 5471.0f, -5472.0f, 5473.0f, -5474.0f, 5475.0f, -5476.0f, 5477.0f, -5478.0f, 5479.0f, -5480.0f, 5481.0f, -5482.0f, 5483.0f, -5484.0f, 5485.0f, -5486.0f, 5487.0f, -5488.0f, 5489.0f, -5490.0f, 5491.0f, -5492.0f, 5493.0f, -5494.0f, 5495.0f, -5496.0f, 5497.0f, -5498.0f, 5499.0f, -5500.0f, 5501.0f, -5502.0f, 5503.0f, -5504.0f, 5505.0f, -5506.0f, 5507.0f, -5508.0f, 5509.0f, -5510.0f, 5511.0f, -5512.0f, 5513.0f, -5514.0f, 5515.0f, -5516.0f, 5517.0f, -5518.0f, 5519.0f, -5520.0f, 5521.0f, -5522.0f, 5523.0f, -5524.0f, 5525.0f, -5526.0f, 5527.0f, -5528.0f, 5529.0f, -5530.0f, 5531.0f, -5532.0f, 5533.0f, -5534.0f, 5535.0f, -5536.0f, 5537.0f, -5538.0f, 5539.0f, -5540.0f, 5541.0f, -5542.0f, 5543.0f, -5544.0f, 5545.0f, -5546.0f, 5547.0f, -5548.0f, 5549.0f, -5550.0f, 5551.0f, -5552.0f, 5553.0f, -5554.0f, 5555.0f, -5556.0f, 5557.0f, -5558.0f, 5559.0f, -5560.0f, 5561.0f, -5562.0f, 5563.0f, -5564.0f, 5565.0f, -5566.0f, 5567.0f, -5568.0f, 5569.0f, -5570.0f, 5571.0f, -5572.0f, 5573.0f, -5574.0f, 5575.0f, -5576.0f, 5577.0f, -5578.0f, 5579.0f, -5580.0f, 5581.0f, -5582.0f, 5583.0f, -5584.0f, 5585.0f, -5586.0f, 5587.0f, -5588.0f, 5589.0f, -5590.0f, 5591.0f, -5592.0f, 5593.0f, -5594.0f, 5595.0f, -5596.0f, 5597.0f, -5598.0f, 5599.0f, -5600.0f, 5601.0f, -5602.0f, 5603.0f, -5604.0f, 5605.0f, -5606.0f, 5607.0f, -5608.0f, 5609.0f, -5610.0f, 5611.0f, -5612.0f, 5613.0f, -5614.0f, 5615.0f, -5616.0f, 5617.0f, -5618.0f, 5619.0f, -5620.0f, 5621.0f, -5622.0f, 5623.0f, -5624.0f, 5625.0f, -5626.0f, 5627.0f, -5628.0f, 5629.0f, -5630.0f, 5631.0f, -5632.0f, 5633.0f, -5634.0f, 5635.0f, -5636.0f, 5637.0f, -5638.0f, 5639.0f, -5640.0f, 5641.0f, -5642.0f, 5643.0f, -5644.0f, 5645.0f, -5646.0f, 5647.0f, -5648.0f, 5649.0f, -5650.0f, 5651.0f, -5652.0f, 5653.0f, -5654.0f, 5655.0f, -5656.0f, 5657.0f, -5658.0f, 5659.0f, -5660.0f, 5661.0f, -5662.0f, 5663.0f, -5664.0f, 5665.0f, -5666.0f, 5667.0f, -5668.0f, 5669.0f, -5670.0f, 5671.0f, -5672.0f, 5673.0f, -5674.0f, 5675.0f, -5676.0f, 5677.0f, -5678.0f, 5679.0f, -5680.0f, 5681.0f, -5682.0f, 5683.0f, -5684.0f, 5685.0f, -5686.0f, 5687.0f, -5688.0f, 5689.0f, -5690.0f, 5691.0f, -5692.0f, 5693.0f, -5694.0f, 5695.0f, -5696.0f, 5697.0f, -5698.0f, 5699.0f, -5700.0f, 5701.0f, -5702.0f, 5703.0f, -5704.0f, 5705.0f, -5706.0f, 5707.0f, -5708.0f, 5709.0f, -5710.0f, 5711.0f, -5712.0f, 5713.0f, -5714.0f, 5715.0f, -5716.0f, 5717.0f, -5718.0f, 5719.0f, -5720.0f, 5721.0f, -5722.0f, 5723.0f, -5724.0f, 5725.0f, -5726.0f, 5727.0f, -5728.0f, 5729.0f, -5730.0f, 5731.0f, -5732.0f, 5733.0f, -5734.0f, 5735.0f, -5736.0f, 5737.0f, -5738.0f, 5739.0f, -5740.0f, 5741.0f, -5742.0f, 5743.0f, -5744.0f, 5745.0f, -5746.0f, 5747.0f, -5748.0f, 5749.0f, -5750.0f, 5751.0f, -5752.0f, 5753.0f, -5754.0f, 5755.0f, -5756.0f, 5757.0f, -5758.0f, 5759.0f, -5760.0f, 5761.0f, -5762.0f, 5763.0f, -5764.0f, 5765.0f, -5766.0f, 5767.0f, -5768.0f, 5769.0f, -5770.0f, 5771.0f, -5772.0f, 5773.0f, -5774.0f, 5775.0f, -5776.0f, 5777.0f, -5778.0f, 5779.0f, -5780.0f, 5781.0f, -5782.0f, 5783.0f, -5784.0f, 5785.0f, -5786.0f, 5787.0f, -5788.0f, 5789.0f, -5790.0f, 5791.0f, -5792.0f, 5793.0f, -5794.0f, 5795.0f, -5796.0f, 5797.0f, -5798.0f, 5799.0f, -5800.0f, 5801.0f, -5802.0f, 5803.0f, -5804.0f, 5805.0f, -5806.0f, 5807.0f, -5808.0f, 5809.0f, -5810.0f, 5811.0f, -5812.0f, 5813.0f, -5814.0f, 5815.0f, -5816.0f, 5817.0f, -5818.0f, 5819.0f, -5820.0f, 5821.0f, -5822.0f, 5823.0f, -5824.0f, 5825.0f, -5826.0f, 5827.0f, -5828.0f, 5829.0f, -5830.0f, 5831.0f, -5832.0f, 5833.0f, -5834.0f, 5835.0f, -5836.0f, 5837.0f, -5838.0f, 5839.0f, -5840.0f, 5841.0f, -5842.0f, 5843.0f, -5844.0f, 5845.0f, -5846.0f, 5847.0f, -5848.0f, 5849.0f, -5850.0f, 5851.0f, -5852.0f, 5853.0f, -5854.0f, 5855.0f, -5856.0f, 5857.0f, -5858.0f, 5859.0f, -5860.0f, 5861.0f, -5862.0f, 5863.0f, -5864.0f, 5865.0f, -5866.0f, 5867.0f, -5868.0f, 5869.0f, -5870.0f, 5871.0f, -5872.0f, 5873.0f, -5874.0f, 5875.0f, -5876.0f, 5877.0f, -5878.0f, 5879.0f, -5880.0f, 5881.0f, -5882.0f, 5883.0f, -5884.0f, 5885.0f, -5886.0f, 5887.0f, -5888.0f, 5889.0f, -5890.0f, 5891.0f, -5892.0f, 5893.0f, -5894.0f, 5895.0f, -5896.0f, 5897.0f, -5898.0f, 5899.0f, -5900.0f, 5901.0f, -5902.0f, 5903.0f, -5904.0f, 5905.0f, -5906.0f, 5907.0f, -5908.0f, 5909.0f, -5910.0f, 5911.0f, -5912.0f, 5913.0f, -5914.0f, 5915.0f, -5916.0f, 5917.0f, -5918.0f, 5919.0f, -5920.0f, 5921.0f, -5922.0f, 5923.0f, -5924.0f, 5925.0f, -5926.0f, 5927.0f, -5928.0f, 5929.0f, -5930.0f, 5931.0f, -5932.0f, 5933.0f, -5934.0f, 5935.0f, -5936.0f, 5937.0f, -5938.0f, 5939.0f, -5940.0f, 5941.0f, -5942.0f, 5943.0f, -5944.0f, 5945.0f, -5946.0f, 5947.0f, -5948.0f, 5949.0f, -5950.0f, 5951.0f, -5952.0f, 5953.0f, -5954.0f, 5955.0f, -5956.0f, 5957.0f, -5958.0f, 5959.0f, -5960.0f, 5961.0f, -5962.0f, 5963.0f, -5964.0f, 5965.0f, -5966.0f, 5967.0f, -5968.0f, 5969.0f, -5970.0f, 5971.0f, -5972.0f, 5973.0f, -5974.0f, 5975.0f, -5976.0f, 5977.0f, -5978.0f, 5979.0f, -5980.0f, 5981.0f, -5982.0f, 5983.0f, -5984.0f, 5985.0f, -5986.0f, 5987.0f, -5988.0f, 5989.0f, -5990.0f, 5991.0f, -5992.0f, 5993.0f, -5994.0f, 5995.0f, -5996.0f, 5997.0f, -5998.0f, 5999.0f, -6000.0f, 6001.0f, -6002.0f, 6003.0f, -6004.0f, 6005.0f, -6006.0f, 6007.0f, -6008.0f, 6009.0f, -6010.0f, 6011.0f, -6012.0f, 6013.0f, -6014.0f, 6015.0f, -6016.0f, 6017.0f, -6018.0f, 6019.0f, -6020.0f, 6021.0f, -6022.0f, 6023.0f, -6024.0f, 6025.0f, -6026.0f, 6027.0f, -6028.0f, 6029.0f, -6030.0f, 6031.0f, -6032.0f, 6033.0f, -6034.0f, 6035.0f, -6036.0f, 6037.0f, -6038.0f, 6039.0f, -6040.0f, 6041.0f, -6042.0f, 6043.0f, -6044.0f, 6045.0f, -6046.0f, 6047.0f, -6048.0f, 6049.0f, -6050.0f, 6051.0f, -6052.0f, 6053.0f, -6054.0f, 6055.0f, -6056.0f, 6057.0f, -6058.0f, 6059.0f, -6060.0f, 6061.0f, -6062.0f, 6063.0f, -6064.0f, 6065.0f, -6066.0f, 6067.0f, -6068.0f, 6069.0f, -6070.0f, 6071.0f, -6072.0f, 6073.0f, -6074.0f, 6075.0f, -6076.0f, 6077.0f, -6078.0f, 6079.0f, -6080.0f, 6081.0f, -6082.0f, 6083.0f, -6084.0f, 6085.0f, -6086.0f, 6087.0f, -6088.0f, 6089.0f, -6090.0f, 6091.0f, -6092.0f, 6093.0f, -6094.0f, 6095.0f, -6096.0f, 6097.0f, -6098.0f, 6099.0f, -6100.0f, 6101.0f, -6102.0f, 6103.0f, -6104.0f, 6105.0f, -6106.0f, 6107.0f, -6108.0f, 6109.0f, -6110.0f, 6111.0f, -6112.0f, 6113.0f, -6114.0f, 6115.0f, -6116.0f, 6117.0f, -6118.0f, 6119.0f, -6120.0f, 6121.0f, -6122.0f, 6123.0f, -6124.0f, 6125.0f, -6126.0f, 6127.0f, -6128.0f, 6129.0f, -6130.0f, 6131.0f, -6132.0f, 6133.0f, -6134.0f, 6135.0f, -6136.0f, 6137.0f, -6138.0f, 6139.0f, -6140.0f, 6141.0f, -6142.0f, 6143.0f, -6144.0f, 6145.0f, -6146.0f, 6147.0f, -6148.0f, 6149.0f, -6150.0f, 6151.0f, -6152.0f, 6153.0f, -6154.0f, 6155.0f, -6156.0f, 6157.0f, -6158.0f, 6159.0f, -6160.0f, 6161.0f, -6162.0f, 6163.0f, -6164.0f, 6165.0f, -6166.0f, 6167.0f, -6168.0f, 6169.0f, -6170.0f, 6171.0f, -6172.0f, 6173.0f, -6174.0f, 6175.0f, -6176.0f, 6177.0f, -6178.0f, 6179.0f, -6180.0f, 6181.0f, -6182.0f, 6183.0f, -6184.0f, 6185.0f, -6186.0f, 6187.0f, -6188.0f, 6189.0f, -6190.0f, 6191.0f, -6192.0f, 6193.0f, -6194.0f, 6195.0f, -6196.0f, 6197.0f, -6198.0f, 6199.0f, -6200.0f, 6201.0f, -6202.0f, 6203.0f, -6204.0f, 6205.0f, -6206.0f, 6207.0f, -6208.0f, 6209.0f, -6210.0f, 6211.0f, -6212.0f, 6213.0f, -6214.0f, 6215.0f, -6216.0f, 6217.0f, -6218.0f, 6219.0f, -6220.0f, 6221.0f, -6222.0f, 6223.0f, -6224.0f, 6225.0f, -6226.0f, 6227.0f, -6228.0f, 6229.0f, -6230.0f, 6231.0f, -6232.0f, 6233.0f, -6234.0f, 6235.0f, -6236.0f, 6237.0f, -6238.0f, 6239.0f, -6240.0f, 6241.0f, -6242.0f, 6243.0f, -6244.0f, 6245.0f, -6246.0f, 6247.0f, -6248.0f, 6249.0f, -6250.0f, 6251.0f, -6252.0f, 6253.0f, -6254.0f, 6255.0f, -6256.0f, 6257.0f, -6258.0f, 6259.0f, -6260.0f, 6261.0f, -6262.0f, 6263.0f, -6264.0f, 6265.0f, -6266.0f, 6267.0f, -6268.0f, 6269.0f, -6270.0f, 6271.0f, -6272.0f, 6273.0f, -6274.0f, 6275.0f, -6276.0f, 6277.0f, -6278.0f, 6279.0f, -6280.0f, 6281.0f, -6282.0f, 6283.0f, -6284.0f, 6285.0f, -6286.0f, 6287.0f, -6288.0f, 6289.0f, -6290.0f, 6291.0f, -6292.0f, 6293.0f, -6294.0f, 6295.0f, -6296.0f, 6297.0f, -6298.0f, 6299.0f, -6300.0f, 6301.0f, -6302.0f, 6303.0f, -6304.0f, 6305.0f, -6306.0f, 6307.0f, -6308.0f, 6309.0f, -6310.0f, 6311.0f, -6312.0f, 6313.0f, -6314.0f, 6315.0f, -6316.0f, 6317.0f, -6318.0f, 6319.0f, -6320.0f, 6321.0f, -6322.0f, 6323.0f, -6324.0f, 6325.0f, -6326.0f, 6327.0f, -6328.0f, 6329.0f, -6330.0f, 6331.0f, -6332.0f, 6333.0f, -6334.0f, 6335.0f, -6336.0f, 6337.0f, -6338.0f, 6339.0f, -6340.0f, 6341.0f, -6342.0f, 6343.0f, -6344.0f, 6345.0f, -6346.0f, 6347.0f, -6348.0f, 6349.0f, -6350.0f, 6351.0f, -6352.0f, 6353.0f, -6354.0f, 6355.0f, -6356.0f, 6357.0f, -6358.0f, 6359.0f, -6360.0f, 6361.0f, -6362.0f, 6363.0f, -6364.0f, 6365.0f, -6366.0f, 6367.0f, -6368.0f, 6369.0f, -6370.0f, 6371.0f, -6372.0f, 6373.0f, -6374.0f, 6375.0f, -6376.0f, 6377.0f, -6378.0f, 6379.0f, -6380.0f, 6381.0f, -6382.0f, 6383.0f, -6384.0f, 6385.0f, -6386.0f, 6387.0f, -6388.0f, 6389.0f, -6390.0f, 6391.0f, -6392.0f, 6393.0f, -6394.0f, 6395.0f, -6396.0f, 6397.0f, -6398.0f, 6399.0f, -6400.0f, 6401.0f, -6402.0f, 6403.0f, -6404.0f, 6405.0f, -6406.0f, 6407.0f, -6408.0f, 6409.0f, -6410.0f, 6411.0f, -6412.0f, 6413.0f, -6414.0f, 6415.0f, -6416.0f, 6417.0f, -6418.0f, 6419.0f, -6420.0f, 6421.0f, -6422.0f, 6423.0f, -6424.0f, 6425.0f, -6426.0f, 6427.0f, -6428.0f, 6429.0f, -6430.0f, 6431.0f, -6432.0f, 6433.0f, -6434.0f, 6435.0f, -6436.0f, 6437.0f, -6438.0f, 6439.0f, -6440.0f, 6441.0f, -6442.0f, 6443.0f, -6444.0f, 6445.0f, -6446.0f, 6447.0f, -6448.0f, 6449.0f, -6450.0f, 6451.0f, -6452.0f, 6453.0f, -6454.0f, 6455.0f, -6456.0f, 6457.0f, -6458.0f, 6459.0f, -6460.0f, 6461.0f, -6462.0f, 6463.0f, -6464.0f, 6465.0f, -6466.0f, 6467.0f, -6468.0f, 6469.0f, -6470.0f, 6471.0f, -6472.0f, 6473.0f, -6474.0f, 6475.0f, -6476.0f, 6477.0f, -6478.0f, 6479.0f, -6480.0f, 6481.0f, -6482.0f, 6483.0f, -6484.0f, 6485.0f, -6486.0f, 6487.0f, -6488.0f, 6489.0f, -6490.0f, 6491.0f, -6492.0f, 6493.0f, -6494.0f, 6495.0f, -6496.0f, 6497.0f, -6498.0f, 6499.0f, -6500.0f, 6501.0f, -6502.0f, 6503.0f, -6504.0f, 6505.0f, -6506.0f, 6507.0f, -6508.0f, 6509.0f, -6510.0f, 6511.0f, -6512.0f, 6513.0f, -6514.0f, 6515.0f, -6516.0f, 6517.0f, -6518.0f, 6519.0f, -6520.0f, 6521.0f, -6522.0f, 6523.0f, -6524.0f, 6525.0f, -6526.0f, 6527.0f, -6528.0f, 6529.0f, -6530.0f, 6531.0f, -6532.0f, 6533.0f, -6534.0f, 6535.0f, -6536.0f, 6537.0f, -6538.0f, 6539.0f, -6540.0f, 6541.0f, -6542.0f, 6543.0f, -6544.0f, 6545.0f, -6546.0f, 6547.0f, -6548.0f, 6549.0f, -6550.0f, 6551.0f, -6552.0f, 6553.0f, -6554.0f, 6555.0f, -6556.0f, 6557.0f, -6558.0f, 6559.0f, -6560.0f, 6561.0f, -6562.0f, 6563.0f, -6564.0f, 6565.0f, -6566.0f, 6567.0f, -6568.0f, 6569.0f, -6570.0f, 6571.0f, -6572.0f, 6573.0f, -6574.0f, 6575.0f, -6576.0f, 6577.0f, -6578.0f, 6579.0f, -6580.0f, 6581.0f, -6582.0f, 6583.0f, -6584.0f, 6585.0f, -6586.0f, 6587.0f, -6588.0f, 6589.0f, -6590.0f, 6591.0f, -6592.0f, 6593.0f, -6594.0f, 6595.0f, -6596.0f, 6597.0f, -6598.0f, 6599.0f, -6600.0f, 6601.0f, -6602.0f, 6603.0f, -6604.0f, 6605.0f, -6606.0f, 6607.0f, -6608.0f, 6609.0f, -6610.0f, 6611.0f, -6612.0f, 6613.0f, -6614.0f, 6615.0f, -6616.0f, 6617.0f, -6618.0f, 6619.0f, -6620.0f, 6621.0f, -6622.0f, 6623.0f, -6624.0f, 6625.0f, -6626.0f, 6627.0f, -6628.0f, 6629.0f, -6630.0f, 6631.0f, -6632.0f, 6633.0f, -6634.0f, 6635.0f, -6636.0f, 6637.0f, -6638.0f, 6639.0f, -6640.0f, 6641.0f, -6642.0f, 6643.0f, -6644.0f, 6645.0f, -6646.0f, 6647.0f, -6648.0f, 6649.0f, -6650.0f, 6651.0f, -6652.0f, 6653.0f, -6654.0f, 6655.0f, -6656.0f, 6657.0f, -6658.0f, 6659.0f, -6660.0f, 6661.0f, -6662.0f, 6663.0f, -6664.0f, 6665.0f, -6666.0f, 6667.0f, -6668.0f, 6669.0f, -6670.0f, 6671.0f, -6672.0f, 6673.0f, -6674.0f, 6675.0f, -6676.0f, 6677.0f, -6678.0f, 6679.0f, -6680.0f, 6681.0f, -6682.0f, 6683.0f, -6684.0f, 6685.0f, -6686.0f, 6687.0f, -6688.0f, 6689.0f, -6690.0f, 6691.0f, -6692.0f, 6693.0f, -6694.0f, 6695.0f, -6696.0f, 6697.0f, -6698.0f, 6699.0f, -6700.0f, 6701.0f, -6702.0f, 6703.0f, -6704.0f, 6705.0f, -6706.0f, 6707.0f, -6708.0f, 6709.0f, -6710.0f, 6711.0f, -6712.0f, 6713.0f, -6714.0f, 6715.0f, -6716.0f, 6717.0f, -6718.0f, 6719.0f, -6720.0f, 6721.0f, -6722.0f, 6723.0f, -6724.0f, 6725.0f, -6726.0f, 6727.0f, -6728.0f, 6729.0f, -6730.0f, 6731.0f, -6732.0f, 6733.0f, -6734.0f, 6735.0f, -6736.0f, 6737.0f, -6738.0f, 6739.0f, -6740.0f, 6741.0f, -6742.0f, 6743.0f, -6744.0f, 6745.0f, -6746.0f, 6747.0f, -6748.0f, 6749.0f, -6750.0f, 6751.0f, -6752.0f, 6753.0f, -6754.0f, 6755.0f, -6756.0f, 6757.0f, -6758.0f, 6759.0f, -6760.0f, 6761.0f, -6762.0f, 6763.0f, -6764.0f, 6765.0f, -6766.0f, 6767.0f, -6768.0f, 6769.0f, -6770.0f, 6771.0f, -6772.0f, 6773.0f, -6774.0f, 6775.0f, -6776.0f, 6777.0f, -6778.0f, 6779.0f, -6780.0f, 6781.0f, -6782.0f, 6783.0f, -6784.0f, 6785.0f, -6786.0f, 6787.0f, -6788.0f, 6789.0f, -6790.0f, 6791.0f, -6792.0f, 6793.0f, -6794.0f, 6795.0f, -6796.0f, 6797.0f, -6798.0f, 6799.0f, -6800.0f, 6801.0f, -6802.0f, 6803.0f, -6804.0f, 6805.0f, -6806.0f, 6807.0f, -6808.0f, 6809.0f, -6810.0f, 6811.0f, -6812.0f, 6813.0f, -6814.0f, 6815.0f, -6816.0f, 6817.0f, -6818.0f, 6819.0f, -6820.0f, 6821.0f, -6822.0f, 6823.0f, -6824.0f, 6825.0f, -6826.0f, 6827.0f, -6828.0f, 6829.0f, -6830.0f, 6831.0f, -6832.0f, 6833.0f, -6834.0f, 6835.0f, -6836.0f, 6837.0f, -6838.0f, 6839.0f, -6840.0f, 6841.0f, -6842.0f, 6843.0f, -6844.0f, 6845.0f, -6846.0f, 6847.0f, -6848.0f, 6849.0f, -6850.0f, 6851.0f, -6852.0f, 6853.0f, -6854.0f, 6855.0f, -6856.0f, 6857.0f, -6858.0f, 6859.0f, -6860.0f, 6861.0f, -6862.0f, 6863.0f, -6864.0f, 6865.0f, -6866.0f, 6867.0f, -6868.0f, 6869.0f, -6870.0f, 6871.0f, -6872.0f, 6873.0f, -6874.0f, 6875.0f, -6876.0f, 6877.0f, -6878.0f, 6879.0f, -6880.0f, 6881.0f, -6882.0f, 6883.0f, -6884.0f, 6885.0f, -6886.0f, 6887.0f, -6888.0f, 6889.0f, -6890.0f, 6891.0f, -6892.0f, 6893.0f, -6894.0f, 6895.0f, -6896.0f, 6897.0f, -6898.0f, 6899.0f, -6900.0f, 6901.0f, -6902.0f, 6903.0f, -6904.0f, 6905.0f, -6906.0f, 6907.0f, -6908.0f, 6909.0f, -6910.0f, 6911.0f, -6912.0f, 6913.0f, -6914.0f, 6915.0f, -6916.0f, 6917.0f, -6918.0f, 6919.0f, -6920.0f, 6921.0f, -6922.0f, 6923.0f, -6924.0f, 6925.0f, -6926.0f, 6927.0f, -6928.0f, 6929.0f, -6930.0f, 6931.0f, -6932.0f, 6933.0f, -6934.0f, 6935.0f, -6936.0f, 6937.0f, -6938.0f, 6939.0f, -6940.0f, 6941.0f, -6942.0f, 6943.0f, -6944.0f, 6945.0f, -6946.0f, 6947.0f, -6948.0f, 6949.0f, -6950.0f, 6951.0f, -6952.0f, 6953.0f, -6954.0f, 6955.0f, -6956.0f, 6957.0f, -6958.0f, 6959.0f, -6960.0f, 6961.0f, -6962.0f, 6963.0f, -6964.0f, 6965.0f, -6966.0f, 6967.0f, -6968.0f, 6969.0f, -6970.0f, 6971.0f, -6972.0f, 6973.0f, -6974.0f, 6975.0f, -6976.0f, 6977.0f, -6978.0f, 6979.0f, -6980.0f, 6981.0f, -6982.0f, 6983.0f, -6984.0f, 6985.0f, -6986.0f, 6987.0f, -6988.0f, 6989.0f, -6990.0f, 6991.0f, -6992.0f, 6993.0f, -6994.0f, 6995.0f, -6996.0f, 6997.0f, -6998.0f, 6999.0f, -7000.0f, 7001.0f, -7002.0f, 7003.0f, -7004.0f, 7005.0f, -7006.0f, 7007.0f, -7008.0f, 7009.0f, -7010.0f, 7011.0f, -7012.0f, 7013.0f, -7014.0f, 7015.0f, -7016.0f, 7017.0f, -7018.0f, 7019.0f, -7020.0f, 7021.0f, -7022.0f, 7023.0f, -7024.0f, 7025.0f, -7026.0f, 7027.0f, -7028.0f, 7029.0f, -7030.0f, 7031.0f, -7032.0f, 7033.0f, -7034.0f, 7035.0f, -7036.0f, 7037.0f, -7038.0f, 7039.0f, -7040.0f, 7041.0f, -7042.0f, 7043.0f, -7044.0f, 7045.0f, -7046.0f, 7047.0f, -7048.0f, 7049.0f, -7050.0f, 7051.0f, -7052.0f, 7053.0f, -7054.0f, 7055.0f, -7056.0f, 7057.0f, -7058.0f, 7059.0f, -7060.0f, 7061.0f, -7062.0f, 7063.0f, -7064.0f, 7065.0f, -7066.0f, 7067.0f, -7068.0f, 7069.0f, -7070.0f, 7071.0f, -7072.0f, 7073.0f, -7074.0f, 7075.0f, -7076.0f, 7077.0f, -7078.0f, 7079.0f, -7080.0f, 7081.0f, -7082.0f, 7083.0f, -7084.0f, 7085.0f, -7086.0f, 7087.0f, -7088.0f, 7089.0f, -7090.0f, 7091.0f, -7092.0f, 7093.0f, -7094.0f, 7095.0f, -7096.0f, 7097.0f, -7098.0f, 7099.0f, -7100.0f, 7101.0f, -7102.0f, 7103.0f, -7104.0f, 7105.0f, -7106.0f, 7107.0f, -7108.0f, 7109.0f, -7110.0f, 7111.0f, -7112.0f, 7113.0f, -7114.0f, 7115.0f, -7116.0f, 7117.0f, -7118.0f, 7119.0f, -7120.0f, 7121.0f, -7122.0f, 7123.0f, -7124.0f, 7125.0f, -7126.0f, 7127.0f, -7128.0f, 7129.0f, -7130.0f, 7131.0f, -7132.0f, 7133.0f, -7134.0f, 7135.0f, -7136.0f, 7137.0f, -7138.0f, 7139.0f, -7140.0f, 7141.0f, -7142.0f, 7143.0f, -7144.0f, 7145.0f, -7146.0f, 7147.0f, -7148.0f, 7149.0f, -7150.0f, 7151.0f, -7152.0f, 7153.0f, -7154.0f, 7155.0f, -7156.0f, 7157.0f, -7158.0f, 7159.0f, -7160.0f, 7161.0f, -7162.0f, 7163.0f, -7164.0f, 7165.0f, -7166.0f, 7167.0f, -7168.0f, 7169.0f, -7170.0f, 7171.0f, -7172.0f, 7173.0f, -7174.0f, 7175.0f, -7176.0f, 7177.0f, -7178.0f, 7179.0f, -7180.0f, 7181.0f, -7182.0f, 7183.0f, -7184.0f, 7185.0f, -7186.0f, 7187.0f, -7188.0f, 7189.0f, -7190.0f, 7191.0f, -7192.0f, 7193.0f, -7194.0f, 7195.0f, -7196.0f, 7197.0f, -7198.0f, 7199.0f, -7200.0f, 7201.0f, -7202.0f, 7203.0f, -7204.0f, 7205.0f, -7206.0f, 7207.0f, -7208.0f, 7209.0f, -7210.0f, 7211.0f, -7212.0f, 7213.0f, -7214.0f, 7215.0f, -7216.0f, 7217.0f, -7218.0f, 7219.0f, -7220.0f, 7221.0f, -7222.0f, 7223.0f, -7224.0f, 7225.0f, -7226.0f, 7227.0f, -7228.0f, 7229.0f, -7230.0f, 7231.0f, -7232.0f, 7233.0f, -7234.0f, 7235.0f, -7236.0f, 7237.0f, -7238.0f, 7239.0f, -7240.0f, 7241.0f, -7242.0f, 7243.0f, -7244.0f, 7245.0f, -7246.0f, 7247.0f, -7248.0f, 7249.0f, -7250.0f, 7251.0f, -7252.0f, 7253.0f, -7254.0f, 7255.0f, -7256.0f, 7257.0f, -7258.0f, 7259.0f, -7260.0f, 7261.0f, -7262.0f, 7263.0f, -7264.0f, 7265.0f, -7266.0f, 7267.0f, -7268.0f, 7269.0f, -7270.0f, 7271.0f, -7272.0f, 7273.0f, -7274.0f, 7275.0f, -7276.0f, 7277.0f, -7278.0f, 7279.0f, -7280.0f, 7281.0f, -7282.0f, 7283.0f, -7284.0f, 7285.0f, -7286.0f, 7287.0f, -7288.0f, 7289.0f, -7290.0f, 7291.0f, -7292.0f, 7293.0f, -7294.0f, 7295.0f, -7296.0f, 7297.0f, -7298.0f, 7299.0f, -7300.0f, 7301.0f, -7302.0f, 7303.0f, -7304.0f, 7305.0f, -7306.0f, 7307.0f, -7308.0f, 7309.0f, -7310.0f, 7311.0f, -7312.0f, 7313.0f, -7314.0f, 7315.0f, -7316.0f, 7317.0f, -7318.0f, 7319.0f, -7320.0f, 7321.0f, -7322.0f, 7323.0f, -7324.0f, 7325.0f, -7326.0f, 7327.0f, -7328.0f, 7329.0f, -7330.0f, 7331.0f, -7332.0f, 7333.0f, -7334.0f, 7335.0f, -7336.0f, 7337.0f, -7338.0f, 7339.0f, -7340.0f, 7341.0f, -7342.0f, 7343.0f, -7344.0f, 7345.0f, -7346.0f, 7347.0f, -7348.0f, 7349.0f, -7350.0f, 7351.0f, -7352.0f, 7353.0f, -7354.0f, 7355.0f, -7356.0f, 7357.0f, -7358.0f, 7359.0f, -7360.0f, 7361.0f, -7362.0f, 7363.0f, -7364.0f, 7365.0f, -7366.0f, 7367.0f, -7368.0f, 7369.0f, -7370.0f, 7371.0f, -7372.0f, 7373.0f, -7374.0f, 7375.0f, -7376.0f, 7377.0f, -7378.0f, 7379.0f, -7380.0f, 7381.0f, -7382.0f, 7383.0f, -7384.0f, 7385.0f, -7386.0f, 7387.0f, -7388.0f, 7389.0f, -7390.0f, 7391.0f, -7392.0f, 7393.0f, -7394.0f, 7395.0f, -7396.0f, 7397.0f, -7398.0f, 7399.0f, -7400.0f, 7401.0f, -7402.0f, 7403.0f, -7404.0f, 7405.0f, -7406.0f, 7407.0f, -7408.0f, 7409.0f, -7410.0f, 7411.0f, -7412.0f, 7413.0f, -7414.0f, 7415.0f, -7416.0f, 7417.0f, -7418.0f, 7419.0f, -7420.0f, 7421.0f, -7422.0f, 7423.0f, -7424.0f, 7425.0f, -7426.0f, 7427.0f, -7428.0f, 7429.0f, -7430.0f, 7431.0f, -7432.0f, 7433.0f, -7434.0f, 7435.0f, -7436.0f, 7437.0f, -7438.0f, 7439.0f, -7440.0f, 7441.0f, -7442.0f, 7443.0f, -7444.0f, 7445.0f, -7446.0f, 7447.0f, -7448.0f, 7449.0f, -7450.0f, 7451.0f, -7452.0f, 7453.0f, -7454.0f, 7455.0f, -7456.0f, 7457.0f, -7458.0f, 7459.0f, -7460.0f, 7461.0f, -7462.0f, 7463.0f, -7464.0f, 7465.0f, -7466.0f, 7467.0f, -7468.0f, 7469.0f, -7470.0f, 7471.0f, -7472.0f, 7473.0f, -7474.0f, 7475.0f, -7476.0f, 7477.0f, -7478.0f, 7479.0f, -7480.0f, 7481.0f, -7482.0f, 7483.0f, -7484.0f, 7485.0f, -7486.0f, 7487.0f, -7488.0f, 7489.0f, -7490.0f, 7491.0f, -7492.0f, 7493.0f, -7494.0f, 7495.0f, -7496.0f, 7497.0f, -7498.0f, 7499.0f, -7500.0f, 7501.0f, -7502.0f, 7503.0f, -7504.0f, 7505.0f, -7506.0f, 7507.0f, -7508.0f, 7509.0f, -7510.0f, 7511.0f, -7512.0f, 7513.0f, -7514.0f, 7515.0f, -7516.0f, 7517.0f, -7518.0f, 7519.0f, -7520.0f, 7521.0f, -7522.0f, 7523.0f, -7524.0f, 7525.0f, -7526.0f, 7527.0f, -7528.0f, 7529.0f, -7530.0f, 7531.0f, -7532.0f, 7533.0f, -7534.0f, 7535.0f, -7536.0f, 7537.0f, -7538.0f, 7539.0f, -7540.0f, 7541.0f, -7542.0f, 7543.0f, -7544.0f, 7545.0f, -7546.0f, 7547.0f, -7548.0f, 7549.0f, -7550.0f, 7551.0f, -7552.0f, 7553.0f, -7554.0f, 7555.0f, -7556.0f, 7557.0f, -7558.0f, 7559.0f, -7560.0f, 7561.0f, -7562.0f, 7563.0f, -7564.0f, 7565.0f, -7566.0f, 7567.0f, -7568.0f, 7569.0f, -7570.0f, 7571.0f, -7572.0f, 7573.0f, -7574.0f, 7575.0f, -7576.0f, 7577.0f, -7578.0f, 7579.0f, -7580.0f, 7581.0f, -7582.0f, 7583.0f, -7584.0f, 7585.0f, -7586.0f, 7587.0f, -7588.0f, 7589.0f, -7590.0f, 7591.0f, -7592.0f, 7593.0f, -7594.0f, 7595.0f, -7596.0f, 7597.0f, -7598.0f, 7599.0f, -7600.0f, 7601.0f, -7602.0f, 7603.0f, -7604.0f, 7605.0f, -7606.0f, 7607.0f, -7608.0f, 7609.0f, -7610.0f, 7611.0f, -7612.0f, 7613.0f, -7614.0f, 7615.0f, -7616.0f, 7617.0f, -7618.0f, 7619.0f, -7620.0f, 7621.0f, -7622.0f, 7623.0f, -7624.0f, 7625.0f, -7626.0f, 7627.0f, -7628.0f, 7629.0f, -7630.0f, 7631.0f, -7632.0f, 7633.0f, -7634.0f, 7635.0f, -7636.0f, 7637.0f, -7638.0f, 7639.0f, -7640.0f, 7641.0f, -7642.0f, 7643.0f, -7644.0f, 7645.0f, -7646.0f, 7647.0f, -7648.0f, 7649.0f, -7650.0f, 7651.0f, -7652.0f, 7653.0f, -7654.0f, 7655.0f, -7656.0f, 7657.0f, -7658.0f, 7659.0f, -7660.0f, 7661.0f, -7662.0f, 7663.0f, -7664.0f, 7665.0f, -7666.0f, 7667.0f, -7668.0f, 7669.0f, -7670.0f, 7671.0f, -7672.0f, 7673.0f, -7674.0f, 7675.0f, -7676.0f, 7677.0f, -7678.0f, 7679.0f, -7680.0f, 7681.0f, -7682.0f, 7683.0f, -7684.0f, 7685.0f, -7686.0f, 7687.0f, -7688.0f, 7689.0f, -7690.0f, 7691.0f, -7692.0f, 7693.0f, -7694.0f, 7695.0f, -7696.0f, 7697.0f, -7698.0f, 7699.0f, -7700.0f, 7701.0f, -7702.0f, 7703.0f, -7704.0f, 7705.0f, -7706.0f, 7707.0f, -7708.0f, 7709.0f, -7710.0f, 7711.0f, -7712.0f, 7713.0f, -7714.0f, 7715.0f, -7716.0f, 7717.0f, -7718.0f, 7719.0f, -7720.0f, 7721.0f, -7722.0f, 7723.0f, -7724.0f, 7725.0f, -7726.0f, 7727.0f, -7728.0f, 7729.0f, -7730.0f, 7731.0f, -7732.0f, 7733.0f, -7734.0f, 7735.0f, -7736.0f, 7737.0f, -7738.0f, 7739.0f, -7740.0f, 7741.0f, -7742.0f, 7743.0f, -7744.0f, 7745.0f, -7746.0f, 7747.0f, -7748.0f, 7749.0f, -7750.0f, 7751.0f, -7752.0f, 7753.0f, -7754.0f, 7755.0f, -7756.0f, 7757.0f, -7758.0f, 7759.0f, -7760.0f, 7761.0f, -7762.0f, 7763.0f, -7764.0f, 7765.0f, -7766.0f, 7767.0f, -7768.0f, 7769.0f, -7770.0f, 7771.0f, -7772.0f, 7773.0f, -7774.0f, 7775.0f, -7776.0f, 7777.0f, -7778.0f, 7779.0f, -7780.0f, 7781.0f, -7782.0f, 7783.0f, -7784.0f, 7785.0f, -7786.0f, 7787.0f, -7788.0f, 7789.0f, -7790.0f, 7791.0f, -7792.0f, 7793.0f, -7794.0f, 7795.0f, -7796.0f, 7797.0f, -7798.0f, 7799.0f, -7800.0f, 7801.0f, -7802.0f, 7803.0f, -7804.0f, 7805.0f, -7806.0f, 7807.0f, -7808.0f, 7809.0f, -7810.0f, 7811.0f, -7812.0f, 7813.0f, -7814.0f, 7815.0f, -7816.0f, 7817.0f, -7818.0f, 7819.0f, -7820.0f, 7821.0f, -7822.0f, 7823.0f, -7824.0f, 7825.0f, -7826.0f, 7827.0f, -7828.0f, 7829.0f, -7830.0f, 7831.0f, -7832.0f, 7833.0f, -7834.0f, 7835.0f, -7836.0f, 7837.0f, -7838.0f, 7839.0f, -7840.0f, 7841.0f, -7842.0f, 7843.0f, -7844.0f, 7845.0f, -7846.0f, 7847.0f, -7848.0f, 7849.0f, -7850.0f, 7851.0f, -7852.0f, 7853.0f, -7854.0f, 7855.0f, -7856.0f, 7857.0f, -7858.0f, 7859.0f, -7860.0f, 7861.0f, -7862.0f, 7863.0f, -7864.0f, 7865.0f, -7866.0f, 7867.0f, -7868.0f, 7869.0f, -7870.0f, 7871.0f, -7872.0f, 7873.0f, -7874.0f, 7875.0f, -7876.0f, 7877.0f, -7878.0f, 7879.0f, -7880.0f, 7881.0f, -7882.0f, 7883.0f, -7884.0f, 7885.0f, -7886.0f, 7887.0f, -7888.0f, 7889.0f, -7890.0f, 7891.0f, -7892.0f, 7893.0f, -7894.0f, 7895.0f, -7896.0f, 7897.0f, -7898.0f, 7899.0f, -7900.0f, 7901.0f, -7902.0f, 7903.0f, -7904.0f, 7905.0f, -7906.0f, 7907.0f, -7908.0f, 7909.0f, -7910.0f, 7911.0f, -7912.0f, 7913.0f, -7914.0f, 7915.0f, -7916.0f, 7917.0f, -7918.0f, 7919.0f, -7920.0f, 7921.0f, -7922.0f, 7923.0f, -7924.0f, 7925.0f, -7926.0f, 7927.0f, -7928.0f, 7929.0f, -7930.0f, 7931.0f, -7932.0f, 7933.0f, -7934.0f, 7935.0f, -7936.0f, 7937.0f, -7938.0f, 7939.0f, -7940.0f, 7941.0f, -7942.0f, 7943.0f, -7944.0f, 7945.0f, -7946.0f, 7947.0f, -7948.0f, 7949.0f, -7950.0f, 7951.0f, -7952.0f, 7953.0f, -7954.0f, 7955.0f, -7956.0f, 7957.0f, -7958.0f, 7959.0f, -7960.0f, 7961.0f, -7962.0f, 7963.0f, -7964.0f, 7965.0f, -7966.0f, 7967.0f, -7968.0f, 7969.0f, -7970.0f, 7971.0f, -7972.0f, 7973.0f, -7974.0f, 7975.0f, -7976.0f, 7977.0f, -7978.0f, 7979.0f, -7980.0f, 7981.0f, -7982.0f, 7983.0f, -7984.0f, 7985.0f, -7986.0f, 7987.0f, -7988.0f, 7989.0f, -7990.0f, 7991.0f, -7992.0f, 7993.0f, -7994.0f, 7995.0f, -7996.0f, 7997.0f, -7998.0f, 7999.0f, -8000.0f, 8001.0f, -8002.0f, 8003.0f, -8004.0f, 8005.0f, -8006.0f, 8007.0f, -8008.0f, 8009.0f, -8010.0f, 8011.0f, -8012.0f, 8013.0f, -8014.0f, 8015.0f, -8016.0f, 8017.0f, -8018.0f, 8019.0f, -8020.0f, 8021.0f, -8022.0f, 8023.0f, -8024.0f, 8025.0f, -8026.0f, 8027.0f, -8028.0f, 8029.0f, -8030.0f, 8031.0f, -8032.0f, 8033.0f, -8034.0f, 8035.0f, -8036.0f, 8037.0f, -8038.0f, 8039.0f, -8040.0f, 8041.0f, -8042.0f, 8043.0f, -8044.0f, 8045.0f, -8046.0f, 8047.0f, -8048.0f, 8049.0f, -8050.0f, 8051.0f, -8052.0f, 8053.0f, -8054.0f, 8055.0f, -8056.0f, 8057.0f, -8058.0f, 8059.0f, -8060.0f, 8061.0f, -8062.0f, 8063.0f, -8064.0f, 8065.0f, -8066.0f, 8067.0f, -8068.0f, 8069.0f, -8070.0f, 8071.0f, -8072.0f, 8073.0f, -8074.0f, 8075.0f, -8076.0f, 8077.0f, -8078.0f, 8079.0f, -8080.0f, 8081.0f, -8082.0f, 8083.0f, -8084.0f, 8085.0f, -8086.0f, 8087.0f, -8088.0f, 8089.0f, -8090.0f, 8091.0f, -8092.0f, 8093.0f, -8094.0f, 8095.0f, -8096.0f, 8097.0f, -8098.0f, 8099.0f, -8100.0f, 8101.0f, -8102.0f, 8103.0f, -8104.0f, 8105.0f, -8106.0f, 8107.0f, -8108.0f, 8109.0f, -8110.0f, 8111.0f, -8112.0f, 8113.0f, -8114.0f, 8115.0f, -8116.0f, 8117.0f, -8118.0f, 8119.0f, -8120.0f, 8121.0f, -8122.0f, 8123.0f, -8124.0f, 8125.0f, -8126.0f, 8127.0f, -8128.0f, 8129.0f, -8130.0f, 8131.0f, -8132.0f, 8133.0f, -8134.0f, 8135.0f, -8136.0f, 8137.0f, -8138.0f, 8139.0f, -8140.0f, 8141.0f, -8142.0f, 8143.0f, -8144.0f, 8145.0f, -8146.0f, 8147.0f, -8148.0f, 8149.0f, -8150.0f, 8151.0f, -8152.0f, 8153.0f, -8154.0f, 8155.0f, -8156.0f, 8157.0f, -8158.0f, 8159.0f, -8160.0f, 8161.0f, -8162.0f, 8163.0f, -8164.0f, 8165.0f, -8166.0f, 8167.0f, -8168.0f, 8169.0f, -8170.0f, 8171.0f, -8172.0f, 8173.0f, -8174.0f, 8175.0f, -8176.0f, 8177.0f, -8178.0f, 8179.0f, -8180.0f, 8181.0f, -8182.0f, 8183.0f, -8184.0f, 8185.0f, -8186.0f, 8187.0f, -8188.0f, 8189.0f, -8190.0f, 8191.0f, -8192.0f, 8193.0f, -8194.0f, 8195.0f, -8196.0f, 8197.0f, -8198.0f, 8199.0f, -8200.0f, 8201.0f, -8202.0f, 8203.0f, -8204.0f, 8205.0f, -8206.0f, 8207.0f, -8208.0f, 8209.0f, -8210.0f, 8211.0f, -8212.0f, 8213.0f, -8214.0f, 8215.0f, -8216.0f, 8217.0f, -8218.0f, 8219.0f, -8220.0f, 8221.0f, -8222.0f, 8223.0f, -8224.0f, 8225.0f, -8226.0f, 8227.0f, -8228.0f, 8229.0f, -8230.0f, 8231.0f, -8232.0f, 8233.0f, -8234.0f, 8235.0f, -8236.0f, 8237.0f, -8238.0f, 8239.0f, -8240.0f, 8241.0f, -8242.0f, 8243.0f, -8244.0f, 8245.0f, -8246.0f, 8247.0f, -8248.0f, 8249.0f, -8250.0f, 8251.0f, -8252.0f, 8253.0f, -8254.0f, 8255.0f, -8256.0f, 8257.0f, -8258.0f, 8259.0f, -8260.0f, 8261.0f, -8262.0f, 8263.0f, -8264.0f, 8265.0f, -8266.0f, 8267.0f, -8268.0f, 8269.0f, -8270.0f, 8271.0f, -8272.0f, 8273.0f, -8274.0f, 8275.0f, -8276.0f, 8277.0f, -8278.0f, 8279.0f, -8280.0f, 8281.0f, -8282.0f, 8283.0f, -8284.0f, 8285.0f, -8286.0f, 8287.0f, -8288.0f, 8289.0f, -8290.0f, 8291.0f, -8292.0f, 8293.0f, -8294.0f, 8295.0f, -8296.0f, 8297.0f, -8298.0f, 8299.0f, -8300.0f, 8301.0f, -8302.0f, 8303.0f, -8304.0f, 8305.0f, -8306.0f, 8307.0f, -8308.0f, 8309.0f, -8310.0f, 8311.0f, -8312.0f, 8313.0f, -8314.0f, 8315.0f, -8316.0f, 8317.0f, -8318.0f, 8319.0f, -8320.0f, 8321.0f, -8322.0f, 8323.0f, -8324.0f, 8325.0f, -8326.0f, 8327.0f, -8328.0f, 8329.0f, -8330.0f, 8331.0f, -8332.0f, 8333.0f, -8334.0f, 8335.0f, -8336.0f, 8337.0f, -8338.0f, 8339.0f, -8340.0f, 8341.0f, -8342.0f, 8343.0f, -8344.0f, 8345.0f, -8346.0f, 8347.0f, -8348.0f, 8349.0f, -8350.0f, 8351.0f, -8352.0f, 8353.0f, -8354.0f, 8355.0f, -8356.0f, 8357.0f, -8358.0f, 8359.0f, -8360.0f, 8361.0f, -8362.0f, 8363.0f, -8364.0f, 8365.0f, -8366.0f, 8367.0f, -8368.0f, 8369.0f, -8370.0f, 8371.0f, -8372.0f, 8373.0f, -8374.0f, 8375.0f, -8376.0f, 8377.0f, -8378.0f, 8379.0f, -8380.0f, 8381.0f, -8382.0f, 8383.0f, -8384.0f, 8385.0f, -8386.0f, 8387.0f, -8388.0f, 8389.0f, -8390.0f, 8391.0f, -8392.0f, 8393.0f, -8394.0f, 8395.0f, -8396.0f, 8397.0f, -8398.0f, 8399.0f, -8400.0f, 8401.0f, -8402.0f, 8403.0f, -8404.0f, 8405.0f, -8406.0f, 8407.0f, -8408.0f, 8409.0f, -8410.0f, 8411.0f, -8412.0f, 8413.0f, -8414.0f, 8415.0f, -8416.0f, 8417.0f, -8418.0f, 8419.0f, -8420.0f, 8421.0f, -8422.0f, 8423.0f, -8424.0f, 8425.0f, -8426.0f, 8427.0f, -8428.0f, 8429.0f, -8430.0f, 8431.0f, -8432.0f, 8433.0f, -8434.0f, 8435.0f, -8436.0f, 8437.0f, -8438.0f, 8439.0f, -8440.0f, 8441.0f, -8442.0f, 8443.0f, -8444.0f, 8445.0f, -8446.0f, 8447.0f, -8448.0f, 8449.0f, -8450.0f, 8451.0f, -8452.0f, 8453.0f, -8454.0f, 8455.0f, -8456.0f, 8457.0f, -8458.0f, 8459.0f, -8460.0f, 8461.0f, -8462.0f, 8463.0f, -8464.0f, 8465.0f, -8466.0f, 8467.0f, -8468.0f, 8469.0f, -8470.0f, 8471.0f, -8472.0f, 8473.0f, -8474.0f, 8475.0f, -8476.0f, 8477.0f, -8478.0f, 8479.0f, -8480.0f, 8481.0f, -8482.0f, 8483.0f, -8484.0f, 8485.0f, -8486.0f, 8487.0f, -8488.0f, 8489.0f, -8490.0f, 8491.0f, -8492.0f, 8493.0f, -8494.0f, 8495.0f, -8496.0f, 8497.0f, -8498.0f, 8499.0f, -8500.0f, 8501.0f, -8502.0f, 8503.0f, -8504.0f, 8505.0f, -8506.0f, 8507.0f, -8508.0f, 8509.0f, -8510.0f, 8511.0f, -8512.0f, 8513.0f, -8514.0f, 8515.0f, -8516.0f, 8517.0f, -8518.0f, 8519.0f, -8520.0f, 8521.0f, -8522.0f, 8523.0f, -8524.0f, 8525.0f, -8526.0f, 8527.0f, -8528.0f, 8529.0f, -8530.0f, 8531.0f, -8532.0f, 8533.0f, -8534.0f, 8535.0f, -8536.0f, 8537.0f, -8538.0f, 8539.0f, -8540.0f, 8541.0f, -8542.0f, 8543.0f, -8544.0f, 8545.0f, -8546.0f, 8547.0f, -8548.0f, 8549.0f, -8550.0f, 8551.0f, -8552.0f, 8553.0f, -8554.0f, 8555.0f, -8556.0f, 8557.0f, -8558.0f, 8559.0f, -8560.0f, 8561.0f, -8562.0f, 8563.0f, -8564.0f, 8565.0f, -8566.0f, 8567.0f, -8568.0f, 8569.0f, -8570.0f, 8571.0f, -8572.0f, 8573.0f, -8574.0f, 8575.0f, -8576.0f, 8577.0f, -8578.0f, 8579.0f, -8580.0f, 8581.0f, -8582.0f, 8583.0f, -8584.0f, 8585.0f, -8586.0f, 8587.0f, -8588.0f, 8589.0f, -8590.0f, 8591.0f, -8592.0f, 8593.0f, -8594.0f, 8595.0f, -8596.0f, 8597.0f, -8598.0f, 8599.0f, -8600.0f, 8601.0f, -8602.0f, 8603.0f, -8604.0f, 8605.0f, -8606.0f, 8607.0f, -8608.0f, 8609.0f, -8610.0f, 8611.0f, -8612.0f, 8613.0f, -8614.0f, 8615.0f, -8616.0f, 8617.0f, -8618.0f, 8619.0f, -8620.0f, 8621.0f, -8622.0f, 8623.0f, -8624.0f, 8625.0f, -8626.0f, 8627.0f, -8628.0f, 8629.0f, -8630.0f, 8631.0f, -8632.0f, 8633.0f, -8634.0f, 8635.0f, -8636.0f, 8637.0f, -8638.0f, 8639.0f, -8640.0f, 8641.0f, -8642.0f, 8643.0f, -8644.0f, 8645.0f, -8646.0f, 8647.0f, -8648.0f, 8649.0f, -8650.0f, 8651.0f, -8652.0f, 8653.0f, -8654.0f, 8655.0f, -8656.0f, 8657.0f, -8658.0f, 8659.0f, -8660.0f, 8661.0f, -8662.0f, 8663.0f, -8664.0f, 8665.0f, -8666.0f, 8667.0f, -8668.0f, 8669.0f, -8670.0f, 8671.0f, -8672.0f, 8673.0f, -8674.0f, 8675.0f, -8676.0f, 8677.0f, -8678.0f, 8679.0f, -8680.0f, 8681.0f, -8682.0f, 8683.0f, -8684.0f, 8685.0f, -8686.0f, 8687.0f, -8688.0f, 8689.0f, -8690.0f, 8691.0f, -8692.0f, 8693.0f, -8694.0f, 8695.0f, -8696.0f, 8697.0f, -8698.0f, 8699.0f, -8700.0f, 8701.0f, -8702.0f, 8703.0f, -8704.0f, 8705.0f, -8706.0f, 8707.0f, -8708.0f, 8709.0f, -8710.0f, 8711.0f, -8712.0f, 8713.0f, -8714.0f, 8715.0f, -8716.0f, 8717.0f, -8718.0f, 8719.0f, -8720.0f, 8721.0f, -8722.0f, 8723.0f, -8724.0f, 8725.0f, -8726.0f, 8727.0f, -8728.0f, 8729.0f, -8730.0f, 8731.0f, -8732.0f, 8733.0f, -8734.0f, 8735.0f, -8736.0f, 8737.0f, -8738.0f, 8739.0f, -8740.0f, 8741.0f, -8742.0f, 8743.0f, -8744.0f, 8745.0f, -8746.0f, 8747.0f, -8748.0f, 8749.0f, -8750.0f, 8751.0f, -8752.0f, 8753.0f, -8754.0f, 8755.0f, -8756.0f, 8757.0f, -8758.0f, 8759.0f, -8760.0f, 8761.0f, -8762.0f, 8763.0f, -8764.0f, 8765.0f, -8766.0f, 8767.0f, -8768.0f, 8769.0f, -8770.0f, 8771.0f, -8772.0f, 8773.0f, -8774.0f, 8775.0f, -8776.0f, 8777.0f, -8778.0f, 8779.0f, -8780.0f, 8781.0f, -8782.0f, 8783.0f, -8784.0f, 8785.0f, -8786.0f, 8787.0f, -8788.0f, 8789.0f, -8790.0f, 8791.0f, -8792.0f, 8793.0f, -8794.0f, 8795.0f, -8796.0f, 8797.0f, -8798.0f, 8799.0f, -8800.0f, 8801.0f, -8802.0f, 8803.0f, -8804.0f, 8805.0f, -8806.0f, 8807.0f, -8808.0f, 8809.0f, -8810.0f, 8811.0f, -8812.0f, 8813.0f, -8814.0f, 8815.0f, -8816.0f, 8817.0f, -8818.0f, 8819.0f, -8820.0f, 8821.0f, -8822.0f, 8823.0f, -8824.0f, 8825.0f, -8826.0f, 8827.0f, -8828.0f, 8829.0f, -8830.0f, 8831.0f, -8832.0f, 8833.0f, -8834.0f, 8835.0f, -8836.0f, 8837.0f, -8838.0f, 8839.0f, -8840.0f, 8841.0f, -8842.0f, 8843.0f, -8844.0f, 8845.0f, -8846.0f, 8847.0f, -8848.0f, 8849.0f, -8850.0f, 8851.0f, -8852.0f, 8853.0f, -8854.0f, 8855.0f, -8856.0f, 8857.0f, -8858.0f, 8859.0f, -8860.0f, 8861.0f, -8862.0f, 8863.0f, -8864.0f, 8865.0f, -8866.0f, 8867.0f, -8868.0f, 8869.0f, -8870.0f, 8871.0f, -8872.0f, 8873.0f, -8874.0f, 8875.0f, -8876.0f, 8877.0f, -8878.0f, 8879.0f, -8880.0f, 8881.0f, -8882.0f, 8883.0f, -8884.0f, 8885.0f, -8886.0f, 8887.0f, -8888.0f, 8889.0f, -8890.0f, 8891.0f, -8892.0f, 8893.0f, -8894.0f, 8895.0f, -8896.0f, 8897.0f, -8898.0f, 8899.0f, -8900.0f, 8901.0f, -8902.0f, 8903.0f, -8904.0f, 8905.0f, -8906.0f, 8907.0f, -8908.0f, 8909.0f, -8910.0f, 8911.0f, -8912.0f, 8913.0f, -8914.0f, 8915.0f, -8916.0f, 8917.0f, -8918.0f, 8919.0f, -8920.0f, 8921.0f, -8922.0f, 8923.0f, -8924.0f, 8925.0f, -8926.0f, 8927.0f, -8928.0f, 8929.0f, -8930.0f, 8931.0f, -8932.0f, 8933.0f, -8934.0f, 8935.0f, -8936.0f, 8937.0f, -8938.0f, 8939.0f, -8940.0f, 8941.0f, -8942.0f, 8943.0f, -8944.0f, 8945.0f, -8946.0f, 8947.0f, -8948.0f, 8949.0f, -8950.0f, 8951.0f, -8952.0f, 8953.0f, -8954.0f, 8955.0f, -8956.0f, 8957.0f, -8958.0f, 8959.0f, -8960.0f, 8961.0f, -8962.0f, 8963.0f, -8964.0f, 8965.0f, -8966.0f, 8967.0f, -8968.0f, 8969.0f, -8970.0f, 8971.0f, -8972.0f, 8973.0f, -8974.0f, 8975.0f, -8976.0f, 8977.0f, -8978.0f, 8979.0f, -8980.0f, 8981.0f, -8982.0f, 8983.0f, -8984.0f, 8985.0f, -8986.0f, 8987.0f, -8988.0f, 8989.0f, -8990.0f, 8991.0f, -8992.0f, 8993.0f, -8994.0f, 8995.0f, -8996.0f, 8997.0f, -8998.0f, 8999.0f, -9000.0f, 9001.0f, -9002.0f, 9003.0f, -9004.0f, 9005.0f, -9006.0f, 9007.0f, -9008.0f, 9009.0f, -9010.0f, 9011.0f, -9012.0f, 9013.0f, -9014.0f, 9015.0f, -9016.0f, 9017.0f, -9018.0f, 9019.0f, -9020.0f, 9021.0f, -9022.0f, 9023.0f, -9024.0f, 9025.0f, -9026.0f, 9027.0f, -9028.0f, 9029.0f, -9030.0f, 9031.0f, -9032.0f, 9033.0f, -9034.0f, 9035.0f, -9036.0f, 9037.0f, -9038.0f, 9039.0f, -9040.0f, 9041.0f, -9042.0f, 9043.0f, -9044.0f, 9045.0f, -9046.0f, 9047.0f, -9048.0f, 9049.0f, -9050.0f, 9051.0f, -9052.0f, 9053.0f, -9054.0f, 9055.0f, -9056.0f, 9057.0f, -9058.0f, 9059.0f, -9060.0f, 9061.0f, -9062.0f, 9063.0f, -9064.0f, 9065.0f, -9066.0f, 9067.0f, -9068.0f, 9069.0f, -9070.0f, 9071.0f, -9072.0f, 9073.0f, -9074.0f, 9075.0f, -9076.0f, 9077.0f, -9078.0f, 9079.0f, -9080.0f, 9081.0f, -9082.0f, 9083.0f, -9084.0f, 9085.0f, -9086.0f, 9087.0f, -9088.0f, 9089.0f, -9090.0f, 9091.0f, -9092.0f, 9093.0f, -9094.0f, 9095.0f, -9096.0f, 9097.0f, -9098.0f, 9099.0f, -9100.0f, 9101.0f, -9102.0f, 9103.0f, -9104.0f, 9105.0f, -9106.0f, 9107.0f, -9108.0f, 9109.0f, -9110.0f, 9111.0f, -9112.0f, 9113.0f, -9114.0f, 9115.0f, -9116.0f, 9117.0f, -9118.0f, 9119.0f, -9120.0f, 9121.0f, -9122.0f, 9123.0f, -9124.0f, 9125.0f, -9126.0f, 9127.0f, -9128.0f, 9129.0f, -9130.0f, 9131.0f, -9132.0f, 9133.0f, -9134.0f, 9135.0f, -9136.0f, 9137.0f, -9138.0f, 9139.0f, -9140.0f, 9141.0f, -9142.0f, 9143.0f, -9144.0f, 9145.0f, -9146.0f, 9147.0f, -9148.0f, 9149.0f, -9150.0f, 9151.0f, -9152.0f, 9153.0f, -9154.0f, 9155.0f, -9156.0f, 9157.0f, -9158.0f, 9159.0f, -9160.0f, 9161.0f, -9162.0f, 9163.0f, -9164.0f, 9165.0f, -9166.0f, 9167.0f, -9168.0f, 9169.0f, -9170.0f, 9171.0f, -9172.0f, 9173.0f, -9174.0f, 9175.0f, -9176.0f, 9177.0f, -9178.0f, 9179.0f, -9180.0f, 9181.0f, -9182.0f, 9183.0f, -9184.0f, 9185.0f, -9186.0f, 9187.0f, -9188.0f, 9189.0f, -9190.0f, 9191.0f, -9192.0f, 9193.0f, -9194.0f, 9195.0f, -9196.0f, 9197.0f, -9198.0f, 9199.0f, -9200.0f, 9201.0f, -9202.0f, 9203.0f, -9204.0f, 9205.0f, -9206.0f, 9207.0f, -9208.0f, 9209.0f, -9210.0f, 9211.0f, -9212.0f, 9213.0f, -9214.0f, 9215.0f, -9216.0f, 9217.0f, -9218.0f, 9219.0f, -9220.0f, 9221.0f, -9222.0f, 9223.0f, -9224.0f, 9225.0f, -9226.0f, 9227.0f, -9228.0f, 9229.0f, -9230.0f, 9231.0f, -9232.0f, 9233.0f, -9234.0f, 9235.0f, -9236.0f, 9237.0f, -9238.0f, 9239.0f, -9240.0f, 9241.0f, -9242.0f, 9243.0f, -9244.0f, 9245.0f, -9246.0f, 9247.0f, -9248.0f, 9249.0f, -9250.0f, 9251.0f, -9252.0f, 9253.0f, -9254.0f, 9255.0f, -9256.0f, 9257.0f, -9258.0f, 9259.0f, -9260.0f, 9261.0f, -9262.0f, 9263.0f, -9264.0f, 9265.0f, -9266.0f, 9267.0f, -9268.0f, 9269.0f, -9270.0f, 9271.0f, -9272.0f, 9273.0f, -9274.0f, 9275.0f, -9276.0f, 9277.0f, -9278.0f, 9279.0f, -9280.0f, 9281.0f, -9282.0f, 9283.0f, -9284.0f, 9285.0f, -9286.0f, 9287.0f, -9288.0f, 9289.0f, -9290.0f, 9291.0f, -9292.0f, 9293.0f, -9294.0f, 9295.0f, -9296.0f, 9297.0f, -9298.0f, 9299.0f, -9300.0f, 9301.0f, -9302.0f, 9303.0f, -9304.0f, 9305.0f, -9306.0f, 9307.0f, -9308.0f, 9309.0f, -9310.0f, 9311.0f, -9312.0f, 9313.0f, -9314.0f, 9315.0f, -9316.0f, 9317.0f, -9318.0f, 9319.0f, -9320.0f, 9321.0f, -9322.0f, 9323.0f, -9324.0f, 9325.0f, -9326.0f, 9327.0f, -9328.0f, 9329.0f, -9330.0f, 9331.0f, -9332.0f, 9333.0f, -9334.0f, 9335.0f, -9336.0f, 9337.0f, -9338.0f, 9339.0f, -9340.0f, 9341.0f, -9342.0f, 9343.0f, -9344.0f, 9345.0f, -9346.0f, 9347.0f, -9348.0f, 9349.0f, -9350.0f, 9351.0f, -9352.0f, 9353.0f, -9354.0f, 9355.0f, -9356.0f, 9357.0f, -9358.0f, 9359.0f, -9360.0f, 9361.0f, -9362.0f, 9363.0f, -9364.0f, 9365.0f, -9366.0f, 9367.0f, -9368.0f, 9369.0f, -9370.0f, 9371.0f, -9372.0f, 9373.0f, -9374.0f, 9375.0f, -9376.0f, 9377.0f, -9378.0f, 9379.0f, -9380.0f, 9381.0f, -9382.0f, 9383.0f, -9384.0f, 9385.0f, -9386.0f, 9387.0f, -9388.0f, 9389.0f, -9390.0f, 9391.0f, -9392.0f, 9393.0f, -9394.0f, 9395.0f, -9396.0f, 9397.0f, -9398.0f, 9399.0f, -9400.0f, 9401.0f, -9402.0f, 9403.0f, -9404.0f, 9405.0f, -9406.0f, 9407.0f, -9408.0f, 9409.0f, -9410.0f, 9411.0f, -9412.0f, 9413.0f, -9414.0f, 9415.0f, -9416.0f, 9417.0f, -9418.0f, 9419.0f, -9420.0f, 9421.0f, -9422.0f, 9423.0f, -9424.0f, 9425.0f, -9426.0f, 9427.0f, -9428.0f, 9429.0f, -9430.0f, 9431.0f, -9432.0f, 9433.0f, -9434.0f, 9435.0f, -9436.0f, 9437.0f, -9438.0f, 9439.0f, -9440.0f, 9441.0f, -9442.0f, 9443.0f, -9444.0f, 9445.0f, -9446.0f, 9447.0f, -9448.0f, 9449.0f, -9450.0f, 9451.0f, -9452.0f, 9453.0f, -9454.0f, 9455.0f, -9456.0f, 9457.0f, -9458.0f, 9459.0f, -9460.0f, 9461.0f, -9462.0f, 9463.0f, -9464.0f, 9465.0f, -9466.0f, 9467.0f, -9468.0f, 9469.0f, -9470.0f, 9471.0f, -9472.0f, 9473.0f, -9474.0f, 9475.0f, -9476.0f, 9477.0f, -9478.0f, 9479.0f, -9480.0f, 9481.0f, -9482.0f, 9483.0f, -9484.0f, 9485.0f, -9486.0f, 9487.0f, -9488.0f, 9489.0f, -9490.0f, 9491.0f, -9492.0f, 9493.0f, -9494.0f, 9495.0f, -9496.0f, 9497.0f, -9498.0f, 9499.0f, -9500.0f, 9501.0f, -9502.0f, 9503.0f, -9504.0f, 9505.0f, -9506.0f, 9507.0f, -9508.0f, 9509.0f, -9510.0f, 9511.0f, -9512.0f, 9513.0f, -9514.0f, 9515.0f, -9516.0f, 9517.0f, -9518.0f, 9519.0f, -9520.0f, 9521.0f, -9522.0f, 9523.0f, -9524.0f, 9525.0f, -9526.0f, 9527.0f, -9528.0f, 9529.0f, -9530.0f, 9531.0f, -9532.0f, 9533.0f, -9534.0f, 9535.0f, -9536.0f, 9537.0f, -9538.0f, 9539.0f, -9540.0f, 9541.0f, -9542.0f, 9543.0f, -9544.0f, 9545.0f, -9546.0f, 9547.0f, -9548.0f, 9549.0f, -9550.0f, 9551.0f, -9552.0f, 9553.0f, -9554.0f, 9555.0f, -9556.0f, 9557.0f, -9558.0f, 9559.0f, -9560.0f, 9561.0f, -9562.0f, 9563.0f, -9564.0f, 9565.0f, -9566.0f, 9567.0f, -9568.0f, 9569.0f, -9570.0f, 9571.0f, -9572.0f, 9573.0f, -9574.0f, 9575.0f, -9576.0f, 9577.0f, -9578.0f, 9579.0f, -9580.0f, 9581.0f, -9582.0f, 9583.0f, -9584.0f, 9585.0f, -9586.0f, 9587.0f, -9588.0f, 9589.0f, -9590.0f, 9591.0f, -9592.0f, 9593.0f, -9594.0f, 9595.0f, -9596.0f, 9597.0f, -9598.0f, 9599.0f, -9600.0f, 9601.0f, -9602.0f, 9603.0f, -9604.0f, 9605.0f, -9606.0f, 9607.0f, -9608.0f, 9609.0f, -9610.0f, 9611.0f, -9612.0f, 9613.0f, -9614.0f, 9615.0f, -9616.0f, 9617.0f, -9618.0f, 9619.0f, -9620.0f, 9621.0f, -9622.0f, 9623.0f, -9624.0f, 9625.0f, -9626.0f, 9627.0f, -9628.0f, 9629.0f, -9630.0f, 9631.0f, -9632.0f, 9633.0f, -9634.0f, 9635.0f, -9636.0f, 9637.0f, -9638.0f, 9639.0f, -9640.0f, 9641.0f, -9642.0f, 9643.0f, -9644.0f, 9645.0f, -9646.0f, 9647.0f, -9648.0f, 9649.0f, -9650.0f, 9651.0f, -9652.0f, 9653.0f, -9654.0f, 9655.0f, -9656.0f, 9657.0f, -9658.0f, 9659.0f, -9660.0f, 9661.0f, -9662.0f, 9663.0f, -9664.0f, 9665.0f, -9666.0f, 9667.0f, -9668.0f, 9669.0f, -9670.0f, 9671.0f, -9672.0f, 9673.0f, -9674.0f, 9675.0f, -9676.0f, 9677.0f, -9678.0f, 9679.0f, -9680.0f, 9681.0f, -9682.0f, 9683.0f, -9684.0f, 9685.0f, -9686.0f, 9687.0f, -9688.0f, 9689.0f, -9690.0f, 9691.0f, -9692.0f, 9693.0f, -9694.0f, 9695.0f, -9696.0f, 9697.0f, -9698.0f, 9699.0f, -9700.0f, 9701.0f, -9702.0f, 9703.0f, -9704.0f, 9705.0f, -9706.0f, 9707.0f, -9708.0f, 9709.0f, -9710.0f, 9711.0f, -9712.0f, 9713.0f, -9714.0f, 9715.0f, -9716.0f, 9717.0f, -9718.0f, 9719.0f, -9720.0f, 9721.0f, -9722.0f, 9723.0f, -9724.0f, 9725.0f, -9726.0f, 9727.0f, -9728.0f, 9729.0f, -9730.0f, 9731.0f, -9732.0f, 9733.0f, -9734.0f, 9735.0f, -9736.0f, 9737.0f, -9738.0f, 9739.0f, -9740.0f, 9741.0f, -9742.0f, 9743.0f, -9744.0f, 9745.0f, -9746.0f, 9747.0f, -9748.0f, 9749.0f, -9750.0f, 9751.0f, -9752.0f, 9753.0f, -9754.0f, 9755.0f, -9756.0f, 9757.0f, -9758.0f, 9759.0f, -9760.0f, 9761.0f, -9762.0f, 9763.0f, -9764.0f, 9765.0f, -9766.0f, 9767.0f, -9768.0f, 9769.0f, -9770.0f, 9771.0f, -9772.0f, 9773.0f, -9774.0f, 9775.0f, -9776.0f, 9777.0f, -9778.0f, 9779.0f, -9780.0f, 9781.0f, -9782.0f, 9783.0f, -9784.0f, 9785.0f, -9786.0f, 9787.0f, -9788.0f, 9789.0f, -9790.0f, 9791.0f, -9792.0f, 9793.0f, -9794.0f, 9795.0f, -9796.0f, 9797.0f, -9798.0f, 9799.0f, -9800.0f, 9801.0f, -9802.0f, 9803.0f, -9804.0f, 9805.0f, -9806.0f, 9807.0f, -9808.0f, 9809.0f, -9810.0f, 9811.0f, -9812.0f, 9813.0f, -9814.0f, 9815.0f, -9816.0f, 9817.0f, -9818.0f, 9819.0f, -9820.0f, 9821.0f, -9822.0f, 9823.0f, -9824.0f, 9825.0f, -9826.0f, 9827.0f, -9828.0f, 9829.0f, -9830.0f, 9831.0f, -9832.0f, 9833.0f, -9834.0f, 9835.0f, -9836.0f, 9837.0f, -9838.0f, 9839.0f, -9840.0f, 9841.0f, -9842.0f, 9843.0f, -9844.0f, 9845.0f, -9846.0f, 9847.0f, -9848.0f, 9849.0f, -9850.0f, 9851.0f, -9852.0f, 9853.0f, -9854.0f, 9855.0f, -9856.0f, 9857.0f, -9858.0f, 9859.0f, -9860.0f, 9861.0f, -9862.0f, 9863.0f, -9864.0f, 9865.0f, -9866.0f, 9867.0f, -9868.0f, 9869.0f, -9870.0f, 9871.0f, -9872.0f, 9873.0f, -9874.0f, 9875.0f, -9876.0f, 9877.0f, -9878.0f, 9879.0f, -9880.0f, 9881.0f, -9882.0f, 9883.0f, -9884.0f, 9885.0f, -9886.0f, 9887.0f, -9888.0f, 9889.0f, -9890.0f, 9891.0f, -9892.0f, 9893.0f, -9894.0f, 9895.0f, -9896.0f, 9897.0f, -9898.0f, 9899.0f, -9900.0f, 9901.0f, -9902.0f, 9903.0f, -9904.0f, 9905.0f, -9906.0f, 9907.0f, -9908.0f, 9909.0f, -9910.0f, 9911.0f, -9912.0f, 9913.0f, -9914.0f, 9915.0f, -9916.0f, 9917.0f, -9918.0f, 9919.0f, -9920.0f, 9921.0f, -9922.0f, 9923.0f, -9924.0f, 9925.0f, -9926.0f, 9927.0f, -9928.0f, 9929.0f, -9930.0f, 9931.0f, -9932.0f, 9933.0f, -9934.0f, 9935.0f, -9936.0f, 9937.0f, -9938.0f, 9939.0f, -9940.0f, 9941.0f, -9942.0f, 9943.0f, -9944.0f, 9945.0f, -9946.0f, 9947.0f, -9948.0f, 9949.0f, -9950.0f, 9951.0f, -9952.0f, 9953.0f, -9954.0f, 9955.0f, -9956.0f, 9957.0f, -9958.0f, 9959.0f, -9960.0f, 9961.0f, -9962.0f, 9963.0f, -9964.0f, 9965.0f, -9966.0f, 9967.0f, -9968.0f, 9969.0f, -9970.0f, 9971.0f, -9972.0f, 9973.0f, -9974.0f, 9975.0f, -9976.0f, 9977.0f, -9978.0f, 9979.0f, -9980.0f, 9981.0f, -9982.0f, 9983.0f, -9984.0f, 9985.0f, -9986.0f, 9987.0f, -9988.0f, 9989.0f, -9990.0f, 9991.0f, -9992.0f, 9993.0f, -9994.0f, 9995.0f, -9996.0f, 9997.0f, -9998.0f, 9999.0f, -10000.0f, 10001.0f, -10002.0f, 10003.0f, -10004.0f, 10005.0f, -10006.0f, 10007.0f, -10008.0f, 10009.0f, -10010.0f, 10011.0f, -10012.0f, 10013.0f, -10014.0f, 10015.0f, -10016.0f, 10017.0f, -10018.0f, 10019.0f, -10020.0f, 10021.0f, -10022.0f, 10023.0f, -10024.0f, 10025.0f, -10026.0f, 10027.0f, -10028.0f, 10029.0f, -10030.0f, 10031.0f, -10032.0f, 10033.0f, -10034.0f, 10035.0f, -10036.0f, 10037.0f, -10038.0f, 10039.0f, -10040.0f, 10041.0f, -10042.0f, 10043.0f, -10044.0f, 10045.0f, -10046.0f, 10047.0f, -10048.0f, 10049.0f, -10050.0f, 10051.0f, -10052.0f, 10053.0f, -10054.0f, 10055.0f, -10056.0f, 10057.0f, -10058.0f, 10059.0f, -10060.0f, 10061.0f, -10062.0f, 10063.0f, -10064.0f, 10065.0f, -10066.0f, 10067.0f, -10068.0f, 10069.0f, -10070.0f, 10071.0f, -10072.0f, 10073.0f, -10074.0f, 10075.0f, -10076.0f, 10077.0f, -10078.0f, 10079.0f, -10080.0f, 10081.0f, -10082.0f, 10083.0f, -10084.0f, 10085.0f, -10086.0f, 10087.0f, -10088.0f, 10089.0f, -10090.0f, 10091.0f, -10092.0f, 10093.0f, -10094.0f, 10095.0f, -10096.0f, 10097.0f, -10098.0f, 10099.0f, -10100.0f, 10101.0f, -10102.0f, 10103.0f, -10104.0f, 10105.0f, -10106.0f, 10107.0f, -10108.0f, 10109.0f, -10110.0f, 10111.0f, -10112.0f, 10113.0f, -10114.0f, 10115.0f, -10116.0f, 10117.0f, -10118.0f, 10119.0f, -10120.0f, 10121.0f, -10122.0f, 10123.0f, -10124.0f, 10125.0f, -10126.0f, 10127.0f, -10128.0f, 10129.0f, -10130.0f, 10131.0f, -10132.0f, 10133.0f, -10134.0f, 10135.0f, -10136.0f, 10137.0f, -10138.0f, 10139.0f, -10140.0f, 10141.0f, -10142.0f, 10143.0f, -10144.0f, 10145.0f, -10146.0f, 10147.0f, -10148.0f, 10149.0f, -10150.0f, 10151.0f, -10152.0f, 10153.0f, -10154.0f, 10155.0f, -10156.0f, 10157.0f, -10158.0f, 10159.0f, -10160.0f, 10161.0f, -10162.0f, 10163.0f, -10164.0f, 10165.0f, -10166.0f, 10167.0f, -10168.0f, 10169.0f, -10170.0f, 10171.0f, -10172.0f, 10173.0f, -10174.0f, 10175.0f, -10176.0f, 10177.0f, -10178.0f, 10179.0f, -10180.0f, 10181.0f, -10182.0f, 10183.0f, -10184.0f, 10185.0f, -10186.0f, 10187.0f, -10188.0f, 10189.0f, -10190.0f, 10191.0f, -10192.0f, 10193.0f, -10194.0f, 10195.0f, -10196.0f, 10197.0f, -10198.0f, 10199.0f, -10200.0f, 10201.0f, -10202.0f, 10203.0f, -10204.0f, 10205.0f, -10206.0f, 10207.0f, -10208.0f, 10209.0f, -10210.0f, 10211.0f, -10212.0f, 10213.0f, -10214.0f, 10215.0f, -10216.0f, 10217.0f, -10218.0f, 10219.0f, -10220.0f, 10221.0f, -10222.0f, 10223.0f, -10224.0f, 10225.0f, -10226.0f, 10227.0f, -10228.0f, 10229.0f, -10230.0f, 10231.0f, -10232.0f, 10233.0f, -10234.0f, 10235.0f, -10236.0f, 10237.0f, -10238.0f, 10239.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0, 1, 0, 3, 0, 5, 0, 7, 0, 9, 0, 11, 0, 13, 0, 15, 0, 17, 0, 19, 0, 21, 0, 23, 0, 25, 0, 27, 0, 29, 0, 31, 0, 33, 0, 35, 0, 37, 0, 39, 0, 41, 0, 43, 0, 45, 0, 47, 0, 49, 0, 51, 0, 53, 0, 55, 0, 57, 0, 59, 0, 61, 0, 63, 0, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 0, 85, 0, 87, 0, 89, 0, 91, 0, 93, 0, 95, 0, 97, 0, 99, 0, 101, 0, 103, 0, 105, 0, 107, 0, 109, 0, 111, 0, 113, 0, 115, 0, 117, 0, 119, 0, 121, 0, 123, 0, 125, 0, 127, 0, 129, 0, 131, 0, 133, 0, 135, 0, 137, 0, 139, 0, 141, 0, 143, 0, 145, 0, 147, 0, 149, 0, 151, 0, 153, 0, 155, 0, 157, 0, 159, 0, 161, 0, 163, 0, 165, 0, 167, 0, 169, 0, 171, 0, 173, 0, 175, 0, 177, 0, 179, 0, 181, 0, 183, 0, 185, 0, 187, 0, 189, 0, 191, 0, 193, 0, 195, 0, 197, 0, 199, 0, 201, 0, 203, 0, 205, 0, 207, 0, 209, 0, 211, 0, 213, 0, 215, 0, 217, 0, 219, 0, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 0, 233, 0, 235, 0, 237, 0, 239, 0, 241, 0, 243, 0, 245, 0, 247, 0, 249, 0, 251, 0, 253, 0, 255, 0, 257, 0, 259, 0, 261, 0, 263, 0, 265, 0, 267, 0, 269, 0, 271, 0, 273, 0, 275, 0, 277, 0, 279, 0, 281, 0, 283, 0, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 0, 301, 0, 303, 0, 305, 0, 307, 0, 309, 0, 311, 0, 313, 0, 315, 0, 317, 0, 319, 0, 321, 0, 323, 0, 325, 0, 327, 0, 329, 0, 331, 0, 333, 0, 335, 0, 337, 0, 339, 0, 341, 0, 343, 0, 345, 0, 347, 0, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 0, 361, 0, 363, 0, 365, 0, 367, 0, 369, 0, 371, 0, 373, 0, 375, 0, 377, 0, 379, 0, 381, 0, 383, 0, 385, 0, 387, 0, 389, 0, 391, 0, 393, 0, 395, 0, 397, 0, 399, 0, 401, 0, 403, 0, 405, 0, 407, 0, 409, 0, 411, 0, 413, 0, 415, 0, 417, 0, 419, 0, 421, 0, 423, 0, 425, 0, 427, 0, 429, 0, 431, 0, 433, 0, 435, 0, 437, 0, 439, 0, 441, 0, 443, 0, 445, 0, 447, 0, 449, 0, 451, 0, 453, 0, 455, 0, 457, 0, 459, 0, 461, 0, 463, 0, 465, 0, 467, 0, 469, 0, 471, 0, 473, 0, 475, 0, 477, 0, 479, 0, 481, 0, 483, 0, 485, 0, 487, 0, 489, 0, 491, 0, 493, 0, 495, 0, 497, 0, 499, 0, 501, 0, 503, 0, 505, 0, 507, 0, 509, 0, 511, 0, 513, 0, 515, 0, 517, 0, 519, 0, 521, 0, 523, 0, 525, 0, 527, 0, 529, 0, 531, 0, 533, 0, 535, 0, 537, 0, 539, 0, 541, 0, 543, 0, 545, 0, 547, 0, 549, 0, 551, 0, 553, 0, 555, 0, 557, 0, 559, 0, 561, 0, 563, 0, 565, 0, 567, 0, 569, 0, 571, 0, 573, 0, 575, 0, 577, 0, 579, 0, 581, 0, 583, 0, 585, 0, 587, 0, 589, 0, 591, 0, 593, 0, 595, 0, 597, 0, 599, 0, 601, 0, 603, 0, 605, 0, 607, 0, 609, 0, 611, 0, 613, 0, 615, 0, 617, 0, 619, 0, 621, 0, 623, 0, 625, 0, 627, 0, 629, 0, 631, 0, 633, 0, 635, 0, 637, 0, 639, 0, 641, 0, 643, 0, 645, 0, 647, 0, 649, 0, 651, 0, 653, 0, 655, 0, 657, 0, 659, 0, 661, 0, 663, 0, 665, 0, 667, 0, 669, 0, 671, 0, 673, 0, 675, 0, 677, 0, 679, 0, 681, 0, 683, 0, 685, 0, 687, 0, 689, 0, 691, 0, 693, 0, 695, 0, 697, 0, 699, 0, 701, 0, 703, 0, 705, 0, 707, 0, 709, 0, 711, 0, 713, 0, 715, 0, 717, 0, 719, 0, 721, 0, 723, 0, 725, 0, 727, 0, 729, 0, 731, 0, 733, 0, 735, 0, 737, 0, 739, 0, 741, 0, 743, 0, 745, 0, 747, 0, 749, 0, 751, 0, 753, 0, 755, 0, 757, 0, 759, 0, 761, 0, 763, 0, 765, 0, 767, 0, 769, 0, 771, 0, 773, 0, 775, 0, 777, 0, 779, 0, 781, 0, 783, 0, 785, 0, 787, 0, 789, 0, 791, 0, 793, 0, 795, 0, 797, 0, 799, 0, 801, 0, 803, 0, 805, 0, 807, 0, 809, 0, 811, 0, 813, 0, 815, 0, 817, 0, 819, 0, 821, 0, 823, 0, 825, 0, 827, 0, 829, 0, 831, 0, 833, 0, 835, 0, 837, 0, 839, 0, 841, 0, 843, 0, 845, 0, 847, 0, 849, 0, 851, 0, 853, 0, 855, 0, 857, 0, 859, 0, 861, 0, 863, 0, 865, 0, 867, 0, 869, 0, 871, 0, 873, 0, 875, 0, 877, 0, 879, 0, 881, 0, 883, 0, 885, 0, 887, 0, 889, 0, 891, 0, 893, 0, 895, 0, 897, 0, 899, 0, 901, 0, 903, 0, 905, 0, 907, 0, 909, 0, 911, 0, 913, 0, 915, 0, 917, 0, 919, 0, 921, 0, 923, 0, 925, 0, 927, 0, 929, 0, 931, 0, 933, 0, 935, 0, 937, 0, 939, 0, 941, 0, 943, 0, 945, 0, 947, 0, 949, 0, 951, 0, 953, 0, 955, 0, 957, 0, 959, 0, 961, 0, 963, 0, 965, 0, 967, 0, 969, 0, 971, 0, 973, 0, 975, 0, 977, 0, 979, 0, 981, 0, 983, 0, 985, 0, 987, 0, 989, 0, 991, 0, 993, 0, 995, 0, 997, 0, 999, 0, 1001, 0, 1003, 0, 1005, 0, 1007, 0, 1009, 0, 1011, 0, 1013, 0, 1015, 0, 1017, 0, 1019, 0, 1021, 0, 1023, 0, 1025, 0, 1027, 0, 1029, 0, 1031, 0, 1033, 0, 1035, 0, 1037, 0, 1039, 0, 1041, 0, 1043, 0, 1045, 0, 1047, 0, 1049, 0, 1051, 0, 1053, 0, 1055, 0, 1057, 0, 1059, 0, 1061, 0, 1063, 0, 1065, 0, 1067, 0, 1069, 0, 1071, 0, 1073, 0, 1075, 0, 1077, 0, 1079, 0, 1081, 0, 1083, 0, 1085, 0, 1087, 0, 1089, 0, 1091, 0, 1093, 0, 1095, 0, 1097, 0, 1099, 0, 1101, 0, 1103, 0, 1105, 0, 1107, 0, 1109, 0, 1111, 0, 1113, 0, 1115, 0, 1117, 0, 1119, 0, 1121, 0, 1123, 0, 1125, 0, 1127, 0, 1129, 0, 1131, 0, 1133, 0, 1135, 0, 1137, 0, 1139, 0, 1141, 0, 1143, 0, 1145, 0, 1147, 0, 1149, 0, 1151, 0, 1153, 0, 1155, 0, 1157, 0, 1159, 0, 1161, 0, 1163, 0, 1165, 0, 1167, 0, 1169, 0, 1171, 0, 1173, 0, 1175, 0, 1177, 0, 1179, 0, 1181, 0, 1183, 0, 1185, 0, 1187, 0, 1189, 0, 1191, 0, 1193, 0, 1195, 0, 1197, 0, 1199, 0, 1201, 0, 1203, 0, 1205, 0, 1207, 0, 1209, 0, 1211, 0, 1213, 0, 1215, 0, 1217, 0, 1219, 0, 1221, 0, 1223, 0, 1225, 0, 1227, 0, 1229, 0, 1231, 0, 1233, 0, 1235, 0, 1237, 0, 1239, 0, 1241, 0, 1243, 0, 1245, 0, 1247, 0, 1249, 0, 1251, 0, 1253, 0, 1255, 0, 1257, 0, 1259, 0, 1261, 0, 1263, 0, 1265, 0, 1267, 0, 1269, 0, 1271, 0, 1273, 0, 1275, 0, 1277, 0, 1279, 0, 1281, 0, 1283, 0, 1285, 0, 1287, 0, 1289, 0, 1291, 0, 1293, 0, 1295, 0, 1297, 0, 1299, 0, 1301, 0, 1303, 0, 1305, 0, 1307, 0, 1309, 0, 1311, 0, 1313, 0, 1315, 0, 1317, 0, 1319, 0, 1321, 0, 1323, 0, 1325, 0, 1327, 0, 1329, 0, 1331, 0, 1333, 0, 1335, 0, 1337, 0, 1339, 0, 1341, 0, 1343, 0, 1345, 0, 1347, 0, 1349, 0, 1351, 0, 1353, 0, 1355, 0, 1357, 0, 1359, 0, 1361, 0, 1363, 0, 1365, 0, 1367, 0, 1369, 0, 1371, 0, 1373, 0, 1375, 0, 1377, 0, 1379, 0, 1381, 0, 1383, 0, 1385, 0, 1387, 0, 1389, 0, 1391, 0, 1393, 0, 1395, 0, 1397, 0, 1399, 0, 1401, 0, 1403, 0, 1405, 0, 1407, 0, 1409, 0, 1411, 0, 1413, 0, 1415, 0, 1417, 0, 1419, 0, 1421, 0, 1423, 0, 1425, 0, 1427, 0, 1429, 0, 1431, 0, 1433, 0, 1435, 0, 1437, 0, 1439, 0, 1441, 0, 1443, 0, 1445, 0, 1447, 0, 1449, 0, 1451, 0, 1453, 0, 1455, 0, 1457, 0, 1459, 0, 1461, 0, 1463, 0, 1465, 0, 1467, 0, 1469, 0, 1471, 0, 1473, 0, 1475, 0, 1477, 0, 1479, 0, 1481, 0, 1483, 0, 1485, 0, 1487, 0, 1489, 0, 1491, 0, 1493, 0, 1495, 0, 1497, 0, 1499, 0, 1501, 0, 1503, 0, 1505, 0, 1507, 0, 1509, 0, 1511, 0, 1513, 0, 1515, 0, 1517, 0, 1519, 0, 1521, 0, 1523, 0, 1525, 0, 1527, 0, 1529, 0, 1531, 0, 1533, 0, 1535, 0, 1537, 0, 1539, 0, 1541, 0, 1543, 0, 1545, 0, 1547, 0, 1549, 0, 1551, 0, 1553, 0, 1555, 0, 1557, 0, 1559, 0, 1561, 0, 1563, 0, 1565, 0, 1567, 0, 1569, 0, 1571, 0, 1573, 0, 1575, 0, 1577, 0, 1579, 0, 1581, 0, 1583, 0, 1585, 0, 1587, 0, 1589, 0, 1591, 0, 1593, 0, 1595, 0, 1597, 0, 1599, 0, 1601, 0, 1603, 0, 1605, 0, 1607, 0, 1609, 0, 1611, 0, 1613, 0, 1615, 0, 1617, 0, 1619, 0, 1621, 0, 1623, 0, 1625, 0, 1627, 0, 1629, 0, 1631, 0, 1633, 0, 1635, 0, 1637, 0, 1639, 0, 1641, 0, 1643, 0, 1645, 0, 1647, 0, 1649, 0, 1651, 0, 1653, 0, 1655, 0, 1657, 0, 1659, 0, 1661, 0, 1663, 0, 1665, 0, 1667, 0, 1669, 0, 1671, 0, 1673, 0, 1675, 0, 1677, 0, 1679, 0, 1681, 0, 1683, 0, 1685, 0, 1687, 0, 1689, 0, 1691, 0, 1693, 0, 1695, 0, 1697, 0, 1699, 0, 1701, 0, 1703, 0, 1705, 0, 1707, 0, 1709, 0, 1711, 0, 1713, 0, 1715, 0, 1717, 0, 1719, 0, 1721, 0, 1723, 0, 1725, 0, 1727, 0, 1729, 0, 1731, 0, 1733, 0, 1735, 0, 1737, 0, 1739, 0, 1741, 0, 1743, 0, 1745, 0, 1747, 0, 1749, 0, 1751, 0, 1753, 0, 1755, 0, 1757, 0, 1759, 0, 1761, 0, 1763, 0, 1765, 0, 1767, 0, 1769, 0, 1771, 0, 1773, 0, 1775, 0, 1777, 0, 1779, 0, 1781, 0, 1783, 0, 1785, 0, 1787, 0, 1789, 0, 1791, 0, 1793, 0, 1795, 0, 1797, 0, 1799, 0, 1801, 0, 1803, 0, 1805, 0, 1807, 0, 1809, 0, 1811, 0, 1813, 0, 1815, 0, 1817, 0, 1819, 0, 1821, 0, 1823, 0, 1825, 0, 1827, 0, 1829, 0, 1831, 0, 1833, 0, 1835, 0, 1837, 0, 1839, 0, 1841, 0, 1843, 0, 1845, 0, 1847, 0, 1849, 0, 1851, 0, 1853, 0, 1855, 0, 1857, 0, 1859, 0, 1861, 0, 1863, 0, 1865, 0, 1867, 0, 1869, 0, 1871, 0, 1873, 0, 1875, 0, 1877, 0, 1879, 0, 1881, 0, 1883, 0, 1885, 0, 1887, 0, 1889, 0, 1891, 0, 1893, 0, 1895, 0, 1897, 0, 1899, 0, 1901, 0, 1903, 0, 1905, 0, 1907, 0, 1909, 0, 1911, 0, 1913, 0, 1915, 0, 1917, 0, 1919, 0, 1921, 0, 1923, 0, 1925, 0, 1927, 0, 1929, 0, 1931, 0, 1933, 0, 1935, 0, 1937, 0, 1939, 0, 1941, 0, 1943, 0, 1945, 0, 1947, 0, 1949, 0, 1951, 0, 1953, 0, 1955, 0, 1957, 0, 1959, 0, 1961, 0, 1963, 0, 1965, 0, 1967, 0, 1969, 0, 1971, 0, 1973, 0, 1975, 0, 1977, 0, 1979, 0, 1981, 0, 1983, 0, 1985, 0, 1987, 0, 1989, 0, 1991, 0, 1993, 0, 1995, 0, 1997, 0, 1999, 0, 2001, 0, 2003, 0, 2005, 0, 2007, 0, 2009, 0, 2011, 0, 2013, 0, 2015, 0, 2017, 0, 2019, 0, 2021, 0, 2023, 0, 2025, 0, 2027, 0, 2029, 0, 2031, 0, 2033, 0, 2035, 0, 2037, 0, 2039, 0, 2041, 0, 2043, 0, 2045, 0, 2047, 0, 2049, 0, 2051, 0, 2053, 0, 2055, 0, 2057, 0, 2059, 0, 2061, 0, 2063, 0, 2065, 0, 2067, 0, 2069, 0, 2071, 0, 2073, 0, 2075, 0, 2077, 0, 2079, 0, 2081, 0, 2083, 0, 2085, 0, 2087, 0, 2089, 0, 2091, 0, 2093, 0, 2095, 0, 2097, 0, 2099, 0, 2101, 0, 2103, 0, 2105, 0, 2107, 0, 2109, 0, 2111, 0, 2113, 0, 2115, 0, 2117, 0, 2119, 0, 2121, 0, 2123, 0, 2125, 0, 2127, 0, 2129, 0, 2131, 0, 2133, 0, 2135, 0, 2137, 0, 2139, 0, 2141, 0, 2143, 0, 2145, 0, 2147, 0, 2149, 0, 2151, 0, 2153, 0, 2155, 0, 2157, 0, 2159, 0, 2161, 0, 2163, 0, 2165, 0, 2167, 0, 2169, 0, 2171, 0, 2173, 0, 2175, 0, 2177, 0, 2179, 0, 2181, 0, 2183, 0, 2185, 0, 2187, 0, 2189, 0, 2191, 0, 2193, 0, 2195, 0, 2197, 0, 2199, 0, 2201, 0, 2203, 0, 2205, 0, 2207, 0, 2209, 0, 2211, 0, 2213, 0, 2215, 0, 2217, 0, 2219, 0, 2221, 0, 2223, 0, 2225, 0, 2227, 0, 2229, 0, 2231, 0, 2233, 0, 2235, 0, 2237, 0, 2239, 0, 2241, 0, 2243, 0, 2245, 0, 2247, 0, 2249, 0, 2251, 0, 2253, 0, 2255, 0, 2257, 0, 2259, 0, 2261, 0, 2263, 0, 2265, 0, 2267, 0, 2269, 0, 2271, 0, 2273, 0, 2275, 0, 2277, 0, 2279, 0, 2281, 0, 2283, 0, 2285, 0, 2287, 0, 2289, 0, 2291, 0, 2293, 0, 2295, 0, 2297, 0, 2299, 0, 2301, 0, 2303, 0, 2305, 0, 2307, 0, 2309, 0, 2311, 0, 2313, 0, 2315, 0, 2317, 0, 2319, 0, 2321, 0, 2323, 0, 2325, 0, 2327, 0, 2329, 0, 2331, 0, 2333, 0, 2335, 0, 2337, 0, 2339, 0, 2341, 0, 2343, 0, 2345, 0, 2347, 0, 2349, 0, 2351, 0, 2353, 0, 2355, 0, 2357, 0, 2359, 0, 2361, 0, 2363, 0, 2365, 0, 2367, 0, 2369, 0, 2371, 0, 2373, 0, 2375, 0, 2377, 0, 2379, 0, 2381, 0, 2383, 0, 2385, 0, 2387, 0, 2389, 0, 2391, 0, 2393, 0, 2395, 0, 2397, 0, 2399, 0, 2401, 0, 2403, 0, 2405, 0, 2407, 0, 2409, 0, 2411, 0, 2413, 0, 2415, 0, 2417, 0, 2419, 0, 2421, 0, 2423, 0, 2425, 0, 2427, 0, 2429, 0, 2431, 0, 2433, 0, 2435, 0, 2437, 0, 2439, 0, 2441, 0, 2443, 0, 2445, 0, 2447, 0, 2449, 0, 2451, 0, 2453, 0, 2455, 0, 2457, 0, 2459, 0, 2461, 0, 2463, 0, 2465, 0, 2467, 0, 2469, 0, 2471, 0, 2473, 0, 2475, 0, 2477, 0, 2479, 0, 2481, 0, 2483, 0, 2485, 0, 2487, 0, 2489, 0, 2491, 0, 2493, 0, 2495, 0, 2497, 0, 2499, 0, 2501, 0, 2503, 0, 2505, 0, 2507, 0, 2509, 0, 2511, 0, 2513, 0, 2515, 0, 2517, 0, 2519, 0, 2521, 0, 2523, 0, 2525, 0, 2527, 0, 2529, 0, 2531, 0, 2533, 0, 2535, 0, 2537, 0, 2539, 0, 2541, 0, 2543, 0, 2545, 0, 2547, 0, 2549, 0, 2551, 0, 2553, 0, 2555, 0, 2557, 0, 2559, 0, 2561, 0, 2563, 0, 2565, 0, 2567, 0, 2569, 0, 2571, 0, 2573, 0, 2575, 0, 2577, 0, 2579, 0, 2581, 0, 2583, 0, 2585, 0, 2587, 0, 2589, 0, 2591, 0, 2593, 0, 2595, 0, 2597, 0, 2599, 0, 2601, 0, 2603, 0, 2605, 0, 2607, 0, 2609, 0, 2611, 0, 2613, 0, 2615, 0, 2617, 0, 2619, 0, 2621, 0, 2623, 0, 2625, 0, 2627, 0, 2629, 0, 2631, 0, 2633, 0, 2635, 0, 2637, 0, 2639, 0, 2641, 0, 2643, 0, 2645, 0, 2647, 0, 2649, 0, 2651, 0, 2653, 0, 2655, 0, 2657, 0, 2659, 0, 2661, 0, 2663, 0, 2665, 0, 2667, 0, 2669, 0, 2671, 0, 2673, 0, 2675, 0, 2677, 0, 2679, 0, 2681, 0, 2683, 0, 2685, 0, 2687, 0, 2689, 0, 2691, 0, 2693, 0, 2695, 0, 2697, 0, 2699, 0, 2701, 0, 2703, 0, 2705, 0, 2707, 0, 2709, 0, 2711, 0, 2713, 0, 2715, 0, 2717, 0, 2719, 0, 2721, 0, 2723, 0, 2725, 0, 2727, 0, 2729, 0, 2731, 0, 2733, 0, 2735, 0, 2737, 0, 2739, 0, 2741, 0, 2743, 0, 2745, 0, 2747, 0, 2749, 0, 2751, 0, 2753, 0, 2755, 0, 2757, 0, 2759, 0, 2761, 0, 2763, 0, 2765, 0, 2767, 0, 2769, 0, 2771, 0, 2773, 0, 2775, 0, 2777, 0, 2779, 0, 2781, 0, 2783, 0, 2785, 0, 2787, 0, 2789, 0, 2791, 0, 2793, 0, 2795, 0, 2797, 0, 2799, 0, 2801, 0, 2803, 0, 2805, 0, 2807, 0, 2809, 0, 2811, 0, 2813, 0, 2815, 0, 2817, 0, 2819, 0, 2821, 0, 2823, 0, 2825, 0, 2827, 0, 2829, 0, 2831, 0, 2833, 0, 2835, 0, 2837, 0, 2839, 0, 2841, 0, 2843, 0, 2845, 0, 2847, 0, 2849, 0, 2851, 0, 2853, 0, 2855, 0, 2857, 0, 2859, 0, 2861, 0, 2863, 0, 2865, 0, 2867, 0, 2869, 0, 2871, 0, 2873, 0, 2875, 0, 2877, 0, 2879, 0, 2881, 0, 2883, 0, 2885, 0, 2887, 0, 2889, 0, 2891, 0, 2893, 0, 2895, 0, 2897, 0, 2899, 0, 2901, 0, 2903, 0, 2905, 0, 2907, 0, 2909, 0, 2911, 0, 2913, 0, 2915, 0, 2917, 0, 2919, 0, 2921, 0, 2923, 0, 2925, 0, 2927, 0, 2929, 0, 2931, 0, 2933, 0, 2935, 0, 2937, 0, 2939, 0, 2941, 0, 2943, 0, 2945, 0, 2947, 0, 2949, 0, 2951, 0, 2953, 0, 2955, 0, 2957, 0, 2959, 0, 2961, 0, 2963, 0, 2965, 0, 2967, 0, 2969, 0, 2971, 0, 2973, 0, 2975, 0, 2977, 0, 2979, 0, 2981, 0, 2983, 0, 2985, 0, 2987, 0, 2989, 0, 2991, 0, 2993, 0, 2995, 0, 2997, 0, 2999, 0, 3001, 0, 3003, 0, 3005, 0, 3007, 0, 3009, 0, 3011, 0, 3013, 0, 3015, 0, 3017, 0, 3019, 0, 3021, 0, 3023, 0, 3025, 0, 3027, 0, 3029, 0, 3031, 0, 3033, 0, 3035, 0, 3037, 0, 3039, 0, 3041, 0, 3043, 0, 3045, 0, 3047, 0, 3049, 0, 3051, 0, 3053, 0, 3055, 0, 3057, 0, 3059, 0, 3061, 0, 3063, 0, 3065, 0, 3067, 0, 3069, 0, 3071, 0, 3073, 0, 3075, 0, 3077, 0, 3079, 0, 3081, 0, 3083, 0, 3085, 0, 3087, 0, 3089, 0, 3091, 0, 3093, 0, 3095, 0, 3097, 0, 3099, 0, 3101, 0, 3103, 0, 3105, 0, 3107, 0, 3109, 0, 3111, 0, 3113, 0, 3115, 0, 3117, 0, 3119, 0, 3121, 0, 3123, 0, 3125, 0, 3127, 0, 3129, 0, 3131, 0, 3133, 0, 3135, 0, 3137, 0, 3139, 0, 3141, 0, 3143, 0, 3145, 0, 3147, 0, 3149, 0, 3151, 0, 3153, 0, 3155, 0, 3157, 0, 3159, 0, 3161, 0, 3163, 0, 3165, 0, 3167, 0, 3169, 0, 3171, 0, 3173, 0, 3175, 0, 3177, 0, 3179, 0, 3181, 0, 3183, 0, 3185, 0, 3187, 0, 3189, 0, 3191, 0, 3193, 0, 3195, 0, 3197, 0, 3199, 0, 3201, 0, 3203, 0, 3205, 0, 3207, 0, 3209, 0, 3211, 0, 3213, 0, 3215, 0, 3217, 0, 3219, 0, 3221, 0, 3223, 0, 3225, 0, 3227, 0, 3229, 0, 3231, 0, 3233, 0, 3235, 0, 3237, 0, 3239, 0, 3241, 0, 3243, 0, 3245, 0, 3247, 0, 3249, 0, 3251, 0, 3253, 0, 3255, 0, 3257, 0, 3259, 0, 3261, 0, 3263, 0, 3265, 0, 3267, 0, 3269, 0, 3271, 0, 3273, 0, 3275, 0, 3277, 0, 3279, 0, 3281, 0, 3283, 0, 3285, 0, 3287, 0, 3289, 0, 3291, 0, 3293, 0, 3295, 0, 3297, 0, 3299, 0, 3301, 0, 3303, 0, 3305, 0, 3307, 0, 3309, 0, 3311, 0, 3313, 0, 3315, 0, 3317, 0, 3319, 0, 3321, 0, 3323, 0, 3325, 0, 3327, 0, 3329, 0, 3331, 0, 3333, 0, 3335, 0, 3337, 0, 3339, 0, 3341, 0, 3343, 0, 3345, 0, 3347, 0, 3349, 0, 3351, 0, 3353, 0, 3355, 0, 3357, 0, 3359, 0, 3361, 0, 3363, 0, 3365, 0, 3367, 0, 3369, 0, 3371, 0, 3373, 0, 3375, 0, 3377, 0, 3379, 0, 3381, 0, 3383, 0, 3385, 0, 3387, 0, 3389, 0, 3391, 0, 3393, 0, 3395, 0, 3397, 0, 3399, 0, 3401, 0, 3403, 0, 3405, 0, 3407, 0, 3409, 0, 3411, 0, 3413, 0, 3415, 0, 3417, 0, 3419, 0, 3421, 0, 3423, 0, 3425, 0, 3427, 0, 3429, 0, 3431, 0, 3433, 0, 3435, 0, 3437, 0, 3439, 0, 3441, 0, 3443, 0, 3445, 0, 3447, 0, 3449, 0, 3451, 0, 3453, 0, 3455, 0, 3457, 0, 3459, 0, 3461, 0, 3463, 0, 3465, 0, 3467, 0, 3469, 0, 3471, 0, 3473, 0, 3475, 0, 3477, 0, 3479, 0, 3481, 0, 3483, 0, 3485, 0, 3487, 0, 3489, 0, 3491, 0, 3493, 0, 3495, 0, 3497, 0, 3499, 0, 3501, 0, 3503, 0, 3505, 0, 3507, 0, 3509, 0, 3511, 0, 3513, 0, 3515, 0, 3517, 0, 3519, 0, 3521, 0, 3523, 0, 3525, 0, 3527, 0, 3529, 0, 3531, 0, 3533, 0, 3535, 0, 3537, 0, 3539, 0, 3541, 0, 3543, 0, 3545, 0, 3547, 0, 3549, 0, 3551, 0, 3553, 0, 3555, 0, 3557, 0, 3559, 0, 3561, 0, 3563, 0, 3565, 0, 3567, 0, 3569, 0, 3571, 0, 3573, 0, 3575, 0, 3577, 0, 3579, 0, 3581, 0, 3583, 0, 3585, 0, 3587, 0, 3589, 0, 3591, 0, 3593, 0, 3595, 0, 3597, 0, 3599, 0, 3601, 0, 3603, 0, 3605, 0, 3607, 0, 3609, 0, 3611, 0, 3613, 0, 3615, 0, 3617, 0, 3619, 0, 3621, 0, 3623, 0, 3625, 0, 3627, 0, 3629, 0, 3631, 0, 3633, 0, 3635, 0, 3637, 0, 3639, 0, 3641, 0, 3643, 0, 3645, 0, 3647, 0, 3649, 0, 3651, 0, 3653, 0, 3655, 0, 3657, 0, 3659, 0, 3661, 0, 3663, 0, 3665, 0, 3667, 0, 3669, 0, 3671, 0, 3673, 0, 3675, 0, 3677, 0, 3679, 0, 3681, 0, 3683, 0, 3685, 0, 3687, 0, 3689, 0, 3691, 0, 3693, 0, 3695, 0, 3697, 0, 3699, 0, 3701, 0, 3703, 0, 3705, 0, 3707, 0, 3709, 0, 3711, 0, 3713, 0, 3715, 0, 3717, 0, 3719, 0, 3721, 0, 3723, 0, 3725, 0, 3727, 0, 3729, 0, 3731, 0, 3733, 0, 3735, 0, 3737, 0, 3739, 0, 3741, 0, 3743, 0, 3745, 0, 3747, 0, 3749, 0, 3751, 0, 3753, 0, 3755, 0, 3757, 0, 3759, 0, 3761, 0, 3763, 0, 3765, 0, 3767, 0, 3769, 0, 3771, 0, 3773, 0, 3775, 0, 3777, 0, 3779, 0, 3781, 0, 3783, 0, 3785, 0, 3787, 0, 3789, 0, 3791, 0, 3793, 0, 3795, 0, 3797, 0, 3799, 0, 3801, 0, 3803, 0, 3805, 0, 3807, 0, 3809, 0, 3811, 0, 3813, 0, 3815, 0, 3817, 0, 3819, 0, 3821, 0, 3823, 0, 3825, 0, 3827, 0, 3829, 0, 3831, 0, 3833, 0, 3835, 0, 3837, 0, 3839, 0, 3841, 0, 3843, 0, 3845, 0, 3847, 0, 3849, 0, 3851, 0, 3853, 0, 3855, 0, 3857, 0, 3859, 0, 3861, 0, 3863, 0, 3865, 0, 3867, 0, 3869, 0, 3871, 0, 3873, 0, 3875, 0, 3877, 0, 3879, 0, 3881, 0, 3883, 0, 3885, 0, 3887, 0, 3889, 0, 3891, 0, 3893, 0, 3895, 0, 3897, 0, 3899, 0, 3901, 0, 3903, 0, 3905, 0, 3907, 0, 3909, 0, 3911, 0, 3913, 0, 3915, 0, 3917, 0, 3919, 0, 3921, 0, 3923, 0, 3925, 0, 3927, 0, 3929, 0, 3931, 0, 3933, 0, 3935, 0, 3937, 0, 3939, 0, 3941, 0, 3943, 0, 3945, 0, 3947, 0, 3949, 0, 3951, 0, 3953, 0, 3955, 0, 3957, 0, 3959, 0, 3961, 0, 3963, 0, 3965, 0, 3967, 0, 3969, 0, 3971, 0, 3973, 0, 3975, 0, 3977, 0, 3979, 0, 3981, 0, 3983, 0, 3985, 0, 3987, 0, 3989, 0, 3991, 0, 3993, 0, 3995, 0, 3997, 0, 3999, 0, 4001, 0, 4003, 0, 4005, 0, 4007, 0, 4009, 0, 4011, 0, 4013, 0, 4015, 0, 4017, 0, 4019, 0, 4021, 0, 4023, 0, 4025, 0, 4027, 0, 4029, 0, 4031, 0, 4033, 0, 4035, 0, 4037, 0, 4039, 0, 4041, 0, 4043, 0, 4045, 0, 4047, 0, 4049, 0, 4051, 0, 4053, 0, 4055, 0, 4057, 0, 4059, 0, 4061, 0, 4063, 0, 4065, 0, 4067, 0, 4069, 0, 4071, 0, 4073, 0, 4075, 0, 4077, 0, 4079, 0, 4081, 0, 4083, 0, 4085, 0, 4087, 0, 4089, 0, 4091, 0, 4093, 0, 4095, 0, 4097, 0, 4099, 0, 4101, 0, 4103, 0, 4105, 0, 4107, 0, 4109, 0, 4111, 0, 4113, 0, 4115, 0, 4117, 0, 4119, 0, 4121, 0, 4123, 0, 4125, 0, 4127, 0, 4129, 0, 4131, 0, 4133, 0, 4135, 0, 4137, 0, 4139, 0, 4141, 0, 4143, 0, 4145, 0, 4147, 0, 4149, 0, 4151, 0, 4153, 0, 4155, 0, 4157, 0, 4159, 0, 4161, 0, 4163, 0, 4165, 0, 4167, 0, 4169, 0, 4171, 0, 4173, 0, 4175, 0, 4177, 0, 4179, 0, 4181, 0, 4183, 0, 4185, 0, 4187, 0, 4189, 0, 4191, 0, 4193, 0, 4195, 0, 4197, 0, 4199, 0, 4201, 0, 4203, 0, 4205, 0, 4207, 0, 4209, 0, 4211, 0, 4213, 0, 4215, 0, 4217, 0, 4219, 0, 4221, 0, 4223, 0, 4225, 0, 4227, 0, 4229, 0, 4231, 0, 4233, 0, 4235, 0, 4237, 0, 4239, 0, 4241, 0, 4243, 0, 4245, 0, 4247, 0, 4249, 0, 4251, 0, 4253, 0, 4255, 0, 4257, 0, 4259, 0, 4261, 0, 4263, 0, 4265, 0, 4267, 0, 4269, 0, 4271, 0, 4273, 0, 4275, 0, 4277, 0, 4279, 0, 4281, 0, 4283, 0, 4285, 0, 4287, 0, 4289, 0, 4291, 0, 4293, 0, 4295, 0, 4297, 0, 4299, 0, 4301, 0, 4303, 0, 4305, 0, 4307, 0, 4309, 0, 4311, 0, 4313, 0, 4315, 0, 4317, 0, 4319, 0, 4321, 0, 4323, 0, 4325, 0, 4327, 0, 4329, 0, 4331, 0, 4333, 0, 4335, 0, 4337, 0, 4339, 0, 4341, 0, 4343, 0, 4345, 0, 4347, 0, 4349, 0, 4351, 0, 4353, 0, 4355, 0, 4357, 0, 4359, 0, 4361, 0, 4363, 0, 4365, 0, 4367, 0, 4369, 0, 4371, 0, 4373, 0, 4375, 0, 4377, 0, 4379, 0, 4381, 0, 4383, 0, 4385, 0, 4387, 0, 4389, 0, 4391, 0, 4393, 0, 4395, 0, 4397, 0, 4399, 0, 4401, 0, 4403, 0, 4405, 0, 4407, 0, 4409, 0, 4411, 0, 4413, 0, 4415, 0, 4417, 0, 4419, 0, 4421, 0, 4423, 0, 4425, 0, 4427, 0, 4429, 0, 4431, 0, 4433, 0, 4435, 0, 4437, 0, 4439, 0, 4441, 0, 4443, 0, 4445, 0, 4447, 0, 4449, 0, 4451, 0, 4453, 0, 4455, 0, 4457, 0, 4459, 0, 4461, 0, 4463, 0, 4465, 0, 4467, 0, 4469, 0, 4471, 0, 4473, 0, 4475, 0, 4477, 0, 4479, 0, 4481, 0, 4483, 0, 4485, 0, 4487, 0, 4489, 0, 4491, 0, 4493, 0, 4495, 0, 4497, 0, 4499, 0, 4501, 0, 4503, 0, 4505, 0, 4507, 0, 4509, 0, 4511, 0, 4513, 0, 4515, 0, 4517, 0, 4519, 0, 4521, 0, 4523, 0, 4525, 0, 4527, 0, 4529, 0, 4531, 0, 4533, 0, 4535, 0, 4537, 0, 4539, 0, 4541, 0, 4543, 0, 4545, 0, 4547, 0, 4549, 0, 4551, 0, 4553, 0, 4555, 0, 4557, 0, 4559, 0, 4561, 0, 4563, 0, 4565, 0, 4567, 0, 4569, 0, 4571, 0, 4573, 0, 4575, 0, 4577, 0, 4579, 0, 4581, 0, 4583, 0, 4585, 0, 4587, 0, 4589, 0, 4591, 0, 4593, 0, 4595, 0, 4597, 0, 4599, 0, 4601, 0, 4603, 0, 4605, 0, 4607, 0, 4609, 0, 4611, 0, 4613, 0, 4615, 0, 4617, 0, 4619, 0, 4621, 0, 4623, 0, 4625, 0, 4627, 0, 4629, 0, 4631, 0, 4633, 0, 4635, 0, 4637, 0, 4639, 0, 4641, 0, 4643, 0, 4645, 0, 4647, 0, 4649, 0, 4651, 0, 4653, 0, 4655, 0, 4657, 0, 4659, 0, 4661, 0, 4663, 0, 4665, 0, 4667, 0, 4669, 0, 4671, 0, 4673, 0, 4675, 0, 4677, 0, 4679, 0, 4681, 0, 4683, 0, 4685, 0, 4687, 0, 4689, 0, 4691, 0, 4693, 0, 4695, 0, 4697, 0, 4699, 0, 4701, 0, 4703, 0, 4705, 0, 4707, 0, 4709, 0, 4711, 0, 4713, 0, 4715, 0, 4717, 0, 4719, 0, 4721, 0, 4723, 0, 4725, 0, 4727, 0, 4729, 0, 4731, 0, 4733, 0, 4735, 0, 4737, 0, 4739, 0, 4741, 0, 4743, 0, 4745, 0, 4747, 0, 4749, 0, 4751, 0, 4753, 0, 4755, 0, 4757, 0, 4759, 0, 4761, 0, 4763, 0, 4765, 0, 4767, 0, 4769, 0, 4771, 0, 4773, 0, 4775, 0, 4777, 0, 4779, 0, 4781, 0, 4783, 0, 4785, 0, 4787, 0, 4789, 0, 4791, 0, 4793, 0, 4795, 0, 4797, 0, 4799, 0, 4801, 0, 4803, 0, 4805, 0, 4807, 0, 4809, 0, 4811, 0, 4813, 0, 4815, 0, 4817, 0, 4819, 0, 4821, 0, 4823, 0, 4825, 0, 4827, 0, 4829, 0, 4831, 0, 4833, 0, 4835, 0, 4837, 0, 4839, 0, 4841, 0, 4843, 0, 4845, 0, 4847, 0, 4849, 0, 4851, 0, 4853, 0, 4855, 0, 4857, 0, 4859, 0, 4861, 0, 4863, 0, 4865, 0, 4867, 0, 4869, 0, 4871, 0, 4873, 0, 4875, 0, 4877, 0, 4879, 0, 4881, 0, 4883, 0, 4885, 0, 4887, 0, 4889, 0, 4891, 0, 4893, 0, 4895, 0, 4897, 0, 4899, 0, 4901, 0, 4903, 0, 4905, 0, 4907, 0, 4909, 0, 4911, 0, 4913, 0, 4915, 0, 4917, 0, 4919, 0, 4921, 0, 4923, 0, 4925, 0, 4927, 0, 4929, 0, 4931, 0, 4933, 0, 4935, 0, 4937, 0, 4939, 0, 4941, 0, 4943, 0, 4945, 0, 4947, 0, 4949, 0, 4951, 0, 4953, 0, 4955, 0, 4957, 0, 4959, 0, 4961, 0, 4963, 0, 4965, 0, 4967, 0, 4969, 0, 4971, 0, 4973, 0, 4975, 0, 4977, 0, 4979, 0, 4981, 0, 4983, 0, 4985, 0, 4987, 0, 4989, 0, 4991, 0, 4993, 0, 4995, 0, 4997, 0, 4999, 0, 5001, 0, 5003, 0, 5005, 0, 5007, 0, 5009, 0, 5011, 0, 5013, 0, 5015, 0, 5017, 0, 5019, 0, 5021, 0, 5023, 0, 5025, 0, 5027, 0, 5029, 0, 5031, 0, 5033, 0, 5035, 0, 5037, 0, 5039, 0, 5041, 0, 5043, 0, 5045, 0, 5047, 0, 5049, 0, 5051, 0, 5053, 0, 5055, 0, 5057, 0, 5059, 0, 5061, 0, 5063, 0, 5065, 0, 5067, 0, 5069, 0, 5071, 0, 5073, 0, 5075, 0, 5077, 0, 5079, 0, 5081, 0, 5083, 0, 5085, 0, 5087, 0, 5089, 0, 5091, 0, 5093, 0, 5095, 0, 5097, 0, 5099, 0, 5101, 0, 5103, 0, 5105, 0, 5107, 0, 5109, 0, 5111, 0, 5113, 0, 5115, 0, 5117, 0, 5119, 0, 5121, 0, 5123, 0, 5125, 0, 5127, 0, 5129, 0, 5131, 0, 5133, 0, 5135, 0, 5137, 0, 5139, 0, 5141, 0, 5143, 0, 5145, 0, 5147, 0, 5149, 0, 5151, 0, 5153, 0, 5155, 0, 5157, 0, 5159, 0, 5161, 0, 5163, 0, 5165, 0, 5167, 0, 5169, 0, 5171, 0, 5173, 0, 5175, 0, 5177, 0, 5179, 0, 5181, 0, 5183, 0, 5185, 0, 5187, 0, 5189, 0, 5191, 0, 5193, 0, 5195, 0, 5197, 0, 5199, 0, 5201, 0, 5203, 0, 5205, 0, 5207, 0, 5209, 0, 5211, 0, 5213, 0, 5215, 0, 5217, 0, 5219, 0, 5221, 0, 5223, 0, 5225, 0, 5227, 0, 5229, 0, 5231, 0, 5233, 0, 5235, 0, 5237, 0, 5239, 0, 5241, 0, 5243, 0, 5245, 0, 5247, 0, 5249, 0, 5251, 0, 5253, 0, 5255, 0, 5257, 0, 5259, 0, 5261, 0, 5263, 0, 5265, 0, 5267, 0, 5269, 0, 5271, 0, 5273, 0, 5275, 0, 5277, 0, 5279, 0, 5281, 0, 5283, 0, 5285, 0, 5287, 0, 5289, 0, 5291, 0, 5293, 0, 5295, 0, 5297, 0, 5299, 0, 5301, 0, 5303, 0, 5305, 0, 5307, 0, 5309, 0, 5311, 0, 5313, 0, 5315, 0, 5317, 0, 5319, 0, 5321, 0, 5323, 0, 5325, 0, 5327, 0, 5329, 0, 5331, 0, 5333, 0, 5335, 0, 5337, 0, 5339, 0, 5341, 0, 5343, 0, 5345, 0, 5347, 0, 5349, 0, 5351, 0, 5353, 0, 5355, 0, 5357, 0, 5359, 0, 5361, 0, 5363, 0, 5365, 0, 5367, 0, 5369, 0, 5371, 0, 5373, 0, 5375, 0, 5377, 0, 5379, 0, 5381, 0, 5383, 0, 5385, 0, 5387, 0, 5389, 0, 5391, 0, 5393, 0, 5395, 0, 5397, 0, 5399, 0, 5401, 0, 5403, 0, 5405, 0, 5407, 0, 5409, 0, 5411, 0, 5413, 0, 5415, 0, 5417, 0, 5419, 0, 5421, 0, 5423, 0, 5425, 0, 5427, 0, 5429, 0, 5431, 0, 5433, 0, 5435, 0, 5437, 0, 5439, 0, 5441, 0, 5443, 0, 5445, 0, 5447, 0, 5449, 0, 5451, 0, 5453, 0, 5455, 0, 5457, 0, 5459, 0, 5461, 0, 5463, 0, 5465, 0, 5467, 0, 5469, 0, 5471, 0, 5473, 0, 5475, 0, 5477, 0, 5479, 0, 5481, 0, 5483, 0, 5485, 0, 5487, 0, 5489, 0, 5491, 0, 5493, 0, 5495, 0, 5497, 0, 5499, 0, 5501, 0, 5503, 0, 5505, 0, 5507, 0, 5509, 0, 5511, 0, 5513, 0, 5515, 0, 5517, 0, 5519, 0, 5521, 0, 5523, 0, 5525, 0, 5527, 0, 5529, 0, 5531, 0, 5533, 0, 5535, 0, 5537, 0, 5539, 0, 5541, 0, 5543, 0, 5545, 0, 5547, 0, 5549, 0, 5551, 0, 5553, 0, 5555, 0, 5557, 0, 5559, 0, 5561, 0, 5563, 0, 5565, 0, 5567, 0, 5569, 0, 5571, 0, 5573, 0, 5575, 0, 5577, 0, 5579, 0, 5581, 0, 5583, 0, 5585, 0, 5587, 0, 5589, 0, 5591, 0, 5593, 0, 5595, 0, 5597, 0, 5599, 0, 5601, 0, 5603, 0, 5605, 0, 5607, 0, 5609, 0, 5611, 0, 5613, 0, 5615, 0, 5617, 0, 5619, 0, 5621, 0, 5623, 0, 5625, 0, 5627, 0, 5629, 0, 5631, 0, 5633, 0, 5635, 0, 5637, 0, 5639, 0, 5641, 0, 5643, 0, 5645, 0, 5647, 0, 5649, 0, 5651, 0, 5653, 0, 5655, 0, 5657, 0, 5659, 0, 5661, 0, 5663, 0, 5665, 0, 5667, 0, 5669, 0, 5671, 0, 5673, 0, 5675, 0, 5677, 0, 5679, 0, 5681, 0, 5683, 0, 5685, 0, 5687, 0, 5689, 0, 5691, 0, 5693, 0, 5695, 0, 5697, 0, 5699, 0, 5701, 0, 5703, 0, 5705, 0, 5707, 0, 5709, 0, 5711, 0, 5713, 0, 5715, 0, 5717, 0, 5719, 0, 5721, 0, 5723, 0, 5725, 0, 5727, 0, 5729, 0, 5731, 0, 5733, 0, 5735, 0, 5737, 0, 5739, 0, 5741, 0, 5743, 0, 5745, 0, 5747, 0, 5749, 0, 5751, 0, 5753, 0, 5755, 0, 5757, 0, 5759, 0, 5761, 0, 5763, 0, 5765, 0, 5767, 0, 5769, 0, 5771, 0, 5773, 0, 5775, 0, 5777, 0, 5779, 0, 5781, 0, 5783, 0, 5785, 0, 5787, 0, 5789, 0, 5791, 0, 5793, 0, 5795, 0, 5797, 0, 5799, 0, 5801, 0, 5803, 0, 5805, 0, 5807, 0, 5809, 0, 5811, 0, 5813, 0, 5815, 0, 5817, 0, 5819, 0, 5821, 0, 5823, 0, 5825, 0, 5827, 0, 5829, 0, 5831, 0, 5833, 0, 5835, 0, 5837, 0, 5839, 0, 5841, 0, 5843, 0, 5845, 0, 5847, 0, 5849, 0, 5851, 0, 5853, 0, 5855, 0, 5857, 0, 5859, 0, 5861, 0, 5863, 0, 5865, 0, 5867, 0, 5869, 0, 5871, 0, 5873, 0, 5875, 0, 5877, 0, 5879, 0, 5881, 0, 5883, 0, 5885, 0, 5887, 0, 5889, 0, 5891, 0, 5893, 0, 5895, 0, 5897, 0, 5899, 0, 5901, 0, 5903, 0, 5905, 0, 5907, 0, 5909, 0, 5911, 0, 5913, 0, 5915, 0, 5917, 0, 5919, 0, 5921, 0, 5923, 0, 5925, 0, 5927, 0, 5929, 0, 5931, 0, 5933, 0, 5935, 0, 5937, 0, 5939, 0, 5941, 0, 5943, 0, 5945, 0, 5947, 0, 5949, 0, 5951, 0, 5953, 0, 5955, 0, 5957, 0, 5959, 0, 5961, 0, 5963, 0, 5965, 0, 5967, 0, 5969, 0, 5971, 0, 5973, 0, 5975, 0, 5977, 0, 5979, 0, 5981, 0, 5983, 0, 5985, 0, 5987, 0, 5989, 0, 5991, 0, 5993, 0, 5995, 0, 5997, 0, 5999, 0, 6001, 0, 6003, 0, 6005, 0, 6007, 0, 6009, 0, 6011, 0, 6013, 0, 6015, 0, 6017, 0, 6019, 0, 6021, 0, 6023, 0, 6025, 0, 6027, 0, 6029, 0, 6031, 0, 6033, 0, 6035, 0, 6037, 0, 6039, 0, 6041, 0, 6043, 0, 6045, 0, 6047, 0, 6049, 0, 6051, 0, 6053, 0, 6055, 0, 6057, 0, 6059, 0, 6061, 0, 6063, 0, 6065, 0, 6067, 0, 6069, 0, 6071, 0, 6073, 0, 6075, 0, 6077, 0, 6079, 0, 6081, 0, 6083, 0, 6085, 0, 6087, 0, 6089, 0, 6091, 0, 6093, 0, 6095, 0, 6097, 0, 6099, 0, 6101, 0, 6103, 0, 6105, 0, 6107, 0, 6109, 0, 6111, 0, 6113, 0, 6115, 0, 6117, 0, 6119, 0, 6121, 0, 6123, 0, 6125, 0, 6127, 0, 6129, 0, 6131, 0, 6133, 0, 6135, 0, 6137, 0, 6139, 0, 6141, 0, 6143, 0, 6145, 0, 6147, 0, 6149, 0, 6151, 0, 6153, 0, 6155, 0, 6157, 0, 6159, 0, 6161, 0, 6163, 0, 6165, 0, 6167, 0, 6169, 0, 6171, 0, 6173, 0, 6175, 0, 6177, 0, 6179, 0, 6181, 0, 6183, 0, 6185, 0, 6187, 0, 6189, 0, 6191, 0, 6193, 0, 6195, 0, 6197, 0, 6199, 0, 6201, 0, 6203, 0, 6205, 0, 6207, 0, 6209, 0, 6211, 0, 6213, 0, 6215, 0, 6217, 0, 6219, 0, 6221, 0, 6223, 0, 6225, 0, 6227, 0, 6229, 0, 6231, 0, 6233, 0, 6235, 0, 6237, 0, 6239, 0, 6241, 0, 6243, 0, 6245, 0, 6247, 0, 6249, 0, 6251, 0, 6253, 0, 6255, 0, 6257, 0, 6259, 0, 6261, 0, 6263, 0, 6265, 0, 6267, 0, 6269, 0, 6271, 0, 6273, 0, 6275, 0, 6277, 0, 6279, 0, 6281, 0, 6283, 0, 6285, 0, 6287, 0, 6289, 0, 6291, 0, 6293, 0, 6295, 0, 6297, 0, 6299, 0, 6301, 0, 6303, 0, 6305, 0, 6307, 0, 6309, 0, 6311, 0, 6313, 0, 6315, 0, 6317, 0, 6319, 0, 6321, 0, 6323, 0, 6325, 0, 6327, 0, 6329, 0, 6331, 0, 6333, 0, 6335, 0, 6337, 0, 6339, 0, 6341, 0, 6343, 0, 6345, 0, 6347, 0, 6349, 0, 6351, 0, 6353, 0, 6355, 0, 6357, 0, 6359, 0, 6361, 0, 6363, 0, 6365, 0, 6367, 0, 6369, 0, 6371, 0, 6373, 0, 6375, 0, 6377, 0, 6379, 0, 6381, 0, 6383, 0, 6385, 0, 6387, 0, 6389, 0, 6391, 0, 6393, 0, 6395, 0, 6397, 0, 6399, 0, 6401, 0, 6403, 0, 6405, 0, 6407, 0, 6409, 0, 6411, 0, 6413, 0, 6415, 0, 6417, 0, 6419, 0, 6421, 0, 6423, 0, 6425, 0, 6427, 0, 6429, 0, 6431, 0, 6433, 0, 6435, 0, 6437, 0, 6439, 0, 6441, 0, 6443, 0, 6445, 0, 6447, 0, 6449, 0, 6451, 0, 6453, 0, 6455, 0, 6457, 0, 6459, 0, 6461, 0, 6463, 0, 6465, 0, 6467, 0, 6469, 0, 6471, 0, 6473, 0, 6475, 0, 6477, 0, 6479, 0, 6481, 0, 6483, 0, 6485, 0, 6487, 0, 6489, 0, 6491, 0, 6493, 0, 6495, 0, 6497, 0, 6499, 0, 6501, 0, 6503, 0, 6505, 0, 6507, 0, 6509, 0, 6511, 0, 6513, 0, 6515, 0, 6517, 0, 6519, 0, 6521, 0, 6523, 0, 6525, 0, 6527, 0, 6529, 0, 6531, 0, 6533, 0, 6535, 0, 6537, 0, 6539, 0, 6541, 0, 6543, 0, 6545, 0, 6547, 0, 6549, 0, 6551, 0, 6553, 0, 6555, 0, 6557, 0, 6559, 0, 6561, 0, 6563, 0, 6565, 0, 6567, 0, 6569, 0, 6571, 0, 6573, 0, 6575, 0, 6577, 0, 6579, 0, 6581, 0, 6583, 0, 6585, 0, 6587, 0, 6589, 0, 6591, 0, 6593, 0, 6595, 0, 6597, 0, 6599, 0, 6601, 0, 6603, 0, 6605, 0, 6607, 0, 6609, 0, 6611, 0, 6613, 0, 6615, 0, 6617, 0, 6619, 0, 6621, 0, 6623, 0, 6625, 0, 6627, 0, 6629, 0, 6631, 0, 6633, 0, 6635, 0, 6637, 0, 6639, 0, 6641, 0, 6643, 0, 6645, 0, 6647, 0, 6649, 0, 6651, 0, 6653, 0, 6655, 0, 6657, 0, 6659, 0, 6661, 0, 6663, 0, 6665, 0, 6667, 0, 6669, 0, 6671, 0, 6673, 0, 6675, 0, 6677, 0, 6679, 0, 6681, 0, 6683, 0, 6685, 0, 6687, 0, 6689, 0, 6691, 0, 6693, 0, 6695, 0, 6697, 0, 6699, 0, 6701, 0, 6703, 0, 6705, 0, 6707, 0, 6709, 0, 6711, 0, 6713, 0, 6715, 0, 6717, 0, 6719, 0, 6721, 0, 6723, 0, 6725, 0, 6727, 0, 6729, 0, 6731, 0, 6733, 0, 6735, 0, 6737, 0, 6739, 0, 6741, 0, 6743, 0, 6745, 0, 6747, 0, 6749, 0, 6751, 0, 6753, 0, 6755, 0, 6757, 0, 6759, 0, 6761, 0, 6763, 0, 6765, 0, 6767, 0, 6769, 0, 6771, 0, 6773, 0, 6775, 0, 6777, 0, 6779, 0, 6781, 0, 6783, 0, 6785, 0, 6787, 0, 6789, 0, 6791, 0, 6793, 0, 6795, 0, 6797, 0, 6799, 0, 6801, 0, 6803, 0, 6805, 0, 6807, 0, 6809, 0, 6811, 0, 6813, 0, 6815, 0, 6817, 0, 6819, 0, 6821, 0, 6823, 0, 6825, 0, 6827, 0, 6829, 0, 6831, 0, 6833, 0, 6835, 0, 6837, 0, 6839, 0, 6841, 0, 6843, 0, 6845, 0, 6847, 0, 6849, 0, 6851, 0, 6853, 0, 6855, 0, 6857, 0, 6859, 0, 6861, 0, 6863, 0, 6865, 0, 6867, 0, 6869, 0, 6871, 0, 6873, 0, 6875, 0, 6877, 0, 6879, 0, 6881, 0, 6883, 0, 6885, 0, 6887, 0, 6889, 0, 6891, 0, 6893, 0, 6895, 0, 6897, 0, 6899, 0, 6901, 0, 6903, 0, 6905, 0, 6907, 0, 6909, 0, 6911, 0, 6913, 0, 6915, 0, 6917, 0, 6919, 0, 6921, 0, 6923, 0, 6925, 0, 6927, 0, 6929, 0, 6931, 0, 6933, 0, 6935, 0, 6937, 0, 6939, 0, 6941, 0, 6943, 0, 6945, 0, 6947, 0, 6949, 0, 6951, 0, 6953, 0, 6955, 0, 6957, 0, 6959, 0, 6961, 0, 6963, 0, 6965, 0, 6967, 0, 6969, 0, 6971, 0, 6973, 0, 6975, 0, 6977, 0, 6979, 0, 6981, 0, 6983, 0, 6985, 0, 6987, 0, 6989, 0, 6991, 0, 6993, 0, 6995, 0, 6997, 0, 6999, 0, 7001, 0, 7003, 0, 7005, 0, 7007, 0, 7009, 0, 7011, 0, 7013, 0, 7015, 0, 7017, 0, 7019, 0, 7021, 0, 7023, 0, 7025, 0, 7027, 0, 7029, 0, 7031, 0, 7033, 0, 7035, 0, 7037, 0, 7039, 0, 7041, 0, 7043, 0, 7045, 0, 7047, 0, 7049, 0, 7051, 0, 7053, 0, 7055, 0, 7057, 0, 7059, 0, 7061, 0, 7063, 0, 7065, 0, 7067, 0, 7069, 0, 7071, 0, 7073, 0, 7075, 0, 7077, 0, 7079, 0, 7081, 0, 7083, 0, 7085, 0, 7087, 0, 7089, 0, 7091, 0, 7093, 0, 7095, 0, 7097, 0, 7099, 0, 7101, 0, 7103, 0, 7105, 0, 7107, 0, 7109, 0, 7111, 0, 7113, 0, 7115, 0, 7117, 0, 7119, 0, 7121, 0, 7123, 0, 7125, 0, 7127, 0, 7129, 0, 7131, 0, 7133, 0, 7135, 0, 7137, 0, 7139, 0, 7141, 0, 7143, 0, 7145, 0, 7147, 0, 7149, 0, 7151, 0, 7153, 0, 7155, 0, 7157, 0, 7159, 0, 7161, 0, 7163, 0, 7165, 0, 7167, 0, 7169, 0, 7171, 0, 7173, 0, 7175, 0, 7177, 0, 7179, 0, 7181, 0, 7183, 0, 7185, 0, 7187, 0, 7189, 0, 7191, 0, 7193, 0, 7195, 0, 7197, 0, 7199, 0, 7201, 0, 7203, 0, 7205, 0, 7207, 0, 7209, 0, 7211, 0, 7213, 0, 7215, 0, 7217, 0, 7219, 0, 7221, 0, 7223, 0, 7225, 0, 7227, 0, 7229, 0, 7231, 0, 7233, 0, 7235, 0, 7237, 0, 7239, 0, 7241, 0, 7243, 0, 7245, 0, 7247, 0, 7249, 0, 7251, 0, 7253, 0, 7255, 0, 7257, 0, 7259, 0, 7261, 0, 7263, 0, 7265, 0, 7267, 0, 7269, 0, 7271, 0, 7273, 0, 7275, 0, 7277, 0, 7279, 0, 7281, 0, 7283, 0, 7285, 0, 7287, 0, 7289, 0, 7291, 0, 7293, 0, 7295, 0, 7297, 0, 7299, 0, 7301, 0, 7303, 0, 7305, 0, 7307, 0, 7309, 0, 7311, 0, 7313, 0, 7315, 0, 7317, 0, 7319, 0, 7321, 0, 7323, 0, 7325, 0, 7327, 0, 7329, 0, 7331, 0, 7333, 0, 7335, 0, 7337, 0, 7339, 0, 7341, 0, 7343, 0, 7345, 0, 7347, 0, 7349, 0, 7351, 0, 7353, 0, 7355, 0, 7357, 0, 7359, 0, 7361, 0, 7363, 0, 7365, 0, 7367, 0, 7369, 0, 7371, 0, 7373, 0, 7375, 0, 7377, 0, 7379, 0, 7381, 0, 7383, 0, 7385, 0, 7387, 0, 7389, 0, 7391, 0, 7393, 0, 7395, 0, 7397, 0, 7399, 0, 7401, 0, 7403, 0, 7405, 0, 7407, 0, 7409, 0, 7411, 0, 7413, 0, 7415, 0, 7417, 0, 7419, 0, 7421, 0, 7423, 0, 7425, 0, 7427, 0, 7429, 0, 7431, 0, 7433, 0, 7435, 0, 7437, 0, 7439, 0, 7441, 0, 7443, 0, 7445, 0, 7447, 0, 7449, 0, 7451, 0, 7453, 0, 7455, 0, 7457, 0, 7459, 0, 7461, 0, 7463, 0, 7465, 0, 7467, 0, 7469, 0, 7471, 0, 7473, 0, 7475, 0, 7477, 0, 7479, 0, 7481, 0, 7483, 0, 7485, 0, 7487, 0, 7489, 0, 7491, 0, 7493, 0, 7495, 0, 7497, 0, 7499, 0, 7501, 0, 7503, 0, 7505, 0, 7507, 0, 7509, 0, 7511, 0, 7513, 0, 7515, 0, 7517, 0, 7519, 0, 7521, 0, 7523, 0, 7525, 0, 7527, 0, 7529, 0, 7531, 0, 7533, 0, 7535, 0, 7537, 0, 7539, 0, 7541, 0, 7543, 0, 7545, 0, 7547, 0, 7549, 0, 7551, 0, 7553, 0, 7555, 0, 7557, 0, 7559, 0, 7561, 0, 7563, 0, 7565, 0, 7567, 0, 7569, 0, 7571, 0, 7573, 0, 7575, 0, 7577, 0, 7579, 0, 7581, 0, 7583, 0, 7585, 0, 7587, 0, 7589, 0, 7591, 0, 7593, 0, 7595, 0, 7597, 0, 7599, 0, 7601, 0, 7603, 0, 7605, 0, 7607, 0, 7609, 0, 7611, 0, 7613, 0, 7615, 0, 7617, 0, 7619, 0, 7621, 0, 7623, 0, 7625, 0, 7627, 0, 7629, 0, 7631, 0, 7633, 0, 7635, 0, 7637, 0, 7639, 0, 7641, 0, 7643, 0, 7645, 0, 7647, 0, 7649, 0, 7651, 0, 7653, 0, 7655, 0, 7657, 0, 7659, 0, 7661, 0, 7663, 0, 7665, 0, 7667, 0, 7669, 0, 7671, 0, 7673, 0, 7675, 0, 7677, 0, 7679, 0, 7681, 0, 7683, 0, 7685, 0, 7687, 0, 7689, 0, 7691, 0, 7693, 0, 7695, 0, 7697, 0, 7699, 0, 7701, 0, 7703, 0, 7705, 0, 7707, 0, 7709, 0, 7711, 0, 7713, 0, 7715, 0, 7717, 0, 7719, 0, 7721, 0, 7723, 0, 7725, 0, 7727, 0, 7729, 0, 7731, 0, 7733, 0, 7735, 0, 7737, 0, 7739, 0, 7741, 0, 7743, 0, 7745, 0, 7747, 0, 7749, 0, 7751, 0, 7753, 0, 7755, 0, 7757, 0, 7759, 0, 7761, 0, 7763, 0, 7765, 0, 7767, 0, 7769, 0, 7771, 0, 7773, 0, 7775, 0, 7777, 0, 7779, 0, 7781, 0, 7783, 0, 7785, 0, 7787, 0, 7789, 0, 7791, 0, 7793, 0, 7795, 0, 7797, 0, 7799, 0, 7801, 0, 7803, 0, 7805, 0, 7807, 0, 7809, 0, 7811, 0, 7813, 0, 7815, 0, 7817, 0, 7819, 0, 7821, 0, 7823, 0, 7825, 0, 7827, 0, 7829, 0, 7831, 0, 7833, 0, 7835, 0, 7837, 0, 7839, 0, 7841, 0, 7843, 0, 7845, 0, 7847, 0, 7849, 0, 7851, 0, 7853, 0, 7855, 0, 7857, 0, 7859, 0, 7861, 0, 7863, 0, 7865, 0, 7867, 0, 7869, 0, 7871, 0, 7873, 0, 7875, 0, 7877, 0, 7879, 0, 7881, 0, 7883, 0, 7885, 0, 7887, 0, 7889, 0, 7891, 0, 7893, 0, 7895, 0, 7897, 0, 7899, 0, 7901, 0, 7903, 0, 7905, 0, 7907, 0, 7909, 0, 7911, 0, 7913, 0, 7915, 0, 7917, 0, 7919, 0, 7921, 0, 7923, 0, 7925, 0, 7927, 0, 7929, 0, 7931, 0, 7933, 0, 7935, 0, 7937, 0, 7939, 0, 7941, 0, 7943, 0, 7945, 0, 7947, 0, 7949, 0, 7951, 0, 7953, 0, 7955, 0, 7957, 0, 7959, 0, 7961, 0, 7963, 0, 7965, 0, 7967, 0, 7969, 0, 7971, 0, 7973, 0, 7975, 0, 7977, 0, 7979, 0, 7981, 0, 7983, 0, 7985, 0, 7987, 0, 7989, 0, 7991, 0, 7993, 0, 7995, 0, 7997, 0, 7999, 0, 8001, 0, 8003, 0, 8005, 0, 8007, 0, 8009, 0, 8011, 0, 8013, 0, 8015, 0, 8017, 0, 8019, 0, 8021, 0, 8023, 0, 8025, 0, 8027, 0, 8029, 0, 8031, 0, 8033, 0, 8035, 0, 8037, 0, 8039, 0, 8041, 0, 8043, 0, 8045, 0, 8047, 0, 8049, 0, 8051, 0, 8053, 0, 8055, 0, 8057, 0, 8059, 0, 8061, 0, 8063, 0, 8065, 0, 8067, 0, 8069, 0, 8071, 0, 8073, 0, 8075, 0, 8077, 0, 8079, 0, 8081, 0, 8083, 0, 8085, 0, 8087, 0, 8089, 0, 8091, 0, 8093, 0, 8095, 0, 8097, 0, 8099, 0, 8101, 0, 8103, 0, 8105, 0, 8107, 0, 8109, 0, 8111, 0, 8113, 0, 8115, 0, 8117, 0, 8119, 0, 8121, 0, 8123, 0, 8125, 0, 8127, 0, 8129, 0, 8131, 0, 8133, 0, 8135, 0, 8137, 0, 8139, 0, 8141, 0, 8143, 0, 8145, 0, 8147, 0, 8149, 0, 8151, 0, 8153, 0, 8155, 0, 8157, 0, 8159, 0, 8161, 0, 8163, 0, 8165, 0, 8167, 0, 8169, 0, 8171, 0, 8173, 0, 8175, 0, 8177, 0, 8179, 0, 8181, 0, 8183, 0, 8185, 0, 8187, 0, 8189, 0, 8191, 0, 8193, 0, 8195, 0, 8197, 0, 8199, 0, 8201, 0, 8203, 0, 8205, 0, 8207, 0, 8209, 0, 8211, 0, 8213, 0, 8215, 0, 8217, 0, 8219, 0, 8221, 0, 8223, 0, 8225, 0, 8227, 0, 8229, 0, 8231, 0, 8233, 0, 8235, 0, 8237, 0, 8239, 0, 8241, 0, 8243, 0, 8245, 0, 8247, 0, 8249, 0, 8251, 0, 8253, 0, 8255, 0, 8257, 0, 8259, 0, 8261, 0, 8263, 0, 8265, 0, 8267, 0, 8269, 0, 8271, 0, 8273, 0, 8275, 0, 8277, 0, 8279, 0, 8281, 0, 8283, 0, 8285, 0, 8287, 0, 8289, 0, 8291, 0, 8293, 0, 8295, 0, 8297, 0, 8299, 0, 8301, 0, 8303, 0, 8305, 0, 8307, 0, 8309, 0, 8311, 0, 8313, 0, 8315, 0, 8317, 0, 8319, 0, 8321, 0, 8323, 0, 8325, 0, 8327, 0, 8329, 0, 8331, 0, 8333, 0, 8335, 0, 8337, 0, 8339, 0, 8341, 0, 8343, 0, 8345, 0, 8347, 0, 8349, 0, 8351, 0, 8353, 0, 8355, 0, 8357, 0, 8359, 0, 8361, 0, 8363, 0, 8365, 0, 8367, 0, 8369, 0, 8371, 0, 8373, 0, 8375, 0, 8377, 0, 8379, 0, 8381, 0, 8383, 0, 8385, 0, 8387, 0, 8389, 0, 8391, 0, 8393, 0, 8395, 0, 8397, 0, 8399, 0, 8401, 0, 8403, 0, 8405, 0, 8407, 0, 8409, 0, 8411, 0, 8413, 0, 8415, 0, 8417, 0, 8419, 0, 8421, 0, 8423, 0, 8425, 0, 8427, 0, 8429, 0, 8431, 0, 8433, 0, 8435, 0, 8437, 0, 8439, 0, 8441, 0, 8443, 0, 8445, 0, 8447, 0, 8449, 0, 8451, 0, 8453, 0, 8455, 0, 8457, 0, 8459, 0, 8461, 0, 8463, 0, 8465, 0, 8467, 0, 8469, 0, 8471, 0, 8473, 0, 8475, 0, 8477, 0, 8479, 0, 8481, 0, 8483, 0, 8485, 0, 8487, 0, 8489, 0, 8491, 0, 8493, 0, 8495, 0, 8497, 0, 8499, 0, 8501, 0, 8503, 0, 8505, 0, 8507, 0, 8509, 0, 8511, 0, 8513, 0, 8515, 0, 8517, 0, 8519, 0, 8521, 0, 8523, 0, 8525, 0, 8527, 0, 8529, 0, 8531, 0, 8533, 0, 8535, 0, 8537, 0, 8539, 0, 8541, 0, 8543, 0, 8545, 0, 8547, 0, 8549, 0, 8551, 0, 8553, 0, 8555, 0, 8557, 0, 8559, 0, 8561, 0, 8563, 0, 8565, 0, 8567, 0, 8569, 0, 8571, 0, 8573, 0, 8575, 0, 8577, 0, 8579, 0, 8581, 0, 8583, 0, 8585, 0, 8587, 0, 8589, 0, 8591, 0, 8593, 0, 8595, 0, 8597, 0, 8599, 0, 8601, 0, 8603, 0, 8605, 0, 8607, 0, 8609, 0, 8611, 0, 8613, 0, 8615, 0, 8617, 0, 8619, 0, 8621, 0, 8623, 0, 8625, 0, 8627, 0, 8629, 0, 8631, 0, 8633, 0, 8635, 0, 8637, 0, 8639, 0, 8641, 0, 8643, 0, 8645, 0, 8647, 0, 8649, 0, 8651, 0, 8653, 0, 8655, 0, 8657, 0, 8659, 0, 8661, 0, 8663, 0, 8665, 0, 8667, 0, 8669, 0, 8671, 0, 8673, 0, 8675, 0, 8677, 0, 8679, 0, 8681, 0, 8683, 0, 8685, 0, 8687, 0, 8689, 0, 8691, 0, 8693, 0, 8695, 0, 8697, 0, 8699, 0, 8701, 0, 8703, 0, 8705, 0, 8707, 0, 8709, 0, 8711, 0, 8713, 0, 8715, 0, 8717, 0, 8719, 0, 8721, 0, 8723, 0, 8725, 0, 8727, 0, 8729, 0, 8731, 0, 8733, 0, 8735, 0, 8737, 0, 8739, 0, 8741, 0, 8743, 0, 8745, 0, 8747, 0, 8749, 0, 8751, 0, 8753, 0, 8755, 0, 8757, 0, 8759, 0, 8761, 0, 8763, 0, 8765, 0, 8767, 0, 8769, 0, 8771, 0, 8773, 0, 8775, 0, 8777, 0, 8779, 0, 8781, 0, 8783, 0, 8785, 0, 8787, 0, 8789, 0, 8791, 0, 8793, 0, 8795, 0, 8797, 0, 8799, 0, 8801, 0, 8803, 0, 8805, 0, 8807, 0, 8809, 0, 8811, 0, 8813, 0, 8815, 0, 8817, 0, 8819, 0, 8821, 0, 8823, 0, 8825, 0, 8827, 0, 8829, 0, 8831, 0, 8833, 0, 8835, 0, 8837, 0, 8839, 0, 8841, 0, 8843, 0, 8845, 0, 8847, 0, 8849, 0, 8851, 0, 8853, 0, 8855, 0, 8857, 0, 8859, 0, 8861, 0, 8863, 0, 8865, 0, 8867, 0, 8869, 0, 8871, 0, 8873, 0, 8875, 0, 8877, 0, 8879, 0, 8881, 0, 8883, 0, 8885, 0, 8887, 0, 8889, 0, 8891, 0, 8893, 0, 8895, 0, 8897, 0, 8899, 0, 8901, 0, 8903, 0, 8905, 0, 8907, 0, 8909, 0, 8911, 0, 8913, 0, 8915, 0, 8917, 0, 8919, 0, 8921, 0, 8923, 0, 8925, 0, 8927, 0, 8929, 0, 8931, 0, 8933, 0, 8935, 0, 8937, 0, 8939, 0, 8941, 0, 8943, 0, 8945, 0, 8947, 0, 8949, 0, 8951, 0, 8953, 0, 8955, 0, 8957, 0, 8959, 0, 8961, 0, 8963, 0, 8965, 0, 8967, 0, 8969, 0, 8971, 0, 8973, 0, 8975, 0, 8977, 0, 8979, 0, 8981, 0, 8983, 0, 8985, 0, 8987, 0, 8989, 0, 8991, 0, 8993, 0, 8995, 0, 8997, 0, 8999, 0, 9001, 0, 9003, 0, 9005, 0, 9007, 0, 9009, 0, 9011, 0, 9013, 0, 9015, 0, 9017, 0, 9019, 0, 9021, 0, 9023, 0, 9025, 0, 9027, 0, 9029, 0, 9031, 0, 9033, 0, 9035, 0, 9037, 0, 9039, 0, 9041, 0, 9043, 0, 9045, 0, 9047, 0, 9049, 0, 9051, 0, 9053, 0, 9055, 0, 9057, 0, 9059, 0, 9061, 0, 9063, 0, 9065, 0, 9067, 0, 9069, 0, 9071, 0, 9073, 0, 9075, 0, 9077, 0, 9079, 0, 9081, 0, 9083, 0, 9085, 0, 9087, 0, 9089, 0, 9091, 0, 9093, 0, 9095, 0, 9097, 0, 9099, 0, 9101, 0, 9103, 0, 9105, 0, 9107, 0, 9109, 0, 9111, 0, 9113, 0, 9115, 0, 9117, 0, 9119, 0, 9121, 0, 9123, 0, 9125, 0, 9127, 0, 9129, 0, 9131, 0, 9133, 0, 9135, 0, 9137, 0, 9139, 0, 9141, 0, 9143, 0, 9145, 0, 9147, 0, 9149, 0, 9151, 0, 9153, 0, 9155, 0, 9157, 0, 9159, 0, 9161, 0, 9163, 0, 9165, 0, 9167, 0, 9169, 0, 9171, 0, 9173, 0, 9175, 0, 9177, 0, 9179, 0, 9181, 0, 9183, 0, 9185, 0, 9187, 0, 9189, 0, 9191, 0, 9193, 0, 9195, 0, 9197, 0, 9199, 0, 9201, 0, 9203, 0, 9205, 0, 9207, 0, 9209, 0, 9211, 0, 9213, 0, 9215, 0, 9217, 0, 9219, 0, 9221, 0, 9223, 0, 9225, 0, 9227, 0, 9229, 0, 9231, 0, 9233, 0, 9235, 0, 9237, 0, 9239, 0, 9241, 0, 9243, 0, 9245, 0, 9247, 0, 9249, 0, 9251, 0, 9253, 0, 9255, 0, 9257, 0, 9259, 0, 9261, 0, 9263, 0, 9265, 0, 9267, 0, 9269, 0, 9271, 0, 9273, 0, 9275, 0, 9277, 0, 9279, 0, 9281, 0, 9283, 0, 9285, 0, 9287, 0, 9289, 0, 9291, 0, 9293, 0, 9295, 0, 9297, 0, 9299, 0, 9301, 0, 9303, 0, 9305, 0, 9307, 0, 9309, 0, 9311, 0, 9313, 0, 9315, 0, 9317, 0, 9319, 0, 9321, 0, 9323, 0, 9325, 0, 9327, 0, 9329, 0, 9331, 0, 9333, 0, 9335, 0, 9337, 0, 9339, 0, 9341, 0, 9343, 0, 9345, 0, 9347, 0, 9349, 0, 9351, 0, 9353, 0, 9355, 0, 9357, 0, 9359, 0, 9361, 0, 9363, 0, 9365, 0, 9367, 0, 9369, 0, 9371, 0, 9373, 0, 9375, 0, 9377, 0, 9379, 0, 9381, 0, 9383, 0, 9385, 0, 9387, 0, 9389, 0, 9391, 0, 9393, 0, 9395, 0, 9397, 0, 9399, 0, 9401, 0, 9403, 0, 9405, 0, 9407, 0, 9409, 0, 9411, 0, 9413, 0, 9415, 0, 9417, 0, 9419, 0, 9421, 0, 9423, 0, 9425, 0, 9427, 0, 9429, 0, 9431, 0, 9433, 0, 9435, 0, 9437, 0, 9439, 0, 9441, 0, 9443, 0, 9445, 0, 9447, 0, 9449, 0, 9451, 0, 9453, 0, 9455, 0, 9457, 0, 9459, 0, 9461, 0, 9463, 0, 9465, 0, 9467, 0, 9469, 0, 9471, 0, 9473, 0, 9475, 0, 9477, 0, 9479, 0, 9481, 0, 9483, 0, 9485, 0, 9487, 0, 9489, 0, 9491, 0, 9493, 0, 9495, 0, 9497, 0, 9499, 0, 9501, 0, 9503, 0, 9505, 0, 9507, 0, 9509, 0, 9511, 0, 9513, 0, 9515, 0, 9517, 0, 9519, 0, 9521, 0, 9523, 0, 9525, 0, 9527, 0, 9529, 0, 9531, 0, 9533, 0, 9535, 0, 9537, 0, 9539, 0, 9541, 0, 9543, 0, 9545, 0, 9547, 0, 9549, 0, 9551, 0, 9553, 0, 9555, 0, 9557, 0, 9559, 0, 9561, 0, 9563, 0, 9565, 0, 9567, 0, 9569, 0, 9571, 0, 9573, 0, 9575, 0, 9577, 0, 9579, 0, 9581, 0, 9583, 0, 9585, 0, 9587, 0, 9589, 0, 9591, 0, 9593, 0, 9595, 0, 9597, 0, 9599, 0, 9601, 0, 9603, 0, 9605, 0, 9607, 0, 9609, 0, 9611, 0, 9613, 0, 9615, 0, 9617, 0, 9619, 0, 9621, 0, 9623, 0, 9625, 0, 9627, 0, 9629, 0, 9631, 0, 9633, 0, 9635, 0, 9637, 0, 9639, 0, 9641, 0, 9643, 0, 9645, 0, 9647, 0, 9649, 0, 9651, 0, 9653, 0, 9655, 0, 9657, 0, 9659, 0, 9661, 0, 9663, 0, 9665, 0, 9667, 0, 9669, 0, 9671, 0, 9673, 0, 9675, 0, 9677, 0, 9679, 0, 9681, 0, 9683, 0, 9685, 0, 9687, 0, 9689, 0, 9691, 0, 9693, 0, 9695, 0, 9697, 0, 9699, 0, 9701, 0, 9703, 0, 9705, 0, 9707, 0, 9709, 0, 9711, 0, 9713, 0, 9715, 0, 9717, 0, 9719, 0, 9721, 0, 9723, 0, 9725, 0, 9727, 0, 9729, 0, 9731, 0, 9733, 0, 9735, 0, 9737, 0, 9739, 0, 9741, 0, 9743, 0, 9745, 0, 9747, 0, 9749, 0, 9751, 0, 9753, 0, 9755, 0, 9757, 0, 9759, 0, 9761, 0, 9763, 0, 9765, 0, 9767, 0, 9769, 0, 9771, 0, 9773, 0, 9775, 0, 9777, 0, 9779, 0, 9781, 0, 9783, 0, 9785, 0, 9787, 0, 9789, 0, 9791, 0, 9793, 0, 9795, 0, 9797, 0, 9799, 0, 9801, 0, 9803, 0, 9805, 0, 9807, 0, 9809, 0, 9811, 0, 9813, 0, 9815, 0, 9817, 0, 9819, 0, 9821, 0, 9823, 0, 9825, 0, 9827, 0, 9829, 0, 9831, 0, 9833, 0, 9835, 0, 9837, 0, 9839, 0, 9841, 0, 9843, 0, 9845, 0, 9847, 0, 9849, 0, 9851, 0, 9853, 0, 9855, 0, 9857, 0, 9859, 0, 9861, 0, 9863, 0, 9865, 0, 9867, 0, 9869, 0, 9871, 0, 9873, 0, 9875, 0, 9877, 0, 9879, 0, 9881, 0, 9883, 0, 9885, 0, 9887, 0, 9889, 0, 9891, 0, 9893, 0, 9895, 0, 9897, 0, 9899, 0, 9901, 0, 9903, 0, 9905, 0, 9907, 0, 9909, 0, 9911, 0, 9913, 0, 9915, 0, 9917, 0, 9919, 0, 9921, 0, 9923, 0, 9925, 0, 9927, 0, 9929, 0, 9931, 0, 9933, 0, 9935, 0, 9937, 0, 9939, 0, 9941, 0, 9943, 0, 9945, 0, 9947, 0, 9949, 0, 9951, 0, 9953, 0, 9955, 0, 9957, 0, 9959, 0, 9961, 0, 9963, 0, 9965, 0, 9967, 0, 9969, 0, 9971, 0, 9973, 0, 9975, 0, 9977, 0, 9979, 0, 9981, 0, 9983, 0, 9985, 0, 9987, 0, 9989, 0, 9991, 0, 9993, 0, 9995, 0, 9997, 0, 9999, 0, 10001, 0, 10003, 0, 10005, 0, 10007, 0, 10009, 0, 10011, 0, 10013, 0, 10015, 0, 10017, 0, 10019, 0, 10021, 0, 10023, 0, 10025, 0, 10027, 0, 10029, 0, 10031, 0, 10033, 0, 10035, 0, 10037, 0, 10039, 0, 10041, 0, 10043, 0, 10045, 0, 10047, 0, 10049, 0, 10051, 0, 10053, 0, 10055, 0, 10057, 0, 10059, 0, 10061, 0, 10063, 0, 10065, 0, 10067, 0, 10069, 0, 10071, 0, 10073, 0, 10075, 0, 10077, 0, 10079, 0, 10081, 0, 10083, 0, 10085, 0, 10087, 0, 10089, 0, 10091, 0, 10093, 0, 10095, 0, 10097, 0, 10099, 0, 10101, 0, 10103, 0, 10105, 0, 10107, 0, 10109, 0, 10111, 0, 10113, 0, 10115, 0, 10117, 0, 10119, 0, 10121, 0, 10123, 0, 10125, 0, 10127, 0, 10129, 0, 10131, 0, 10133, 0, 10135, 0, 10137, 0, 10139, 0, 10141, 0, 10143, 0, 10145, 0, 10147, 0, 10149, 0, 10151, 0, 10153, 0, 10155, 0, 10157, 0, 10159, 0, 10161, 0, 10163, 0, 10165, 0, 10167, 0, 10169, 0, 10171, 0, 10173, 0, 10175, 0, 10177, 0, 10179, 0, 10181, 0, 10183, 0, 10185, 0, 10187, 0, 10189, 0, 10191, 0, 10193, 0, 10195, 0, 10197, 0, 10199, 0, 10201, 0, 10203, 0, 10205, 0, 10207, 0, 10209, 0, 10211, 0, 10213, 0, 10215, 0, 10217, 0, 10219, 0, 10221, 0, 10223, 0, 10225, 0, 10227, 0, 10229, 0, 10231, 0, 10233, 0, 10235, 0, 10237, 0, 10239}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/resize_bilinear_2_relaxed.example.cpp b/nn/runtime/test/generated/examples/resize_bilinear_2_relaxed.example.cpp
new file mode 100644
index 000000000..2139411aa
--- /dev/null
+++ b/nn/runtime/test/generated/examples/resize_bilinear_2_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: resize_bilinear_2_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {3, 4, 6, 10, 9, 10, 12, 16}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {3, 4, 5, 8, 6, 10, 7, 8, 9, 12, 10, 14, 9, 10, 11, 14, 12, 16}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/space_to_batch_float_1_relaxed.example.cpp b/nn/runtime/test/generated/examples/space_to_batch_float_1_relaxed.example.cpp
new file mode 100644
index 000000000..aab8779a9
--- /dev/null
+++ b/nn/runtime/test/generated/examples/space_to_batch_float_1_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: space_to_batch_float_1_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 3, 9, 11, 2, 4, 10, 12, 5, 7, 13, 15, 6, 8, 14, 16}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/space_to_batch_float_2_relaxed.example.cpp b/nn/runtime/test/generated/examples/space_to_batch_float_2_relaxed.example.cpp
new file mode 100644
index 000000000..9c4f649f0
--- /dev/null
+++ b/nn/runtime/test/generated/examples/space_to_batch_float_2_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: space_to_batch_float_2_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0, 0, 0, 5, 0, 0, 0, 6, 0, 1, 0, 7, 0, 2, 0, 8, 0, 3, 0, 9, 0, 4, 0, 10}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/space_to_batch_float_3_relaxed.example.cpp b/nn/runtime/test/generated/examples/space_to_batch_float_3_relaxed.example.cpp
new file mode 100644
index 000000000..6fa614b1c
--- /dev/null
+++ b/nn/runtime/test/generated/examples/space_to_batch_float_3_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: space_to_batch_float_3_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4, 5, 6, 7, 8}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 1, 0, 0, 0, 7, 0, 0, 0, 2, 0, 0, 0, 8, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/space_to_batch_relaxed.example.cpp b/nn/runtime/test/generated/examples/space_to_batch_relaxed.example.cpp
new file mode 100644
index 000000000..aad722fcc
--- /dev/null
+++ b/nn/runtime/test/generated/examples/space_to_batch_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: space_to_batch_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.4f, 2.3f, 3.2f, 4.1f, 5.4f, 6.3f, 7.2f, 8.1f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.4f, 2.3f, 3.2f, 4.1f, 5.4f, 6.3f, 7.2f, 8.1f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/squeeze_float_1_relaxed.example.cpp b/nn/runtime/test/generated/examples/squeeze_float_1_relaxed.example.cpp
new file mode 100644
index 000000000..135d04e31
--- /dev/null
+++ b/nn/runtime/test/generated/examples/squeeze_float_1_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: squeeze_float_1_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/squeeze_relaxed.example.cpp b/nn/runtime/test/generated/examples/squeeze_relaxed.example.cpp
new file mode 100644
index 000000000..b73e0058f
--- /dev/null
+++ b/nn/runtime/test/generated/examples/squeeze_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: squeeze_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.4f, 2.3f, 3.2f, 4.1f, 5.4f, 6.3f, 7.2f, 8.1f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.4f, 2.3f, 3.2f, 4.1f, 5.4f, 6.3f, 7.2f, 8.1f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/strided_slice_float_10_relaxed.example.cpp b/nn/runtime/test/generated/examples/strided_slice_float_10_relaxed.example.cpp
new file mode 100644
index 000000000..6db5d97b0
--- /dev/null
+++ b/nn/runtime/test/generated/examples/strided_slice_float_10_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: strided_slice_float_10_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4, 5, 6}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {4, 5, 6}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/strided_slice_float_11_relaxed.example.cpp b/nn/runtime/test/generated/examples/strided_slice_float_11_relaxed.example.cpp
new file mode 100644
index 000000000..9780e412e
--- /dev/null
+++ b/nn/runtime/test/generated/examples/strided_slice_float_11_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: strided_slice_float_11_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4, 5, 6}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/strided_slice_float_1_relaxed.example.cpp b/nn/runtime/test/generated/examples/strided_slice_float_1_relaxed.example.cpp
new file mode 100644
index 000000000..79324898e
--- /dev/null
+++ b/nn/runtime/test/generated/examples/strided_slice_float_1_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: strided_slice_float_1_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {2, 3}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/strided_slice_float_2_relaxed.example.cpp b/nn/runtime/test/generated/examples/strided_slice_float_2_relaxed.example.cpp
new file mode 100644
index 000000000..fd1e19aed
--- /dev/null
+++ b/nn/runtime/test/generated/examples/strided_slice_float_2_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: strided_slice_float_2_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {2, 3}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/strided_slice_float_3_relaxed.example.cpp b/nn/runtime/test/generated/examples/strided_slice_float_3_relaxed.example.cpp
new file mode 100644
index 000000000..3328aa6f2
--- /dev/null
+++ b/nn/runtime/test/generated/examples/strided_slice_float_3_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: strided_slice_float_3_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/strided_slice_float_4_relaxed.example.cpp b/nn/runtime/test/generated/examples/strided_slice_float_4_relaxed.example.cpp
new file mode 100644
index 000000000..bd455137e
--- /dev/null
+++ b/nn/runtime/test/generated/examples/strided_slice_float_4_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: strided_slice_float_4_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {2}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/strided_slice_float_5_relaxed.example.cpp b/nn/runtime/test/generated/examples/strided_slice_float_5_relaxed.example.cpp
new file mode 100644
index 000000000..3ceaedb7f
--- /dev/null
+++ b/nn/runtime/test/generated/examples/strided_slice_float_5_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: strided_slice_float_5_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/strided_slice_float_6_relaxed.example.cpp b/nn/runtime/test/generated/examples/strided_slice_float_6_relaxed.example.cpp
new file mode 100644
index 000000000..4ad87ee99
--- /dev/null
+++ b/nn/runtime/test/generated/examples/strided_slice_float_6_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: strided_slice_float_6_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {2, 3, 4}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/strided_slice_float_7_relaxed.example.cpp b/nn/runtime/test/generated/examples/strided_slice_float_7_relaxed.example.cpp
new file mode 100644
index 000000000..cd2d40ce4
--- /dev/null
+++ b/nn/runtime/test/generated/examples/strided_slice_float_7_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: strided_slice_float_7_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {3, 2, 1}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/strided_slice_float_8_relaxed.example.cpp b/nn/runtime/test/generated/examples/strided_slice_float_8_relaxed.example.cpp
new file mode 100644
index 000000000..bab6a744a
--- /dev/null
+++ b/nn/runtime/test/generated/examples/strided_slice_float_8_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: strided_slice_float_8_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4, 5, 6}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {6, 5, 4}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/strided_slice_float_9_relaxed.example.cpp b/nn/runtime/test/generated/examples/strided_slice_float_9_relaxed.example.cpp
new file mode 100644
index 000000000..4c3b80ea5
--- /dev/null
+++ b/nn/runtime/test/generated/examples/strided_slice_float_9_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: strided_slice_float_9_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 3, 4, 5, 6}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2, 4, 5}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/strided_slice_relaxed.example.cpp b/nn/runtime/test/generated/examples/strided_slice_relaxed.example.cpp
new file mode 100644
index 000000000..b6054ea7f
--- /dev/null
+++ b/nn/runtime/test/generated/examples/strided_slice_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: strided_slice_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/sub_broadcast_float_relaxed.example.cpp b/nn/runtime/test/generated/examples/sub_broadcast_float_relaxed.example.cpp
new file mode 100644
index 000000000..762d7c3cc
--- /dev/null
+++ b/nn/runtime/test/generated/examples/sub_broadcast_float_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: sub_broadcast_float_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1, 2}}, {1, {1, 2, 3, 4}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0, 0, -2, -2}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/sub_relaxed.example.cpp b/nn/runtime/test/generated/examples/sub_relaxed.example.cpp
new file mode 100644
index 000000000..f1575f129
--- /dev/null
+++ b/nn/runtime/test/generated/examples/sub_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: sub_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {2.0f, -4.0f, 8.0f, -16.0f}}, {1, {2.0f, -2.0f, -4.0f, 4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0.0f, -2.0f, 12.0f, -20.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/transpose_float_1_relaxed.example.cpp b/nn/runtime/test/generated/examples/transpose_float_1_relaxed.example.cpp
new file mode 100644
index 000000000..38412fd3b
--- /dev/null
+++ b/nn/runtime/test/generated/examples/transpose_float_1_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: transpose_float_1_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {0, 1, 2, 3, 4, 20, 21, 22, 23, 24, 40, 41, 42, 43, 44, 60, 61, 62, 63, 64, 80, 81, 82, 83, 84, 100, 101, 102, 103, 104, 5, 6, 7, 8, 9, 25, 26, 27, 28, 29, 45, 46, 47, 48, 49, 65, 66, 67, 68, 69, 85, 86, 87, 88, 89, 105, 106, 107, 108, 109, 10, 11, 12, 13, 14, 30, 31, 32, 33, 34, 50, 51, 52, 53, 54, 70, 71, 72, 73, 74, 90, 91, 92, 93, 94, 110, 111, 112, 113, 114, 15, 16, 17, 18, 19, 35, 36, 37, 38, 39, 55, 56, 57, 58, 59, 75, 76, 77, 78, 79, 95, 96, 97, 98, 99, 115, 116, 117, 118, 119}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/examples/transpose_relaxed.example.cpp b/nn/runtime/test/generated/examples/transpose_relaxed.example.cpp
new file mode 100644
index 000000000..b4da88feb
--- /dev/null
+++ b/nn/runtime/test/generated/examples/transpose_relaxed.example.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: transpose_relaxed.mod.py). Do not edit
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 2.0f, 3.0f, 4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+ // int -> FLOAT32 map
+ {{0, {1.0f, 3.0f, 2.0f, 4.0f}}},
+ // int -> INT32 map
+ {},
+ // int -> QUANT8_ASYMM map
+ {}
+}
+}, // End of an example
diff --git a/nn/runtime/test/generated/models/avg_pool_float_5_relaxed.model.cpp b/nn/runtime/test/generated/models/avg_pool_float_5_relaxed.model.cpp
new file mode 100644
index 000000000..0002efd89
--- /dev/null
+++ b/nn/runtime/test/generated/models/avg_pool_float_5_relaxed.model.cpp
@@ -0,0 +1,32 @@
+// Generated file (from: avg_pool_float_5_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type1(Type::INT32, {});
+ OperandType type2(Type::TENSOR_FLOAT32, {1, 1, 2, 1});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 4, 1});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto cons2 = model->addOperand(&type1);
+ auto pad_same = model->addOperand(&type1);
+ auto act_none = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ // Phase 2, operations
+ static int32_t cons2_init[] = {2};
+ model->setOperandValue(cons2, cons2_init, sizeof(int32_t) * 1);
+ static int32_t pad_same_init[] = {1};
+ model->setOperandValue(pad_same, pad_same_init, sizeof(int32_t) * 1);
+ static int32_t act_none_init[] = {0};
+ model->setOperandValue(act_none, act_none_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_AVERAGE_POOL_2D, {op1, pad_same, cons2, cons2, cons2, cons2, act_none}, {op3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op3});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/batch_to_space_float_1_relaxed.model.cpp b/nn/runtime/test/generated/models/batch_to_space_float_1_relaxed.model.cpp
new file mode 100644
index 000000000..c53f0a04c
--- /dev/null
+++ b/nn/runtime/test/generated/models/batch_to_space_float_1_relaxed.model.cpp
@@ -0,0 +1,26 @@
+// Generated file (from: batch_to_space_float_1_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::TENSOR_FLOAT32, {1, 4, 4, 1});
+ OperandType type0(Type::TENSOR_FLOAT32, {4, 2, 2, 1});
+ OperandType type1(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto block_size = model->addOperand(&type1);
+ auto output = model->addOperand(&type2);
+ // Phase 2, operations
+ static int32_t block_size_init[] = {2, 2};
+ model->setOperandValue(block_size, block_size_init, sizeof(int32_t) * 2);
+ model->addOperation(ANEURALNETWORKS_BATCH_TO_SPACE_ND, {input, block_size}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/batch_to_space_relaxed.model.cpp b/nn/runtime/test/generated/models/batch_to_space_relaxed.model.cpp
new file mode 100644
index 000000000..d3180cc0c
--- /dev/null
+++ b/nn/runtime/test/generated/models/batch_to_space_relaxed.model.cpp
@@ -0,0 +1,26 @@
+// Generated file (from: batch_to_space_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type0(Type::TENSOR_FLOAT32, {4, 1, 1, 2});
+ OperandType type1(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto block_size = model->addOperand(&type1);
+ auto output = model->addOperand(&type2);
+ // Phase 2, operations
+ static int32_t block_size_init[] = {2, 2};
+ model->setOperandValue(block_size, block_size_init, sizeof(int32_t) * 2);
+ model->addOperation(ANEURALNETWORKS_BATCH_TO_SPACE_ND, {input, block_size}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/conv_float_2_relaxed.model.cpp b/nn/runtime/test/generated/models/conv_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..84c5f6c42
--- /dev/null
+++ b/nn/runtime/test/generated/models/conv_float_2_relaxed.model.cpp
@@ -0,0 +1,39 @@
+// Generated file (from: conv_float_2_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type3(Type::INT32, {});
+ OperandType type1(Type::TENSOR_FLOAT32, {1, 3, 3, 1});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 3, 4, 1});
+ OperandType type2(Type::TENSOR_FLOAT32, {1});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto pad_same = model->addOperand(&type3);
+ auto act_relu = model->addOperand(&type3);
+ auto stride = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type0);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 4.0f, 7.0f, 2.0f, 5.0f, 8.0f, 3.0f, 6.0f, 9.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 9);
+ static float op3_init[] = {-200.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 1);
+ static int32_t pad_same_init[] = {1};
+ model->setOperandValue(pad_same, pad_same_init, sizeof(int32_t) * 1);
+ static int32_t act_relu_init[] = {1};
+ model->setOperandValue(act_relu, act_relu_init, sizeof(int32_t) * 1);
+ static int32_t stride_init[] = {1};
+ model->setOperandValue(stride, stride_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op1, op2, op3, pad_same, stride, stride, act_relu}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/depthwise_conv2d_float_2_relaxed.model.cpp b/nn/runtime/test/generated/models/depthwise_conv2d_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..5875093e3
--- /dev/null
+++ b/nn/runtime/test/generated/models/depthwise_conv2d_float_2_relaxed.model.cpp
@@ -0,0 +1,43 @@
+// Generated file (from: depthwise_conv2d_float_2_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_FLOAT32, {1, 2, 1, 4});
+ OperandType type1(Type::TENSOR_FLOAT32, {1, 2, 2, 4});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 3, 2, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {4});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto pad_valid = model->addOperand(&type3);
+ auto act_none = model->addOperand(&type3);
+ auto stride = model->addOperand(&type3);
+ auto channelMultiplier = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type4);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, -9.0f, 10.0f, -11.0f, 12.0f, 5.0f, 6.0f, 7.0f, 8.0f, 13.0f, -14.0f, 15.0f, -16.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {1.0f, 2.0f, 3.0f, 4.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 4);
+ static int32_t pad_valid_init[] = {2};
+ model->setOperandValue(pad_valid, pad_valid_init, sizeof(int32_t) * 1);
+ static int32_t act_none_init[] = {0};
+ model->setOperandValue(act_none, act_none_init, sizeof(int32_t) * 1);
+ static int32_t stride_init[] = {1};
+ model->setOperandValue(stride, stride_init, sizeof(int32_t) * 1);
+ static int32_t channelMultiplier_init[] = {2};
+ model->setOperandValue(channelMultiplier, channelMultiplier_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_DEPTHWISE_CONV_2D, {op1, op2, op3, pad_valid, stride, stride, channelMultiplier, act_none}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/depthwise_conv2d_float_large_2_relaxed.model.cpp b/nn/runtime/test/generated/models/depthwise_conv2d_float_large_2_relaxed.model.cpp
new file mode 100644
index 000000000..f736d8f83
--- /dev/null
+++ b/nn/runtime/test/generated/models/depthwise_conv2d_float_large_2_relaxed.model.cpp
@@ -0,0 +1,42 @@
+// Generated file (from: depthwise_conv2d_float_large_2_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {1, 1, 1, 4});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 4});
+ OperandType type1(Type::TENSOR_FLOAT32, {4});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type0);
+ auto op3 = model->addOperand(&type1);
+ auto pad0 = model->addOperand(&type2);
+ auto act = model->addOperand(&type2);
+ auto stride = model->addOperand(&type2);
+ auto channelMultiplier = model->addOperand(&type2);
+ auto op4 = model->addOperand(&type3);
+ // Phase 2, operations
+ static float op2_init[] = {0.25f, 0.0f, 10.0f, 100.0f, 0.25f, 1.0f, 20.0f, 100.0f, 0.25f, 0.0f, 30.0f, 100.0f, 0.25f, 1.0f, 40.0f, 100.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {600000.0f, 700000.0f, 800000.0f, 900000.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 4);
+ static int32_t pad0_init[] = {0};
+ model->setOperandValue(pad0, pad0_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t stride_init[] = {1};
+ model->setOperandValue(stride, stride_init, sizeof(int32_t) * 1);
+ static int32_t channelMultiplier_init[] = {1};
+ model->setOperandValue(channelMultiplier, channelMultiplier_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_DEPTHWISE_CONV_2D, {op1, op2, op3, pad0, pad0, pad0, pad0, stride, stride, channelMultiplier, act}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.model.cpp b/nn/runtime/test/generated/models/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.model.cpp
new file mode 100644
index 000000000..d24074c7b
--- /dev/null
+++ b/nn/runtime/test/generated/models/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.model.cpp
@@ -0,0 +1,39 @@
+// Generated file (from: depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type3(Type::INT32, {});
+ OperandType type4(Type::TENSOR_FLOAT32, {1, 1, 1, 4});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 3});
+ OperandType type1(Type::TENSOR_FLOAT32, {1, 2, 2, 4});
+ OperandType type2(Type::TENSOR_FLOAT32, {4});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto pad0 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto stride = model->addOperand(&type3);
+ auto channelMultiplier = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type4);
+ // Phase 2, operations
+ static int32_t pad0_init[] = {0};
+ model->setOperandValue(pad0, pad0_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t stride_init[] = {1};
+ model->setOperandValue(stride, stride_init, sizeof(int32_t) * 1);
+ static int32_t channelMultiplier_init[] = {1};
+ model->setOperandValue(channelMultiplier, channelMultiplier_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_DEPTHWISE_CONV_2D, {op1, op2, op3, pad0, pad0, pad0, pad0, stride, stride, channelMultiplier, act}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2, op3},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/depthwise_conv2d_float_relaxed.model.cpp b/nn/runtime/test/generated/models/depthwise_conv2d_float_relaxed.model.cpp
new file mode 100644
index 000000000..33eb3d341
--- /dev/null
+++ b/nn/runtime/test/generated/models/depthwise_conv2d_float_relaxed.model.cpp
@@ -0,0 +1,42 @@
+// Generated file (from: depthwise_conv2d_float_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type3(Type::INT32, {});
+ OperandType type1(Type::TENSOR_FLOAT32, {1, 2, 2, 4});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 3, 3, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {4});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto pad0 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto stride = model->addOperand(&type3);
+ auto channelMultiplier = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type1);
+ // Phase 2, operations
+ static float op2_init[] = {0.25f, 0.0f, 0.2f, 0.0f, 0.25f, 0.0f, 0.0f, 0.3f, 0.25f, 0.0f, 0.0f, 0.0f, 0.25f, 0.1f, 0.0f, 0.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 16);
+ static float op3_init[] = {1.0f, 2.0f, 3.0f, 4.0f};
+ model->setOperandValue(op3, op3_init, sizeof(float) * 4);
+ static int32_t pad0_init[] = {0};
+ model->setOperandValue(pad0, pad0_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t stride_init[] = {1};
+ model->setOperandValue(stride, stride_init, sizeof(int32_t) * 1);
+ static int32_t channelMultiplier_init[] = {2};
+ model->setOperandValue(channelMultiplier, channelMultiplier_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_DEPTHWISE_CONV_2D, {op1, op2, op3, pad0, pad0, pad0, pad0, stride, stride, channelMultiplier, act}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/depthwise_conv2d_float_weights_as_inputs_relaxed.model.cpp b/nn/runtime/test/generated/models/depthwise_conv2d_float_weights_as_inputs_relaxed.model.cpp
new file mode 100644
index 000000000..281add460
--- /dev/null
+++ b/nn/runtime/test/generated/models/depthwise_conv2d_float_weights_as_inputs_relaxed.model.cpp
@@ -0,0 +1,38 @@
+// Generated file (from: depthwise_conv2d_float_weights_as_inputs_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type3(Type::INT32, {});
+ OperandType type1(Type::TENSOR_FLOAT32, {1, 2, 2, 4});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 3, 3, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {4});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto pad0 = model->addOperand(&type3);
+ auto act = model->addOperand(&type3);
+ auto stride = model->addOperand(&type3);
+ auto channelMultiplier = model->addOperand(&type3);
+ auto op4 = model->addOperand(&type1);
+ // Phase 2, operations
+ static int32_t pad0_init[] = {0};
+ model->setOperandValue(pad0, pad0_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ static int32_t stride_init[] = {1};
+ model->setOperandValue(stride, stride_init, sizeof(int32_t) * 1);
+ static int32_t channelMultiplier_init[] = {2};
+ model->setOperandValue(channelMultiplier, channelMultiplier_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_DEPTHWISE_CONV_2D, {op1, op2, op3, pad0, pad0, pad0, pad0, stride, stride, channelMultiplier, act}, {op4});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2, op3},
+ {op4});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/dequantize_relaxed.model.cpp b/nn/runtime/test/generated/models/dequantize_relaxed.model.cpp
new file mode 100644
index 000000000..e1f924c37
--- /dev/null
+++ b/nn/runtime/test/generated/models/dequantize_relaxed.model.cpp
@@ -0,0 +1,22 @@
+// Generated file (from: dequantize_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type1(Type::TENSOR_FLOAT32, {1, 2, 2, 1});
+ OperandType type0(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 1}, 1.f, 0);
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ // Phase 2, operations
+ model->addOperation(ANEURALNETWORKS_DEQUANTIZE, {op1}, {op2});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op2});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/div_broadcast_float_relaxed.model.cpp b/nn/runtime/test/generated/models/div_broadcast_float_relaxed.model.cpp
new file mode 100644
index 000000000..d8f2c1efe
--- /dev/null
+++ b/nn/runtime/test/generated/models/div_broadcast_float_relaxed.model.cpp
@@ -0,0 +1,27 @@
+// Generated file (from: div_broadcast_float_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto act = model->addOperand(&type2);
+ auto op3 = model->addOperand(&type1);
+ // Phase 2, operations
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_DIV, {op1, op2, act}, {op3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op3});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/div_relaxed.model.cpp b/nn/runtime/test/generated/models/div_relaxed.model.cpp
new file mode 100644
index 000000000..6634f18fb
--- /dev/null
+++ b/nn/runtime/test/generated/models/div_relaxed.model.cpp
@@ -0,0 +1,26 @@
+// Generated file (from: div_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type1(Type::INT32, {});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 1});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type0);
+ auto act = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type0);
+ // Phase 2, operations
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_DIV, {op1, op2, act}, {op3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op3});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/fully_connected_float_2_relaxed.model.cpp b/nn/runtime/test/generated/models/fully_connected_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..44db385b0
--- /dev/null
+++ b/nn/runtime/test/generated/models/fully_connected_float_2_relaxed.model.cpp
@@ -0,0 +1,34 @@
+// Generated file (from: fully_connected_float_2_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type4(Type::INT32, {});
+ OperandType type1(Type::TENSOR_FLOAT32, {16, 8});
+ OperandType type2(Type::TENSOR_FLOAT32, {16});
+ OperandType type3(Type::TENSOR_FLOAT32, {2, 16});
+ OperandType type0(Type::TENSOR_FLOAT32, {2, 8});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto b0 = model->addOperand(&type2);
+ auto op3 = model->addOperand(&type3);
+ auto act_relu = model->addOperand(&type4);
+ // Phase 2, operations
+ static float op2_init[] = {0.091327f, 0.103366f, -0.316505f, -0.08312f, 0.149366f, -0.196636f, -0.123672f, 0.0628f, 0.063031f, 0.19167f, -0.062001f, -0.061504f, -0.275581f, 0.059388f, -0.118497f, -0.079224f, 0.109758f, 0.008307f, -0.062657f, -0.060962f, -0.049782f, -0.106719f, -0.319482f, -0.10365f, 0.266455f, 0.051517f, -0.123448f, 0.322464f, 0.043282f, -0.173782f, -0.190381f, 0.002013f, 0.096086f, 0.131157f, 0.031164f, 0.100638f, -0.312191f, -0.080923f, -0.101318f, -0.116614f, 0.142238f, 0.08654f, -0.139154f, 0.174268f, -0.073161f, 0.080072f, 0.006874f, 0.229382f, -0.104321f, -0.176035f, -0.208587f, -0.001019f, -0.162032f, 0.080824f, -0.025021f, 0.07446f, -0.252595f, -0.16175f, -0.136403f, 0.008308f, 0.00571f, 0.0966f, 0.289839f, 0.218816f, -0.304651f, -0.070958f, 0.054598f, 0.147113f, -0.139112f, -0.072798f, -0.163335f, -0.167863f, -0.128762f, -0.03578f, 0.117262f, 0.017177f, 0.263335f, -0.176612f, 0.262961f, -0.093654f, -0.339283f, 0.333071f, 0.180827f, 0.287583f, 0.06635f, -0.197947f, -0.114449f, -0.236035f, 0.103532f, -0.034284f, 0.093299f, -0.145361f, 0.054001f, 0.25057f, 0.15701f, -0.14348f, -0.139061f, -0.048873f, 0.067557f, 0.139038f, 0.324106f, 0.227041f, 0.037793f, -0.225747f, -0.241619f, 0.357835f, 0.135762f, -0.306764f, -0.125982f, 0.091916f, 0.266587f, 0.030135f, 0.265148f, 0.141627f, 0.02012f, 0.083815f, -0.124556f, -0.100124f, -0.048159f, 0.181172f, 0.302309f, -0.041084f, 0.146334f, -0.061511f, -0.232605f, 0.281324f, 0.145408f, -0.221897f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 128);
+ static float b0_init[] = {-0.160594f, 0.20577f, -0.078307f, -0.077984f, 0.001937f, 0.01586f, 0.03681f, 0.012346f, 0.001028f, 0.038551f, 0.075415f, 0.020804f, 0.048478f, -0.03227f, 0.175688f, -0.085662f};
+ model->setOperandValue(b0, b0_init, sizeof(float) * 16);
+ static int32_t act_relu_init[] = {1};
+ model->setOperandValue(act_relu, act_relu_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_FULLY_CONNECTED, {op1, op2, b0, act_relu}, {op3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op3});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/fully_connected_float_4d_simple.model.cpp b/nn/runtime/test/generated/models/fully_connected_float_4d_simple.model.cpp
index 46932f5aa..aa645d966 100644
--- a/nn/runtime/test/generated/models/fully_connected_float_4d_simple.model.cpp
+++ b/nn/runtime/test/generated/models/fully_connected_float_4d_simple.model.cpp
@@ -23,8 +23,6 @@ void CreateModel(Model *model) {
model->identifyInputsAndOutputs(
{op1},
{op3});
- // Phase 4: set relaxed execution
- model->relaxComputationFloat32toFloat16(true);
assert(model->isValid());
}
diff --git a/nn/runtime/test/generated/models/fully_connected_float_4d_simple_relaxed.model.cpp b/nn/runtime/test/generated/models/fully_connected_float_4d_simple_relaxed.model.cpp
new file mode 100644
index 000000000..496e066a2
--- /dev/null
+++ b/nn/runtime/test/generated/models/fully_connected_float_4d_simple_relaxed.model.cpp
@@ -0,0 +1,34 @@
+// Generated file (from: fully_connected_float_4d_simple_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type4(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {2, 3});
+ OperandType type1(Type::TENSOR_FLOAT32, {3, 10});
+ OperandType type2(Type::TENSOR_FLOAT32, {3});
+ OperandType type0(Type::TENSOR_FLOAT32, {4, 1, 5, 1});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto b0 = model->addOperand(&type2);
+ auto op3 = model->addOperand(&type3);
+ auto act = model->addOperand(&type4);
+ // Phase 2, operations
+ static float op2_init[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f, 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f, 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 30);
+ static float b0_init[] = {1.0f, 2.0f, 3.0f};
+ model->setOperandValue(b0, b0_init, sizeof(float) * 3);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_FULLY_CONNECTED, {op1, op2, b0, act}, {op3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op3});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/fully_connected_float_large_relaxed.model.cpp b/nn/runtime/test/generated/models/fully_connected_float_large_relaxed.model.cpp
new file mode 100644
index 000000000..a4a719f8f
--- /dev/null
+++ b/nn/runtime/test/generated/models/fully_connected_float_large_relaxed.model.cpp
@@ -0,0 +1,33 @@
+// Generated file (from: fully_connected_float_large_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type3(Type::INT32, {});
+ OperandType type2(Type::TENSOR_FLOAT32, {1, 1});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 5});
+ OperandType type1(Type::TENSOR_FLOAT32, {1});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type0);
+ auto b0 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto act = model->addOperand(&type3);
+ // Phase 2, operations
+ static float op2_init[] = {2.0f, 3.0f, 4.0f, 5.0f, 6.0f};
+ model->setOperandValue(op2, op2_init, sizeof(float) * 5);
+ static float b0_init[] = {900000.0f};
+ model->setOperandValue(b0, b0_init, sizeof(float) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_FULLY_CONNECTED, {op1, op2, b0, act}, {op3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op3});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/fully_connected_float_large_weights_as_inputs_relaxed.model.cpp b/nn/runtime/test/generated/models/fully_connected_float_large_weights_as_inputs_relaxed.model.cpp
new file mode 100644
index 000000000..a0161d15f
--- /dev/null
+++ b/nn/runtime/test/generated/models/fully_connected_float_large_weights_as_inputs_relaxed.model.cpp
@@ -0,0 +1,29 @@
+// Generated file (from: fully_connected_float_large_weights_as_inputs_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type3(Type::INT32, {});
+ OperandType type2(Type::TENSOR_FLOAT32, {1, 1});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 5});
+ OperandType type1(Type::TENSOR_FLOAT32, {1});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type0);
+ auto b0 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ auto act = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_FULLY_CONNECTED, {op1, op2, b0, act}, {op3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2, b0},
+ {op3});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/l2_normalization_2_relaxed.model.cpp b/nn/runtime/test/generated/models/l2_normalization_2_relaxed.model.cpp
new file mode 100644
index 000000000..cde12578b
--- /dev/null
+++ b/nn/runtime/test/generated/models/l2_normalization_2_relaxed.model.cpp
@@ -0,0 +1,21 @@
+// Generated file (from: l2_normalization_2_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 1, 1, 6});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type0);
+ // Phase 2, operations
+ model->addOperation(ANEURALNETWORKS_L2_NORMALIZATION, {op1}, {op2});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op2});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/l2_pool_float_2_relaxed.model.cpp b/nn/runtime/test/generated/models/l2_pool_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..597cfeda5
--- /dev/null
+++ b/nn/runtime/test/generated/models/l2_pool_float_2_relaxed.model.cpp
@@ -0,0 +1,32 @@
+// Generated file (from: l2_pool_float_2_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type1(Type::INT32, {});
+ OperandType type2(Type::TENSOR_FLOAT32, {1, 1, 2, 1});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 4, 1});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto cons2 = model->addOperand(&type1);
+ auto pad_same = model->addOperand(&type1);
+ auto act_none = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ // Phase 2, operations
+ static int32_t cons2_init[] = {2};
+ model->setOperandValue(cons2, cons2_init, sizeof(int32_t) * 1);
+ static int32_t pad_same_init[] = {1};
+ model->setOperandValue(pad_same, pad_same_init, sizeof(int32_t) * 1);
+ static int32_t act_none_init[] = {0};
+ model->setOperandValue(act_none, act_none_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_L2_POOL_2D, {op1, pad_same, cons2, cons2, cons2, cons2, act_none}, {op3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op3});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/l2_pool_float_large_relaxed.model.cpp b/nn/runtime/test/generated/models/l2_pool_float_large_relaxed.model.cpp
new file mode 100644
index 000000000..11bc8cfcc
--- /dev/null
+++ b/nn/runtime/test/generated/models/l2_pool_float_large_relaxed.model.cpp
@@ -0,0 +1,41 @@
+// Generated file (from: l2_pool_float_large_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type1(Type::INT32, {});
+ OperandType type2(Type::TENSOR_FLOAT32, {1, 1, 1, 3});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 3});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto filter_width = model->addOperand(&type1);
+ auto filter_height = model->addOperand(&type1);
+ auto stride_width = model->addOperand(&type1);
+ auto stride_height = model->addOperand(&type1);
+ auto pad0 = model->addOperand(&type1);
+ auto act = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ // Phase 2, operations
+ static int32_t filter_width_init[] = {2};
+ model->setOperandValue(filter_width, filter_width_init, sizeof(int32_t) * 1);
+ static int32_t filter_height_init[] = {2};
+ model->setOperandValue(filter_height, filter_height_init, sizeof(int32_t) * 1);
+ static int32_t stride_width_init[] = {1};
+ model->setOperandValue(stride_width, stride_width_init, sizeof(int32_t) * 1);
+ static int32_t stride_height_init[] = {1};
+ model->setOperandValue(stride_height, stride_height_init, sizeof(int32_t) * 1);
+ static int32_t pad0_init[] = {0};
+ model->setOperandValue(pad0, pad0_init, sizeof(int32_t) * 1);
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_L2_POOL_2D, {op1, pad0, pad0, pad0, pad0, stride_width, stride_height, filter_width, filter_height, act}, {op3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op3});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/max_pool_float_4_relaxed.model.cpp b/nn/runtime/test/generated/models/max_pool_float_4_relaxed.model.cpp
new file mode 100644
index 000000000..0ea4461c1
--- /dev/null
+++ b/nn/runtime/test/generated/models/max_pool_float_4_relaxed.model.cpp
@@ -0,0 +1,32 @@
+// Generated file (from: max_pool_float_4_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type1(Type::INT32, {});
+ OperandType type2(Type::TENSOR_FLOAT32, {1, 1, 2, 1});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 4, 1});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto cons2 = model->addOperand(&type1);
+ auto pad_same = model->addOperand(&type1);
+ auto act_none = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ // Phase 2, operations
+ static int32_t cons2_init[] = {2};
+ model->setOperandValue(cons2, cons2_init, sizeof(int32_t) * 1);
+ static int32_t pad_same_init[] = {1};
+ model->setOperandValue(pad_same, pad_same_init, sizeof(int32_t) * 1);
+ static int32_t act_none_init[] = {0};
+ model->setOperandValue(act_none, act_none_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_MAX_POOL_2D, {op1, pad_same, cons2, cons2, cons2, cons2, act_none}, {op3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op3});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/mean_float_1_relaxed.model.cpp b/nn/runtime/test/generated/models/mean_float_1_relaxed.model.cpp
new file mode 100644
index 000000000..d03939661
--- /dev/null
+++ b/nn/runtime/test/generated/models/mean_float_1_relaxed.model.cpp
@@ -0,0 +1,30 @@
+// Generated file (from: mean_float_1_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {2});
+ OperandType type0(Type::TENSOR_FLOAT32, {4, 3, 2});
+ OperandType type1(Type::TENSOR_INT32, {4});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto axis = model->addOperand(&type1);
+ auto keepDims = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t axis_init[] = {1, 0, -3, -3};
+ model->setOperandValue(axis, axis_init, sizeof(int32_t) * 4);
+ static int32_t keepDims_init[] = {0};
+ model->setOperandValue(keepDims, keepDims_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_MEAN, {input, axis, keepDims}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/mean_float_2_relaxed.model.cpp b/nn/runtime/test/generated/models/mean_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..80c7ba519
--- /dev/null
+++ b/nn/runtime/test/generated/models/mean_float_2_relaxed.model.cpp
@@ -0,0 +1,30 @@
+// Generated file (from: mean_float_2_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {1, 3, 1});
+ OperandType type0(Type::TENSOR_FLOAT32, {4, 3, 2});
+ OperandType type1(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto axis = model->addOperand(&type1);
+ auto keepDims = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t axis_init[] = {0, 2};
+ model->setOperandValue(axis, axis_init, sizeof(int32_t) * 2);
+ static int32_t keepDims_init[] = {1};
+ model->setOperandValue(keepDims, keepDims_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_MEAN, {input, axis, keepDims}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/mean_relaxed.model.cpp b/nn/runtime/test/generated/models/mean_relaxed.model.cpp
new file mode 100644
index 000000000..b420b6b7c
--- /dev/null
+++ b/nn/runtime/test/generated/models/mean_relaxed.model.cpp
@@ -0,0 +1,30 @@
+// Generated file (from: mean_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {1, 2, 1});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 1});
+ OperandType type1(Type::TENSOR_INT32, {1});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto axis = model->addOperand(&type1);
+ auto keepDims = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t axis_init[] = {2};
+ model->setOperandValue(axis, axis_init, sizeof(int32_t) * 1);
+ static int32_t keepDims_init[] = {0};
+ model->setOperandValue(keepDims, keepDims_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_MEAN, {input, axis, keepDims}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/mobilenet_224_gender_basic_fixed.model.cpp b/nn/runtime/test/generated/models/mobilenet_224_gender_basic_fixed.model.cpp
index e20b010cf..9e0fcd5b8 100644
--- a/nn/runtime/test/generated/models/mobilenet_224_gender_basic_fixed.model.cpp
+++ b/nn/runtime/test/generated/models/mobilenet_224_gender_basic_fixed.model.cpp
@@ -625,11 +625,11 @@ void CreateModel(Model *model) {
model->setOperandValue(op79, op79_init, sizeof(float) * 128);
static float op80_init[] = {0.143542f, -0.139317f, -0.0557127f, 0.00193462f, -0.0166539f, 0.216436f, 0.0694379f, 0.0774276f, -0.061387f, 0.193228f, -0.0234282f, 0.0303868f, -0.10475f, 0.225842f, -0.0953071f, 0.0518284f, -0.177148f, 0.0706816f, -0.116163f, 0.23116f, 0.0276252f, 0.361075f, 0.114999f, -0.179934f, 0.0362276f, 0.098386f, 0.160729f, -0.203046f, -0.0214891f, -0.0217361f, -0.0319598f, -0.0163527f, 0.0405451f, -0.0441358f, -0.0010904f, -0.219479f, -0.0129676f, 0.0840713f, -0.334326f, -0.275725f, 0.00787852f, 0.102421f, 0.0265449f, -0.0439835f, -0.160807f, 0.128618f, 0.00937286f, 0.0634051f, 0.17013f, -0.149011f, -0.148228f, -0.185353f, -0.230108f, 0.0953904f, -0.0328099f, 0.08806f, 0.164153f, -0.155663f, 0.00662038f, 0.106636f, -0.110083f, -0.123108f, -0.0422361f, 0.00699063f, 0.0286107f, 0.0872556f, 0.0372735f, 0.129417f, 0.0114595f, 0.127069f, 0.0845895f, -0.103232f, -0.084375f, 0.0507973f, 0.0924024f, -0.0443189f, -0.0373084f, -0.0547004f, -0.107631f, -0.0137342f, -0.0308677f, 0.0818465f, 0.0348359f, -0.108934f, -0.0827724f, 0.0323507f, 0.0412687f, 0.0808453f, -0.051265f, 0.0951301f, -0.144128f, 0.0754425f, -0.127725f, 0.0168919f, -0.0751399f, -0.0739095f, 0.00380989f, -0.118355f, 0.131179f, 0.123024f, 0.13949f, -0.0452833f, 0.0908636f, -0.044923f, 0.0656833f, 0.154517f, -0.0803701f, -0.0962871f, 0.0389703f, 0.208096f, -0.0663933f, -0.131471f, -0.0790224f, 0.101245f, 0.0744562f, 0.210848f, -0.0880954f, 0.28261f, -0.0400531f, -0.0429046f, 0.115284f, 0.0810933f, -0.0732279f, -0.0363539f, 0.0390622f, 0.194268f, -0.125276f, 0.0660326f, -0.0191896f, -0.107549f, -0.068292f, 0.0296083f, -0.148026f, 0.00504009f, 0.136341f, 0.0185907f, 0.0354229f, 0.0570693f, -0.0652823f, 0.053643f, -0.226353f, -0.0586207f, 0.102557f, -0.0804407f, -0.0549814f, -0.0189678f, 0.0337161f, -0.240366f, -0.0152426f, -0.032301f, 0.0503872f, -0.00353695f, 0.0145041f, -0.0507668f, 0.0484236f, 0.0752401f, 0.131723f, 0.137027f, 0.0677545f, 0.149137f, -0.136712f, -0.047893f, -0.196069f, 0.000413097f, 0.0625105f, 0.0322905f, 0.053051f, -0.0330334f, 0.00132125f, 0.113487f, -0.120027f, 0.101515f, -0.0197327f, -0.0517467f, -0.11808f, 0.0443416f, 0.0221126f, 0.114449f, -0.0283071f, -0.139215f, 0.0598481f, 0.100257f, 0.0225039f, 0.0457186f, 0.0902391f, -0.0631196f, 0.0257833f, 0.112569f, -0.19343f, -0.0384445f, 0.0758919f, -0.0712537f, 0.00513168f, 0.151857f, -0.142854f, -0.0268036f, 0.0715712f, -0.0574248f, -0.03567f, -0.166058f, 0.0211768f, -0.119797f, -0.11925f, -0.0453431f, 0.0293065f, 0.120933f, 0.00402085f, 0.109672f, 0.134472f, 0.195219f, 0.180352f, -0.0515071f, -0.0545261f, 0.0322048f, 0.012976f, 0.0343293f, -0.0513619f, -0.0167084f, -0.160956f, -0.0328522f, -0.0399779f, 0.114662f, -0.0672433f, 0.125241f, -0.102642f, 0.0559623f, 0.13515f, 0.066221f, 0.0132642f, 0.0813321f, -0.0672475f, -0.0972056f, 0.0324055f, 0.0954416f, 0.0394797f, -0.0865308f, 0.0588531f, 0.0190956f, 0.102544f, 0.0641314f, 0.05755f, 0.114106f, 0.0457606f, -0.0910149f, -0.143913f, 0.194996f, 0.0472138f, 0.0982623f, 0.0933318f, -0.0164617f, 0.167852f, 0.0858759f, 0.024742f, -0.0244132f, -0.0391241f, -0.154507f, 0.232425f, 0.0863314f, -0.0912234f, -0.0686242f, -0.0843728f, -0.0843567f, 0.0455852f, 0.0630358f, -0.092492f, -0.22545f, 0.0876258f, -0.0352342f, -0.359393f, -0.0194426f, 0.294886f, -0.0433901f, 0.0325373f, -0.324941f, -0.0427619f, 0.0448717f, -0.15146f, 0.0438511f, -0.220376f, -0.0819532f, -0.0174128f, 0.100561f, 0.0430337f, 0.0414014f, 0.121123f, -0.0788664f, -0.168092f, 0.033273f, -0.11762f, -0.0747128f, 0.0525967f, -0.150569f, -0.236359f, 0.278161f, 0.0885922f, 0.244919f, -0.168161f, 0.0686316f, 0.012994f, -0.0216511f, -0.230709f, -0.0114048f, 0.0636575f, -0.0553088f, -0.192013f, 0.0983257f, 0.0901875f, -0.0105777f, -0.0940917f, 0.221411f, 0.0282917f, -0.0284609f, 0.0485826f, -0.0397148f, -0.101875f, 0.0899159f, -0.114372f, -0.0717718f, -0.0650426f, 0.0715851f, 0.173865f, -0.0590554f, 0.240822f, -0.0405229f, 0.187867f, -0.160394f, -0.117823f, 0.0841448f, 0.0290731f, -0.129093f, -0.158988f, -0.0964775f, -0.106913f, -0.0510117f, -0.18104f, 0.0471442f, 0.280828f, 0.218015f, 0.0462861f, -0.0360041f, -0.0760816f, -0.101383f, 0.140904f, -0.00188858f, 0.0512563f, 0.0795458f, -0.0592014f, -0.0480013f, -0.198783f, -0.0743156f, 0.00823776f, 0.00302121f, -0.158797f, -0.337508f, -0.0621661f, -0.0796384f, 0.0180529f, 0.282215f, 0.118301f, 0.0741615f, -0.143692f, 0.091673f, -0.0364806f, -0.0114767f, 0.0780368f, -0.0897321f, -0.152821f, -0.0671889f, -0.0692471f, 0.0876068f, -0.0775014f, -0.0218678f, 0.0988145f, 0.0365032f, 0.0649689f, 0.207876f, -0.0245112f, -0.154562f, -0.10886f, 0.0325802f, -0.24719f, 0.0370411f, -0.0360635f, -0.00888619f, -0.242136f, -0.0329173f, 0.0785577f, 0.271041f, -0.0185736f, -0.108456f, -0.00761614f, -0.0269069f, -0.00430044f, 0.0505398f, -0.000320044f, 0.0520941f, -0.159526f, -0.124799f, -0.143481f, -0.183575f, -0.063532f, -0.0748322f, 0.101428f, -0.0520108f, 0.063462f, -0.00296892f, 0.101294f, 0.153199f, 0.0206717f, -0.103064f, -0.179984f, -0.177949f, 0.140065f, -0.127144f, -0.0333468f, 0.0467561f, -0.00343087f, 0.180397f, -0.0121281f, -0.0672522f, 0.0404061f, 0.167426f, -0.0522661f, -0.0227105f, 0.0889145f, -0.0707926f, 0.0423966f, -0.0184403f, 0.0206349f, -0.091981f, -0.10978f, -0.0315829f, 0.0320504f, -0.0145855f, -0.112708f, 0.0282959f, 0.0218289f, 0.136329f, 0.140791f, 0.149901f, 0.0948601f, 0.0295434f, 0.0146677f, 0.0952558f, 0.0422617f, 0.0350605f, -0.0243103f, -0.0111926f, 0.108864f, 0.179619f, 0.134015f, -0.00186874f, 0.0113527f, 0.171912f, 0.0973041f, -0.00204195f, -0.00121672f, 0.0485887f, 0.187415f, 0.122391f, 0.0741455f, -0.0586289f, -0.0784467f, 0.0178936f, 0.044156f, -0.0177429f, -0.1245f, -0.0766287f, -0.0274828f, -0.209541f, -0.107082f, -0.0448177f, 0.102689f, 0.066247f, -0.207502f, 0.201752f, -0.0682319f, 0.000157785f, -0.0624622f, 0.0829125f, 0.0572581f, 0.0324885f, -0.255542f, -0.0242865f, 0.133733f, 0.125331f, 0.0444277f, -0.0740691f, -0.305656f, -0.0979331f, 0.0137294f, -0.186448f, 0.0440386f, -0.0269379f, 0.100561f, -0.0473734f, 0.0189935f, -0.110422f, 0.0765553f, -0.0533288f, 0.0557821f, 0.15486f, -0.0696687f, 0.018034f, -0.048962f, 0.26856f, 0.0694982f, 0.0779114f, -0.0491336f, -0.0715103f, -0.073851f, -0.0788125f, 0.131468f, 0.0029189f, -0.0617308f, 0.0079196f, 0.0967586f, 0.0700419f, -0.0104907f, 0.0532375f, -0.139393f, -0.0927297f, 0.0201045f, 0.0178144f, -0.0568273f, 0.117868f, 0.178675f, 0.0967953f, -0.140062f, -0.0890961f, -0.107846f, 0.176296f, -0.041206f, 0.0692927f, 0.199868f, 0.0252243f, -0.0926268f, 0.0372654f, 0.067384f, -0.0294065f, 0.0167733f, -0.0277856f, -0.00286399f, 0.0238443f, -0.00258016f, -0.0551158f, 0.0812973f, -0.0515719f, -0.0200198f, 0.0350771f, -0.141078f, 9.13782e-05f, -0.0692944f, -0.207666f, 0.0467013f, 0.0698437f, 0.0171331f, 0.0714376f, -0.0993458f, 0.0440245f, 0.212063f, -0.144622f, 0.00450367f, 0.139606f, -0.0501133f, -0.0102619f, -0.211941f, -0.127275f, 0.253289f, -0.123098f, 0.170057f, 0.092615f, -0.163819f, 0.196475f, -0.18187f, 0.0981483f, 0.0925714f, 0.121395f, 0.0132756f, 0.143471f, -0.128311f, 0.140034f, -0.0555017f, 0.142239f, -0.0958642f, 0.112803f, 0.0213153f, -0.0851649f, 0.0340842f, 0.343708f, 0.178607f, -0.0793312f, -0.123594f, -0.0453247f, -0.0840823f, -0.164602f, 0.00628157f, 0.168223f, 0.0182086f, 0.220889f, 0.0619126f, -0.224789f, 0.016324f, 0.105973f, 0.133024f, -0.0163574f, 0.22635f, -0.162561f, -0.0975535f, -0.148272f, -0.0956684f, 0.0381995f, 0.0676893f, -0.335305f, -0.190542f, 0.136389f, 0.0535692f, -0.0605476f, 0.0554843f, 0.0997467f, 0.00862075f, -0.226586f, -0.16497f, 0.0201992f, -0.0772015f, 0.0788386f, -0.117927f, 0.144615f, 0.00799103f, -0.0351088f, -0.0113093f, 0.132909f, -0.0358592f, -0.0122538f, 0.155776f, 0.0282121f, -0.16227f, -0.0151059f, -0.0187413f, 0.0857813f, -0.0785807f, 0.0621579f, -0.0501869f, -0.0249879f, -0.186399f, -0.178681f, 0.221563f, -0.00791696f, -0.0576498f, 0.00587176f, 0.0268011f, -0.00445795f, -0.115657f, -0.0405289f, 0.0175926f, -0.0100064f, -0.140753f, -0.0258144f, -0.0725655f, 0.234874f, -0.0863728f, 0.139723f, -0.19378f, 0.198153f, -0.000494459f, -0.173008f, 0.0953991f, 0.169061f, -0.013775f, 0.00814875f, 0.105978f, 0.0218623f, -0.0150824f, -0.00845808f, -0.202008f, -0.152247f, 0.00751242f, 0.213069f, -0.0448261f, -0.250221f, -0.104704f, -0.0217631f, 0.168254f, 0.0315712f, -0.0451418f, 0.106817f, -0.135847f, -0.262978f, -0.113509f, 0.127917f, -0.208706f, -0.121287f, 0.0771521f, -0.0467399f, 0.127534f, -0.052607f, 0.0258406f, -0.132387f, 0.0608249f, -0.0326503f, 0.140262f, 0.0132674f, -0.176895f, 0.128123f, 0.192034f, -0.104261f, -0.12386f, -0.0531352f, -0.059036f, -0.123427f, 0.090708f, 0.115284f, -0.0551993f, 0.0106573f, 0.185406f, 0.0556199f, -0.0099576f, 0.0141045f, 0.0901436f, -0.0053439f, -0.0419934f, 0.0176783f, -0.039611f, 0.31635f, 0.0688501f, -0.00859634f, 0.0807408f, 0.0353497f, 0.0402697f, -0.0472012f, -0.226557f, 0.0506318f, -0.0369554f, 0.15113f, 0.0176626f, 0.0228614f, 0.166845f, 0.167796f, 0.084194f, -0.0459883f, 0.0610326f, -0.018784f, 0.000815405f, -0.085279f, -0.104616f, -0.00875845f, 0.0192147f, 0.0114769f, -0.047343f, -0.00482453f, 0.0356457f, -0.0655179f, 0.0709537f, -0.0603955f, 0.0863833f, 0.0999315f, 0.00705398f, 0.00362584f, -0.00647712f, 0.074964f, 0.101683f, 0.0921386f, 0.0793896f, 0.0830977f, 0.0156262f, 0.0243862f, 0.016024f, 0.0701486f, 0.284074f, 0.108565f, -0.168411f, -0.0581087f, 0.177248f, 0.118877f, -0.23371f, -0.133581f, 0.0893517f, 0.0897357f, -0.0561381f, -0.0354253f, 0.020553f, 0.078302f, -0.0117755f, -0.0433998f, -0.0104717f, -0.150325f, -0.16587f, 0.00122702f, 0.0979206f, 0.00300825f, -0.0496534f, 0.130813f, 0.0659582f, 0.182842f, 0.157746f, -0.0229205f, -0.184419f, 0.0222686f, -0.00994201f, -0.303319f, -0.0104238f, -0.0299203f, 0.0216778f, -0.146331f, 0.0913194f, 0.0219254f, -0.215296f, -0.116976f, 0.165147f, -0.0710141f, -0.0461321f, -0.139086f, 0.126077f, -0.0845732f, 0.048919f, -0.127741f, -0.0226093f, -0.029325f, 0.168392f, -0.0115994f, 0.0735731f, -0.167067f, -0.0868217f, 0.0779401f, 0.0208533f, -0.00414653f, -0.0786065f, 0.080362f, 0.114412f, -0.119166f, 0.0927742f, -0.0886966f, -0.0475087f, -0.0553333f, -0.163707f, -0.00182551f, 0.00339286f, -0.0243587f, -0.133708f, 0.0435995f, 0.122672f, 0.0222556f, 0.0719079f, 0.155192f, -0.0655534f, -0.000694311f, -0.175752f, 0.166626f, 0.109869f, -0.144333f, -0.189351f, 0.0930466f, 0.0421676f, 0.121991f, 0.0242376f, -0.24684f, 0.0192133f, -0.00721696f, 0.0515523f, 0.0182745f, -0.179138f, -0.153891f, -0.206407f, 0.109263f, -0.0665639f, -0.167768f, -0.0382564f, -0.0991432f, 0.00114097f, -0.270128f, -0.136406f, -0.0342589f, -0.085211f, -0.0985847f, 0.028126f, -0.0612121f, 0.0226484f, -0.145462f, 0.119119f, 0.100325f, 0.131587f, -0.0651782f, 0.121331f, 0.0844396f, 0.0296183f, -0.0205646f, -0.0743535f, -0.0127071f, 0.0860608f, 0.00928115f, -0.217887f, 0.051828f, -0.0399206f, -0.19158f, -0.231826f, 0.110878f, 0.0364731f, 0.0475292f, 0.124959f, 0.125509f, -0.113549f, -0.0945701f, 0.0452462f, -0.194241f, -0.0748856f, -0.0848238f, -0.0705846f, -0.00737421f, -0.00306336f, 0.0818608f, 0.1876f, -0.0164862f, -0.0988693f, -0.0722961f, 0.0717543f, -0.0731678f, -0.0334917f, 0.0126872f, 0.0109792f, -0.0218576f, -0.02024f, 0.126492f, -0.0456014f, -0.0647152f, -0.0223219f, 0.290865f, -0.00461306f, 0.0772482f, 0.028516f, -0.145756f, 0.222435f, -0.0844253f, -0.0490738f, 0.136191f, 0.0917838f, -0.148446f, -0.0245094f, 0.0841544f, 0.0565493f, -0.0285964f, 0.11791f, 0.123028f, -0.0622476f, 0.010014f, -0.204865f, -0.0660146f, -0.0292581f, -0.0239985f, -0.183919f, 0.0633088f, 0.0354361f, -0.0278989f, -0.15381f, 0.010669f, 0.0254481f, 0.088751f, 0.18636f, -0.0357405f, 0.0163929f, 0.140702f, 0.0699074f, -0.179903f, 0.079496f, -0.0188685f, 0.0594603f, -0.0384411f, 0.124561f, 0.101308f, -0.0540631f, 0.104384f, -0.020024f, -0.0242527f, -0.0167859f, -0.156694f, -0.185201f, -0.0148136f, 0.0904471f, 0.114801f, -0.0383085f, -0.0753128f, -0.025013f, -0.00577859f, 0.00730319f, 0.0920455f, 0.00390995f, 0.0667231f, 0.00237347f, 0.0823384f, -0.0812348f, -0.11555f, 0.122286f, 0.0853818f, -0.0323922f, 0.0673012f, 0.0556096f, 0.0134518f, 0.0754678f, 0.0553623f, -0.0689427f, 0.0264757f, 9.96166e-05f, -0.12167f, 0.119696f, -0.00397614f, 0.110184f, -0.16388f, -0.144619f, 0.00994338f, 0.0653049f, 0.118631f, -0.0271103f, -0.159197f, 0.0264458f, 0.0377331f, 0.00670557f, 0.132711f, -0.146988f, 0.0410815f, 0.033654f, -0.0689339f, -0.0323166f, -0.0235119f, 0.0461265f, 0.0404163f, 0.029665f, 0.0703973f, -0.159501f, -0.0205369f, -0.0640582f, 0.0522346f, 0.134372f, -0.0632503f, -0.144296f, -0.127083f, -0.0114842f, -0.0611535f, 0.0999514f, 0.0472518f, 0.185726f, 0.0274118f, -0.0405062f, -0.168224f, 0.161777f, 0.206253f, -0.169522f, -0.015894f, 0.370152f, 0.000467996f, 0.0926897f, -0.0337247f, 0.127827f, -0.08163f, -0.0630913f, -0.0420639f, 0.0687435f, -0.0464481f, -0.0504583f, 0.0731616f, 0.06786f, 0.132546f, 0.101275f, -0.122851f, 0.0935017f, 0.00623603f, 0.0768964f, -0.00366004f, -0.156684f, 0.0801176f, 0.14025f, 0.113511f, -0.0676626f, 0.05901f, -0.0824732f, -0.180079f, -0.0657939f, -0.0619872f, -0.134686f, 0.0913632f, 0.085415f, -0.178582f, -0.184222f, -0.214777f, 0.0349062f, 0.0340991f, 0.0322368f, 0.210072f, 0.0102769f, 0.202452f, -0.0182071f, 0.00892899f, -0.279069f, -0.290953f, -0.0228499f, 0.111496f, 0.132749f, -0.0576087f, -0.0564363f, 0.0847626f, -0.0462059f, 0.0654138f, 0.0678649f, 0.101326f, -0.00636211f, -0.00278047f, -0.0505565f, -0.219111f, -0.149995f, 0.146089f, -0.0528913f, -0.0375884f, 0.358996f, -0.08679f, 0.117836f, -0.0942597f, -0.254633f, -0.00506417f, 0.0682066f, 0.121637f, 0.208151f, 0.0672187f, 0.0504415f, -0.0252887f, 0.205132f, 0.127923f, -0.0793721f, -0.0561765f, -0.156301f, -0.121047f, 0.169479f, 0.143351f, -0.158008f, 0.111533f, -0.0609109f, -0.00240044f, 0.023942f, -0.0815071f, -0.0680816f, -0.0747979f, 0.0811401f, 0.12873f, -0.205013f, 0.0311016f, 0.110573f, -0.216827f, 0.107585f, 0.0539796f, 0.152026f, -0.0463614f, 0.0130653f, -0.0105645f, -0.036467f, -0.0317826f, -0.00611716f, -0.0303259f, 0.0700739f, -0.106284f, -0.00853787f, -0.00211293f, -0.115054f, 0.0910871f, -0.12691f, -0.0485328f, 0.160897f, 0.0732904f, -0.13478f, -0.0361281f, 0.105366f, 0.159763f, 0.212194f, -0.0815196f, -0.0221129f, 0.0200345f, -0.06203f, 0.10234f, 0.161206f, -0.137448f, 0.0868787f, -0.144418f, 0.243887f, -0.16322f, 0.0377241f, 0.0818148f, -0.0433454f, -0.197481f, 0.155739f, 0.194721f, -0.16472f, -0.0739804f, 0.142539f, 0.108739f, 0.0184933f, 0.221583f, -0.0459288f, 0.0686786f, 0.028198f, 0.149859f, 0.251046f, 0.01805f, 0.0313838f, -0.0607054f, -0.148719f, 0.00572261f, -0.0295078f, 0.202119f, -0.0688806f, -0.0872185f, -0.0738928f, -0.0842145f, 0.0159338f, 0.0397653f, 0.123095f, -0.0581185f, -0.0994909f, -0.13393f, -0.0370074f, 0.16904f, -0.0708192f, -0.0754798f, -0.0579512f, -0.173857f, 0.106638f, -0.0342493f, -0.00758789f, -0.0617094f, -0.26003f, -0.0850962f, -0.0474492f, 0.0724282f, -0.044732f, 0.0741011f, -0.0366434f, -0.0499079f, 0.0666642f, 0.0492916f, -0.0908142f, 0.0875124f, -0.193158f, 0.0408296f, 0.115251f, -0.0375242f, -0.101852f, -0.131482f, -0.1898f, -0.0180383f, -0.00476736f, 0.00334787f, 0.00182473f, -0.0459181f, 0.0870828f, -0.0580641f, -0.149119f, -0.0227925f, 0.0657165f, -0.0277835f, -0.141894f, 0.0448996f, 0.00838565f, 0.0153908f, 0.154899f, 0.171492f, 0.0929689f, -0.0726922f, -0.0222083f, 0.0649919f, -0.0237814f, -0.128347f, -0.0895591f, 0.100279f, -0.13608f, 0.29876f, 0.217687f, -0.0138976f, 0.126944f, 0.119621f, -0.144559f, 0.102179f, -0.0180163f, 0.085345f, -0.0475189f, -0.239579f, -0.026328f, 0.152163f, -0.0102625f, -0.0112962f, 0.265043f, 0.0199936f, -0.131415f, 0.0719167f, -0.106391f, -0.125173f, -0.0266488f, -0.00830649f, -0.0198297f, -0.149058f, 0.230543f, 0.0817075f, -0.0851508f, -0.137278f, 0.0801695f, -0.0539673f, 0.17062f, -0.0650049f, 0.13379f, -0.135659f, 0.097116f, 0.0656133f, -0.069825f, -0.0169667f, -0.306699f, -0.00195387f, -0.197762f, -0.244064f, 0.194315f, 0.0406425f, -0.0846421f, -0.109702f, 0.00962155f, 0.0638243f, 0.151184f, 0.0504319f, 0.00645732f, -0.270866f, 0.0984852f, 0.0455063f, -6.03981e-05f, 0.120458f, -0.0850411f, 0.100736f, 0.0478779f, 0.221077f, -0.112596f, 0.0820581f, 0.0271296f, -0.205727f, -0.141208f, 0.0294533f, -0.106681f, -0.243474f, 0.0357164f, 0.0581802f, -0.101303f, -0.0139202f, 0.196134f, 0.107937f, -0.0544005f, -0.0661761f, 0.31173f, -0.0612939f, -0.0829115f, 0.00350939f, -0.0646918f, 0.181386f, -0.101009f, -0.0243713f, -0.0296007f, 0.0730814f, 0.0542519f, -0.208174f, 0.0203016f, -0.0407686f, 0.0385432f, -0.0674441f, -0.066084f, -0.204911f, -0.0424466f, -0.0823417f, -0.104815f, 0.00773922f, 0.00735693f, 0.0405269f, 0.0106065f, 0.132024f, -0.0192005f, -0.126572f, 0.0429317f, -0.0394362f, -0.110754f, 0.112256f, -0.173194f, -0.0574455f, 0.161248f, -0.155649f, -0.0153652f, -0.176656f, -0.0855819f, -0.0560111f, -0.094803f, -6.9748e-06f, 0.0330912f, 0.0135079f, 0.0253883f, -0.0340213f, 0.0133895f, -0.0487959f, -0.0567942f, 0.0073305f, 0.0404094f, -0.0603053f, 0.0314057f, -0.0981589f, -0.00603633f, 0.0303509f, -0.094885f, -0.101597f, 0.112478f, 0.18607f, -0.00654307f, 0.0428245f, 0.161417f, -0.0788306f, 0.000461828f, 0.121395f, 0.0005212f, -0.157768f, 0.0352908f, -0.11757f, 0.0988206f, 0.102103f, -0.0447607f, 0.164419f, 0.0334563f, 0.172619f, -0.106307f, -0.109634f, 0.0702444f, 0.0242332f, 0.0851643f, -0.12248f, -0.0470167f, 0.18609f, 0.161624f, 0.0167193f, -0.0364087f, -0.225782f, -0.0370813f, 0.0930594f, -0.0777285f, 0.0698708f, -0.279222f, -0.270969f, -0.109092f, 0.0314784f, 0.114709f, -0.00971972f, 0.058653f, 0.0344798f, 0.0160749f, -0.0732088f, -0.120663f, -0.0919176f, 0.0405344f, 0.00374037f, -0.0592213f, -0.0754498f, -0.00784496f, 0.175991f, 0.155743f, 0.322885f, 0.12644f, 0.0688629f, -0.217905f, 0.0834167f, 0.0390054f, 0.226054f, -0.0589343f, 0.00422749f, -0.187125f, -0.0773547f, 0.161565f, -0.00691263f, 0.180562f, -0.238656f, -0.113077f, 0.0518564f, 0.112608f, -0.264156f, -0.0161887f, 0.142421f, 0.0337453f, 0.154352f, -0.0275722f, -0.10557f, 0.000596952f, -0.0285571f, -0.0929283f, 0.133982f, -0.0873064f, -0.061734f, -0.159023f, 0.077251f, 0.0277605f, -0.00845134f, -0.129084f, -0.0795942f, -0.0481981f, -0.110448f, -0.0184389f, 0.123008f, 0.107019f, -0.0860279f, 0.00372084f, 0.072156f, 0.0675655f, 0.070688f, -0.0726114f, 0.0367046f, -0.174351f, -0.0660226f, 0.182365f, 0.0898932f, 0.079161f, -0.0760877f, -0.319183f, 0.157357f, 0.0288205f, -0.0287369f, 0.0531748f, 0.0113543f, -0.00183423f, 0.0711047f, 0.159513f, 0.0342131f, -0.00487889f, -0.0817453f, 0.250463f, -0.0284279f, -0.0377289f, -0.0904096f, 0.317344f, 0.0449011f, -0.0521865f, 0.0382946f, 0.20204f, -0.0247765f, -0.149713f, -0.0358974f, -0.161146f, -0.17722f, -0.0379701f, 0.0566355f, -0.103364f, -0.0372382f, -0.0403359f, -0.044241f, -0.0155164f, 0.121921f, -0.0601127f, 0.0684083f, -0.00531953f, -0.000575791f, 0.13219f, -0.0524841f, -0.000373635f, 0.0187546f, 0.0956514f, -0.0432686f, 0.107781f, -0.0935017f, 0.0962704f, -0.166151f, 0.00697562f, 0.0362284f, -0.0268233f, -0.0616627f, 0.208481f, -0.0909199f, 0.0325374f, -0.0185765f, 0.0499102f, 0.231498f, 0.0146188f, 0.000875598f, -0.0719198f, 0.0754061f, -0.0663974f, -0.109134f, -0.118688f, 0.0247073f, -0.114238f, 0.091028f, 0.0361002f, -0.00499809f, -0.0550956f, -0.0406556f, 0.053865f, -0.00972763f, -0.0177384f, 0.0329161f, -0.0245598f, 0.00930862f, 0.215003f, 0.046014f, -0.103101f, -0.102586f, 0.172648f, 0.308194f, 0.00203503f, -0.077075f, -0.04961f, -0.160022f, -0.0682138f, 0.025242f, -0.0804685f, -0.223623f, -0.19025f, 0.0165835f, -0.0501993f, -0.0413633f, -0.145179f, -0.00213924f, 0.123645f, -0.0311368f, 0.113178f, -0.0101823f, -0.0361475f, 0.0964516f, 0.13692f, 0.118838f, -0.0359253f, 0.178834f, 0.142814f, 0.108264f, -0.0533048f, 0.0224323f, -0.0869867f, 0.1928f, 0.103927f, 0.0347344f, -0.0612816f, -0.0359572f, -0.0563679f, -0.0783742f, 0.00209833f, 0.0117178f, 0.120654f, -0.1035f, 0.115381f, -0.014589f, 0.0835337f, -0.109606f, 0.0554175f, -0.0117645f, 0.0735395f, 0.28248f, -0.00215742f, 0.0541724f, -0.0709023f, 0.05168f, 0.0336764f, 0.0829889f, -0.154695f, 0.142443f, 0.0621448f, 0.0816364f, 0.0971748f, -0.0347333f, 0.0912891f, 0.0329896f, -0.129578f, -0.0778539f, -0.111941f, -0.00476076f, 0.0490305f, -0.0046601f, 0.217866f, -0.0191019f, -0.0588361f, 0.172585f, -0.232674f, 0.10165f, -0.123439f, -0.0534576f, -0.0148523f, 0.000395795f, 0.211512f, -0.210344f, 0.469005f, -0.0246099f, 0.102876f, 0.100109f, -0.217879f, -0.0679756f, -0.0749751f, -0.0677645f, -0.0840007f, -0.027203f, -0.043259f, -0.117643f, -0.025526f, -0.0402664f, -0.05038f, -0.00527167f, 0.203404f, 0.0409284f, 0.00394163f, -0.0320391f, 0.0458514f, -0.183136f, 0.056115f, -0.123375f, 0.107744f, 0.0841868f, -0.0515625f, 0.0295452f, -0.0890182f, 0.0736166f, -0.0984962f, -0.143159f, 0.0378522f, 0.187563f, 0.0771938f, -0.117103f, -0.0674864f, -0.204715f, 0.12022f, -0.228907f, -0.000710857f, 0.0979078f, -0.0624472f, 0.187522f, 0.0821749f, 0.0949648f, 0.0565785f, 0.0628847f, 0.00677163f, -0.032064f, -0.0213019f, 0.128268f, 0.105937f, 0.269875f, 0.046035f, 0.0635842f, -0.105716f, -0.181148f, 0.0146467f, 0.0407832f, 0.0485354f, -0.0271778f, 0.142066f, 0.0953618f, -0.0412729f, -0.0725773f, 0.0248198f, -0.0495635f, 0.0979875f, 0.122572f, -0.0275678f, 0.0222445f, 0.0457587f, -0.163481f, 0.156041f, 0.00626979f, 0.0725135f, 0.0127444f, -0.131288f, -0.139553f, -0.0171093f, -0.0370769f, 0.0528362f, -0.251222f, -0.027922f, 0.017252f, 0.0457254f, 0.037921f, -0.0340891f, 0.133788f, 0.0858977f, -0.0118427f, -0.0657502f, -0.154473f, 0.0548952f, -0.0682875f, 0.229189f, 0.0540929f, -0.061801f, 0.00521075f, -0.0291024f, -0.0329822f, -0.0427182f, -0.0974417f, 0.0328311f, 0.145152f, -0.0293305f, 0.0746478f, -0.0179712f, 0.170347f, 0.110984f, -0.109048f, -0.0182257f, -0.0721303f, 0.0603937f, 0.0308048f, -0.0624456f, 0.0323034f, 0.0507288f, 0.0235858f, -0.0403662f, 0.0492176f, 0.124347f, -0.0753749f, 0.132866f, -0.214677f, 0.0255701f, -0.108109f, 0.0740564f, 0.178181f, 0.0812858f, -0.0166243f, 0.110087f, -0.0197837f, -0.116514f, 0.0347567f, -0.139605f, 0.151246f, 0.0593472f, 0.0560066f, -0.137867f, 0.0729474f, -0.0430025f, -0.149958f, -0.236727f, 0.162334f, -0.0541904f, -0.158737f, -0.146193f, -0.0794956f, 0.0818801f, 0.072588f, 0.103179f, -0.121259f, 0.0735243f, 0.0113247f, 0.20007f, -0.0523187f, -0.0473618f, -0.0911523f, -0.118989f, -0.0716201f, -0.121752f, 0.263995f, 0.173536f, -0.0889619f, -0.0420394f, -0.151839f, -0.0761855f, 0.0658923f, -0.0701936f, -0.177004f, -0.0547861f, -0.015202f, -0.00357922f, 0.100469f, 0.267598f, -0.151382f, 0.0224132f, -0.0588889f, 0.0870429f, -0.157965f, 0.0274763f, 0.17281f, 0.119041f, -0.223712f, 0.0291605f, -0.0565674f, 0.0498508f, -0.127293f, 0.110274f, -0.0598819f, -0.0851282f, 0.0934764f, -0.190259f, 0.145679f, 0.176189f, 0.0517863f, 0.0602086f, 0.0428395f, 0.116015f, 0.14247f, -0.173053f, 0.119458f, 0.168445f, 0.0513431f, 0.0476729f, -0.00270212f, 0.0371649f, -0.013634f, -0.00374531f, -0.187704f, 0.0154535f, 0.080569f, -0.0584067f, -0.0379629f, 0.058911f, 0.0227231f, -0.0767663f, -0.114519f, 0.0545278f, 0.0433426f, 0.115809f, -0.107319f, 0.0356108f, -0.188816f, -0.0956224f, 0.0149959f, -0.109566f, -0.101362f, 0.121146f, -0.202317f, 0.312493f, -0.192223f, 0.0533858f, 0.0389283f, -0.031656f, 0.161431f, 0.00800317f, 0.0156512f, 0.172804f, -0.0439582f, 0.160328f, 0.0681495f, -0.100676f, -0.0564418f, 0.0822967f, -0.361322f, -0.0882227f, 0.00480206f, -0.230367f, -0.238302f, 0.087099f, -0.155299f, -0.0957393f, 0.191707f, -0.129323f, -0.235908f, 0.153018f, 0.0256241f, -0.346757f, 0.0385961f, -0.00697349f, 0.117679f, 0.149925f, 0.0663061f, 0.122076f, -0.0953967f, -0.0657201f, 0.140212f, 0.138994f, -0.122792f, 0.0401238f, 0.0347921f, -0.0888757f, 0.0953544f, 0.078522f, 0.164624f, 0.213171f, 0.0679546f, -0.134505f, -0.0283343f, -0.0415735f, -0.242619f, -0.0795108f, -0.132163f, 0.153644f, 0.037888f, 0.130176f, -0.0256915f, -0.0120679f, 0.0233503f, -0.0753665f, -0.0333763f, 0.0267694f, -0.216806f, -0.178769f, 0.0322943f, 0.141386f, 0.0173458f, 0.0211252f, -0.00953213f, -0.0910179f, 0.0546482f, -0.136684f, 0.269219f, -0.0590755f, -0.13305f, 0.113944f, -0.148639f, -0.0434122f, -0.163555f, 0.191413f, 0.208434f, 0.214745f, -0.061314f, -0.0380879f, 0.0986161f, -0.0939888f, 0.00416629f, 0.185235f, -0.157235f, -0.00613423f, -0.0846416f, -0.10866f, -0.0224339f, -0.214955f, 0.0683846f, 0.175048f, 0.161348f, -0.112771f, 0.12254f, -0.00496664f, -0.0522687f, -0.168367f, 0.371266f, 0.0382643f, 0.119417f, 0.0627995f, 0.330142f, -0.0287853f, -0.14184f, 0.0899962f, 0.0371859f, 0.103928f, -0.108667f, 0.049074f, 0.058157f, 0.0391799f, 0.0870731f, 0.321793f, 0.000572261f, -0.0195541f, -0.118719f, 0.0907994f, -0.0700519f, 0.0382184f, 0.227104f, 0.212335f, 0.047627f, -0.073622f, 0.0849035f, 0.0670489f, -0.00739742f, 0.151991f, 0.0708107f, 0.176417f, 0.149568f, -0.000626854f, -0.146807f, -0.111098f, -0.0545701f, 0.0794571f, -0.133683f, 0.22212f, -0.00187226f, 0.16306f, 0.0498503f, 0.0916642f, 0.199446f, 0.0271136f, 0.198759f, 0.126674f, 0.132349f, 0.0359271f, -0.105986f, -0.0288404f, 0.0737737f, -0.069132f, -0.107967f, 0.0727774f, 0.298493f, 0.0704923f, -0.00433016f, -0.0543909f, -0.157999f, -0.141107f, -0.118533f, -0.143359f, 0.11661f, 0.141672f, -0.0365257f, 0.00772911f, -0.0320574f, 0.00562881f, -0.0110184f, -0.17925f, -0.164518f, 0.0910001f, -0.0500575f, -0.100417f, 0.00109036f, -0.0386041f, -0.0385887f, 0.147545f, -0.0282409f, 0.0575225f, 0.272205f, 0.144783f, 0.0266403f, -0.0449117f, 0.0282341f, -0.00183043f, -0.10506f, -0.056399f, -0.0684179f, 0.0714739f, 0.0447198f, 0.0236807f, 0.056605f, -0.0358227f, -0.0388024f, -0.109701f, 0.108465f, -0.0799841f, 0.226032f, 0.0296582f, 0.0752317f, -0.0417484f, -0.00251694f, -0.0607814f, -0.108056f, -0.0556356f, -0.130543f, -0.120291f, 0.213003f, 0.0214225f, 0.00584085f, -0.0675413f, -0.00557502f, 0.0527302f, -0.0786162f, -0.0632066f, -0.00499547f, -0.0589725f, 0.113325f, 0.0685201f, -0.0857787f, 0.0936921f, 0.081481f, 0.0513278f, -0.0569653f, -0.177657f, 0.112916f, 0.0300974f, 0.0551348f, -0.159349f, 0.133455f, -0.134287f, 0.131509f, 0.0127834f, 0.117415f, 0.0861868f, 0.188152f, -0.0596614f, 0.0445275f, -0.0284248f, 0.0855275f, 0.147834f, 0.0520453f, 0.0823926f, -0.0272626f, -0.128915f, -0.0722884f, 0.0862391f, 0.00376402f, 0.0287283f, 0.0376815f, -0.0958195f, 0.0492384f, -0.0559696f, -0.045558f, -0.115853f, 0.104045f, 0.148082f, -0.0814995f, -0.0476794f, 0.0791956f, 0.0199157f, -0.129018f, -0.0630556f, -0.0767718f, -0.0829134f, 0.238502f, -0.0916194f, -0.0726179f, -0.11654f, -0.0436059f, 0.10451f, -0.0964034f, -0.0638973f, 0.23404f, 0.0458717f, -0.0285301f, 0.287048f, -0.134132f, -0.134034f, 0.0410712f, -0.0633622f, 0.0724423f, -0.0661121f, -0.105312f, -0.36242f, -0.0823323f, -0.111609f, -0.039246f, 0.0437769f, 0.000565853f, 0.224856f, 0.0887242f, 0.2744f, -0.142815f, 0.0246727f, -0.180066f, 0.106875f, -0.138448f, 0.135613f, 0.0194182f, 0.16573f, 0.132528f, 0.0872561f, -0.0847732f, -0.0232272f, -0.00710525f, -0.0291167f, 0.116438f, 0.0569064f, -0.109541f, -0.0746416f, -0.0405521f, 0.0779969f, 0.0271285f, -0.227607f, 0.19564f, -0.0407123f, 0.0447206f, -0.0470057f, -0.173843f, -0.0619797f, -0.0539832f, -0.153993f, 0.01572f, -0.138176f, -0.137165f, -0.132166f, 0.129374f, 0.0289758f, 0.057014f, -0.0881285f, -0.00260486f, 0.00884046f, 0.0455607f, 0.139475f, 0.0248122f, -0.187112f, -0.0929973f, -0.187711f, -0.0277693f, 0.21244f, -0.0638955f, 0.00261231f, 0.257271f, -0.0393011f, 0.132236f, 0.0704923f, 0.0131538f, -0.00991646f, -0.13077f, 0.009843f, 0.0122753f, 0.264451f, -0.105269f, 0.0708374f, 0.0937325f, 0.0427365f, -0.064863f, 0.00839128f, 0.0734033f, -0.059311f, 0.0089841f, 0.0862932f, 0.274432f, 0.0630902f, 0.0546411f, 0.111582f, 0.143942f, -0.0742699f, -0.0341754f, 0.0306475f, -0.0342186f, -0.0489278f, 0.209378f, 0.0764388f, -0.177037f, -0.0277749f, 0.191848f, 0.00819012f, 0.103408f, -0.00176977f, 0.177486f, -0.00329732f, -0.0644328f, 0.18469f, 0.0564194f, 0.245244f, 0.040421f, -0.0926763f, -0.177587f, 0.10681f, 0.0639101f, -0.126571f, -0.0627459f, -0.102859f, 0.0861041f, 0.307871f, 0.102007f, 0.0383946f, 0.15336f, 0.132546f, 0.134394f, -0.110318f, -0.0674608f, -0.156828f, 0.0553648f, 0.0161165f, -0.0120321f, 0.0107209f, 0.250589f, 0.192434f, -0.0601187f, -0.0788559f, -0.0631325f, -0.0298663f, 0.145766f, 0.0991106f, -0.166258f, -0.170731f, -0.0631831f, 0.145709f, -0.0215251f, 0.0898712f, 0.0959285f, 0.0748607f, 0.0729323f, 0.000784042f, -0.090907f, 0.00316911f, -0.180841f, -0.0219752f, 0.0072499f, 0.190194f, 0.160161f, -0.0536875f, 0.0653102f, 0.0931338f, 0.110166f, -0.106135f, 0.0162917f, 0.0715435f, -0.0887602f, 0.149379f, -0.177369f, -0.0329502f, -0.112293f, -0.199455f, 0.0270215f, 0.222825f, -0.11387f, -0.0743753f, -0.131991f, -0.0880726f, 0.0970628f, -0.0525572f, 0.00705545f, 0.0356413f, 0.0160315f, 0.0834361f, -0.0318797f, -0.0124734f, 0.0943407f, 0.0748987f, -0.0382874f, 0.058931f, -0.0974267f, 0.0810331f, 0.118376f, -0.159692f, 0.0994759f, -0.0429683f, -0.00188228f, 0.00745748f, -0.0382275f, -0.225161f, 0.0655033f, 0.0894096f, 0.119675f, 0.0929055f, -0.200835f, 0.0859509f, 0.0150305f, 0.060959f, 0.0952061f, -0.152332f, 0.177215f, 0.0154271f, -0.0974927f, 0.0392572f, -0.0235105f, -0.0130155f, 0.0467075f, 0.0697631f, 0.15563f, 0.175952f, 0.167312f, -0.0353084f, -0.0431333f, 0.105883f, -0.0345105f, 0.0198052f, 0.0194614f, 0.0425772f, 0.0642758f, -0.135864f, 0.296038f, 0.191229f, -0.115556f, -0.166978f, -0.0112536f, 0.0555504f, -0.0476803f, 0.0492445f, 0.0459494f, 0.214071f, -0.00591361f, 0.0607222f, 0.0340495f, 0.233836f, 0.0597213f, -0.0879274f, 0.0505745f, -0.0604938f, -0.266654f, -0.153542f, -0.206078f, -0.178227f, -0.168672f, -0.138656f, 0.268077f, 0.0205752f, 0.163125f, 0.199547f, 0.125297f, -0.1968f, 0.0479744f, 0.130152f, -0.0770827f, -0.0604686f, -0.0625255f, -0.0504935f, -0.144949f, -0.0938229f, -0.0448925f, 0.222758f, -0.0681382f, -0.0660635f, -0.105244f, 0.0711717f, -0.120427f, 0.0377428f, 0.132914f, -0.137678f, 0.012007f, 0.0618f, 0.0125061f, -0.164142f, -0.012324f, -0.166922f, 0.07234f, 0.048073f, 0.255044f, -0.0207798f, -0.0835459f, 0.0463638f, 0.18979f, 0.0844308f, 0.100276f, -0.0496919f, -0.100608f, 0.0242858f, -0.0980914f, -0.0796412f, 0.178373f, 0.0410912f, 0.0175745f, 0.0288237f, -0.0394135f, -0.0424368f, -0.14236f, -0.153841f, 0.0222358f, 0.0504833f, 0.0945223f, -0.0621098f, -0.0305613f, 0.0202656f, 0.131734f, -0.118036f, 0.0196659f, 0.144094f, -0.0766275f, -0.283195f, 0.151605f, -0.0389253f, 0.0637892f, 0.193152f, -0.0209977f, 0.253582f, -0.0778264f, 0.186167f, 0.0825451f, -0.023295f, -0.0792824f, -0.0707089f, -0.178696f, 0.11186f, -0.0452025f, 0.0738612f, 0.0129579f, -0.0619412f, -0.155263f, 0.1305f, -0.147599f, -0.0799781f, -0.109112f, -0.033514f, 0.0394846f, -0.190111f, 0.111105f, -0.0255946f, 0.0589856f, -0.114607f, 0.0668425f, -0.285232f, 0.0354405f, -0.00333251f, -0.0218019f, -0.105714f, 0.126994f, -0.0438589f, -0.214264f, 0.00219057f, -0.0783345f, -0.0104942f, 0.132301f, -0.121082f, 0.0529824f, -0.0551067f, 0.00661386f, 0.00175032f, -0.0497012f, 0.220264f, -0.093263f, -0.00943525f, 0.259071f, 0.202804f, -0.0422607f, -0.161806f, -0.154168f, -0.0462766f, 0.0902625f, -0.0135326f, -0.047336f, 0.036648f, 0.11816f, -0.0805548f, 0.255133f, 0.0454735f, 0.0279021f, 0.0537928f, 0.0547063f, 0.151402f, 0.0176965f, -0.00440483f, -0.205686f, 0.0764088f, 0.021828f, -0.193544f, 0.292588f, 0.040979f, -0.0728292f, 0.148619f, 0.153826f, -0.220085f, 0.113869f, 0.246681f, -0.057884f, 0.179553f, 0.0750444f, -0.0738771f, 0.144798f, -0.118541f, 0.140011f, -0.213109f, 0.218563f, 0.294634f, 0.185895f, 0.157788f, -0.1362f, -0.157909f, 0.11174f, 0.337991f, 0.0857814f, 0.0536436f, -0.012582f, 0.0473327f, -0.0784865f, 0.115365f, 0.326177f, 0.0422627f, -0.00285842f, 0.0283941f, -0.111546f, 0.105723f, -0.0723006f, -0.0221328f, -0.0105951f, 0.0991718f, -0.0392868f, -0.0584225f, -0.0800304f, -0.037926f, 0.175987f, 0.00121246f, 0.0226899f, -0.258225f, -0.00822019f, 0.11858f, -0.0564814f, -0.0658633f, 0.00266708f, -0.128026f, -0.0693174f, -0.0457778f, 0.264187f, -0.162937f, -0.0414696f, 0.171257f, 0.007658f, 0.00395017f, -0.140338f, -0.00978526f, 0.0288758f, 0.03454f, -0.0764987f, -0.0155282f, 0.210575f, -0.157301f, 0.00114538f, -0.0963804f, 0.123452f, -0.0610447f, -0.125864f, 0.108137f, 0.0999536f, -0.143575f, -0.0208251f, 0.0952736f, -0.012989f, 0.0448433f, -0.0133603f, -0.135674f, 0.218456f, -0.188652f, 0.0451715f, 0.0961409f, 0.0310139f, -0.00797584f, 0.0583903f, 0.0246629f, -0.0111583f, 0.0110281f, -0.0217884f, -0.0198457f, 0.000682905f, 0.154728f, -0.155413f, -0.251753f, 0.123911f, -0.0833131f, 0.078501f, 0.0615957f, -0.047863f, 0.00988068f, 0.00311942f, -0.0749786f, -0.0849391f, 0.00837908f, 0.0824322f, -0.0391383f, -0.0394313f, -0.0633632f, 0.101734f, 0.0677399f, 0.149852f, -0.0477991f, -0.104714f, 0.0807371f, -0.0288154f, -0.19025f, 0.133226f, -0.0855922f, -0.205452f, 0.133694f, 0.2193f, -0.115268f, -0.152828f, 0.0644212f, -0.13474f, -0.00418466f, -0.0586749f, 0.214339f, 0.0201869f, -0.0569237f, 0.121868f, -0.0250844f, -0.00612182f, -0.0460443f, -0.0547093f, 0.0825133f, 0.0874537f, -0.0962602f, -0.099678f, -0.0400507f, 0.150084f, 0.055895f, -0.0866807f, 0.0167682f, 0.0895822f, -0.0683397f, 0.0406722f, 0.00901364f, -0.036952f, 0.0151821f, 0.129439f, 0.114176f, -0.130048f, 0.162025f, 0.0544178f, -0.0135192f, 0.127936f, 0.0444373f, 0.0696283f, -0.0359577f, 0.0284889f, -0.0846976f, -0.198234f, -0.016698f, 0.0645268f, 0.100102f, -0.132388f, 0.0397982f, -0.16405f, 0.0796291f, -0.172208f, -0.110677f, 0.195553f, -0.00135103f, 0.0324908f, 0.114338f, -0.146503f, -0.0364143f, -0.00698319f, 0.185327f, 0.112794f, 0.0146411f, 0.00516997f, -0.0741633f, 0.143078f, 0.105597f, 0.0494355f, -0.00616047f, 0.0566299f, -0.251283f, 0.0615356f, 0.115824f, 0.0461655f, 0.0669317f, -0.139589f, 0.0728001f, -0.0920899f, -0.0405793f, -0.13037f, -0.0463053f, 0.057631f, -0.0708467f, -0.115391f, -0.140383f, -0.0211144f, -0.0948482f, -0.0313153f, 0.109514f, -0.033576f, -0.0574729f, -0.0882786f, -0.0238447f, -0.0304008f, 0.0745921f, 0.151296f, -0.0885721f, 0.223509f, 0.107232f, 0.0862027f, 0.0918841f, -0.0124349f, 0.093146f, -0.10309f, 0.00180291f, 0.00283113f, 0.0630533f, -0.240819f, 0.053405f, 0.0748347f, 0.0527197f, -0.0253875f, -0.00482912f, 0.11326f, 0.334863f, 0.16577f, -0.14447f, 0.0127792f, -0.122786f, 0.00269574f, -0.0730285f, 0.103715f, 0.0908264f, -0.077013f, 0.0666975f, -0.105594f, -0.22978f, 0.0355962f, -0.123153f, -0.0466776f, 0.0807791f, 0.189353f, 0.149165f, 0.252758f, 0.0699468f, 0.140927f, -0.240233f, -0.0126965f, -0.153619f, 0.216384f, -0.137699f, 0.0430931f, 0.0489138f, -0.082443f, 0.0521397f, 0.0123896f, 0.0516258f, 0.0374367f, -0.0354089f, 0.135221f, -0.0564659f, -0.0512903f, -0.00888598f, -0.093107f, -0.0854671f, 0.0837814f, -0.0282706f, 0.0639345f, 0.0953691f, 0.0860188f, -0.189737f, 0.130907f, -0.0329583f, 0.0297436f, -0.0759287f, -0.0516023f, -0.0617952f, -0.0751f, 0.0209101f, -0.0432293f, 0.0330576f, -0.0728689f, 0.123781f, 0.187297f, 0.268743f, -0.0487147f, 0.144102f, -0.113081f, 0.0309442f, -0.186323f, -0.160266f, 0.0236466f, 0.0152241f, 0.0471288f, 0.126855f, 0.251027f, -0.198948f, 0.164132f, -0.087211f, -0.109323f, -0.172532f, 0.116749f, 0.0522465f, 0.00202273f, 0.102556f, 0.00617408f, -0.0625486f, 0.0351583f, 0.00167059f, 0.15782f, -0.022945f, 0.0883283f, 0.035065f, -0.0538942f, -0.197966f, -0.108187f, 0.174553f, 0.0259893f, 0.0447092f, -0.064657f, -0.0111728f, -0.223932f, 0.0200091f, 0.276156f, 0.208043f, -0.0732518f, -0.0226216f, 0.121243f, -0.00189368f, 0.226548f, -0.0268615f, -0.044084f, -0.0370458f, 0.215344f, -0.149062f, -0.234086f, 0.0605974f, -0.0587929f, 0.0278486f, -0.223908f, -0.0773695f, 8.37608e-06f, -0.00432288f, 0.0436819f, -0.194043f, 0.0224394f, 0.0768387f, -0.157518f, 0.138284f, -0.151267f, 0.00588833f, 0.115127f, -0.212466f, -0.0323781f, 0.154232f, -0.183373f, -0.10043f, 0.0464086f, 0.0815123f, 0.0517785f, 0.0362275f, -0.0132397f, 0.129774f, 0.0744425f, 0.000323879f, 0.265573f, 0.163705f, -0.0872868f, -0.0347583f, -0.117982f, 0.0563349f, -0.00263922f, 0.00300454f, 0.0347105f, -0.150338f, -0.0467793f, 0.0505631f, 0.156978f, 0.0291304f, 0.0448842f, -0.212697f, -0.0593771f, -0.00992316f, -0.0442035f, -0.237398f, 0.112421f, 0.0721868f, 0.0656435f, 0.11156f, -0.0818317f, -0.172975f, -0.0405748f, 0.0166645f, -0.115721f, 0.148824f, -0.0683489f, -0.13539f, -0.0192247f, 0.00897393f, 0.0141431f, -0.229807f, -0.0060753f, 0.113521f, -0.149776f, -0.113536f, -0.081281f, 0.0477445f, 0.0535184f, -0.0782879f, -0.0905782f, 0.030897f, 0.12872f, 0.0395866f, 0.410535f, -0.250751f, -0.0344598f, -0.185031f, -0.121285f, 0.00559817f, 0.0312565f, -0.0973458f, 0.00851001f, 0.0355187f, -0.00576454f, 0.20088f, 0.079856f, -0.102866f, -0.0577016f, -0.0840094f, 0.0362344f, -0.0463938f, -0.0451951f, -0.119899f, 0.0388494f, 0.0589476f, -0.12907f, -0.0070646f, 0.197235f, -0.130618f, -0.173188f, 0.023361f, -0.115665f, 0.0584612f, -0.0703209f, 0.0361746f, -0.0864356f, 0.0359699f, -0.00459243f, -0.187284f, -0.00318405f, 0.190258f, -0.0225454f, -0.0933743f, 0.0392661f, -0.0295677f, 0.0381723f, -0.0950352f, -0.0303431f, -0.18737f, 0.133563f, 0.0439595f, 0.172039f, 0.0634187f, 0.109315f, 0.00865376f, -0.0875904f, 0.00314922f, -0.0819572f, 0.0962105f, -0.147667f, 0.17414f, -0.0275186f, 0.0776494f, -0.0196294f, 0.142242f, 0.0169015f, 0.0147235f, 0.0174198f, -0.10516f, -0.124503f, -0.035037f, -0.11649f, -0.114682f, 0.00286897f, -0.040766f, 0.0537642f, 0.0804028f, -0.10209f, -0.0598771f, -0.109153f, -0.156319f, 0.0410412f, 0.097915f, 0.0841537f, -0.0196916f, 0.0615565f, 0.061058f, 0.00623674f, -0.0277692f, -0.0341571f, -0.198591f, 0.101239f, 0.186489f, -0.154104f, 0.166741f, 0.0414054f, -0.123269f, 0.0481001f, 0.0379167f, 0.0245851f, -0.0652871f, 0.0321105f, -0.131219f, 0.0149575f, -0.117208f, -0.0090091f, -0.169629f, -0.0624746f, -0.0478262f, 0.0715636f, -0.0631163f, 0.0656477f, 0.107171f, -0.0472398f, 0.0604766f, -0.111456f, -0.0633671f, 0.0449898f, -0.0150743f, -0.137471f, 0.0651104f, 0.0663957f, 0.0320031f, 0.0761733f, 0.0239094f, 0.132785f, -0.101429f, 0.131251f, 0.163274f, 0.26232f, 0.268939f, 0.0347813f, 0.0554182f, 0.0469428f, 0.0385462f, -0.0500494f, -0.2231f, 0.0324855f, -0.0344319f, -0.0744358f, -0.0631954f, -0.0106201f, 0.129262f, -0.053109f, 0.0589497f, -0.0811378f, 0.142629f, 0.379967f, 0.0286038f, 0.00757108f, -0.0874985f, -0.0832005f, 0.133687f, -0.106776f, -0.169855f, 0.00901175f, 0.117407f, 0.174377f, 0.149093f, 0.0842249f, 0.151845f, -0.0955833f, -0.167521f, 0.0153226f, 0.0516023f, -0.165503f, -0.255525f, -0.166356f, -0.0945882f, -0.0253131f, -0.354168f, -0.014988f, 0.0349702f, 0.0939307f, 0.0740683f, -0.116761f, 0.0338095f, 0.053978f, -0.0114166f, -0.00302694f, 0.0179369f, 0.110849f, 0.150416f, 0.140136f, -0.135288f, -0.0761019f, -0.145891f, -0.137998f, -0.0587339f, -0.0813218f, -0.0573154f, -0.251909f, -0.0715996f, 0.133971f, 0.157904f, -0.0631378f, -0.103957f, -0.166318f, -0.0675543f, -0.0417313f, 0.0495355f, -0.0555726f, 0.267859f, 0.0723202f, -0.0210532f, -0.172526f, 0.129043f, 0.0799852f, 0.0959523f, 0.0775354f, 0.0303392f, 0.0648846f, 0.0349828f, 0.0444663f, 0.0314597f, 0.0537239f, 0.0130542f, 0.136871f, 0.0202238f, -0.170925f, -0.00736751f, -0.189061f, -0.0272211f, -0.123369f, 0.175804f, 0.0982394f, 0.164605f, 0.235536f, 0.196296f, 0.0680173f, 0.00290658f, 0.145698f, 0.107097f, 0.106976f, -0.0672305f, -0.0751587f, -0.0716977f, -0.214078f, -0.0526906f, 0.100345f, 0.0833854f, -0.0170819f, -0.0990284f, -0.036929f, 0.0630621f, 0.0409574f, 0.0329493f, -0.261429f, -0.0169771f, -0.036854f, 0.0540496f, 0.0156242f, 0.160673f, -0.124393f, -0.00557814f, -0.149301f, 0.120459f, -0.0839688f, 0.130115f, -0.101772f, -0.113222f, -0.112352f, -0.0886592f, 0.0780054f, 0.068618f, 0.0396648f, -0.0461262f, -0.0194843f, -0.100428f, -0.0335306f, -0.00235125f, -0.19825f, 0.187106f, 0.00816451f, -0.0911466f, 0.0231616f, 0.264079f, -0.118771f, -0.1525f, -0.048867f, 0.0319623f, 0.0991494f, -0.0402932f, -0.139238f, -0.0714109f, 0.048445f, 0.087496f, 0.0504924f, 0.0850871f, 0.26548f, 0.0236706f, -0.0815344f, 0.0504495f, -0.169064f, 0.0435302f, 0.162694f, 0.0453634f, 0.0315223f, -0.0836665f, 0.0249505f, -0.0356326f, -0.0214875f, 0.139125f, 0.10907f, 0.112547f, -0.00562706f, -0.248669f, 0.0674515f, -0.0259893f, 0.123933f, -0.177089f, 0.0390987f, 0.152091f, -0.0275251f, 0.0348462f, -0.229286f, -0.151669f, 0.077205f, -0.0830269f, 0.285756f, -0.176332f, -0.172307f, -0.130113f, 0.0344439f, 0.0277699f, -0.188295f, -0.120415f, -0.0861775f, 0.225267f, -0.06124f, -0.0413824f, 0.185665f, 0.0344884f, -0.1554f, 0.078196f, -0.0334587f, -0.0523341f, -0.0110627f, 0.062857f, -0.0992103f, 0.165915f, 0.158463f, -0.220817f, -0.0762813f, -0.138302f, -0.246817f, 0.029335f, -0.0778863f, 0.111039f, 0.0500911f, -0.111798f, -0.0811882f, -0.0132875f, 0.110374f, 0.106173f, -0.0903874f, -0.122127f, 0.191466f, 0.204684f, -0.0919555f, 0.123884f, 0.0812594f, -0.029222f, -0.139194f, -0.21588f, 0.0403488f, -0.19448f, 0.114819f, -0.160441f, -0.0481321f, 0.0273495f, -0.0498029f, -0.0278347f, -0.0194198f, 0.297201f, 0.106746f, -0.107952f, 0.1445f, 0.0200566f, -0.0729731f, -0.109618f, 0.160259f, -0.0801642f, 0.0445926f, 0.0421409f, 0.0602185f, -0.0507784f, -0.171351f, 0.0214688f, -0.130304f, -0.0557924f, 0.0276426f, 0.0129281f, 0.181451f, 0.183303f, -0.0132663f, 0.105639f, -0.0299418f, 0.233848f, 0.0489694f, 0.0721906f, 0.0602845f, -0.0540681f, -0.0387776f, -0.0121076f, 0.140361f, -0.0588857f, -0.227388f, 0.0111968f, -0.00785115f, -0.256679f, 0.191712f, 0.113356f, 0.00772008f, 0.139106f, 0.0955315f, -0.0352201f, 0.04606f, 0.00864178f, -0.0851243f, 0.062746f, 0.177216f, 0.0549666f, 0.0897557f, -0.0782678f, 0.144552f, 0.169682f, -0.0520743f, 0.225317f, 0.261248f, 0.030091f, 0.014221f, -0.0924314f, 0.0764437f, 0.151169f, -0.0977351f, -0.201742f, 0.097808f, -0.148211f, 0.218951f, -0.106093f, 0.148675f, -0.00729254f, 0.0884228f, -0.16234f, -0.0379227f, 0.0138549f, -0.0985393f, 0.188188f, -0.0378999f, 0.151389f, -0.0349545f, -0.219343f, 0.238529f, -0.194958f, -0.210776f, -0.0128597f, 0.0618125f, -0.217026f, 0.0370368f, 0.0166973f, -0.114056f, -0.0180308f, 0.0264202f, 0.0760518f, 0.017002f, 0.133002f, 0.147616f, 0.0343307f, -0.130585f, 0.0607683f, -0.000604139f, 0.135301f, 0.145507f, -0.059804f, 0.163797f, 0.153676f, 0.039206f, -0.107107f, -0.00541485f, -0.0898827f, 0.07827f, 0.107804f, 0.0920882f, -0.0430097f, 0.0114125f, 0.0637225f, 0.11898f, 0.00909611f, -0.0410687f, -0.0425009f, -0.237374f, -0.0264733f, -0.113706f, -0.0336939f, 0.0246705f, 0.0240621f, 0.00972635f, 0.0858575f, -0.0260544f, -0.055621f, -0.00459199f, 0.0752843f, 0.149771f, 0.0678032f, -0.0284409f, -0.000727898f, -0.00677528f, -0.161488f, 0.131935f, 0.0754087f, 0.0169214f, -0.157669f, 0.156975f, 0.181095f, 0.113667f, -0.0126714f, 0.210095f, 0.00217018f, 0.079725f, 0.0986213f, 0.0330426f, -0.0303806f, -0.0232705f, 0.114698f, -0.0804841f, -0.0963906f, 0.0847222f, -0.0684975f, -0.0880388f, 0.0385345f, 0.00792878f, -0.102431f, -0.0187649f, 0.013661f, -0.0885022f, 0.115181f, 0.0942428f, 0.14799f, -0.271758f, -0.0622996f, -0.0525235f, 0.0413268f, 0.0626483f, 0.00108596f, 0.00784293f, -0.0342454f, 0.105265f, -0.0627863f, -0.028524f, 0.00379326f, 0.125653f, -0.0446292f, 0.136173f, 0.172221f, -0.129042f, 0.0837391f, 0.101685f, -0.00536954f, 0.0520788f, 0.156367f, 0.0277749f, -0.143444f, -0.0799233f, -0.00828491f, 0.223146f, -0.120549f, 0.0625486f, 0.0308417f, -0.0326997f, -0.10578f, -0.0435261f, -0.176185f, -0.0431812f, -0.095901f, -0.0862781f, 0.0481042f, -0.0482013f, 0.0298214f, -0.0250222f, -0.201045f, 0.102107f, -0.0682298f, -0.0505838f, 0.019306f, -0.146127f, 0.024426f, 0.0677878f, 0.255064f, -0.0333239f, -0.00952168f, -0.0271202f, 0.119389f, -0.0189859f, 0.0849824f, -0.146853f, 0.088561f, -0.141293f, 0.0282972f, -0.0451804f, -0.0111583f, 0.0776917f, 0.083979f, -0.0957924f, 0.00390353f, -0.230549f, -0.146468f, -0.0828452f, 0.0825708f, -0.186666f, -0.0751397f, -0.209216f, 0.0145068f, 0.0164816f, -0.00909483f, -0.00369552f, -0.0699559f, 0.252602f, -0.10942f, -0.0361187f, -0.0717638f, -0.0615458f, -0.104157f, -0.0355428f, -0.0910381f, -0.197453f, 0.0889255f, 0.0528506f, 0.00893365f, 0.0903858f, 0.0905593f, 0.0734537f, 0.00685707f, 0.0518498f, -0.025848f, 0.209048f, 0.193204f, 0.109816f, -0.00383333f, -0.039668f, 0.135598f, -0.0673444f, 0.101388f, -0.0577858f, 0.0117056f, -0.0277554f, -0.0828063f, 0.0524208f, -0.0232551f, -0.186132f, -0.0281371f, 0.218905f, -0.0154305f, -0.00411663f, 0.0620608f, 0.0637394f, -0.0995925f, -0.00337193f, 0.0896648f, 0.0200998f, 0.0598588f, -0.0151036f, -0.0114821f, 0.0140752f, 0.00838099f, 0.0754851f, 0.159125f, 0.132055f, 0.0155289f, 0.0494469f, -0.0444041f, -0.151508f, -0.128391f, 0.0358831f, -0.0288741f, -0.0573798f, -0.200453f, -0.113849f, -0.0602758f, 0.105432f, 0.0559223f, -0.149417f, -0.112894f, 0.00755768f, 0.105483f, 0.162936f, 0.090509f, 0.124528f, -0.0991133f, -0.0558572f, 0.0288998f, -0.0865133f, 0.0151584f, -0.0610425f, 0.127259f, -0.199909f, 0.0154326f, -0.0960287f, -0.212739f, -0.183287f, 0.0586773f, -0.0501916f, -0.0286151f, -0.081877f, 0.153102f, -0.242908f, -0.18602f, 0.185858f, -0.208695f, 0.106436f, -0.028256f, -0.0399761f, -0.0233843f, -0.101679f, -0.115362f, 0.0541982f, -0.211327f, -0.0051503f, -0.00603983f, 0.070405f, 0.158355f, 0.0259321f, -0.0841388f, -0.0845467f, -0.241143f, -0.188445f, -0.212685f, 0.0490743f, -0.0688726f, 0.0141847f, -0.015387f, 0.0890401f, 0.0417599f, 0.220021f, -0.158833f, -0.119809f, 0.147151f, 0.0023199f, -0.0808225f, 0.123213f, 0.152786f, -0.0674836f, -0.00128442f, 0.199138f, -0.031239f, 0.084605f, 0.0377388f, 0.0170138f, 0.0507333f, 0.022644f, 0.030301f, -0.0975418f, -0.0581584f, -0.0147322f, 0.00465851f, 0.123258f, -0.194869f, 0.164663f, 0.0282284f, 0.29669f, 0.117063f, 0.0482899f, 0.0517057f, 0.117657f, -0.10848f, -0.0148483f, 0.0881104f, -0.0446944f, 0.0511782f, 0.0281497f, -0.125451f, -0.066603f, 0.0409661f, -0.140002f, -0.0368363f, -0.127671f, 0.00340453f, 0.0838216f, -0.0551784f, 0.00061034f, -0.0549816f, -0.0302944f, 0.0294502f, -0.020267f, 0.0393088f, -0.0189309f, 0.00690822f, 0.0406713f, -0.0981707f, 0.00104771f, -0.0352848f, 0.0878607f, 0.0203256f, 0.0906847f, 0.171318f, -0.127757f, -0.200183f, -0.0628113f, -0.147333f, -0.109265f, -0.10294f, -0.0708279f, 0.214564f, 0.132434f, -0.0461598f, 0.181728f, -0.0245736f, 0.0210963f, -0.0340841f, -0.0312356f, -0.181008f, -0.107997f, 0.151486f, -0.0776947f, 0.119884f, -0.0031553f, 0.106772f, 0.0163706f, -0.0875151f, -0.0885882f, 0.105742f, -0.155735f, 0.0638482f, -0.0187743f, 0.0921039f, -0.0410423f, -0.2897f, -0.0545549f, 0.290727f, -0.0429221f, 0.0553089f, -0.0592756f, 0.0224578f, -0.138718f, -0.140993f, 0.114599f, 0.000368313f, 0.156807f, -0.0660085f, -0.00442095f, 0.0271386f, 0.0424953f, -0.171861f, -0.0764929f, 0.127167f, -0.0884361f, -0.152904f, -0.0112842f, 0.00314154f, 0.172519f, 0.00522066f, 0.0389319f, -0.0748639f, 0.135104f, -0.103064f, -0.0195557f, -0.10481f, 0.117884f, -0.147472f, -0.0484443f, 0.118257f, 0.0763777f, -0.0645275f, -0.292527f, 0.0539097f, -0.0829154f, -0.243534f, 0.00142628f, 0.0356479f, -0.141852f, 0.199985f, -0.00340609f, -0.0325591f, 0.063092f, 0.0356474f, -0.180259f, 0.0323061f, 0.0384537f, -0.0325805f, 0.153809f, -0.0907534f, 0.152707f, -0.050294f, -0.0214203f, -0.101077f, 0.200103f, 0.00174342f, 0.0466262f, -0.167723f, -0.0815158f, 0.0946393f, -0.0184179f, 0.012282f, 0.312541f, 0.192873f, -0.133942f, -0.0212957f, 0.122371f, 0.168319f, -0.00107848f, -0.0157632f, 0.0141721f, 0.0223552f, -0.161001f, 0.104472f, -0.12698f, -0.0386618f, -0.188808f, 0.143388f, -0.150102f, -0.0329557f, 0.0464469f, -0.0118673f, -0.0615962f, -0.156501f, -0.0589524f, -0.0584158f, 0.0498758f, 0.0286623f, 0.0546496f, -0.121115f, 0.0276572f, -0.143289f, 0.117744f, -0.0466496f, 0.0248568f, -0.0407596f, 0.012102f, -0.116335f, -0.0484275f, -0.168449f, -0.05505f, 0.139444f, 0.147966f, -0.111855f, -0.0297009f, -0.171484f, 0.11586f, 0.16844f, -0.012422f, 0.0597176f, 0.234258f, 0.14872f, -0.0502805f, -0.110595f, -0.0178411f, -0.0905406f, 0.05123f, 0.145708f, -0.0138954f, -0.201726f, 0.0802725f, 0.196908f, -0.150232f, -0.0729755f, 0.0531283f, 0.027899f, 0.0895996f, -0.0453042f, 0.11351f, 0.0546207f, -0.0389722f, -0.0530513f, -0.124414f, -0.0958673f, -0.0704529f, -0.267706f, -0.072015f, -0.0341869f, -0.169094f, -0.0430783f, -0.159039f, 0.105491f, -0.0511552f, -0.0782239f, 0.017113f, -0.0446794f, -0.0171927f, -0.0741336f, 0.029292f, -0.0473893f, 0.112204f, -0.151622f, 0.0118747f, 0.0444341f, -0.104539f, 0.0425062f, 0.136133f, 0.070309f, 0.191369f, 0.26388f, 0.0319978f, 0.00507096f, -0.0729764f, -0.133869f, -0.160741f, -0.0436979f, 0.10928f, 0.0926505f, 0.0803579f, -0.109713f, -0.103395f, 0.174481f, -0.0688781f, 0.0414972f, 0.109587f, 0.103748f, 0.0268222f, 0.116618f, -0.0655083f, -0.0272787f, -0.119966f, 0.0420301f, -0.095944f, 0.199033f, -0.0460042f, -0.119803f, -0.0332279f, 0.0232258f, 0.279626f, -0.237919f, 0.00160104f, 0.00159377f, 0.0177097f, -0.0410766f, 0.0399425f, -0.00401052f, -0.0111306f, 0.12246f, -0.0971883f, -0.158824f, -0.145213f, -0.0561174f, -0.119317f, 0.0856046f, 0.0952667f, 0.106159f, 0.076056f, 0.180741f, 0.0628437f, -0.114494f, 0.0801033f, -0.0969682f, -0.117619f, -0.295821f, -0.0874622f, 0.0663144f, 0.0460557f, 0.0461456f, -0.123683f, 0.0439772f, -0.0362618f, 0.105041f, 0.0432001f, -0.00592445f, 0.170126f, 0.0830223f, 0.171433f, -0.0623503f, -0.239983f, -0.0515402f, -0.0351445f, -0.0157021f, 0.0891836f, -0.0613749f, -0.0446423f, -0.23306f, -0.22256f, 0.0961184f, 0.119572f, 0.0985964f, -0.0642032f, 0.200684f, 0.0531507f, -0.0644261f, 0.0697928f, -0.0627565f, 0.0932382f, -0.203788f, 0.016967f, 0.0489951f, 0.0834868f, -0.126223f, -0.0455413f, 0.215395f, 0.15713f, -0.0677129f, 0.11849f, -0.224099f, -0.00564901f, -0.0515676f, -0.0223984f, -0.0951502f, -0.306609f, -0.131045f, -0.0908551f, -0.0637113f, -0.0664312f, -0.120158f, 0.0081412f, -0.0697521f, -0.179817f, -0.0363421f, -0.159363f, -0.276491f, -0.0474945f, 0.0443234f, -0.05925f, 0.0806377f, -0.0867666f, 0.175683f, -0.0577573f, -0.221521f, 0.0424225f, 0.0350486f, -0.0219766f, 0.359715f, -0.0791245f, 0.16094f, -0.053502f, -0.24656f, 0.230291f, -0.149395f, -0.0405685f, 0.0446512f, 0.0317444f, 0.278322f, -0.205548f, -0.258859f, -0.0187514f, -0.0126554f, 0.0676104f, 0.205884f, -0.142568f, -0.0172591f, 0.116454f, -0.10954f, -0.0562847f, 0.0191536f, 0.167997f, 0.0140592f, -0.00764243f, 0.0720588f, 0.06172f, -0.292667f, -0.0233591f, 0.0898345f, -0.0819293f, -0.0133489f, 0.134911f, 0.0216395f, 0.0231037f, 0.0386825f, 0.191793f, 0.204526f, -0.0529837f, 0.071734f, 0.0646656f, 0.13188f, 0.0746916f, 0.0567235f, -0.108015f, 0.122519f, 0.016008f, 0.187478f, 0.0821305f, -0.0832648f, -0.0939737f, 0.043362f, -0.0735132f, -0.132463f, -0.342839f, -0.179825f, 0.0195324f, 0.0939771f, -0.0820749f, 0.124221f, -0.141847f, -0.257998f, -0.144468f, 0.0969171f, 0.177705f, 0.0775434f, -0.272517f, 0.164924f, -0.055217f, -0.0249579f, -0.0590735f, -0.0716679f, -0.15047f, 0.0766674f, 0.00412021f, -0.109287f, -0.148353f, -0.0943131f, 0.00226437f, 0.171753f, -0.0906924f, 0.0897021f, 0.011858f, 0.0722941f, 0.00244883f, 0.189717f, 0.0656087f, 0.0198338f, -0.00154283f, 0.0524443f, 0.193007f, 0.136008f, 0.209426f, -0.233605f, -0.209436f, -0.0350347f, 0.0733655f, -0.00453009f, -0.134195f, -0.112743f, -0.078847f, 0.0146875f, -0.188191f, 0.0356071f, 0.0859781f, 0.182553f, -0.100099f, 0.00223675f, 0.0163556f, 0.13081f, 0.147073f, -0.101827f, -0.0759129f, 0.0219072f, 0.0310091f, -0.23603f, -0.00466859f, -0.163668f, 0.124721f, 0.0932285f, -0.0388255f, -0.142489f, 0.122313f, 0.032474f, 0.109185f, -0.0101782f, -0.128974f, -0.163159f, -0.0738602f, 0.106046f, -0.0179798f, -0.0798286f, -0.0600035f, -0.103388f, -0.0173794f, 0.150437f, 0.0993913f, 0.104881f, -0.0772221f, -0.170673f, 0.167237f, 0.0402524f, 0.109682f, -0.223013f, -0.0262903f, -0.105392f, 0.238909f, 0.0958375f, 0.0222019f, -0.067445f, -0.156866f, 0.0570429f, -0.0565689f, -0.147683f, 0.0872151f, -0.113916f, -0.105798f, -0.140791f, -0.126458f, 0.0566273f, -0.21033f, 0.0839152f, 0.169957f, 0.124456f, -0.1176f, -0.0333512f, 0.0960142f, -0.0704116f, -0.17301f, -0.00478061f, 0.191731f, 0.093183f, -0.043726f, -0.119546f, -0.251064f, -0.0217078f, 0.0118136f, 0.0803301f, -0.0603706f, -0.0284006f, 0.0944146f, -0.14712f, 0.0461381f, -0.218749f, -0.0842371f, -0.0848391f, 0.0643181f, 0.0300151f, -0.0444076f, 0.0507619f, -0.0835678f, 0.0331038f, -0.00275954f, -0.045894f, -0.174133f, -0.0149071f, -0.0245152f, 0.156365f, -0.0675673f, -0.0338153f, 0.0667169f, 0.0106668f, 0.0992645f, -0.0377082f, 0.0151032f, 0.101662f, -0.103056f, 0.0651068f, -0.0798478f, -0.030759f, -0.0170273f, 0.101995f, 0.0376442f, -0.0586843f, -0.0847361f, 0.0321792f, -0.226284f, -0.161394f, 0.146352f, 0.178523f, -0.107654f, -0.102787f, 0.0802861f, -0.0289662f, 0.0297838f, -0.0259187f, -0.123691f, -0.0112545f, 0.113731f, -0.00291314f, -0.253846f, 0.0343799f, 0.0733315f, -0.0749695f, 0.0124321f, -0.162748f, -0.175606f, -0.0579814f, 0.0513276f, -0.159151f, 0.0350945f, 0.0321563f, -0.0114402f, -0.0640753f, -0.0394047f, 0.0185511f, -0.14398f, 0.00879595f, -0.0296036f, -0.244379f, 0.0167521f, 0.016732f, -0.0101207f, 0.168328f, 0.138313f, -0.265654f, -0.132551f, -0.0111195f, -0.00240027f, 0.128003f, -0.00583597f, 0.0855749f, 0.0116712f, 0.0379805f, -0.0385913f, -0.0457178f, -0.0872776f, 0.0557534f, -0.000648953f, -0.0201334f, 0.0142394f, -0.0978714f, 0.0534771f, -0.152652f, 0.0463474f, -0.0244624f, 0.10234f, 0.0969861f, -0.0438915f, 0.042162f, 0.114185f, -0.023404f, -0.13764f, -0.018015f, 0.0415236f, 0.0242454f, -0.104537f, -0.0728245f, -0.242682f, 0.0353832f, -0.072485f, 0.102579f, -0.201913f, -0.188865f, 0.0395992f, 0.0245252f, -0.166361f, -0.0503329f, 0.23836f, 0.166731f, 0.0798325f, 0.0428123f, -0.0859694f, -0.140555f, -0.0783308f, 0.00272267f, -0.0654277f, -0.0478751f, 0.00640876f, 0.0360599f, -0.119341f, 0.133659f, 0.0334623f, -0.0239176f, -0.0457586f, 0.0920342f, 0.0460798f, -0.0519714f, -0.0897178f, 0.0885382f, -0.00393213f, -0.0314096f, 0.0618494f, -0.0723128f, 0.255447f, 0.00749907f, 0.0989676f, -0.138348f, -0.193205f, 0.0752203f, 0.00146259f, 0.00345568f, 0.148683f, -0.0433247f, 0.156943f, -0.0453991f, -0.137258f, -0.0790754f, -0.073398f, 0.03724f, -0.0407481f, 0.0748606f, -0.0275041f, -0.060414f, 0.0785068f, -0.0326368f, -0.153573f, 0.00704897f, -0.072898f, -0.0249653f, 0.0963092f, -0.0598411f, -0.0320101f, -0.148148f, -0.0083775f, -0.0293703f, 0.101685f, -0.0706839f, 0.151555f, 0.0904381f, 0.038486f, -0.293796f, -0.125123f, -0.0160274f, -0.156264f, 0.234075f, 0.0433258f, -0.0259251f, 0.059972f, 0.136438f, -0.163279f, 0.0494018f, -0.0349067f, -0.0410755f, 0.0515858f, 0.0076149f, 0.0415095f, 0.0486478f, 0.0501927f, -0.1477f, -0.211128f, -0.0850318f, -0.0920465f, -0.160542f, 0.0614634f, 0.142552f, 0.117021f, 0.0504493f, -0.0190265f, 0.294126f, 0.0805029f, 0.230605f, 0.050371f, 0.20018f, -0.144033f, -0.144304f, 0.0717571f, 0.0672925f, 0.0299704f, -0.0779885f, -0.0430933f, 0.179312f, -0.00180555f, -0.103548f, -0.0483372f, -0.209504f, 0.0494223f, 0.20363f, 0.137725f, 0.0511731f, -0.220107f, -0.0324874f, 0.151429f, -0.109922f, -0.061593f, -0.219332f, 0.0796796f, 0.0722311f, 0.0855472f, -0.113754f, 0.137077f, -0.309277f, 0.103939f, -0.126866f, -0.071245f, 0.0984532f, 0.215451f, -0.0371189f, -0.197791f, -0.0272569f, -0.00999765f, 0.0423821f, -0.12291f, 0.0826751f, -0.0627678f, 0.0708544f, 0.290949f, -0.161565f, 0.177596f, 0.125264f, -0.00369029f, 0.159427f, 0.0282618f, -0.0587159f, -0.0287543f, 0.162345f, 0.345679f, -0.105309f, -0.0654687f, -0.0405783f, -0.0296809f, -0.0552884f, 0.0975892f, -0.0191581f, -0.0497437f, -0.261679f, 0.209914f, -0.177016f, -0.197073f, 0.0076806f, -0.0629609f, 0.0123245f, 0.0937974f, 0.0479937f, 0.217542f, -0.071637f, 0.0169403f, -0.0760303f, -0.17119f, 0.288249f, -0.0517558f, 0.119818f, 0.0785349f, -0.109471f, 0.0743482f, -0.184169f, -0.127643f, 0.0474436f, 0.213437f, 0.0530124f, -0.0179431f, 0.084271f, -0.0473217f, -0.0232327f, -0.0134479f, 0.271559f, 0.0878773f, 0.20957f, 0.297467f, 0.00786742f, 0.0964503f, -0.0264547f, -0.0727342f, -0.181154f, 0.137983f, 0.338245f, 0.0940735f, 0.0304558f, -0.211413f, -0.0398992f, 0.0857381f, -0.24316f, 0.109596f, -0.009855f, 0.135898f, -0.00702097f, -0.0424251f, 0.0978528f, 0.109505f, -0.0252706f, -0.0559088f, 0.0971168f, -0.120149f, -0.0158332f, 0.102226f, 0.134883f, 0.263983f, -0.205572f, 0.158973f, 0.248976f, 0.190913f, -0.0707164f, -0.0473297f, -0.0134367f, -0.19617f, -0.0776648f, -0.0804706f, -0.0621507f, -0.128139f, 0.0861913f, -0.0667626f, -0.0933351f, 0.085164f, -0.0640133f, 0.0271569f, 0.00431443f, -0.16492f, -0.0277846f, -0.192857f, -0.069405f, -0.00971334f, 0.0136974f, -0.177793f, -0.0382278f, -0.0801822f, 0.0926707f, 0.0570204f, -0.0766761f, -0.0244724f, 0.226214f, 0.0034218f, 0.12342f, 0.0583063f, -0.00866649f, -0.168052f, 0.0294049f, -0.234313f, -0.166429f, -0.115369f, 0.0421235f, -0.120304f, -0.120782f, 0.0560781f, 0.0761294f, 0.192221f, 0.132559f, 0.130927f, 0.141902f, -0.0850186f, 0.0386294f, -0.0586129f, 0.130883f, 0.218548f, -0.0623188f, 0.0236953f, 0.12479f, 0.304044f, 0.191109f, -0.16265f, -0.179291f, -0.128139f, 0.0511675f, 0.218045f, 0.181536f, 0.0905323f, 0.0532676f, -0.17246f, -0.0709164f, 0.144862f, -0.213331f, -0.235726f, 0.245371f, 0.0184792f, -0.14075f, -0.167106f, 0.163998f, 0.0600328f, 0.20224f, -0.0888768f, 0.0576127f, 0.0766289f, 0.0146756f, -0.0830833f, 0.082287f, 0.176898f, -0.120031f, -0.0623949f, 0.0937113f, -0.28067f, 0.0581045f, -0.0365983f, 0.0937025f, -0.170001f, -0.0496406f, -0.01878f, 0.050687f, -0.0472238f, 0.0611565f, -0.0349166f, -0.196733f, 0.0839714f, 0.141242f, -0.0141962f, -0.0600266f, 0.163483f, 0.084526f, 0.0479008f, 0.102239f, 0.176715f, -0.160759f, -0.140169f, 0.122158f, 0.16211f, 0.111234f, 0.0672217f, 0.0634676f, -0.0817171f, 0.0992053f, 0.0253314f, 0.00165232f, 0.0960391f, -0.0316036f, -0.0967862f, -0.0125182f, 0.0939067f, -0.0978939f, -0.15249f, -0.0993986f, 0.0309397f, 0.0712689f, -0.0476272f, -0.0291233f, 0.0585029f, -0.0188334f, 0.0855798f, 0.0358009f, 0.0837921f, -0.133823f, 0.0673859f, -0.0244877f, -0.0603419f, 0.0514354f, 0.0385311f, 0.0944403f, 0.0727609f, 0.123709f, -0.0565418f, -0.129561f, -0.144019f, -0.079002f, 0.0609638f, 0.0767799f, -0.042883f, -0.0768833f, 0.00519367f, 0.147202f, -0.0245255f, -0.0475613f, -0.0739563f, -0.139207f, 0.0636918f, 0.0227896f, 0.0528194f, 0.0331631f, -0.147878f, -0.0844867f, 0.0664663f, 0.0861752f, -0.0169384f, 0.0791446f, 0.0119883f, -0.0297605f, -0.11075f, 0.0395281f, -0.10849f, 0.10311f, 0.038575f, 0.0123208f, 0.147154f, 0.221508f, -0.0947877f, -0.0734871f, 0.115789f, 0.0845052f, 0.0882626f, -0.073914f, 0.0662464f, 0.196438f, 0.0531637f, 0.100317f, -0.0838218f, -0.133268f, -0.132621f, 0.05095f, -0.0518469f, -0.0189343f, 0.154335f, 0.175336f, 0.0490932f, -0.116714f, -0.0720386f, -0.134515f, -0.0308114f, 0.0632764f, 0.00974837f, -0.0238573f, 0.00195356f, 0.161251f, -0.0802542f, -0.0369155f, 0.0175843f, 0.115263f, 0.0567821f, -0.046157f, -0.0973101f, -0.130586f, -0.169732f, -0.126506f, -0.120035f, -0.0356325f, 0.101123f, 0.121764f, -0.0524125f, 0.12542f, -0.0136652f, 0.0837537f, 0.109469f, 0.0244736f, -0.192945f, -0.0362202f, 0.00650635f, -0.0403809f, -0.0840146f, 0.127304f, 0.135327f, 0.00664333f, -0.146668f, -0.0467965f, 0.0794409f, 0.0819527f, -0.0480886f, 0.0486855f, 0.0809595f, 0.0476226f, 0.0965907f, -0.151954f, -0.211885f, -0.0919035f, 0.0165932f, 0.100495f, -0.0256261f, 0.0798814f, -0.035813f, 0.0928816f, -0.080874f, 0.0157428f, -0.158356f, 0.0406484f, 0.137512f, -0.0432774f, -0.0970761f, -0.0173381f, 0.0872708f, -0.188939f, -0.0492732f, -0.0548583f, 0.0255167f, -0.109551f, -0.159603f, 0.00478809f, -0.0360562f, 0.0239488f, 0.0251315f, -0.0424566f, -0.0290303f, -0.134485f, -0.0718073f, -0.105682f, -0.0649339f, -0.0254721f, 0.0756504f, 0.0105627f, -0.0360207f, 0.105144f, -0.0640832f, 0.0747f, 0.0143695f, 0.052161f, 0.117849f, -0.0580816f, -0.0637175f, -0.0869699f, 0.0587343f, 0.158956f, 0.0195474f, 0.032326f, 0.0793255f, 0.0719585f, -0.00223175f, -0.184524f, -0.128771f, -0.0882953f, 0.0732438f, -0.0032445f, -0.0304805f, -0.0178773f, -0.0339384f, 0.158236f, -0.0487496f, 0.016933f, 0.0112062f, -0.0782969f, -0.162667f, 0.0586429f, -0.00812828f, 0.0114383f, 0.167387f, 0.00546322f, 0.0163308f, -0.111334f, -0.0812219f, -0.11669f, 0.00861207f, 0.0685736f, 0.0209076f, 0.0399496f, -0.0672537f, -0.123078f, 0.123896f, -0.1616f, 0.123452f, 0.0532213f, -0.0343441f, -0.144929f, -0.105285f, 0.23618f, 0.102064f, 0.0584476f, -0.0691786f, -0.095552f, -0.0027904f, -0.0836166f, 0.00252111f, 0.0372544f, 0.0537237f, -0.0207287f, 0.118723f, 0.0849747f, 0.208035f, -0.0111249f, -0.0370659f, -0.127211f, 0.0770758f, 0.0564665f, -0.0264482f, 0.108892f, 0.0185015f, -0.0939939f, -0.096015f, 0.144052f, 0.0848727f, 0.115484f, 0.101108f, 0.106897f, -0.11726f, -0.101021f, -0.055208f, 0.0919943f, -0.0402092f, 0.0823158f, -0.173687f, 0.0262834f, -0.169203f, 0.0291962f, 0.161416f, -0.0222565f, 0.00806963f, 0.174466f, -0.0767426f, 0.0436967f, 0.133778f, 0.128272f, 0.0935785f, -0.0991929f, 0.0454707f, -0.0476718f, 0.182201f, 0.00817971f, -0.00889266f, -0.026149f, -0.0644335f, 0.21186f, 0.117338f, 0.000792542f, 0.0213164f, 0.0744333f, -0.0121939f, -0.0285224f, -0.0211257f, 0.0362032f, 0.225319f, 0.0353633f, 0.0677996f, 0.116871f, -0.159991f, 0.0992094f, 0.0189144f, 0.152633f, 0.107308f, 0.127172f, -0.0692259f, 0.0262822f, -0.00366592f, 0.179692f, -0.198168f, -0.0997837f, -0.144807f, 0.039853f, 0.047124f, 0.120955f, -0.0771065f, -0.103562f, -0.0651258f, -0.0531673f, -0.0619981f, -0.0731576f, 0.0199195f, -0.0169349f, 0.0821189f, 0.023271f, 0.0876386f, 0.0983463f, 0.000128572f, -0.141145f, -0.0651145f, -0.0147654f, -0.100562f, -0.0475197f, 0.0213094f, -0.00217677f, 0.0144144f, -0.102248f, -0.0687139f, 0.119284f, 0.108135f, 0.0327078f, 0.17814f, 0.0355532f, 0.0149883f, 0.0348057f, 0.0103493f, 0.136389f, -0.0778083f, 0.103086f, 0.0110015f, -0.137888f, -0.0700109f, -0.113218f, 0.0824224f, 0.110952f, 0.0284493f, 0.0644895f, 0.105552f, -0.0766362f, 0.0353689f, 0.00300272f, -0.00675409f, -0.0185473f, -0.144722f, -0.0390585f, 0.0607631f, -0.186547f, -0.133354f, 0.107808f, -0.0647819f, 0.00271049f, 0.111212f, -0.202471f, -0.0205888f, 0.0477485f, -0.162617f, -0.0783853f, 0.111147f, -0.105033f, 0.0113969f, 0.0656377f, -0.0641083f, 0.0559224f, 0.194715f, -0.0502232f, -0.0978247f, 0.00345545f, -0.140454f, 0.0652129f, 0.0894265f, -0.138477f, -0.00417201f, 0.119584f, 0.0441594f, 0.00971707f, 0.0754769f, 0.0666072f, -0.210797f, -0.15096f, 0.0882694f, 0.176745f, -0.0332011f, 0.0268707f, -0.110367f, 0.0637021f, -0.106035f, 0.0969342f, 0.00336191f, -0.0492548f, -0.145268f, -0.212099f, 0.0258385f, -0.049315f, 0.0361512f, -0.0330719f, 0.0593164f, -0.0348575f, 0.042603f, 0.0231313f, 0.174576f, -0.0634203f, 0.204092f, -0.0658941f, 0.0522331f, -0.0533762f, -0.0354594f, 0.0948335f, 0.0458306f, -0.0718856f, 0.281908f, -0.123062f, -0.0513454f, -0.0832522f, 0.155176f, 0.134888f, -0.135909f, -0.116734f, -0.0605271f, 0.113046f, -0.122005f, 0.0314084f, -0.13757f, -0.169438f, -0.0810206f, 0.0748278f, 0.0484664f, -0.13779f, 0.0355303f, -0.213031f, 0.00975126f, -0.109359f, -0.0417217f, -0.171188f, -0.208053f, -0.106154f, 0.0882189f, 0.042032f, 0.0563421f, -0.00468812f, -0.0153875f, 0.0174556f, 0.264397f, 0.132391f, 0.0376369f, -0.0722157f, -0.0329904f, -0.123504f, 0.120935f, -0.000930922f, 0.0247556f, 0.0121103f, 0.0402275f, 0.0176447f, 0.055605f, -0.197491f, -0.0408461f, 0.166147f, 0.0124153f, 0.0983871f, 0.131356f, 0.0382799f, 0.0761826f, 0.0686585f, -0.0539948f, 0.10968f, -0.161117f, 0.112203f, 0.0534579f, -0.0111837f, 0.111632f, 0.0814211f, -0.0806675f, 0.154568f, 0.00899491f, 0.098906f, -0.00204868f, 0.0949005f, -0.199919f, 0.144038f, -0.0246662f, -0.173516f, 0.275438f, -0.093574f, -0.0174857f, 0.109003f, 0.0272693f, 0.128264f, -0.0676252f, -0.0442261f, 0.107223f, 0.0430002f, -0.0470434f, 0.129835f, 0.109157f, -0.0920301f, 0.11826f, -0.220729f, 0.0375858f, -0.18664f, 0.14797f, 0.0374536f, 0.0346498f, -0.109529f, -0.0115872f, 0.0743362f, 0.180665f, 0.0984952f, -0.034864f, 0.178656f, -0.126427f, 0.0865202f, 0.128285f, 0.0206691f, 0.0465389f, -0.103255f, -0.110313f, 0.0431721f, 0.0901045f, -0.120569f, 0.0608268f, -0.221831f, -0.0180149f, 0.206021f, -0.221668f, 0.134156f, 0.0174607f, 0.0969935f, 0.167248f, -0.0597994f, -0.0170466f, 0.0243817f, -0.146495f, 0.0496713f, -0.00274506f, 0.139291f, 0.117634f, -0.0466885f, -0.0470088f, 0.0615922f, -0.00469908f, 0.107988f, -0.212616f, 0.176661f, 0.0568089f, 0.0121125f, 0.0319384f, 0.119767f, 0.00334511f, -0.0146433f, 0.0281925f, 0.0980598f, 0.0219119f, -0.0475717f, 0.0567364f, 0.189298f, -0.127564f, -0.0676554f, -0.153621f, 0.176936f, -0.14443f, -0.0704143f, 0.167968f, 0.282004f, 0.060831f, 0.0877933f, 0.050756f, -0.0511437f, -0.0769624f, -0.0514547f, -0.127916f, -0.0640993f, -0.0833388f, -0.0641917f, 0.0654744f, -0.0586587f, 0.0591393f, -0.138396f, -0.0931565f, -0.0489671f, -0.0494166f, -0.056881f, 0.0785713f, 0.178007f, -0.0514138f, -0.0989853f, -0.0120956f, 0.096578f, -0.109647f, -0.0743485f, -0.0825694f, 0.0119665f, 0.0650571f, 0.0389372f, -0.0208462f, -0.0392945f, -0.0555843f, -0.120977f, -0.0287519f, 0.0659682f, -0.265658f, 0.128913f, -0.0295166f, 0.0441855f, -0.105853f, -0.0495585f, -0.00852319f, -0.213449f, 0.0918541f, -0.123255f, -0.0455771f, -0.0304465f, 0.125686f, 0.0277281f, -0.246698f, 0.0269226f, -0.0118558f, 0.0595722f, 0.104024f, 0.0162148f, 0.125497f, 0.0912526f, 0.255578f, 0.161356f, -0.0586317f, 0.159912f, -0.256179f, 0.0320642f, 0.0454331f, -0.0936912f, 0.0190513f, 0.169121f, 0.0377877f, -0.144895f, 0.00715338f, -0.19054f, 0.0455218f, -0.106215f, 0.19001f, 0.219418f, -0.0254529f, 0.0818676f, 0.0784976f, 0.00283358f, 0.234166f, -0.0342924f, -0.0835303f, -0.136667f, -0.160508f, -0.305654f, 0.0525073f, 0.0403464f, 0.127312f, 0.0824422f, 0.121575f, -0.207532f, -0.135517f, -0.0960406f, 0.0815916f, 0.0327979f, -0.062943f, 0.0300865f, 0.0528177f, -0.0418652f, -0.0570994f, -0.103301f, 0.223695f, -0.0870602f, 0.00904972f, 0.0670306f, -0.167899f, 0.0085447f, -0.0942417f, 0.241428f, -0.112212f, -0.256615f, -0.257956f, -0.116644f, 0.0851484f, 0.00117436f, 0.0817315f, -0.181302f, -0.0553693f, -0.112013f, -0.0772925f, -0.0820859f, 0.0502862f, 0.111918f, -0.0609315f, 0.0951362f, 0.0689764f, -0.0636572f, -0.0886974f, -0.0352235f, 0.144306f, -0.079956f, 0.137689f, -0.307117f, -0.0534141f, 0.0126667f, -0.00654405f, -0.0174781f, -0.037189f, 0.0831323f, 0.198067f, 0.0327671f, -0.105462f, 0.160614f, -0.0104439f, -0.0818153f, 0.181018f, -0.042836f, 0.134418f, -0.129177f, -0.0917917f, -0.00584106f, -0.194135f, 0.0213184f, -0.179936f, -0.0555157f, -0.000540658f, 0.185082f, 0.0418863f, 0.205037f, -0.133818f, -0.0491808f, -0.118878f, -0.0379033f, -0.0550183f, -0.0309059f, 0.101655f, 0.0197472f, -0.0287046f, 0.0221947f, -0.0387419f, 0.0705591f, -0.168198f, 0.0106428f, 0.102485f, 0.0727887f, 0.03393f, -0.0267385f, -0.0261254f, 0.0416247f, -0.0529903f, 0.0243474f, 0.196779f, -0.138567f, 0.0666022f, -0.0688454f, 0.0680961f, -0.0266971f, -0.0561386f, -0.159046f, 0.122139f, -0.050073f, 0.0910026f, -0.0574785f, -0.328669f, 0.0378877f, 0.150768f, 0.129447f, -0.00744589f, 0.0304156f, -0.27702f, 0.178747f, -0.19005f, 0.146062f, -0.0616832f, 0.0409175f, -0.164076f, 0.128163f, 0.0826385f, -0.00203388f, -0.0240326f, -0.0648058f, 0.0646042f, 0.0831431f, 0.103365f, -0.00514731f, -0.0300297f, -0.0985743f, -0.00416123f, 0.0159298f, -0.102553f, 0.00938452f, -0.0117091f, -0.0377082f, -0.0876689f, -0.0823903f, 0.0726545f, -0.0598911f, 0.104333f, 0.0956845f, -0.195119f, 0.107949f, 0.0924422f, -0.0122513f, -0.0732881f, 0.0646259f, -0.0251552f, 0.196854f, -0.0346116f, 0.02246f, -0.147126f, 0.0499791f, -0.0276557f, -0.23886f, -0.0324634f, 0.2523f, -0.12954f, 0.0713887f, 0.237271f, 0.102429f, -0.00120775f, 0.0433416f, -0.0345823f, -0.208548f, -0.141292f, -0.0588002f, -0.113626f, 0.0132473f, -0.0325691f, -0.0786973f, 0.163899f, 0.00525945f, -0.0208978f, -0.188894f, 0.0955209f, 0.235535f, -0.0761551f, -0.035881f, 0.118797f, 0.0526795f, -0.142891f, -0.0622927f, 0.144473f, -0.113774f, 0.0491807f, 0.0414315f, -0.0126343f, -0.0875944f, 0.0264076f, -0.17136f, -0.0725468f, -0.0427449f, -0.0286398f, -0.0537285f, 0.00159443f, 0.106739f, 0.100818f, 0.0907005f, -0.0525567f, 0.0275104f, -0.137268f, 0.15402f, 0.0596651f, -0.297984f, 0.0778276f, -0.0372008f, -0.0777964f, -0.0168532f, 0.0258906f, -0.00465703f, -0.0830696f, -0.0207592f, 0.109446f, -0.110701f, 0.179498f, 0.0752319f, 0.0848247f, 0.17416f, -0.285889f, -0.251303f, 0.0100071f, 0.0101899f, 0.145078f, -0.111133f, -0.00575954f, 0.0515581f, -0.0443203f, -0.0913258f, -0.0161453f, -0.0449113f, 0.0989803f, -0.0177613f, 0.157888f, -0.0926173f, 0.0830479f, -0.109199f, 0.0886168f, -0.0450191f, 0.106226f, -0.259468f, -0.181131f, 0.0845421f, 0.0745095f, 0.0601264f, -0.0418663f, -0.0343481f, -0.0599877f, -0.180518f, 0.00622462f, 0.0582915f, 0.0943433f, 0.0449844f, -0.198057f, 0.206816f, -0.000445837f, -0.206176f, 0.119879f, 0.171306f, 0.178961f, -0.0376099f, 0.0759337f, -0.193037f, 0.159082f, -0.0454939f, 0.231529f, 0.19697f, -0.0527631f, -0.07752f, -0.0489649f, -0.0179346f, 0.0186004f, -0.113162f, 0.0458203f, -0.103527f, 0.0190073f, -0.0867345f, -0.0370142f, 0.119919f, 0.106526f, -0.0667163f, 0.0359036f, -0.0789678f, -0.138709f, -0.028857f, -0.050412f, -0.0270008f, 0.120481f, -0.140142f, -0.043569f, 0.117375f, 0.0846386f, 0.0676402f, -0.0390312f, 0.0487376f, 0.00381574f, 0.068868f, -0.142914f, 0.0537322f, -0.0135041f, 0.186685f, 0.201425f, 0.0070755f, 0.0532441f, -0.014985f, 0.00928228f, 0.152088f, -0.0812244f, 0.00655907f, 0.078549f, -0.00257587f, -0.095374f, 0.101777f, -0.0854879f, -0.133711f, -0.0296004f, -0.0875907f, 0.00963053f, 0.17282f, 0.0914867f, -0.00121463f, -0.139567f, -0.149376f, 0.129923f, -0.101361f, -0.0292206f, -0.00676972f, 0.006276f, -0.0734507f, 0.0347251f, 0.0206986f, -0.11267f, -0.125151f, 0.216722f, 0.00684697f, -0.052881f, 0.0865279f, 0.0576749f, -0.0295077f, 0.0491213f, -0.030428f, 0.0589632f, -0.0949345f, 0.0493515f, -0.0694301f, -0.163268f, -0.146357f, -0.0673626f, 0.052032f, 0.0169042f, -0.0774962f, 0.0612279f, -0.166672f, -0.0599577f, -0.0633639f, 0.0254275f, 0.0201976f, -0.0526878f, -0.0169007f, -0.035077f, -0.02462f, -0.0181858f, 0.0965988f, 0.20853f, -0.297697f, -0.00340586f, -0.17861f, 0.0250463f, -0.118272f, 0.0869688f, 0.0192564f, -0.0381018f, 0.108253f, 0.0535526f, -0.0620016f, 0.0532243f, -0.0975526f, -0.00402363f, 0.0429589f, 0.158719f, -0.0104289f, -0.0432213f, 0.0479665f, -0.0717833f, -0.0616633f, 0.0328634f, -0.0113763f, -0.0626997f, -0.0816766f, -0.0432693f, -0.00923528f, -0.0144775f, -0.0233158f, -0.0947143f, -0.115744f, -0.157524f, 0.0229645f, -0.0810907f, 0.00524696f, 0.00146699f, -0.0679898f, -0.154052f, -0.065664f, -0.0133225f, 0.144046f, 0.0653894f, -0.0684904f, -0.00210106f, 0.0202325f, -0.00935773f, -0.0554747f, 0.041597f, 0.0649542f, -0.0404586f, 0.0412038f, 0.0477054f, 0.0682841f, -0.0435034f, 0.0765214f, -0.137913f, 0.0259453f, 0.234249f, -0.0356187f, -0.0887007f, -0.0882061f, 0.0608521f, 0.143281f, -0.113073f, -0.0459355f, -0.0299839f, 0.202796f, -0.112708f, -0.164899f, -0.14343f, -0.0281979f, 0.167565f, -0.146052f, -0.187173f, -0.0750578f, 0.02184f, 0.177881f, 0.0728833f, 0.0800647f, -0.00750367f, 0.119847f, 0.254518f, -0.0861984f, -0.0498776f, 0.0141963f, 0.0636368f, 0.123143f, -0.0387844f, 0.125971f, -0.0417447f, 0.113657f, -0.0981219f, -0.0547367f, -0.037534f, 0.0539888f, -0.100744f, -0.0434071f, 0.0682394f, 0.0871798f, -0.0311033f, -0.0370197f, -0.0249085f, -0.00708454f, -0.0404447f, 0.110853f, 0.00923914f, 0.0787698f, 0.112387f, -0.101354f, -0.163688f, -0.0185496f, 0.0226097f, -0.152786f, -0.0726398f, 0.0676446f, 0.125094f, 0.0132284f, -0.0593779f, 0.128175f, -0.241873f, -0.0273401f, 0.0742429f, 0.19901f, 0.0919988f, -0.00836191f, 0.0348858f, 0.165104f, -0.0613898f, 0.112708f, 0.0917051f, 0.0260319f, -0.065118f, 0.0293152f, 0.0565822f, 0.0119785f, 0.19477f, -0.0146787f, -0.260057f, -0.104186f, -0.0966388f, 0.149077f, -0.173489f, 0.0939372f, 0.128021f, -0.0357684f, -0.0517028f, 0.0143647f, 0.130607f, -0.0711035f, -0.00189047f, -0.115368f, 0.263314f, 0.0167397f, -0.245477f, -0.0858798f, 0.302755f, 0.115846f, -0.0318118f, -0.0383703f, -0.198666f, -0.109806f, -0.0613273f, -0.124917f, -0.0124952f, -0.100409f, -0.0208415f, -0.101458f, 0.0152759f, -0.0859899f, 0.159824f, 0.128329f, -0.31282f, 0.0354912f, 0.000195532f, 0.135932f, -0.0713688f, -0.0506432f, -0.142672f, 0.199843f, 0.0800669f, 0.126208f, -0.180627f, -0.0130582f, -0.0219251f, 0.180833f, 0.00287334f, -0.0152245f, -0.041901f, -0.0855636f, -0.0100015f, 0.0588728f, 0.0562564f, 0.0378948f, 0.16806f, -0.0358485f, 0.0583644f, -0.104326f, 0.0378592f, -0.0258975f, -0.0892439f, -0.0513174f, 0.0957967f, 0.0177465f, -0.112847f, 0.107659f, -0.111288f, 0.210257f, 0.0097983f, -0.285422f, 0.0180447f, 0.157152f, 0.0180197f, 0.168234f, 0.105974f, 0.0618372f, 0.146627f, 0.188205f, -0.101942f, -0.247116f, 0.0768678f, 0.00380349f, -0.0161102f, -0.000461874f, -0.00881054f, -0.169328f, -0.274421f, -0.0559141f, 0.0477404f, 0.175432f, 0.0685379f, -0.00848164f, -0.119128f, 0.150294f, -0.0379938f, -0.194851f, 0.144737f, -0.153222f, 0.161334f, -0.101825f, -0.107602f, 0.11945f, 0.155657f, -0.110698f, -0.0454566f, -0.125012f, 0.190649f, -0.0551027f, -0.125038f, 0.079506f, -0.17378f, -0.0425822f, -0.0454212f, -0.0382528f, 0.0142943f, 0.0820632f, -0.193152f, 0.12707f, 0.291845f, 0.0250315f, 0.14491f, -0.0557204f, 0.131727f, -0.0752608f, 0.0194725f, -0.10702f, 0.0810582f, 0.137637f, -0.0501724f, -0.0493465f, -0.0352318f, -0.129282f, 0.0497972f, 0.0202646f, -0.0726985f, -0.104236f, 0.159151f, 0.0465456f, 0.0690551f, 0.0935469f, 0.0878223f, -0.150406f, 0.288982f, -0.0746188f, -0.0518087f, 0.229179f, -0.155393f, 0.12748f, -0.11096f, 0.147661f, 0.296666f, 0.0166346f, -0.0512565f, 0.200339f, -0.135889f, 0.243239f, 0.105383f, -0.169761f, -0.115881f, -0.0864284f, -0.10431f, 0.259267f, 0.215348f, -0.165873f, -0.0555462f, 0.346018f, 0.0761327f, -0.123868f, 0.167333f, -0.166088f, -0.100387f, 0.144344f, 0.275665f, 0.0471274f, -0.00097327f, -2.95061e-05f, 0.100579f, -0.0133436f, -0.00761577f, 0.0421832f, -0.0193875f, -0.25119f, 0.0511413f, -0.0602377f, -0.00919222f, 0.0465616f, 0.197247f, -0.0547346f, 0.0316546f, -0.0290977f, -0.02137f, 0.171355f, -0.0803177f, -0.0696796f, 0.014716f, -0.00322727f, 0.0808363f, 0.0707796f, -0.156772f, 0.0370113f, -0.0178656f, -0.151161f, 0.0736129f, 0.148416f, 0.0778711f, 0.144752f, 0.217577f, -0.0354451f, -0.0698486f, -0.000243647f, -0.276243f, 0.0881414f, 0.0291491f, -0.161759f, 0.102996f, -0.0212529f, 0.069136f, -0.030714f, 0.121583f, 0.00274517f, -0.0432696f, -0.0347584f, 0.0413975f, -0.0519171f, 0.0354985f, 0.0710298f, -0.131995f, 0.189207f, -0.110611f, 0.00185955f, -0.0136468f, -0.160632f, 0.170354f, 0.0200062f, 0.0842623f, -0.240196f, -0.00336677f, -0.0237264f, 0.0627549f, 0.0334963f, -0.150746f, -0.0992013f, -0.0330021f, 0.015375f, 0.09146f, -0.000341288f, -0.0496006f, 0.0264943f, -0.0349599f, -0.0870973f, -0.0158846f, 0.0649118f, -0.0264069f, 0.140874f, -0.134253f, -0.182363f, -0.0480671f, 0.0162931f, 0.173416f, -0.0676093f, 0.0855051f, -0.0274659f, 0.00257764f, 0.0560504f, -0.00783401f, 0.089456f, 0.00923903f, -0.0219951f, -0.0367787f, 0.128951f, -0.0805817f, -0.0152612f, 0.126516f, -0.0535054f, 0.127436f, -0.0867403f, -0.0405207f, 0.0684804f, -0.0714253f, 0.0466667f, 0.0801769f, -0.0913664f, -0.0567048f, 0.0305216f, 0.0837042f, 0.126671f, -0.103212f, 0.0240113f, -0.0412055f, -0.0176235f, 0.049632f, -0.123632f, -0.0965269f, -0.164327f, -0.0562135f, 0.00716115f, 0.0292528f, -0.00628087f, -0.0205584f, 0.061692f, 0.161399f, -0.0627791f, -0.0147308f, -0.0197903f, -0.0213397f, -0.0531252f, 0.0743197f, -0.0379642f, 0.0353109f, -0.0593498f, 0.104684f, -0.039303f, 0.0402391f, -0.105199f, -0.106732f, -0.0153482f, -0.0435758f, 0.0210806f, 0.0562325f, 0.111882f, 0.208832f, 0.0372033f, 0.181224f, 0.0372735f, 0.0591067f, 0.0228514f, 0.0581491f, 0.0938781f, -0.0359237f, 0.0777512f, 0.124402f, 0.0326624f, 0.0749604f, 0.0447018f, -0.0667774f, -0.0396599f, 0.278345f, -0.050733f, 0.0414298f, 0.0867645f, 0.0388629f, -0.169728f, -0.0444036f, 0.0819363f, 0.0533693f, 0.0657915f, -0.017999f, -0.0814245f, -0.0799469f, -0.0129275f, -0.0530307f, 0.0696467f, 0.234941f, -0.186764f, 0.0497403f, 0.0272907f, -0.319766f, 0.0231814f, 0.090319f, 0.212207f, -0.104769f, 0.108553f, 0.00730924f, 0.12782f, 0.0359584f, 0.300987f, -0.102498f, -0.0710951f, -0.0815281f, -0.167076f, -0.00679631f, -0.0164476f, 0.193152f, -0.145179f, -0.0611794f, 0.0978545f, 0.0429408f, -0.1264f, -0.128275f, -0.0151647f, -0.119932f, 0.20639f, 0.191013f, -0.209411f, -0.0437698f, 0.0790275f, -0.0718078f, 0.174889f, -0.15512f, 0.198089f, 0.0312478f, 0.322805f, -0.0412883f, 0.149645f, -0.0262381f, -0.0757899f, 0.24154f, 0.0394889f, -0.171412f, 0.217387f, 0.126547f, -0.05483f, 0.182408f, -0.0315842f, 0.0691452f, -0.312494f, -0.049709f, 0.0391686f, -0.090463f, -0.231341f, 0.13147f, 0.0697943f, -0.122581f, 0.177777f, 0.0664141f, -0.152756f, 0.0941587f, 0.0155857f, -0.0807591f, 0.110624f, 0.0525559f, -0.00812555f, -0.251217f, 0.10456f, 0.0342443f, 0.339913f, -0.0812434f, 0.0855094f, 0.227078f, -0.139941f, -0.0275827f, 0.00566671f, 0.0275855f, -0.138074f, -0.0610872f, 0.0968856f, 0.115626f, -0.10294f, 0.183147f, -0.208446f, -0.165846f, -0.339885f, -0.157991f, 0.0622351f, 0.193735f, -0.109777f, -0.0559708f, -0.0166934f, 0.252757f, 0.0809263f, 0.0132609f, 0.0627364f, -0.0352769f, -0.0109583f, 0.183909f, -0.056895f, -0.174981f, 0.0439825f, -0.0999545f, -0.220794f, -0.153614f, 0.010093f, 0.120385f, 0.0334917f, -0.0027368f, -0.20321f, 0.147244f, -0.152629f, -0.163189f, 0.0781647f, -0.232898f, -0.065927f, 0.0478716f, 0.0193067f, 0.0489897f, -0.0335261f, -0.0345639f, 0.122515f, -0.0226949f, -0.00417744f, 0.141205f, 0.151485f, 0.0569054f, 0.0816053f, -0.108104f, 0.180673f, 0.038813f, 0.0876608f, -0.0425215f, 0.142872f, -0.133096f, -0.057236f, 0.0076226f, -0.102328f, 0.163952f, 0.0427539f, -0.0977589f, -0.127269f, 0.0555577f, 0.0841087f, -0.0554758f, -0.118995f, 0.0263244f, 0.0141408f, 0.141878f, -0.00527854f, -0.0150048f, 0.0355274f, -0.0548872f, -0.189712f, -0.214184f, 0.00964007f, -0.0586524f, 0.00905838f, -0.0571771f, -0.146775f, -0.0635174f, 0.072846f, -0.172074f, -0.0494472f, -0.0627442f, -0.151566f, -0.145567f, 0.0659817f, 0.342936f, 0.0744705f, 0.100358f, 0.2071f, -0.184782f, -0.180136f, 0.00758893f, 0.226953f, -0.0703637f, 0.0939868f, 0.0576693f, -0.0953817f, -0.0710142f, -0.0250063f, 0.0815378f, 0.0196303f, 0.0219979f, -0.0401528f, 0.205523f, -0.289672f, 0.128141f, 0.119088f, -0.136449f, 0.120113f, -0.0131446f, -0.156771f, -0.0338674f, -0.0382205f, 0.0895644f, 0.145418f, 0.158809f, -0.0377644f, 0.100308f, -0.0670112f, -0.0316172f, -0.082633f, -0.0605007f, 0.00175235f, 0.162351f, 0.0705741f, 0.221847f, -0.0418172f, -0.140294f, -0.13928f, 0.0970257f, -0.186233f, 0.0925076f, -0.0464865f, -0.0486217f, -0.012789f, 0.0008506f, 0.0545131f, 0.0202357f, -0.13518f, 0.0724624f, -0.00265273f, 0.121795f, 0.139651f, -0.00623427f, 0.0516746f, -0.278809f, 0.0307117f, -0.0138461f, -0.0316161f, -0.0635615f, -0.0167036f, -0.0839848f, -0.123533f, -0.188764f, 0.0690727f, 0.0700189f, -0.0454808f, 0.0761997f, -0.0657147f, -0.0330789f, 0.131101f, 0.0320996f, -0.00885046f, 0.0237182f, 0.150155f, 0.0257812f, 0.0506739f, 0.165673f, 0.0528347f, 0.0749397f, -0.128485f, -0.149093f, 0.166925f, -0.0429316f, -0.0940599f, 0.110752f, -0.117957f, 0.130362f, -0.0427425f, -0.0632804f, 0.0714312f, -0.175904f, 0.0655312f, -0.152686f, 0.0701625f, -0.186286f, 0.0537244f, 0.14171f, 0.16313f, 0.0631038f, 0.10094f, 0.0215025f, 0.0347512f, -0.00589819f, -0.00261855f, 0.0450388f, -0.0676456f, -0.041731f, 0.104283f, -0.0327162f, -0.150531f, -0.028093f, -0.165598f, 0.0765471f, -0.0820261f, -0.151437f, -0.0308188f, 0.120251f, -0.0589898f, -0.0814475f, 0.0172613f, 0.118746f, 0.0874222f, 0.0835443f, 0.152497f, 0.124371f, -0.146124f, 0.0384179f, -0.189206f, 0.155086f, 0.312175f, -0.0796643f, -0.305907f, -0.0736521f, -0.0227438f, -0.241296f, 0.0622643f, 0.00960953f, 0.120467f, 0.0995098f, 0.0850037f, 0.0866194f, -0.0570051f, 0.0693803f, -0.069417f, -0.0779219f, -0.0714367f, -0.0511432f, 0.197727f, -0.0330674f, -0.0685791f, 0.0719018f, 0.00689966f, -0.104008f, -0.292775f, 0.103057f, -0.0613242f, -0.0322103f, 0.109159f, -0.038669f, -0.136429f, 0.122571f, 0.202224f, 0.171118f, -0.224797f, 0.0143497f, -0.194573f, 0.035574f, 0.0133479f, 0.0874611f, -0.137535f, 0.182776f, 0.169329f, -0.0851196f, 0.127398f, -0.0385788f, -0.119594f, -0.0981955f, 0.0539169f, 0.0179976f, -0.075667f, -0.0220002f, 0.126424f, -0.0621554f, 0.168451f, -0.146512f, 0.0333036f, 0.333577f, -0.199602f, -0.167206f, 0.32022f, 0.00362058f, 0.0762582f, -0.0596424f, -0.00595615f, 0.0102077f, -0.0733206f, -0.106703f, 0.229529f, -0.0993421f, -0.0215923f, 0.00751318f, -0.037281f, -0.0105213f, -0.0245158f, 0.116935f, -0.0750441f, 0.106165f, -0.0584906f, 0.113367f, 0.138997f, -0.118686f, 0.0747012f, 0.0806652f, -0.0637945f, 0.072159f, -0.0520217f, 0.0760341f, -0.0310179f, -0.0209772f, 0.0515148f, -0.0106329f, -0.084966f, -0.135738f, -0.0975498f, 0.0531544f, -0.0302376f, 0.0319431f, 0.0419289f, 0.0145727f, 0.073691f, -0.0458698f, -0.14811f, 0.0864719f, 0.0694009f, 0.0957774f, -0.113633f, -0.049991f, 0.0151852f, -0.0131232f, -0.105976f, -0.121235f, -0.105547f, 0.0462909f, 0.0685215f, 0.0153091f, 0.0501654f, 0.0219969f, -0.0181243f, 0.0636983f, 0.0158325f, 0.0460223f, -0.000670258f, 0.0261072f, 0.0337185f, -0.0455574f, 0.00626718f, 0.123593f, -0.042446f, -0.0899644f, 0.151826f, -0.0402423f, 0.0673586f, -0.0679838f, 0.0369653f, -0.160131f, 0.0211449f, 0.280465f, 0.101367f, 0.0441965f, 0.0645803f, 0.105521f, 0.0968707f, -0.00496291f, 0.139755f, 0.0112006f, 0.0234996f, 0.0565801f, 0.00271005f, 0.0953157f, 0.0607829f, -0.00735964f, 0.0344101f, 0.00578495f, 0.157768f, 0.12943f, -0.0244923f, -0.00774611f, 0.0378338f, 0.006374f, 0.0459797f, -0.0023943f, 0.0776756f, -0.130663f, -0.0616284f, -0.0373749f, 0.0461782f, 0.147859f, 0.0806398f, 0.0531128f, 0.0805751f, -0.0466261f, -0.0984892f, 0.107121f, 0.0569994f, -0.0424457f, 0.0309604f, -0.0301039f, -0.170548f, 0.186004f, -0.0316727f, 0.0646111f, 0.029736f, 0.139371f, 0.113806f, -0.0125401f, 0.00117808f, 0.00301598f, 0.0253076f, 0.0614612f, -0.000862088f, 0.124195f, -0.0675383f, 0.0432192f, -0.134983f, -0.173352f, 0.0574713f, 0.0502343f, -0.0422746f, -0.0853276f, -0.279391f, -0.103267f, -0.131245f, -0.13177f, -0.0573477f, 0.206888f, 0.0336043f, -0.00292155f, -0.0832419f, -0.0446665f, -0.0376712f, 0.100174f, -0.0735857f, 0.0122289f, -0.122218f, 0.132375f, 0.127904f, 0.233338f, -0.142078f, -0.0307978f, -0.130829f, -0.155276f, -0.000830845f, -0.0450574f, -0.211732f, 0.107058f, 0.0294231f, -0.0420133f, 0.301597f, -0.226347f, -0.0215871f, 0.231819f, 0.179751f, 0.131988f, 0.0225025f, -0.118644f, 0.12747f, 0.123244f, -0.0710275f, -0.116407f, -0.0831745f, -0.130239f, 0.0027758f, 0.0338664f, -0.133211f, 0.0977805f, -0.0311547f, 0.00225501f, -0.105702f, -0.139918f, -0.0700555f, -0.0268356f, 0.0265563f, 0.0322304f, 0.333508f, 0.27746f, -0.105972f, 0.204437f, 0.0174261f, -0.125124f, 0.097578f, -0.000971249f, -0.114268f, -0.145734f, 0.0809757f, 0.173898f, 0.0289713f, -0.0555501f, 0.205156f, -0.121679f, -0.139063f, -0.259678f, 0.1069f, 0.0286787f, 0.210054f, -0.115093f, 0.0162415f, -0.0435798f, 0.0453261f, -0.163677f, 0.2189f, -0.214846f, 0.146902f, 0.0964614f, 0.00641223f, 0.222883f, -0.0154906f, -0.00131629f, 0.00612184f, 0.102965f, 0.00762272f, -0.0764442f, -0.0740976f, 0.152941f, -0.0291966f, 0.0975746f, -0.0997908f, 0.00582824f, 0.119599f, -0.186128f, 0.0776957f, 0.189578f, 0.0211283f, 0.130569f, -0.0739394f, -0.198629f, -0.0392742f, 0.0834129f, -0.090232f, 0.0859862f, -0.0831193f, 0.0361708f, -0.123919f, -0.00786216f, -0.0678348f, 0.1805f, -0.024118f, 0.0159092f, 0.0846805f, 0.198399f, 0.123242f, 0.0253988f, 0.018767f, -0.211508f, 0.212309f, -0.0964088f, -0.113188f, -0.208419f, -0.244779f, -0.0770067f, 0.0496954f, -0.150546f, 0.00841154f, -0.0296205f, 0.0642756f, 0.00500122f, -0.0523034f, -0.0709524f, 0.165673f, 0.180753f, 0.0258572f, -0.00270847f, -0.060578f, 0.148877f, -0.132787f, 0.126505f, 0.0767535f, -0.207558f, -0.0145993f, -0.178216f, -0.141995f, 0.0953777f, 0.0263095f, -0.100894f, 0.076645f, 0.0577234f, 0.296604f, 0.151073f, 0.0678511f, 0.12685f, 0.0287726f, 0.0927748f, -0.142404f, 0.158894f, 0.235325f, 0.026567f, 0.0691311f, -0.0795059f, 0.259903f, 0.112407f, 0.0876057f, 0.0987532f, -0.156062f, -0.164544f, 0.122628f, -0.0161695f, -0.0872636f, -0.0200897f, -0.0676756f, 0.302504f, 0.130418f, 0.185497f, -0.0636215f, -0.152076f, -0.0182599f, -0.016521f, -0.00725324f, -0.0684344f, 0.00425221f, 0.108273f, -0.0436141f, -0.108968f, -0.162845f, -0.0920954f, -0.0778879f, -0.190341f, -0.0340267f, -0.15175f, -0.286598f, 0.00784783f, -0.101229f, -0.0627455f, -0.19413f, -0.144877f, 0.0786388f, 0.302923f, 0.0148747f, -0.178929f, -0.131618f, -0.058789f, 0.0817423f, 0.148466f, -0.00697995f, 0.439176f, -0.0110164f, -0.0625794f, -0.118082f, -0.194804f, -0.263674f, 0.298317f, 0.0573918f, -0.159657f, -0.156461f, 0.0401275f, 0.213389f, -0.151609f, 0.0536184f, 0.0560205f, -0.18624f, 0.300344f, -0.0380615f, -0.168366f, 0.0359582f, -0.188781f, 0.174949f, -0.0411812f, 0.0325197f, 0.053454f, -0.118273f, -0.114449f, -0.0198333f, 0.0454938f, -0.0289594f, -0.0745887f, 0.001945f, 0.218167f, -0.098585f, -0.206205f, -0.0263053f, -0.0889894f, 0.0397852f, -0.0224575f, -0.13282f, -0.117512f, -0.0907816f, 0.00598627f, -0.0161887f, 0.180835f, -0.0495169f, -0.0844113f, -0.068347f, -0.17579f, -0.02717f, 0.024448f, 0.00136483f, 0.103834f, 0.101668f, -0.0194171f, -0.0154055f, -0.100141f, 0.191981f, -0.0160862f, 0.142042f, 0.243844f, 0.181115f, 0.0127734f, -0.00247913f, 0.0155267f, -0.120857f, -0.189613f, -0.0465066f, 0.022456f, -0.149848f, 0.0553893f, -0.0550394f, -0.0780449f, -0.0559193f, -0.0847209f, 0.0636217f, -0.0240147f, -0.0765591f, 0.124972f, -0.0863761f, 0.122869f, 0.0944162f, -0.0544976f, 0.00207065f, 0.0507205f, 0.0783548f, 0.00630309f, 0.124784f, 0.0806319f, 0.143318f, -0.111277f, -0.0844753f, -0.0442736f, 0.181034f, 0.0896975f, 0.0792532f, 0.096939f, -0.0859035f, -0.157592f, -0.0214996f, -0.204737f, -0.0485357f, 0.00667932f, 0.247652f, -0.0358236f, -0.0450392f, 0.120724f, -0.0470001f, 0.157388f, -0.171752f, 0.0211501f, -0.0434529f, -0.240291f, 0.0259062f, 0.0650488f, -0.0789314f, 0.0407254f, 0.178145f, -0.232562f, 0.0164714f, -0.0962433f, 0.144049f, -0.0643646f, -0.23135f, 0.036033f, 0.160172f, -0.0639099f, 0.143247f, -0.00405809f, 0.0392108f, 0.105821f, -0.0117836f, 0.364862f, -0.0218085f, 0.0582941f, -0.0268128f, -0.17341f, 0.0822477f, -0.0965718f, -0.0813121f, -0.214483f, -0.0213665f, -0.0935961f, -0.0511138f, -0.0948025f, -0.0737758f, 0.0593076f, -0.00460759f, -0.020429f, -0.171676f, -0.0735104f, -0.0423072f, 0.022951f, 0.0580283f, -0.0729432f, -0.104986f, -0.118006f, -0.244905f, -0.133555f, -0.0186048f, -0.187553f, -0.0160515f, -0.0255951f, -0.170562f, 0.0676428f, 0.0219462f, -0.127552f, 0.199784f, 0.107133f, 0.0332873f, 0.0529776f, 0.00201128f, 0.0642946f, -0.0317144f, -0.0373147f, -0.180745f, 0.0254618f, -0.0889232f, 0.0566953f, 0.236658f, 0.0163783f, -0.0463298f, 0.210977f, 0.00385164f, 0.081637f, 0.171105f, -0.168025f, 0.0661184f, -0.0841871f, -0.0602157f, 0.0176702f, -0.0296257f, 0.0125458f, -0.0625908f, -0.0880055f, 0.143577f, -0.0285165f, 0.215558f, -0.00994533f, 0.411468f, -0.0962043f, -0.0192072f, 0.0887899f, -0.0697142f, -0.0986407f, -0.0868332f, 0.0438853f, -0.051023f, -0.256342f, -0.106239f, -0.11333f, -0.0903353f, 0.0967568f, -0.309541f, -0.0189138f, -0.218905f, -0.173599f, 0.120725f, 0.0449916f, 0.0317646f, -0.0340631f, -0.135762f, -0.0371541f, -0.206674f, -0.226991f, -0.059626f, -0.157564f, 0.0606819f, -0.122837f, 0.096013f, 0.0672354f, -0.0537031f, -0.0718931f, -0.199894f, -0.0548641f, 0.107513f, -0.0139111f, -0.171422f, -0.0826682f, -0.0569582f, -0.105932f, 0.0802937f, 0.151964f, 0.0131966f, 0.07648f, 0.0280713f, -0.0461546f, -0.115433f, -0.0376063f, -0.100288f, 0.073246f, -0.109359f, 0.0487129f, 0.0443031f, -0.0950756f, 0.164226f, 0.142305f, 0.000433425f, -0.0197098f, -0.152526f, 0.152461f, -0.200112f, 0.233174f, -0.0221765f, -0.025886f, -0.0804262f, 0.0785665f, 0.0923028f, 0.133491f, 0.142697f, 0.0353495f, -0.115291f, 0.0199922f, 0.0612502f, -0.0765483f, 0.0436982f, 0.0486859f, -0.0690549f, -0.109503f, 0.107462f, -0.0566332f, -0.0132047f, -0.0329169f, -0.204581f, 0.0822049f, 0.143198f, 0.06244f, 0.106203f, -0.026786f, -0.149848f, 0.0301633f, -0.117558f, 0.133783f, 0.167485f, -0.0275655f, -0.0327422f, 0.164527f, -0.000375591f, 0.0887277f, -0.157277f, 0.0355882f, 0.0368576f, 0.0672137f, -0.049742f, 0.122216f, -0.0127117f, -0.0044736f, 0.121116f, -0.176739f, 0.191013f, 0.0961458f, 0.0432433f, 0.091975f, 0.0605433f, -0.0266397f, -0.068778f, 0.0193055f, -0.0442058f, 0.263195f, -0.0548342f, -0.105645f, -0.0326775f, 0.122997f, 0.00341024f, -0.163314f, 0.0460038f, 0.0287458f, -0.127227f, 0.0990594f, 0.0658493f, 0.0467441f, 0.0147039f, -0.0139484f, 0.090247f, -0.0571602f, -0.0544286f, -0.010209f, -0.111872f, 0.0788802f, 0.171519f, 0.169329f, 0.0999534f, 0.00700933f, -0.0719375f, -0.0595488f, 0.0364368f, 0.0358204f, 0.098086f, 0.24935f, -0.178989f, -0.0402939f, 0.0115397f, 0.0787451f, 0.235796f, -0.156643f, 0.0121429f, -0.182488f, 0.000534596f, -0.092585f, 0.00365245f, 0.0763095f, -0.0595423f, -0.140221f, 0.171876f, -0.0526002f, 0.0765097f, 0.0359853f, -0.131491f, -0.122749f, 0.187665f, -0.0282685f, 0.0630142f, 0.242813f, 0.0343545f, -0.197126f, -0.0403493f, 0.158171f, -0.188647f, 0.0483702f, -0.0336927f, -0.249133f, 0.0196906f, -0.0444548f, 0.184136f, -0.0879479f, -0.263376f, 0.0351465f, 0.00967625f, 0.135519f, -0.0148328f, -0.048009f, -0.0790516f, -0.102249f, 0.158303f, 0.208098f, 0.127586f, 0.138344f, -0.0882175f, -0.0206738f, -0.0353707f, -0.0403802f, -0.029957f, 0.0438575f, -0.0938733f, -0.0113277f, -0.138526f, -0.0393376f, -0.0114237f, -0.0153331f, 0.158024f, -0.051859f, -0.0268225f, 0.114178f, -0.0909112f, -0.0584362f, 0.105077f, -0.00871527f, -0.0263271f, 0.134731f, 0.124487f, -0.0602837f, -0.317559f, 0.0595694f, -0.040935f, -0.0758511f, 0.153561f, -0.025558f, 0.0220983f, -0.238473f, 0.0676488f, 0.0466724f, -0.225018f, -0.134105f, -0.0579748f, -0.0439054f, 0.106927f, 0.104645f, 0.0802242f, 0.108084f, 0.179642f, -0.00713578f, -0.041249f, -0.0799491f, -0.124749f, -3.37074e-05f, -0.1252f, 0.122808f, -0.0297159f, 0.0912978f, 0.0985428f, -0.0113928f, -0.134364f, 0.135816f, -0.210747f, 0.0581764f, 0.263032f, -0.0530373f, 0.0916552f, -0.045496f, 0.0582804f, -0.00956537f, -0.0154948f, -0.0423851f, 0.136804f, -0.109962f, 0.00940771f, -0.0292943f, -0.0414003f, 0.00620048f, 0.00956379f, -0.16308f, 0.052962f, -0.177084f, -0.024391f, 0.000572143f, 0.019255f, 0.0644014f, -0.153145f, 0.0128466f, -0.0821009f, 0.0346998f, 0.0650427f, -0.0177383f, 0.0660731f, 0.123216f, -0.0833914f, 0.0679018f, -0.0681478f, 0.02281f, -0.0524921f, 0.116663f, -0.0979795f, -0.151933f, -0.0352637f, 0.0881004f, 0.0487541f, 0.00661808f, -0.0508892f, -0.0186194f, 0.0520507f, -0.0413656f, 0.146433f, -0.050049f, 0.0827483f, -0.0501086f, 0.0153644f, -0.0459721f, 0.0655622f, 0.0872876f, -0.0773443f, -0.00406444f, 0.138694f, -0.235277f, 0.148203f, -0.0704743f, -0.0322915f, -0.00143417f, -0.0982438f, -0.0880526f, -0.0759267f, 0.0810061f, -0.0172714f, -0.150085f, -0.0612483f, -0.0500993f, 0.00923557f, -0.0583429f, -0.00595263f, 0.0896776f, 0.0220401f, -0.0565514f, -0.310119f, 0.0819224f, 0.122018f, -0.0788263f, 0.0777749f, 0.0979377f, -0.0639043f, 0.00928077f, -0.0499973f, -0.0400224f, -0.000574911f, -0.110339f, -0.0948323f, 0.122082f, 0.0561886f, 0.1508f, 0.251017f, -0.0138699f, 0.024624f, 0.0169676f, 0.136544f, 0.0479448f, -0.109695f, -0.080051f, -0.0163915f, -0.167296f, 0.0575209f, -0.249151f, 0.0944675f, -0.0781063f, 0.375681f, 0.0394529f, 0.0618735f, -0.089519f, 0.118682f, -0.0253652f, 0.184129f, 0.297229f, -0.191492f, 0.0505591f, -0.0913989f, 0.259753f, -0.00750962f, 0.234687f, 0.157358f, -0.0707787f, 0.124578f, -0.0755218f, 0.0786754f, 0.0353833f, 0.103605f, 0.0830876f, 0.0424528f, -0.231562f, -0.17194f, 0.0643963f, -0.0140621f, -0.0211199f, -0.0049944f, -0.0737082f, -0.035092f, -0.0545289f, -0.0290527f, -0.0643643f, 0.11262f, -0.191746f, -0.0775751f, 0.00088546f, 0.0690892f, -0.25274f, -0.156711f, 0.102495f, 0.079138f, 0.155971f, 0.0640578f, 0.19935f, -0.0377256f, -0.246248f, -0.0104044f, 0.163365f, -0.00444166f, -0.0071001f, -0.124159f, 0.0493446f, -0.0850798f, 0.135526f, -0.00830749f, 0.148943f, -0.198971f, 0.13768f, 0.111081f, -0.049332f, 0.16852f, -0.215226f, -0.0367846f, 0.0321961f, -0.0437618f, 0.000728186f, -0.128975f, -0.15799f, -0.160072f, 0.0678405f, 0.281432f, -0.19657f, -0.0383664f, 0.248991f, -0.106262f, 0.292048f, 0.0339603f, -0.287998f, -0.0871172f, -0.180279f, 0.0977158f, -0.19148f, -0.28444f, 0.177285f, 0.039786f, 0.019712f, 0.0729741f, 0.0719424f, -0.0385185f, -0.0253956f, 0.0385404f, 0.221119f, 0.0895462f, -0.203467f, -0.147307f, -0.130687f, -0.0412938f, 0.232114f, -0.169174f, 0.0337914f, 0.0460482f, 0.0991183f, -0.0201279f, 0.0666047f, -0.196172f, -0.0853529f, -0.320088f, 0.0994365f, -0.151252f, -0.296701f, 0.00891138f, 0.0887634f, -0.207757f, -0.138991f, -0.157515f, -0.0990999f, -0.049215f, -0.00359244f, 0.0375753f, -0.13283f, 0.214505f, -0.14148f, 0.0117754f, -0.0264148f, 0.0690092f, 0.0975677f, -0.0878925f, -0.0515724f, 0.129437f, 0.143636f, 0.0310838f, -0.0442985f, -0.172708f, 0.181611f, 0.000661019f, 0.139161f, -0.134996f, -0.0690357f, -0.180024f, 0.0707661f, -0.119782f, -0.21067f, 0.291987f, 0.0581316f, -0.0336682f, -0.127663f, -0.168299f, 0.15631f, 0.078031f, -0.280874f, 0.285433f, -0.0866227f, 0.0441013f, -0.099833f, -0.0951391f, 0.061341f, -0.139349f, -0.0267476f, -0.207449f, 0.153061f, 0.227412f, -0.0938632f, 0.0815401f, -0.0653588f, 0.177069f, -0.052655f, 0.00816404f, -0.0242939f, 0.0352447f, 0.145852f, -0.0111049f, -0.0305896f, 0.0378056f, -0.114642f, 0.0500372f, 0.256098f, -0.0730372f, -0.0345089f, -0.214501f, 0.088835f, 0.270163f, 0.0364656f, 0.106849f, -0.147953f, 0.0747268f, 0.0936846f, -0.00268404f, -0.0594785f, -0.0375037f, -0.0887921f, 0.103873f, -0.108598f, 0.0407106f, -0.100062f, -0.170551f, 0.125359f, 0.0146713f, -0.0571015f, -0.194618f, 0.171913f, 0.0539628f, -0.0323952f, 0.231512f, -0.252168f, -0.118866f, -0.083549f, -0.177682f, 0.107113f, 0.0781758f, 0.0594684f, 0.00499479f, 0.0729393f, -0.0537143f, -0.0523934f, -0.082592f, -0.126924f, 0.165956f, -0.269973f, -0.0459364f, -0.121452f, -0.135581f, 0.223677f, -0.121832f, -0.0102091f, -0.117992f, -0.1182f, -0.042425f, -0.0122301f, -0.00365751f, 0.00509835f, -0.140769f, 0.022116f, -0.117889f, 0.150793f, 0.279977f, 0.0207496f, 0.116917f, 0.17914f, -0.0867594f, 0.0018449f, 0.101424f, 0.105308f, -0.154097f, -0.0837564f, 0.0710214f, -0.098975f, 0.141926f, -0.231457f, 0.0262595f, -0.152914f, -0.0976511f, 0.0494525f, 0.125201f, -0.357862f, 0.189771f, 0.184475f, 0.377177f, -0.0180502f, -0.181088f, 0.208415f, 0.0124353f, 0.175047f, 0.0634869f, 0.176655f, -0.160972f, -0.251638f, -0.0465624f, 0.1851f, 0.198393f, 0.0739704f, 0.0627861f, -0.157248f, 0.0945963f, 0.169016f, -0.198817f, 0.0335858f, 0.18541f, -0.171907f, -0.193336f, -0.0974043f, 0.0662577f, -0.0289654f, -0.0996697f, 0.039504f, 0.155125f, -0.00389195f, -0.0639551f, -0.0500292f, -0.0244204f, 0.0112994f, -0.00909482f, -0.124192f, 0.164573f, 0.126937f, 0.102731f, 0.163111f, -0.122477f, 0.05775f, 0.00755021f, -0.0673032f, 0.0299418f, -0.127382f, 0.204485f, -0.0424081f, -0.0660717f, -0.0015198f, 0.0728257f, 0.0590629f, 0.310328f, 0.0234367f, -0.01669f, -0.0272065f, 0.0184384f, 0.270189f, -0.024391f, -0.226581f, 0.0332318f, -0.0683126f, -0.0213689f, -0.149375f, -0.176875f, 0.233165f, 0.0860945f, -0.203347f, 0.174001f, -0.0505966f, -0.148272f, 0.0629295f, 0.0526873f, 0.0399599f, 0.160626f, 0.0281925f, -0.026179f, -0.0391977f, 0.0836354f, -0.0980322f, -0.0438703f, 0.102913f, 0.0394862f, 0.366043f, -0.0753206f, 0.122993f, -0.121856f, -0.0916316f, 0.0468668f, 0.102665f, -0.18753f, -0.125031f, 0.174206f, -0.166477f, 0.257371f, 0.108198f, -0.152632f, -0.0549908f, -0.00445209f, 0.177751f, 0.0774868f, -0.223178f, -0.0314952f, -0.0588019f, -0.0577483f, 0.190325f, 0.104597f, 0.0704718f, -0.121883f, 0.15023f, -0.0730659f, 0.0659461f, 0.221952f, 0.00475319f, -0.0253628f, -0.0093409f, 0.227499f, -0.0254921f, 0.0915292f, 0.139914f, 0.0585727f, 0.13218f, -0.0640147f, 0.0986692f, -0.0565669f, 0.304242f, -0.104631f, 0.213945f, -0.0427173f, -0.0984624f, 0.154443f, 0.209982f, 0.0079323f, -0.0335943f, 0.0523011f, 0.0577859f, 0.0389239f, 0.171507f, -0.0351128f, -0.143558f, 0.0353017f, 0.200947f, 0.056613f, 0.0323922f, 0.165315f, -0.0282567f, -0.146359f, 0.0627438f, 0.0319588f, 0.0339514f, -0.0545956f, -0.0719747f, -0.0157826f, 0.0115707f, -0.223508f, 0.170779f, -0.261247f, 0.161725f, -0.0474512f, -0.0817654f, 0.103288f, -0.0848836f, -0.00576106f, -0.0939211f, 0.0421044f, 0.0293124f, 0.0392874f, -0.0861634f, 0.0506589f, 0.133116f, 0.150643f, 0.213563f, 0.148135f, 0.240643f, -0.263292f, 0.215457f, 0.13152f, -0.132204f, 0.175195f, -0.14678f, 0.123917f, -0.355653f, 0.221528f, -0.335998f, 0.0591199f, 0.152389f, -0.0569674f, 0.0251812f, -0.106788f, 0.141392f, 0.000663555f, 0.00554093f, -0.0630895f, 0.207504f, -0.0542251f, 0.0736436f, 0.061985f, 0.252209f, -0.276209f, -0.0712424f, -0.127771f, 0.0517661f, -0.0573801f, -0.0300092f, -0.0284713f, -0.110957f, -0.0661945f, 0.122647f, -0.122942f, 0.0301846f, -0.155941f, -0.0807976f, 0.039458f, -0.0316346f, -0.0325458f, -0.0554089f, 0.110666f, 0.0830738f, 0.00363627f, 0.119053f, -0.0204632f, 0.262043f, 0.159036f, 0.0571317f, -0.181915f, -0.0979988f, -0.14703f, -0.154228f, -0.0139271f, -0.0708364f, 0.139381f, -0.2362f, 0.0193619f, 0.0453204f, 0.0567484f, 0.173824f, 0.117773f, -0.00332873f, -0.0112279f, -0.0722681f, -0.0289657f, -0.0507285f, 0.118787f, 0.0315706f, 0.0212674f, 0.0206731f, -0.0236192f, -0.0573769f, -0.0332449f, 0.0112735f, 0.0615494f, 0.113297f, 0.161885f, -0.115966f, 0.0421901f, -0.0385358f, -0.110057f, 0.0429052f, -0.0712834f, 0.0976452f, -0.064218f, 0.0751013f, 0.214687f, 0.0542966f, 0.0443563f, -0.0769756f, -0.0373999f, 0.164008f, -0.0505326f, 0.00125737f, -0.0126467f, -0.00355373f, -0.0411733f, 0.153456f, -0.237199f, 0.0290545f, -0.112688f, 0.14473f, -0.0529883f, 0.0724369f, -0.0404337f, 0.0212136f, -0.0422881f, -0.027609f, -0.10796f, 0.125772f, 0.0765661f, 0.0228904f, -0.0181031f, 0.00124019f, -0.140476f, 0.10999f, -0.173882f, -0.126255f, 0.0418209f, -0.179944f, -0.191607f, -0.020302f, 0.0614274f, -0.0863173f, 0.00445859f, 0.0516228f, -0.0579874f, -0.0654816f, 0.0133511f, -0.0236542f, -0.0837031f, 0.19486f, -0.13387f, -0.028467f, -0.0363815f, 0.0159868f, 0.202969f, -0.107041f, -0.24144f, -0.0138798f, 0.0505556f, -0.0553807f, 0.01119f, -0.00691395f, 0.00866526f, -0.00603107f, -0.108696f, -0.0475683f, -0.102675f, 0.0581881f, 0.00247019f, 0.221338f, 0.0282824f, -0.250254f, 0.0314065f, -0.150588f, 0.0244129f, -0.00735956f, 0.0392754f, 0.138102f, 0.0150318f, 0.0456039f, 0.0544637f, -0.00308071f, 0.0481664f, 0.0666403f, 0.0253438f, -0.10434f, 0.0338339f, 0.111899f, 0.0273458f, -0.122546f, -0.0668817f, -0.225542f, 0.05416f, 0.101241f, 0.000406677f, 0.102811f, -0.0601314f, 0.039399f, 0.0308256f, -0.0921805f, 0.0178523f, 0.198057f, 0.195732f, 0.203292f, 0.131383f, 0.0862096f, -0.0102639f, -0.0843691f, 0.0210823f, -0.00482735f, -0.000907958f, -0.113243f, 0.0467233f, 0.0309043f, 0.146057f, 0.212456f, -0.0932336f, -0.242933f, 0.100431f, -0.0720508f, -0.101171f, -0.0855f, -0.0188432f, 0.0459342f, 0.156714f, -0.0811427f, -0.0326259f, 0.186238f, 0.0923666f, -0.00817366f, 0.0282681f, 0.0881631f, -0.0834928f, -0.0164617f, -0.0496607f, 0.218332f, 0.00411857f, 0.131525f, 0.0991355f, 0.0270095f, 0.0246061f, -0.0719871f, 0.0878847f, -0.0392614f, -0.140324f, -0.131136f, 0.0499142f, -0.0339919f, -0.0375233f, -0.0890882f, 8.53245e-05f, 0.0551538f, -0.106908f, -0.0520464f, -0.00888123f, -0.11121f, 0.0620103f, 0.02852f, 0.00360382f, -0.100427f, 0.0432809f, 0.0889561f, -0.146118f, 0.13726f, -0.0639197f, 0.0401705f, 0.0265976f, -0.171236f, 0.0321678f, 0.0224811f, -0.0301821f, 0.171515f, -0.0470559f, -0.0426517f, -0.16323f, -0.0604493f, -0.034069f, -0.0972682f, 0.0228597f, 0.0883019f, -0.136297f, 0.160625f, 0.0749949f, 0.0894854f, -0.0319722f, 0.0663027f, 0.0817774f, 0.0163954f, 0.0903092f, -0.057734f, 0.0474958f, 0.102466f, 0.0303416f, -0.0479542f, -0.0631701f, -0.00230907f, -0.0777043f, 0.121863f, -0.0166429f, -0.0999836f, -0.13273f, 0.0800447f, -0.110862f, 0.0974836f, -0.0628038f, -0.0271585f, 0.0782667f, 0.0348656f, 0.0545337f, 0.143262f, 0.0748304f, 0.0712561f, -0.0664998f, -0.168497f, -0.101424f, 0.0974512f, 0.0252471f, -0.0867533f, -0.0656633f, -0.166401f, -0.109735f, -0.098683f, 0.0389852f, -0.0308158f, -0.138058f, 0.0885635f, -0.0492763f, -0.0404245f, -0.218716f, -0.0098714f, -0.182133f, 0.0218515f, 0.0435324f, 0.144572f, 0.141958f, -0.113712f, -0.0386648f, 0.180394f, 0.0515006f, -0.120037f, 0.0847489f, 0.0713453f, -0.0113679f, -0.0583359f, -0.0920653f, 0.0933766f, 0.0476242f, 0.0307557f, -0.163229f, -0.143127f, 0.00845675f, 0.0598008f, -0.0335867f, -0.0126779f, -0.281854f, -0.0437504f, 0.0176548f, 0.0524415f, 0.00556486f, 0.156353f, -0.117685f, 0.0605237f, 0.195363f, -0.0567216f, 0.00102997f, -0.0264323f, -0.00372358f, -0.0646244f, -0.0453075f, -0.166247f, 0.281282f, 0.17668f, -0.208788f, -0.142048f, -0.104778f, 0.109376f, -0.140984f, 0.12344f, 0.162422f, 0.0161071f, 0.117405f, -0.0344586f, -0.0492684f, 0.204228f, -0.0682354f, -0.169862f, -0.0145701f, 0.213315f, 0.0548976f, -0.0115088f, -0.169776f, 0.118558f, -0.0933496f, -0.0044707f, -0.141336f, -0.30965f, -0.0270276f, -0.222323f, 0.0159243f, 0.0588809f, 0.177776f, -0.126862f, 0.123038f, -0.203747f, 0.0544854f, 0.0938431f, 0.0423699f, -0.146566f, 0.153805f, -0.144095f, -0.0456819f, -0.200621f, -0.134878f, 0.28065f, -0.00443472f, 0.0279263f, 0.0820288f, -0.0894597f, 0.0721688f, 0.0774442f, -0.189078f, -0.0349292f, 0.13507f, 0.00840802f, -0.0826386f, 0.0738777f, 0.0206416f, 0.113751f, 0.0498769f, -0.117093f, 0.0130043f, 0.168696f, 0.0784696f, -0.15385f, -0.0693485f, -0.084018f, -0.365608f, -0.125739f, -0.176172f, 0.0950291f, -0.0214553f, 0.137151f, 0.0207239f, 0.12605f, 0.0842234f, 0.0268108f, 0.0131902f, -0.0362878f, -0.0859567f, 0.0159591f, 0.133899f, -0.0343238f, -0.172363f, 0.0752871f, -0.124453f, 0.080041f, 0.156957f, 0.0199755f, 0.0715357f, -0.0807205f, -0.185395f, 0.0886297f, 0.0506201f, -0.0778519f, -0.23308f, 0.0659566f, -0.104742f, 0.00464306f, -0.0196824f, -0.114372f, 0.0915493f, -0.0115746f, -0.17966f, -0.0192289f, -0.15546f, 0.100115f, -0.00505412f, -0.127364f, 0.0424053f, 0.0784288f, 0.104405f, 0.115772f, -0.0134272f, 0.198713f, -0.0219105f, 0.0683327f, -0.100121f, 0.0612386f, 0.118496f, 0.0640191f, -0.0749607f, -0.125005f, -0.135778f, 0.15251f, -0.0755532f, 0.106249f, -0.129481f, -0.0234071f, -0.121551f, -0.0824015f, 0.0580827f, 0.0676968f, -0.22937f, -0.160847f, -0.0465591f, -0.0645434f, -0.123939f, 0.135866f, -0.138445f, -0.063113f, -0.0114802f, -0.0524438f, 0.0661694f, -0.0053462f, -0.00282241f, -0.194824f, -0.0327054f, -0.04437f, -0.100807f, 0.0698429f, -0.0748573f, -0.0472383f, 0.0205898f, 0.0212699f, 0.124688f, 0.0770573f, 0.117578f, -0.00815512f, 0.0936562f, 0.124921f, -0.0601859f, -0.0955667f, -0.135149f, -0.0542988f, -0.0608268f, -0.197428f, -0.0650044f, 0.0704519f, -0.0164466f, -0.0986307f, 0.00715452f, 0.0767934f, 0.00412818f, 0.0905304f, 0.0396319f, 0.00837817f, -0.0267244f, 0.0930721f, 0.108028f, 0.00744507f, -0.0493471f, -0.0195623f, -0.177904f, -0.135552f, 0.0737591f, -0.178602f, -0.00670441f, 0.0754992f, -0.0378846f, -0.0629313f, -0.122933f, 0.0839281f, 0.0749091f, 0.0797577f, -0.175295f, -0.0168336f, -0.109948f, 0.0334345f, 0.083586f, 0.045774f, 0.0362349f, -0.202326f, 0.0418665f, 0.16225f, -0.00662578f, -0.116256f, -0.185444f, -0.0623462f, 0.0850885f, 0.108743f, 0.162511f, -0.154857f, -0.0573133f, -0.0633981f, 0.00990949f, -0.0896302f, 0.0157241f, -0.0902843f, -0.00159114f, 0.0701454f, 0.0728213f, -0.0906549f, -0.0224363f, -0.0464723f, -0.0144856f, -0.126268f, 0.107614f, -0.0468405f, -0.096787f, 0.0874614f, 0.0926338f, -0.152835f, 0.0176823f, -0.111429f, -0.0950984f, -0.0115219f, -0.0771794f, 0.0681982f, -0.111016f, 0.203482f, -0.00714296f, 0.0586067f, -0.0329465f, -0.0845449f, 0.0486549f, -0.0104111f, -0.199626f, -0.0892381f, -0.0629865f, 0.155612f, 0.0191639f, -0.0838307f, 0.209533f, 0.0316975f, -0.100128f, -0.00201803f, -0.194165f, -0.0726416f, -0.0313286f, 0.0242704f, -0.0801909f, 0.17155f, 0.145251f, -0.108382f, 0.314601f, -0.00656433f, -0.0320169f, -0.0816271f, 0.0489008f, 0.0992959f, -0.143571f, -0.00608044f, 0.172211f, -0.079396f, -0.177926f, -0.0300531f, 0.137206f, -0.149952f, -0.0553588f, 0.0345417f, -0.143267f, 0.0474634f, 0.0218015f, 0.0522585f, -0.141634f, -0.239816f, -0.0069167f, 0.26758f, 0.0171523f, -0.0734612f, -0.125769f, -0.0793968f, 0.291079f, -0.185657f, -0.0370256f, 0.121864f, 0.0162035f, 0.184935f, 0.259483f, -0.0395243f, -0.0944665f, -0.212575f, -0.172647f, -0.0665187f, -0.0145952f, 0.0767734f, -0.0227692f, 0.0859191f, 0.202254f, 0.0923423f, 0.0723978f, -0.10578f, 0.293047f, -0.0462116f, 0.0436361f, 0.0695043f, -0.152992f, -0.0416715f, -0.108272f, 0.0295012f, 0.0255692f, -0.161628f, -0.106685f, -0.0210847f, -0.154647f, 0.0204896f, -0.0587513f, 0.230551f, 0.0497908f, -0.0475957f, -0.0210439f, -0.155053f, -0.118814f, 0.0511831f, 0.054593f, 0.169512f, -0.0378522f, 0.143163f, 0.030325f, -0.0339543f, 0.0213306f, -0.164466f, -0.0934754f, -0.22833f, 0.0858077f, 0.0461933f, -0.118319f, 0.0767028f, 0.136021f, 0.0729791f, -0.0380858f, -0.0714416f, -0.117463f, 0.0150731f, 0.0284947f, -0.225468f, -0.237645f, -0.00766115f, -0.239746f, 0.248227f, -0.146335f, 0.137973f, 0.0281188f, 0.144324f, 0.0976707f, 0.0770879f, 0.0571518f, -0.104919f, 0.0866104f, -0.0190444f, 0.0697817f, 0.0870388f, -0.0719153f, 0.163755f, 0.331883f, 0.113305f, 0.120221f, -0.0100012f, -0.0332775f, 0.065859f, -0.0416822f, 0.197175f, 0.0936962f, 0.0247296f, -0.0276575f, -0.0286851f, -0.0589173f, 0.149987f, 0.185468f, 0.155365f, 0.188355f, 0.00237859f, 0.0676103f, 0.0251445f, 0.193678f, 0.0409631f, -0.0366925f, -0.154589f, -0.0389411f, -0.158109f, -0.169772f, -0.280884f, 0.0398991f, 0.139555f, -0.0110791f, 0.0485936f, 0.0385639f, 0.0519579f, -0.0920265f, 0.161738f, 0.065721f, -0.255336f, 0.101532f, 0.129778f, -0.0604631f, 0.070988f, 0.352874f, -0.0629856f, 0.291217f, -0.0543547f, 0.090589f, 0.00334543f, 0.00252746f, -0.141186f, -0.166335f, -0.121629f, 0.0590552f, -0.0850281f, -0.211745f, -0.172293f, -0.159549f, 0.123006f, 0.135679f, 0.0267605f, -0.152193f, -0.036335f, -0.214873f, -0.121877f, -0.170854f, -0.121973f, -0.113269f, 0.141356f, 0.0256631f, -0.0148008f, 0.151741f, -0.117047f, 0.0251949f, -0.0919051f, -0.112196f, -0.0214006f, -0.0821592f, -0.10234f, -0.0829865f, 0.180194f, 0.0215716f, 0.111666f, -0.0636419f, -0.215688f, 0.109072f, 0.199377f, 0.00757224f, 0.190048f, -0.253411f, 0.000319625f, -0.0757468f, -0.138268f, -0.276189f, 0.00548186f, -0.133266f, 0.182946f, 0.0393198f, 0.0600505f, 0.0116044f, 0.0239095f, 0.00707852f, 0.163426f, 0.0131742f, 0.145158f, 0.00283553f, 0.202782f, -0.031349f, 0.106123f, 0.0317581f, -0.294969f, 0.000338775f, 0.134041f, -0.0600072f, -0.0730347f, 0.119654f, -0.0980097f, -0.0241402f, -0.0668989f, -0.0310573f, -0.0581586f, 0.0820751f, -0.128971f, -0.0695226f, 0.0135357f, -0.123606f, 0.129523f, -0.0894331f, 0.0764786f, 0.195178f, -0.0130679f, -0.134301f, 0.145709f, -0.193029f, 0.161094f, 0.0701801f, -0.0707612f, -0.0958126f, -0.0118173f, 0.0684425f, -0.265877f, 0.0542163f, 0.0964676f, -0.0822466f, 0.0521333f, -0.117361f, 0.138503f, -0.131198f, -0.0831384f, -0.0514136f, -0.0502016f, -0.036244f, 0.0218823f, 0.0782719f, -0.0640802f, -0.0553641f, -0.221093f, -0.104808f, -0.0545768f, -0.0462723f, -0.017221f, 0.0993997f, 0.0120203f, 0.0898902f, -0.167782f, 0.149125f, 0.0787422f, -0.180152f, 0.160089f, 0.0780007f, 0.0823145f, 0.135117f, -0.0797384f, -0.0980863f, -0.177234f, 0.107759f, 0.0383026f, -0.0674362f, -0.00488177f, -0.057259f, -0.191928f, -0.202768f, 0.154951f, -0.0455602f, 0.0811795f, 0.053733f, 0.103396f, 0.12294f, 0.0911659f, 0.0843187f, -0.100549f, -0.109884f, 0.0839741f, 0.161174f, -0.11418f, 0.18197f, 0.287072f, -0.0938153f, -0.076604f, -0.185878f, 0.0672537f, -0.125767f, 0.0368364f, 0.0157694f, -0.0407388f, -0.0308107f, 0.112735f, -0.0396355f, -0.189153f, -0.0608741f, -0.129302f, 0.00478799f, -0.0368768f, 0.0510359f, 0.0907217f, -0.218272f, -0.101936f, -0.14216f, -0.00560582f, -0.0288911f, 0.0433238f, -0.119338f, -0.11291f, 0.0748263f, 0.147806f, 0.0880856f, -0.0552198f, -0.113234f, -0.0892637f, 0.0768048f, 0.307189f, 0.0245638f, 0.0316409f, 0.0402566f, 0.237725f, -0.0399341f, -0.0530584f, -0.0883524f, 0.0202731f, -0.0616137f, -0.166939f, 0.151039f, 0.140738f, -0.069355f, 0.0384919f, -0.00550695f, -0.0909114f, -0.0805798f, -0.0342301f, -0.115917f, -0.0618465f, -0.0146205f, 0.0675873f, 0.128991f, -0.11117f, -0.045966f, -0.0391435f, 0.129061f, 0.120178f, 0.0693377f, -0.0946104f, -0.142355f, 0.20063f, -0.0818652f, -0.00298243f, -0.0801682f, 0.0631885f, 0.01779f, -0.00135591f, 0.00272314f, 0.111527f, -0.131859f, -0.0075387f, -0.0554573f, 0.125767f, 0.112316f, -0.0343979f, 0.00222635f, -0.126533f, -0.0102839f, 0.00983106f, -0.074818f, 0.0852295f, 0.0767417f, 0.00988669f, 0.0636016f, -0.0303379f, -0.126553f, 0.0820762f, -0.189013f, -0.20051f, 0.0630236f, -0.242048f, -0.0865404f, 0.0687962f, 0.00123627f, -0.0103948f, -0.0663098f, -0.0931299f, 0.110547f, 0.0751829f, 0.0581922f, -0.113103f, 0.0361607f, -0.022175f, -0.301193f, 0.00355331f, 0.0283203f, -0.0707817f, 0.0468476f, -0.00397366f, 0.0291346f, 0.139848f, 0.0667173f, 0.0156195f, 0.113167f, -0.036104f, 0.00608596f, -0.0224088f, -0.0376919f, -0.0394271f, -0.0852909f, 0.0475666f, 0.00465422f, 0.00660843f, 0.169551f, 0.101554f, 0.00150177f, -0.179223f, -0.0468258f, -0.0876736f, 0.0390746f, 0.0242532f, -0.00118479f, -0.0421474f, 0.000581151f, 0.123448f, 0.0719542f, -0.087029f, 0.0380085f, 0.103385f, 0.00126884f, 0.126489f, 0.0424233f, -0.107618f, 0.185452f, -0.0627246f, -0.0584536f, -0.0934283f, 0.0652069f, 0.0906484f, 0.0510872f, -0.0573928f, 0.130892f, -0.0487467f, 0.0758483f, -0.108435f, -0.0120764f, -0.0467645f, 0.12436f, -0.0997391f, 0.0868841f, 0.00323449f, -0.195749f, -0.138854f, -0.0121493f, 0.0263368f, -0.122166f, 0.0323598f, -0.0193836f, 0.144236f, -0.0628916f, 0.00428236f, -0.12099f, 0.266247f, -0.183494f, 0.132809f, -0.167129f, -0.0508275f, 0.135888f, -0.0418371f, -0.0962804f, -0.0951941f, -0.114821f, 0.132069f, -0.128282f, -0.175875f, -0.198912f, 0.116171f, 0.107752f, -0.143536f, 0.087323f, 0.0323613f, 0.033975f, 0.0165699f, -0.0331944f, -0.0551878f, -0.0777927f, 0.179104f, -0.0700769f, 0.113302f, 0.110442f, -0.00930085f, -0.0759301f, 0.0293114f, -0.039091f, 0.0321688f, -0.0759916f, -0.0762207f, 0.215916f, 0.139452f, 0.0126151f, -0.0031113f, -0.127195f, -0.191948f, 0.230031f, -0.0381873f, -0.085138f, -0.100308f, -0.0454466f, 0.118482f, 0.072371f, 0.109701f, 0.0742852f, -0.243044f, -0.0148899f, 0.224943f, -0.0997938f, -0.073007f, -0.18312f, -0.0306144f, -0.0105626f, 0.00213062f, -0.0233494f, 0.130955f, 0.135267f, -0.0703666f, 0.175057f, 0.0790237f, 0.032288f, 0.0524363f, 0.247834f, -0.0374616f, 0.106501f, -0.131993f, 0.170161f, 0.0398851f, 0.203499f, -0.150741f, 0.311905f, 0.0788658f, -0.0282965f, -0.0872977f, 0.0739423f, -0.0334502f, 0.0262f, -0.130304f, 0.180065f, -0.115533f, -0.103926f, -0.0512683f, -0.199867f, -0.163173f, 0.0671355f, 0.0176537f, -0.0971016f, -0.125815f, 0.108367f, 0.0138161f, 0.168023f, 0.101629f, -0.054382f, 0.0140505f, 0.0327086f, 0.263393f, 0.0663371f, 0.336929f, 0.405278f, 0.0314086f, 0.0184855f, -0.084812f, -0.0709576f, -0.0326737f, -0.0280696f, -0.142288f, -0.098986f, -0.136917f, -0.0553295f, 0.0676655f, 0.013853f, 0.112344f, 0.125811f, -0.0767639f, 0.0439987f, 0.0228987f, -0.02137f, -0.0244697f, -0.147876f, -0.0251752f, -0.0379284f, -0.0647226f, 0.0341937f, -0.121735f, -0.100174f, -0.0598986f, -0.0196131f, -0.3472f, -0.0743812f, 0.0627455f, -0.114211f, 0.217759f, 0.158175f, -0.151913f, 0.0859349f, -0.0558778f, -0.245536f, 0.0773829f, -0.231388f, -0.100464f, -0.060115f, 0.0676159f, 0.118111f, -0.106342f, -0.12934f, 0.0371733f, -0.0581194f, -0.176012f, 0.215812f, 0.190505f, -0.163736f, 0.000507007f, -0.135493f, -0.0830093f, 0.071609f, -0.0371225f, -0.076906f, -0.11375f, 0.0239612f, 0.299057f, -0.0412237f, -0.121527f, 0.0044028f, -0.044546f, 0.0753698f, 0.164639f, 0.00228523f, 0.093221f, 0.0214547f, 0.0600803f, -0.0622577f, -0.0752353f, -0.051275f, -0.0383169f, -0.00795754f, 0.0252119f, -0.0126401f, -0.00567422f, -0.042262f, 0.083442f, -0.0586978f, -0.147768f, -0.0794564f, -0.0244015f, 0.051853f, 0.109868f, -0.0793144f, -0.0393269f, 0.0608404f, 0.0890723f, -0.0710212f, -0.0982764f, 0.0727868f, 0.13005f, -0.0119973f, 0.171012f, 0.171988f, 0.0211775f, 0.146145f, 0.0269672f, 0.0412364f, 0.115178f, -0.0423957f, 0.00905616f, -0.0838831f, -0.12676f, 0.152691f, -0.0199984f, 0.013342f, 0.165197f, -0.0625832f, -0.0520675f, -0.15181f, 0.0443335f, 0.113525f, -0.142989f, 0.0336039f, -0.00989765f, -0.131299f, -0.131239f, -0.00198667f, 0.106465f, 0.0741258f, -0.0136108f, -0.137813f, 0.0631925f, -0.179853f, -0.033683f, -0.0348575f, -0.0298155f, 0.0852177f, 0.0936267f, -0.0144655f, 0.0203772f, 0.145535f, 0.203976f, -0.0141789f, 0.0858781f, -0.0255209f, 0.109895f, 0.118723f, 0.0282528f, 0.016933f, -0.0639188f, 0.239534f, 0.00771305f, 0.0788866f, -0.359206f, 0.158087f, -0.065598f, 0.0692041f, -0.0462564f, -0.0307944f, 0.171246f, 0.112872f, 0.0671335f, 0.0626649f, -0.0382015f, -0.133939f, 0.0248772f, 0.0688373f, 0.122861f, 0.0577861f, -0.0359108f, -0.041648f, 0.0717909f, -0.174273f, 0.111512f, 0.0159893f, -0.0322574f, 0.178031f, -0.299461f, 0.0360735f, 0.0737417f, 0.0469753f, -0.0236047f, -0.0806754f, -0.0576051f, -0.0715724f, 0.364909f, -0.0898077f, -0.140996f, 0.0390871f, -0.0536726f, -0.0362228f, 0.180279f, -0.0575788f, -0.21351f, -0.112418f, 0.0206654f, 0.0852196f, -0.264722f, 0.137786f, -0.215293f, 0.0462995f, 0.0248057f, -0.171608f, -0.136671f, -0.0646528f, 0.120517f, -0.00781697f, -0.0950003f, -0.178635f, 0.233572f, 0.0616323f, -0.137745f, 0.000359951f, -0.182259f, -0.0305628f, 0.0951776f, 0.0861118f, 0.311919f, -0.0272912f, 0.169004f, -0.221897f, -0.139324f, 0.109422f, -0.0389392f, -0.050584f, -0.0306887f, -0.141575f, 0.107044f, -0.0541275f, -0.0915775f, 1.90883e-05f, -0.0540671f, -0.1248f, -0.0594223f, -0.1174f, -0.200067f, -0.0601236f, -0.0282521f, -0.0727905f, -0.0693876f, -0.153171f, 0.108937f, -0.0105873f, 0.0131787f, -0.148446f, 0.15995f, 0.0739947f, 0.0635453f, 0.0554233f, 0.22983f, -0.153248f, -0.0487372f, -0.0417181f, -0.0533395f, -0.0373368f, 0.104692f, 0.123382f, 0.0786966f, 0.0821922f, -0.00352446f, 0.0130089f, 0.0106247f, 0.00358571f, 0.0766206f, -0.0054139f, 0.12976f, 0.0834133f, 0.157372f, 0.0152423f, -0.0426947f, 0.0300059f, -0.20194f, -0.0200452f, -0.122784f, 0.227948f, -0.142901f, 0.0209322f, 0.056382f, 0.1699f, 0.0549787f, 0.0493221f, -0.0563672f, -0.115473f, 0.0850725f, 0.0169803f, 0.0493528f, 0.142897f, -0.0446541f, -0.0767163f, 0.024522f, 0.0243657f, 0.00812796f, 0.0901421f, -0.224886f, 0.0856588f, -0.0561756f, -0.00401722f, -0.104798f, -0.0289472f, 0.0493577f, 0.172639f, -0.0644f, -0.000980458f, 0.0368067f, 0.0589687f, -0.106533f, 0.133992f, -0.0776493f, -0.125717f, -0.0728068f, -0.0182634f, 0.0735045f, 0.0525062f, 0.00368344f, 0.0373791f, -0.143519f, -0.172335f, -0.108627f, -0.042079f, 0.0781261f, 0.0915418f, 0.00644886f, 0.113893f, 0.141698f, -0.0124521f, 0.142577f, -0.0129428f, -0.0301781f, 0.109445f, 0.158906f, 0.0158273f, -0.0906456f, 0.0412154f, -0.131566f, -0.0845796f, 0.0329961f, -0.00858371f, -0.0827552f, -0.128911f, -0.143573f, 0.0215006f, -0.247756f, -0.175999f, -0.0166564f, -0.0774325f, 0.00988666f, -0.0456803f, -0.0119353f, -0.164879f, 0.0481019f, -0.186182f, -0.246335f, 0.0300121f, -0.0655653f, -0.103862f, 0.0176684f, -0.0171149f, 0.102215f, -0.183757f, 0.0595879f, 0.265491f, 0.0481233f, -0.0763418f, 0.0866587f, -0.117466f, 0.0687183f, 0.00452964f, 0.130429f, -0.00565681f, -0.112608f, 0.0394847f, 0.0539918f, -0.0666722f, 0.0471328f, 0.194936f, -0.0127938f, -0.0114168f, -0.262257f, 0.0753083f, 0.215486f, 0.0772158f, -0.0450628f, 0.00289644f, -0.180984f, 0.209547f, -0.0633849f, 0.0265503f, 0.0121725f, -0.046239f, -0.0913831f, 0.0356748f, -0.0423551f, -0.0302324f, 0.0999703f, -0.0632859f, -0.167072f, 0.0703653f, 0.160756f, -0.075644f, -0.00553132f, 0.00583935f, 0.122859f, -0.168266f, -0.0846613f, -0.0584543f, -0.0940396f, -0.103338f, 0.152793f, -0.143324f, -0.0275598f, 0.0956532f, 0.0562156f, -0.0362134f, 0.213211f, -0.00973861f, 0.135226f, -0.174589f, -0.00329751f, -0.108182f, -0.113654f, -0.0662413f, 0.131774f, -0.0133282f, 0.0584787f, 0.00645135f, 0.0372075f, 0.0306904f, -0.0302557f, 0.0498232f, 0.042666f, 0.205024f, -0.0758641f, -0.0294425f, -0.0889366f, 0.0269959f, 0.0380038f, 0.0398266f, 0.0528198f, -0.0904563f, -0.0978281f, -0.068313f, 0.0457333f, 0.0795943f, 0.0948405f, 0.0200469f, 0.0548833f, 0.0636191f, -0.0799197f, 0.0612335f, -0.150282f, 0.0247016f, -0.050293f, -0.0426422f, -0.137472f, 0.155789f, 0.0456863f, 0.0591184f, 0.00261422f, 0.0555748f, 0.0937126f, -0.0109287f, -0.0912026f, 0.079417f, 0.0389771f, 0.0954299f, 0.120018f, 0.0929776f, -0.0487948f, 0.107617f, 0.0918833f, -0.046635f, 0.0347915f, 0.0977946f, 0.0753953f, -0.0611924f, 0.21114f, 0.116284f, 0.0365512f, -0.0132882f, -0.0210345f, -0.0169423f, -0.17591f, 0.0592985f, -0.139507f, 0.0330528f, -0.0665453f, 0.0111887f, 0.105084f, 0.0137584f, 0.0190691f, 0.0323157f, 0.0113699f, 0.00319627f, 0.11794f, -0.0311242f, -0.0749491f, -0.0944589f, -0.0847734f, 0.0646025f, 0.0401871f, 0.0454185f, -0.10769f, -0.0152885f, -0.0738717f, 0.024917f, 0.0632017f, -0.0650402f, -0.107415f, -0.0461601f, -0.0739395f, -0.0616036f, 0.099588f, 0.10589f, 0.175195f, -0.00573559f, 0.0891543f, -0.17376f, 0.197099f, -0.116157f, -0.0230408f, -0.14788f, -0.12097f, -0.032844f, -0.0941979f, 0.0742182f, 0.0945046f, 0.11055f, -0.215483f, 0.0605058f, -0.0866538f, -0.110748f, 0.0185428f, 0.23512f, -0.0259306f, 0.065226f, -0.218019f, 0.0561802f, 0.0709543f, -0.0303998f, 0.0386827f, 0.126517f, 0.0140852f, -0.203561f, 0.122919f, -0.0988643f, 0.13082f, 0.0114748f, -0.044433f, 0.106286f, 0.0455707f, -0.0492215f, -0.0681314f, -0.0411076f, -0.0592518f, 0.022264f, 0.0483298f, -0.160539f, 0.00204136f, -0.0791445f, 0.00221954f, -0.0864776f, 0.0226643f, 0.0719411f, 0.210257f, -0.0210368f, -0.0484101f, -0.0671174f, -0.0913305f, 0.0770722f, 0.0877918f, -0.109657f, 0.0328532f, -0.28558f, 0.21467f, -0.0802692f, -0.0333188f, 0.0428537f, -0.0723209f, 0.0649066f, 0.107654f, 0.0982478f, 0.115625f, -0.171343f, -0.120226f, -0.00309913f, 0.050965f, -0.00228574f, 0.0623146f, 0.0810032f, -0.150137f, 0.00222692f, -0.0166594f, 0.170252f, -0.312464f, 0.203978f, 0.017799f, 0.0790203f, 0.0015032f, 0.00860595f, 0.0149137f, -0.200581f, 0.0429273f, 0.048673f, 0.00148765f, -0.218366f, -0.0852597f, -0.147702f, 0.211552f, 0.0214932f, 0.074382f, -0.0120731f, -0.248212f, -0.086573f, 0.0452987f, -0.0577113f, 0.0607527f, -0.14452f, -0.0397579f, -0.169824f, -0.011252f, -0.0133552f, 0.0330437f, -0.118153f, 0.0489353f, -0.133889f, 0.0366741f, 0.0280261f, -0.0574276f, -0.144657f, 0.238303f, 0.206255f, -0.103662f, -0.0910226f, 0.00352447f, -0.0164529f, 0.0166113f, -0.043883f, -0.0273704f, 0.131583f, -0.0873214f, 0.161439f, 0.0380599f, 0.094898f, -0.144901f, -0.0316275f, -0.000449612f, 0.12693f, -0.0989803f, -0.0509203f, -0.0297262f, -0.0124091f, 0.00662072f, 0.07389f, 0.0869624f, 0.0284625f, 0.0159207f, -0.121852f, -0.0735496f, -0.162634f, -0.122031f, -0.14721f, -0.0395819f, 0.0332046f, -0.0376581f, 0.0930084f, 0.0293073f, 0.0449609f, 0.0712641f, 0.023479f, 0.0733267f, -0.0771408f, 0.203868f, -0.0100238f, -0.0713184f, -0.053772f, -0.228332f, 0.096183f, -0.000586273f, 0.0798726f, 0.0271422f, -0.11139f, -0.080159f, -0.100096f, 0.158267f, -0.0304058f, 0.109574f, 0.0273384f, -0.180382f, -0.0129705f, -0.00449165f, 0.0162746f, -0.0109055f, 0.218799f, -0.0560683f, -0.0714457f, 0.0588829f, 0.0284399f, 0.0610956f, -0.0780087f, -0.158301f, -0.153325f, -0.294042f, 0.0963786f, 0.0587289f, -0.157122f, -0.0960356f, -0.0716606f, -0.00437045f, -0.00715929f, -0.0188494f, 0.139635f, -0.0345328f, -0.181105f, -0.0159563f, 0.0276014f, 0.0201848f, -0.0393259f, 0.0417377f, -0.145298f, -0.0695349f, -0.0820722f, -0.0493121f, 0.0371987f, -0.00562376f, -0.0578931f, -0.0909422f, -0.162283f, -0.000605049f, 0.222216f, 0.00240155f, -0.178902f, -0.0455435f, 0.116903f, 0.0450361f, -0.01398f, -0.162968f, 0.025879f, -0.014392f, 0.0766986f, -0.0489045f, -0.0474465f, 0.0489962f, -0.152654f, 0.218469f, 0.156301f, 0.0500277f, 0.0797548f, -0.118716f, -0.0474145f, -0.000864115f, -0.0247415f, -0.0827129f, 0.0873829f, -0.0219059f, -0.0871082f, 0.100327f, -0.13832f, -0.0165597f, 0.145661f, -0.0821696f, -0.147556f, 0.0192553f, 0.204414f, 0.00112382f, -0.0398974f, 0.0613577f, 0.223857f, -0.218919f, 0.144116f, 0.0986556f, -0.0441144f, -0.114117f, 0.0672281f, 0.120759f, 0.202168f, -0.0537319f, -0.0243972f, -0.0340832f, -0.0419258f, 0.0473834f, -0.101516f, 0.0392689f, -0.0823353f, -0.204043f, -0.0848256f, -0.161437f, 0.128847f, 0.0674325f, 0.0322743f, -0.109569f, 0.0489494f, -0.114716f, -0.099169f, 0.0664302f, -0.00184453f, -0.00578211f, -0.146132f, 0.00227202f, -0.0742572f, 0.0806709f, 0.0877887f, -0.0154718f, -0.0610823f, 0.0172351f, 0.0739758f, 0.147027f, -0.00639858f, 0.051417f, -0.127549f, 0.179905f, -0.0221607f, -0.0561042f, 0.0480268f, -0.0219837f, -0.169972f, -0.125453f, -0.211313f, 0.179642f, 0.0896357f, -0.037611f, -0.138123f, -0.126171f, 0.140264f, -0.0425931f, -0.000123576f, 0.0297793f, -0.127861f, 0.0172955f, -0.0919533f, -0.0737757f, -0.158576f, -0.129847f, -0.0216638f, 0.162461f, 0.0578183f, 0.0672344f, 0.0323345f, 0.0714533f, -0.240807f, 0.0866002f, -0.124947f, 0.0883711f, 0.1538f, -0.0360892f, -0.0237766f, 0.267349f, -0.097481f, -0.0988234f, 0.0459941f, -0.0153057f, 0.194028f, -0.126622f, -0.00536853f, 0.0640994f, 0.144056f, 0.13606f, 0.177487f, 0.148337f, 0.0962032f, 0.257488f, -0.027079f, -0.00302076f, -0.0289131f, 0.0349572f, 0.0408395f, -0.111576f, -0.0273783f, -0.0839572f, 0.155334f, 0.133845f, 0.0963186f, 0.0788487f, 0.100227f, 0.112126f, -0.114955f, 0.119812f, -0.149146f, -0.010438f, -0.236915f, 0.263617f, 0.0547058f, 0.0794508f, 0.13985f, -0.0521213f, -0.165117f, -0.0517254f, 0.103716f, 0.141786f, 0.107149f, 0.151315f, 0.0242284f, 0.105166f, 0.0694475f, -0.194349f, 0.142696f, -0.0446672f, -0.025278f, 0.0688978f, -0.0644529f, -0.114916f, -0.050345f, 0.0632172f, 0.0468515f, -0.139235f, -0.175502f, 0.0372038f, -0.00606444f, 0.0438848f, 0.0931778f, 0.13727f, -0.0790641f, -0.0288281f, -0.00854381f, -0.106295f, 0.238316f, -0.0457221f, 0.189015f, 0.22545f, -0.135396f, 0.0454621f, -0.0248203f, -0.0933177f, -0.158999f, -0.0780454f, -0.16016f, 0.120078f, -0.0169928f, -0.0144272f, -0.0307134f, 0.256497f, -0.0247971f, 0.13928f, -0.0837646f, 0.109796f, -0.110754f, 0.185297f, 0.0123165f, 0.136713f, 0.255131f, 0.133605f, -0.0586806f, -0.0496904f, 0.0134735f, 0.015594f, -0.0607103f, -0.0634061f, 0.0350112f, -0.0356687f, -0.0549249f, 0.0907385f, -0.150614f, -0.153818f, 0.0478335f, 0.0483349f, -0.087461f, -0.0720636f, 0.179102f, 0.0730291f, -0.251164f, 0.182906f, -0.0471581f, -0.189788f, -0.0849958f, 0.188372f, -0.00271709f, 0.0513947f, -0.125499f, 0.0779445f, 0.000222669f, 0.0725459f, -0.0113365f, -0.0951412f, 0.110982f, -0.0657272f, 0.0468602f, -0.0521199f, 0.193719f, -0.199871f, -0.00269381f, 0.0978099f, 0.105297f, -0.108208f, 0.111672f, -0.0402458f, -0.0495727f, 0.0101632f, 0.0296603f, 0.0231331f, 0.0639629f, -0.00589024f, -0.0112265f, -0.142519f, 0.184097f, 0.170799f, -0.129636f, -0.00502621f, -0.0207182f, 0.132493f, 0.0690643f, -0.100455f, 0.0380391f, -0.0875112f, 0.0277012f, 0.0342734f, 0.112756f, -0.0250227f, -0.0221532f, 0.086932f, -0.0869034f, -0.111286f, 0.029825f, 0.0859659f, -0.00465861f, 0.00562891f, -0.124829f, -0.0904298f, -0.105986f, -0.0587141f, 0.0337069f, -0.178597f, -0.0839138f, -0.0325981f, -0.119738f, -0.0377287f, 0.0614227f, 0.171248f, -0.129788f, -0.058292f, 0.00583078f, 0.0181502f, -0.0968664f, -0.0750171f, 0.00214694f, -0.181049f, 0.289168f, -0.00308913f, 0.0318213f, 0.058329f, -0.195396f, -0.0940282f, -0.162397f, -0.0634562f, -0.0531168f, -0.161095f, 0.0427339f, -0.043042f, -0.154553f, -0.102211f, 0.0491242f, 0.0111924f, -0.047476f, -0.142178f, 0.102246f, -0.0621268f, -0.143292f, -0.207453f, 0.0636621f, -0.0350397f, -0.0804742f, 0.014769f, 0.0729948f, 0.0127519f, -0.195178f, 0.200197f, 0.00375432f, -0.00624014f, 0.123584f, -0.0383331f, 0.012285f, -0.110208f, 0.0731114f, 0.00880996f, -0.0822221f, -0.0284906f, 0.106397f, 0.174703f, 0.00411917f, -0.047814f, -0.259528f, 0.035195f, -0.0471244f, -0.102322f, 0.0981426f, 0.0681561f, 0.0739306f, 0.137213f, 0.0634007f, 0.088386f, 0.0625578f, -0.0693105f, 0.0345739f, 0.0895836f, 0.132281f, 0.181831f, -0.0305308f, 0.0882188f, -0.11804f, -0.0644841f, -0.0373792f, -0.114423f, 0.0536577f, -0.0338129f, -0.0691534f, -0.07293f, -0.0231796f, -0.102025f, -0.193936f, 0.0381616f, -0.0769748f, -0.0199439f, 0.115287f, -0.0221551f, -0.062492f, 0.155696f, -0.0760195f, -0.138965f, -0.122147f, 0.0170207f, -0.132155f, -0.197658f, 0.056561f, -0.147395f, -0.0331016f, -0.0158362f, -0.0839234f, -0.146546f, 0.0568423f, 0.123853f, 0.125969f, -0.0188812f, -0.0580131f, 0.0843678f, 0.0372052f, -0.0498497f, -0.0532892f, -0.094139f, -0.0663339f, -0.112045f, 0.133358f, 0.0885907f, 0.0970352f, 0.102026f, 0.015308f, -0.0270658f, 0.16547f, -0.0560272f, -0.0964968f, -0.0988407f, 0.0679886f, 0.103749f, 0.170564f, 0.195813f, -0.0312866f, 0.0211601f, -0.017865f, -0.0489293f, -0.0585939f, -0.0276946f, -0.0234492f, 0.0636504f, -0.0129645f, -0.058264f, 0.018051f, 0.089033f, 0.00866929f, 0.031056f, -0.121356f, -0.0140389f, -0.0201874f, 0.0133468f, -0.0441331f, -0.010492f, 0.00685425f, -0.127882f, 0.0329712f, -0.0563903f, -0.126271f, 0.0915666f, 0.0569118f, -0.0162105f, 0.301888f, -0.00577669f, -0.0644699f, 0.286688f, -0.135074f, 0.198701f, 0.13374f, 0.0768465f, 0.221807f, 0.0002473f, 0.252372f, -0.0250196f, 0.119749f, -0.22422f, -0.0938131f, 0.130647f, 0.168878f, -0.145989f, 0.286856f, 0.019002f, 0.0817282f, -0.0497936f, 0.207759f, -0.15028f, 0.162403f, -0.124052f, 0.00808952f, -0.0171731f, 0.0820959f, -0.16853f, -0.0479926f, -0.200043f, -0.00261546f, 0.0301762f, -0.233062f, -0.0234549f, -0.0929557f, 0.0629822f, 0.0794051f, 0.0342013f, 0.144896f, -0.00813711f, -0.227273f, -0.141694f, -0.27169f, 0.224392f, 0.0984629f, -0.133572f, 0.0468921f, -0.0351526f, -0.0331094f, 0.0661988f, -0.0724967f, 0.0524242f, -0.00412046f, -0.0376472f, -0.0658608f, -0.0456447f, -0.0845839f, -0.0361741f, -0.0601928f, 0.163131f, -0.106233f, -0.366038f, 0.00797637f, -0.0231015f, -0.0131786f, -0.0775315f, -0.0988734f, -0.217208f, -0.10994f, 0.132245f, -0.132815f, 0.0103053f, -0.102288f, -0.00909063f, -0.119759f, -0.210456f, 0.138341f, -0.0341217f, 0.20882f, -0.0502009f, -0.0483512f, -0.0456487f, 0.100956f, 0.158118f, -0.0915728f, -0.224534f, 0.0363542f, -0.0866533f, -0.0352749f, -0.109428f, 0.127289f, -0.142936f, -0.11762f, 0.0228804f, -0.188088f, -0.114416f, 0.0256583f, 0.0693748f, 0.119776f, 0.0546991f, 0.00115148f, 0.125768f, -0.169633f, 0.110888f, -0.118791f, -0.0384767f, -0.158849f, -0.0264956f, -0.00601355f, 0.154039f, 0.000259476f, -0.0967734f, 0.0324582f, 0.103897f, 0.0409492f, 0.0590243f, -0.0596796f, -0.0718197f, 0.252925f, 0.0787568f, -0.0240129f, -0.0594853f, 0.175524f, 0.110802f, 0.0590543f, 0.101557f, -0.0840849f, -0.106357f, -0.166777f, 0.0203275f, 0.175153f, 0.115441f, -0.0446554f, 0.0730614f, -0.139594f, -0.0827482f, -0.132672f, -0.234653f, 0.0611508f, 0.0523823f, 0.045676f, 0.204303f, 0.0905313f, -0.0223278f, -0.0565026f, -0.030769f, 0.221435f, -0.0478355f, -0.121569f, -0.0662583f, -0.181544f, 0.0645318f, -0.0454806f, 0.0108579f, -0.118606f, 0.0770029f, -0.131012f, -0.0203915f, -0.0716018f, -0.186557f, -0.0117691f, -0.0685991f, -0.141035f, 0.0432303f, -0.0923682f, 0.126433f, -0.130911f, 0.0259088f, -0.0535197f, -0.0940918f, 0.0221463f, -0.00560294f, -0.0478526f, 0.00605898f, -0.131151f, 0.0931443f, -0.299442f, -0.0721862f, 0.140847f, 0.344307f, -0.116993f, -0.150308f, 0.191258f, 0.00269117f, 0.0180435f, 0.0888292f, -0.0469906f, 0.0528578f, 0.00993826f, -0.0956561f, 0.0138077f, -0.163916f, 0.0316806f, -0.0202339f, -0.0940167f, -0.162324f, -0.083094f, 0.133542f, -0.109024f, 0.0927877f, -0.0270035f, 0.121229f, 0.156598f, -0.01322f, -0.0257915f, 0.100085f, 0.130846f, -0.021948f, 0.169491f, -0.0448827f, 0.115516f, -0.0715343f, 0.100839f, 0.0229607f, 0.00926584f, 0.0123816f, 0.159302f, -0.00795994f, -0.0258342f, 0.0807523f, -0.0837323f, 0.0875775f, -0.00891723f, -0.215346f, 0.0401299f, -0.175561f, 0.132453f, 0.0845686f, -0.127178f, -0.0274246f, -0.123372f, 0.0384384f, 0.0455284f, 0.0468858f, 0.096834f, -0.0328079f, -0.0805607f, 0.0838779f, -0.164932f, -0.00130207f, 0.0649114f, 0.140843f, -0.25098f, 0.0439315f, -0.0507408f, -0.076574f, -0.216583f, 0.0963339f, -0.0978792f, -0.112221f, 0.0554837f, 0.0286793f, -0.0572025f, 0.100998f, 0.0914439f, 0.221541f, 0.080418f, 0.236234f, 0.163571f, -0.0113608f, -0.0287685f, -0.0459155f, 0.0950004f, -0.09279f, -0.0689555f, -0.00219202f, 0.00570634f, 0.000771681f, 0.031695f, -0.171196f, 0.0020497f, 0.147911f, 0.0257572f, -0.0266174f, 0.038167f, -0.0241347f, 0.0990397f, 0.132176f, -0.140331f, 0.0897711f, -0.0379647f, 0.00312934f, -0.205916f, -0.0822618f, -0.111707f, 0.0265303f, -0.0561398f, -0.119398f, 0.0748019f, 0.146458f, 0.172327f, 0.0978847f, 0.0358156f, -0.0365916f, 0.0284308f, -0.262982f, 0.023788f, -0.0409537f, -0.0266172f, 0.116654f, -0.0801663f, 0.150359f, 0.2243f, 0.023124f, 0.00684813f, -0.122193f, -0.216644f, 0.147103f, -0.0392872f, 0.10548f, 0.0265301f, -0.069324f, -0.0537202f, -0.0927089f, 0.022324f, -0.0628312f, -0.0479794f, 0.0939621f, -0.0267998f, 0.286743f, 0.0355109f, -0.145163f, -0.161679f, 0.0523757f, -0.0555209f, 0.0807361f, 0.19329f, 0.101239f, 0.123913f, 0.0285046f, -0.172291f, -0.0323532f, -0.109452f, -0.107016f, -0.1759f, 0.187811f, -0.300859f, -0.0147082f, -0.127482f, 0.0470463f, 0.0033916f, -0.107317f, -0.0784184f, 0.151408f, 0.056294f, -0.0475757f, 0.0314982f, 0.190411f, 0.0680653f, -0.130252f, 0.171775f, -0.064087f, 0.127858f, -0.0922019f, 0.117674f, -0.021808f, -0.0716439f, 0.0160781f, -0.025839f, -0.0234358f, 0.185187f, -0.0768174f, -0.0831532f, 0.0809498f, 0.0520408f, -0.0804712f, -0.0394929f, 0.188541f, 0.0182502f, 0.0110734f, -0.0681633f, 0.130992f, 0.109402f, -0.115693f, 0.0803883f, 0.00290294f, -0.160807f, 0.0610858f, -0.06444f, -0.0566136f, -0.101081f, -0.00724675f, 0.0530997f, -0.110278f, -0.0506532f, -0.0429683f, 0.0685958f, -0.0152539f, 0.0346204f, -0.218453f, -0.0535177f, -0.100268f, -0.0453986f, -0.0231579f, 0.165242f, -0.100784f, -0.0242327f, -0.202543f, -0.0469504f, 0.272136f, 0.156335f, 0.0696931f, -0.172909f, -0.0211901f, 0.141482f, -0.068282f, 0.0422313f, -0.00154235f, -0.0785404f, -0.184431f, 0.181534f, 0.0576326f, 0.107513f, -0.0800749f, 0.0341329f, -0.276723f, -0.149026f, 0.0804883f, -0.00251943f, 0.112362f, -0.0201509f, 0.000184098f, -0.1176f, -0.110368f, 0.0206901f, 0.0600989f, 0.0521686f, -0.127748f, 0.033219f, -0.0672267f, 0.0559829f, -0.0376113f, -0.193247f, -0.113583f, 0.287461f, 0.0416636f, -0.135019f, -0.0500053f, 0.087671f, 0.0582614f, -0.0934358f, 0.0747007f, -0.151836f, -0.0291564f, 0.0556662f, -0.0663011f, -0.0584755f, 0.0121648f, -0.017574f, -0.117097f, 0.119486f, 0.134686f, 0.113338f, -0.0840501f, 0.207543f, -0.0661853f, -0.21308f, 0.000827598f, 0.0284056f, -0.100293f, -0.0607275f, -0.0364274f, 0.144572f, -0.0262426f, 0.132025f, -0.183442f, -0.0807243f, -0.0268476f, 0.202466f, -0.00591555f, -0.164143f, 0.0838183f, -0.0120463f, 0.0583207f, 0.0123631f, 0.0402421f, 0.0372003f, -0.109976f, 0.106957f, 0.0870421f, 0.0556528f, -0.0788584f, -0.130307f, 0.0762227f, -0.0663722f, 0.080449f, 0.154444f, 0.0114595f, -0.110727f, -0.285198f, 0.413742f, 0.0378572f, 0.0549009f, -0.275438f, 0.094386f, 0.0927528f, 0.124614f, -0.151312f, -0.0866337f, 0.0384032f, 0.0256686f, -0.119182f, -0.0972024f, 0.110824f, 0.0831841f, -0.0952631f, -0.0661435f, 0.0120204f, 0.127869f, -0.0468275f, -0.119752f, 0.0454494f, -0.115688f, -0.110008f, -0.0749505f, -0.00583533f, -0.0244968f, -0.0629447f, -0.0849419f, 0.0149222f, -0.0921093f, 0.0569924f, 0.053806f, -0.129071f, 0.0658975f, 0.136648f, 0.0418901f, -0.0312137f, 0.0830578f, -0.0789125f, -0.104186f, 0.0452759f, -0.0533351f, 0.0717283f, -0.0408574f, 0.0510284f, -0.13924f, -0.0407522f, -0.0318421f, 0.0494084f, 0.199022f, 0.0930713f, 0.208101f, -0.0740214f, 0.0500993f, -0.0609634f, 0.0677094f, -0.0229159f, -0.0725639f, 0.0514411f, 0.181402f, 0.0551718f, -0.0923629f, -0.117449f, -0.0963486f, -0.0791569f, -0.127033f, 0.0986236f, -0.155317f, -0.181355f, -0.00998766f, -0.0764074f, 0.0296038f, -0.0189728f, -0.0413061f, 0.047893f, 0.258885f, 0.0634384f, 0.224255f, 0.179427f, -0.153857f, -0.1248f, -0.035627f, -0.0961631f, -0.208684f, 0.216583f, 0.127871f, 0.0557714f, 0.0138025f, -0.0466323f, 0.0865939f, -0.0340532f, 0.0830937f, 0.0841876f, 0.00950125f, -0.0193555f, -0.0166588f, 0.216322f, -0.0740391f, -0.167198f, 0.102434f, -0.0476818f, -0.117612f, -0.0268429f, -0.0345731f, -0.0621415f, -0.12924f, 0.0350662f, 0.121563f, -0.122633f, -0.101134f, -0.0588185f, -0.103627f, 0.0489434f, -0.00632062f, -0.0155719f, 0.162799f, -0.00837424f, -0.110807f, -0.0532327f, 0.0945447f, -0.0414527f, -0.045626f, 0.0370292f, -0.183139f, -0.219701f, -0.0674682f, 0.0179123f, -0.00137924f, -0.0317929f, 0.146843f, -0.135342f, -0.0226036f, -0.0155227f, 0.088055f, -0.26204f, 0.0460458f, -0.0441857f, -0.0733038f, -0.0665089f, -0.178976f, -0.178454f, 0.166583f, 0.0297695f, -0.0120972f, 0.177051f, -0.048093f, -0.00250263f, -0.0736178f, -0.264038f, 0.201294f, 0.101955f, -0.0187858f, -0.0972331f, -0.265311f, 0.0431423f, -0.0628686f, 0.114896f, -0.0258723f, -0.20376f, -0.00122948f, -0.122846f, -0.0310558f, -0.0117862f, -0.0272454f, 0.0818012f, -0.0309882f, 0.0214322f, 0.226392f, 0.124972f, -0.112849f, -0.0544989f, -0.0356543f, 0.0502794f, 0.0476455f, 0.0344233f, -0.139078f, -0.0219407f, -0.0870147f, -0.102295f, -0.128166f, 0.00265093f, 0.15427f, -0.1488f, 0.0885249f, -0.190977f, 0.00653755f, -0.108244f, -0.0461647f, -0.0567829f, 0.225126f, -0.0622709f, 0.0746069f, 0.0284896f, -0.209861f, -0.0241492f, -0.0354917f, -0.150776f, -0.0119533f, -0.0253076f, -0.078324f, 0.103285f, 0.0380043f, -0.166961f, -0.108259f, 0.126914f, 0.209137f, -0.00575113f, 0.174594f, -0.044097f, 0.0687706f, 0.00120079f, 0.331485f, -0.135418f, -0.0705993f, -0.00476595f, 0.0212155f, -0.144963f, -0.0971553f, -0.0166222f, 0.0765248f, -0.117799f, 0.141515f, 0.233095f, -0.0456617f, -0.184627f, 0.000341058f, 0.193156f, -0.0582067f, -0.0631333f, 0.115042f, -0.0340653f, -0.070347f, -0.17293f, -0.0752822f, -0.0926911f, 0.171605f, -0.00210489f, -0.180202f, -0.0539088f, 0.192287f, -0.0619876f, 0.151636f, -0.151695f, -0.007274f, 0.092059f, 0.153835f, 0.199597f, -0.17388f, 0.0659664f, -0.0175764f, -0.0460245f, 0.264525f, -0.100137f, 0.00330768f, 0.0823767f, 0.0990917f, 0.156619f, 0.069622f, -0.078627f, 3.33741e-05f, 0.11696f, 0.0625006f, 0.00106364f, -0.0822393f, 0.118716f, 0.0644794f, 0.0284429f, 0.261367f, -0.0200286f, -0.0441847f, -0.0391195f, -0.118246f, -0.0448402f, -0.172288f, 0.0627179f, 0.0532854f, -0.0268378f, 0.0500942f, -0.202119f, -0.102956f, -0.030983f, 0.204023f, -0.0167333f, -0.142507f, 0.0696142f, -0.0907102f, -0.113511f, 0.181735f, -0.0414162f, -0.151163f, -0.197607f, -0.294715f, 0.194818f, 0.0447227f, -0.0845549f, 0.000446896f, 0.0324615f, -0.0146769f, -0.16138f, 0.0424589f, -0.00853703f, -0.148327f, -0.00376977f, 0.159337f, -0.177481f, 0.0404892f, 0.11599f, -0.0291687f, -0.00729663f, -0.072457f, -0.0861569f, -0.147166f, -0.00359691f, 0.0100766f, 0.0814809f, -0.0016804f, -0.292146f, 0.0337088f, 0.0670551f, -0.101796f, -0.0490747f, -0.182828f, -0.0516549f, -0.00496494f, -0.0345553f, 0.136531f, 0.170005f, -0.0778066f, 0.00860977f, 0.162308f, 0.0314563f, -0.0906035f, 0.142333f, -0.0477308f, 0.0498817f, 0.0203438f, -0.135002f, -0.060149f, -0.164977f, -0.159914f, -0.154439f, -0.182108f, -0.124411f, 0.00878263f, -0.10109f, -0.124342f, 0.235977f, 0.0252979f, -0.118639f, -0.032281f, -0.00948904f, 0.0751957f, -0.0254581f, -0.0876938f, 0.258546f, -0.0447662f, 0.189695f, -0.141869f, -0.0198459f, -0.081758f, -0.0877936f, 0.105717f, 0.171741f, 0.249455f, 0.0105537f, 0.0711035f, 0.0496381f, 0.00856438f, -0.0559322f, 0.0673262f, 8.24554e-05f, -0.08786f, 0.035945f, 0.196217f, 0.126371f, -0.0813913f, -0.201842f, 0.164194f, -0.137666f, -0.0147913f, -0.0139483f, -0.0839548f, 0.0448914f, 0.0975126f, 0.0501371f, 0.0499948f, 0.0322864f, 0.0952114f, 0.101062f, -0.05593f, -0.0119803f, -0.0304045f, -0.160986f, -0.18139f, 0.0343838f, -0.147475f, 0.0161662f, -0.121343f, 0.0794716f, -0.0211018f, -0.0252479f, -0.125145f, 0.0654196f, -0.00989245f, -0.0350131f, -0.0595337f, -0.0157397f, -0.130756f, 0.0295524f, 0.0724131f, -0.0428336f, 0.0137997f, 0.0572493f, 0.00937429f, 0.163981f, 0.0255233f, -0.110694f, 0.0502783f, 0.310631f, -0.189034f, -0.124126f, 0.0459643f, -0.0252854f, 0.146522f, -0.149928f, -0.121116f, 0.041718f, -0.097977f, 0.170547f, 0.124892f, 0.22247f, 0.106992f, -0.00643407f, 0.0286726f, -0.0618412f, -0.0165896f, 0.198988f, -0.0515238f, 0.219947f, -0.12288f, 0.301123f, -0.059546f, 0.0497198f, 0.0603091f, -0.115947f, 0.0120238f, 0.00475328f, 0.152704f, -0.0748359f, -0.0547798f, -0.0109254f, 0.0587732f, -0.0470967f, 0.107676f, -0.147906f, 0.0389335f, 0.00923218f, 0.153507f, 0.123108f, 0.134881f, -0.0269816f, 0.0172862f, 0.0456983f, 0.106014f, -0.0798172f, -0.10801f, -0.00512682f, 0.112286f, 0.0667568f, 0.0530622f, -0.0174935f, -0.296172f, -0.0229701f, -0.145664f, 0.0148432f, -0.167926f, -0.114491f, 0.050146f, 0.0738624f, 0.000883195f, 0.018622f, 0.128987f, 0.206308f, 0.219465f, 0.180042f, 0.0389317f, -0.142232f, 0.129695f, 0.131922f, -0.116924f, -0.0381372f, -0.075169f, -0.162921f, -0.295322f, 0.00272688f, -0.1241f, 0.118035f, -0.0555825f, 0.0738209f, -0.000890261f, 0.170495f, -0.0104928f, -0.118044f, 0.0952553f, 0.0152802f, 0.098469f, 0.0269081f, 0.0304095f, -0.175478f, -0.0933294f, -0.00951543f, 0.120319f, -0.0256024f, -0.0571786f, -0.0580948f, 0.0112159f, 0.00898838f, -0.0429338f, 0.00138374f, 0.141736f, 0.179593f, 0.0689599f, 0.138638f, -0.0213535f, 0.0925715f, 0.112653f, 0.0100879f, -0.135327f, -0.0761553f, -0.205939f, -0.118116f, 0.0145743f, 0.0963195f, 0.10418f, 0.192853f, -0.108119f, -0.0551882f, -0.118123f, 0.0444288f, -0.0940051f, 0.0008902f, -0.123441f, 0.131081f, -0.0841662f, -0.212635f, -0.132143f, 0.246695f, 0.14875f, -0.0621554f, 0.0644998f, -0.0812276f, -0.15437f, 0.14992f, 0.183818f, -0.21665f, 0.0243834f, -0.0436365f, 0.267649f, 0.0762967f, -0.0913864f, 0.243118f, -0.115798f, -0.161716f, 0.180914f, -0.0414196f, 0.0197701f, -0.032065f, -0.0140121f, 0.0142414f, 0.149433f, -0.0434218f, 0.162488f, 0.0388419f, -0.0829379f, -0.174202f, 0.190183f, -0.127198f, 0.176703f, -0.12439f, -0.133778f, 0.0376584f, -0.0833935f, 0.0331047f, 0.157363f, -0.0884928f, -0.331471f, -0.0103832f, 0.0185702f, 0.00180358f, -0.047143f, 0.0940995f, 0.0552036f, -0.0308903f, -0.0979566f, 0.092021f, 0.0475873f, -0.0787908f, 0.0619735f, -0.162661f, -0.0712164f, -0.0313521f, -0.0853252f, 0.1336f, 0.103107f, -0.0113819f, -0.023578f, 0.0317859f, -0.133723f, 0.00183594f, 0.011278f, -0.118521f, -0.226179f, 0.267113f, -0.106938f, 0.059034f, -0.152981f, 0.144182f, -0.0622802f, 0.0452729f, 0.0647528f, -0.0305703f, 0.18942f, 0.125856f, 0.060026f, -0.101473f, 0.22643f, -0.145714f, -0.052292f, -0.210142f, 0.0614877f, -0.0517462f, 0.0524913f, -0.0728767f, -0.190416f, -0.0966048f, -0.183528f, -0.0509043f, 0.0488684f, -0.0738124f, -0.0423975f, -0.206571f, -0.0898551f, 0.197522f, 0.0272785f, -0.00859614f, 0.304257f, -0.178542f, -0.030836f, -0.202739f, 0.25374f, 0.123483f, -0.0835555f, -0.223911f, 0.256327f, 0.0261439f, 0.0272462f, -0.079918f, 0.0463708f, -0.0382426f, 0.0250968f, 0.0750829f, 0.146631f, -0.0028671f, 0.367397f, -0.153537f, 0.0142552f, -0.130524f, -0.149762f, 0.00917445f, 0.284289f, 0.0414515f, 0.150337f, 0.0574794f, -0.174154f, -0.0651482f, 0.0157276f, 0.220227f, 0.0855927f, 0.167794f, -0.045045f, -0.0247924f, 0.140008f, 0.180162f, 0.0534937f, -0.342088f, 0.0726904f, -0.0969014f, -0.0416665f, -0.0179782f, -0.154066f, -0.0638126f, -0.0984411f, -0.169677f, 0.134382f, -0.140244f, 0.167786f, 0.119754f, -0.0478107f, -0.152725f, -0.232042f, -0.0343375f, 0.0617236f, -0.0129119f, 0.044775f, -0.0465971f, -0.136438f, -0.0199089f, 0.0796089f, -0.0132068f, -0.188589f, 0.0697174f, -0.0120818f, 0.00524542f, -0.149591f, -0.00902931f, 0.123728f, 0.0313669f, 0.276525f, -0.0269883f, -0.293889f, -0.033967f, 0.128998f, 0.0426487f, -0.072631f, 0.0420613f, -0.0228561f, -0.0616836f, -0.00963289f, -0.142734f, -0.0130697f, -0.150318f, -0.131931f, -0.0517048f, -0.0869106f, -0.115812f, 0.00647047f, 0.215707f, -0.0713472f, -0.12028f, -0.118481f, 0.0407334f, -0.0986782f, 0.0710171f, 0.161216f, -0.125361f, 0.0020411f, 0.0986792f, 0.175617f, -0.235746f, -0.0945418f, -0.0887824f, 0.0122411f, -0.0118314f, -0.175363f, 0.0535823f, 0.0292668f, -0.126305f, 0.0427503f, 0.124513f, 0.261667f, 0.0142618f, -0.114591f, 0.0539734f, -0.110589f, 0.0274366f, -0.294013f, -0.0759431f, 0.0834387f, -0.0629129f, -0.0964626f, 0.13772f, 0.0793417f, 0.207738f, -0.0148147f, 0.0945678f, 0.236599f, 0.325046f, -0.0115739f, 0.115173f, 0.293077f, 0.207232f, 0.198167f, 0.190213f, -0.012586f, -0.0843916f, 0.112677f, -0.00340349f, 0.0915956f, -0.0312061f, -0.178245f, -0.0711494f, 0.235604f, 0.0711315f, 0.0613761f, -0.0882837f, 0.0357169f, -0.121777f, 0.179189f, 0.139817f, -0.133691f, 0.036203f, 0.105639f, 0.172916f, 0.0205647f, -0.162003f, -0.0976792f, -0.0890894f, 0.0415793f, 0.122302f, -0.034495f, 0.191882f, -0.220922f, 0.0836556f, 0.0956715f, 0.0775133f, 0.0926215f, 0.192959f, 0.0640509f, 0.0759727f, -0.127438f, 0.0338757f, -0.0867821f, 0.132475f, -0.0735088f, -0.0309119f, 0.0284463f, 0.0310688f, 0.0747977f, 0.150878f, 0.0586536f, 0.0568884f, -0.000374948f, 0.158279f, 0.123853f, -0.106534f, -0.0115304f, 0.202805f, 0.198232f, 0.228068f, -0.186664f, -0.0666965f, -0.216452f, 0.0970679f, -0.0302432f, 0.00934743f, 0.130089f, 0.173028f, 0.0993083f, -0.243625f, -0.0620919f, -0.0782342f, 0.066171f, -0.101588f, -0.113473f, -0.0587797f, 0.0521048f, 0.0915347f, 0.00285832f, -0.0711215f, -0.0991172f, 0.0754687f, -0.00881336f, -0.182498f, 0.160201f, 0.10812f, -0.0958625f, -0.102869f, -0.154278f, 0.169123f, 0.280065f, 0.0778973f, -0.0259474f, -0.0306253f, -0.00458834f, -0.038694f, -0.205975f, 0.0304589f, 0.089418f, -0.00359111f, 0.0834823f, 0.0469852f, 0.211635f, 0.153898f, -0.0340516f, -0.0778895f, -0.045573f, -0.181518f, 0.141783f, -0.070765f, -0.113321f, -0.0926663f, -0.0611437f, -0.10559f, -0.06136f, 0.00576792f, 0.179099f, 0.0490123f, 0.0382916f, 0.0102474f, 0.104101f, 0.130055f, 0.0326271f, 0.00117686f, -0.127408f, 0.132335f, -0.173075f, 0.0413512f, 0.0595237f, -0.0319169f, -0.125928f, 0.0790645f, -0.190213f, 0.0786231f, -0.0254424f, 0.078559f, 0.105516f, 0.0302581f, 0.0607161f, -0.0618543f, 0.135543f, -0.154344f, 0.0282685f, -0.0556947f, 0.00219797f, -0.100956f, 0.0438901f, -0.0345228f, -0.0190139f, 0.0124859f, -0.155125f, 0.0221977f, 0.103791f, 0.119654f, -0.12209f, -0.0661289f, -0.210783f, 0.0946152f, -0.00771204f, 0.0251201f, -0.0101425f, -0.100511f, 0.00982923f, -0.172337f, 0.0187309f, -0.0121853f, -0.05736f, -0.105573f, 0.0623509f, -0.0307114f, 0.0633378f, -0.0112864f, 0.0170103f, -0.153245f, 0.0464127f, 0.0565105f, 0.0310833f, -0.0778412f, 0.0932749f, 0.0792456f, -0.177013f, 0.0190007f, 0.0785465f, 0.0265335f, 0.115821f, 0.0470261f, 0.062036f, 0.0295261f, 0.0893393f, -0.0253812f, 0.0789343f, -0.0503431f, 0.013657f, 0.181182f, -0.0854078f, 0.0949825f, 0.01508f, -0.0531158f, 0.0998334f, 0.0301811f, -0.0167208f, 0.100272f, 0.103981f, -0.154253f, -0.103543f, -0.165599f, 0.0973987f, -0.124227f, 0.027499f, 0.0251118f, 0.00518567f, -0.236505f, -0.133146f, -0.2521f, -0.113527f, 0.00596217f, -0.0276467f, -0.0991293f, 0.00624565f, 0.0108509f, 0.105118f, 0.00939116f, 0.0679117f, 0.130588f, -0.214799f, 0.0231474f, 0.144498f, 0.122746f, 0.0330317f, 0.0394939f, 0.10216f, 0.0178169f, -0.0431689f, 0.0152703f, -0.0923901f, 0.074839f, 0.0997356f, -0.177636f, -0.104968f, 0.01351f, -0.0942675f, 0.0839423f, 0.137864f, -0.0448634f, 0.0117194f, 0.0848778f, 0.197623f, 0.0696365f, 0.174923f, 0.0336992f, -0.0665797f, -0.0162588f, -0.0695512f, 0.0023802f, 0.203861f, -0.0487839f, 0.160064f, 0.141641f, -0.0806738f, -0.00651877f, -0.117639f, 0.068388f, -0.0463156f, -0.0983185f, -0.159903f, -0.184173f, -0.0439536f, 0.0937831f, -0.0243605f, -0.187385f, -0.0881159f, 0.135717f, 0.00610548f, -0.108267f, -0.0112765f, 0.011077f, 0.0281299f, 0.0258125f, 0.0702131f, 0.00995715f, 0.168076f, -0.123016f, -0.017689f, 0.214862f, -0.168199f, -0.0410651f, -0.0640263f, -0.03534f, 0.0206561f, -0.0391884f, -0.21132f, 0.136741f, -0.00866583f, 0.216935f, 0.0675429f, -0.0344618f, -0.000327164f, -0.0888234f, -0.0631006f, 0.119289f, -0.106489f, -0.0653558f, -0.0648284f, 0.0357283f, -0.156316f, 0.12275f, -0.0118378f, 0.0326235f, 0.0911354f, 0.186131f, 0.125147f, 0.136384f, -0.122628f, 0.0253395f, -0.0896912f, 0.00475646f, -0.0413952f, 0.200544f, -0.118489f, -0.115816f, -0.00088158f, 0.193806f, 0.204797f, -0.0989526f, -0.163561f, -0.0446976f, 0.0319141f, 0.309673f, -0.0433591f, -0.282107f, -0.132429f, -0.0928028f, 0.102329f, 0.114603f, -0.179977f, 0.0812027f, 0.0104497f, 0.110859f, 0.161328f, -0.0330919f, 0.237393f, 0.0374836f, -0.0709295f, 0.05637f, 0.063726f, 0.204909f, -0.124904f, -0.0678783f, 0.104272f, 0.0850258f, -0.12423f, 0.0497491f, 0.012049f, -0.0664524f, -0.0387143f, 0.17346f, 0.054451f, 0.133654f, 0.0841246f, 0.0805135f, 0.0574905f, 0.0479757f, -0.12195f, 0.06703f, 0.113929f, 0.0371968f, 0.0882491f, 0.0962221f, -0.0980939f, -0.0301576f, 0.0211478f, 0.0481364f, 0.00149206f, 0.109121f, 0.0600876f, 0.00424928f, 0.181823f, -0.0176237f, 0.159743f, 0.228803f, 0.0297563f, 0.0173895f, 0.0262509f, 0.0271609f, 0.0381225f, -0.102688f, 0.0979121f, 0.0129589f, 0.29025f, -0.0870115f, 0.122996f, -0.212877f, -0.143569f, 0.10431f, 0.0596165f, 0.00652985f, 0.163829f, -0.0315745f, -0.0507846f, -0.106299f, -0.0542598f, 0.0446323f, 0.0869534f, 0.00285702f, -0.131972f, 0.106759f, -0.00757924f, -0.0443523f, 0.151231f, 0.143231f, 0.123846f, -0.106466f, 0.0865932f, -0.0418223f, 0.0697526f, 0.236489f, 0.135609f, -0.0129149f, -0.211804f, 0.00220671f, 0.0649882f, -0.0234657f, 0.104777f, -0.194886f, 0.132696f, -0.0107263f, 0.184057f, 0.113698f, -0.203956f, 0.0239961f, -0.0426956f, -0.10859f, -0.0944976f, -0.0275666f, 0.102759f, -0.0548122f, -0.0484598f, 0.0776276f, 0.0715995f, -0.133896f, -0.115386f, -0.0112427f, 0.110959f, 0.229496f, -0.231564f, -0.0603242f, 0.0370974f, -0.119638f, -0.0502784f, -0.186808f, 0.0254269f, 0.138665f, -0.190163f, 0.158242f, 0.0408017f, 0.0681737f, 0.0340627f, -0.0165987f, -0.0471619f, 0.0176238f, -0.0167147f, -0.0720574f, -0.175855f, 0.019165f, 0.096485f, -0.0636065f, 0.0559269f, 0.105007f, 0.0460049f, 0.0327097f, -0.171095f, 0.100533f, -0.0661421f, -0.0727171f, -0.0391203f, 0.0313387f, -0.117881f, 0.107694f, 0.117669f, -0.154694f, 0.0612347f, 0.0203976f, -0.138683f, 0.0405877f, -0.115486f, 0.103155f, 0.00251827f, -0.0565817f, -0.0449883f, 0.147169f, 0.00124512f, -0.175298f, 0.0400523f, 0.0241771f, 0.183906f, 0.138561f, 0.0246843f, 0.101158f, 0.0108304f, -0.0364029f, -0.0346576f, -0.00711996f, -0.0740881f, -0.142724f, -0.0260355f, 0.117348f, -0.126261f, -0.169023f, 0.109965f, -0.0548418f, -0.0282443f, -0.0065788f, -0.168918f, 0.000494506f, 0.0406972f, 0.0979397f, -0.184287f, -0.100467f, -0.182486f, -0.122636f, 0.071415f, 0.00972081f, 0.11731f, 0.0129146f, -0.0776944f, -0.255279f, 0.0232243f, -0.163658f, -0.160424f, -0.0858704f, 0.123559f, 0.0140929f, 0.0145527f, -0.0952696f, 0.0338395f, 0.0702864f, -0.2119f, 0.0612608f, -0.0136494f, 0.0154534f, 0.0397866f, -0.19134f, -0.0378334f, -0.144064f, -0.0123331f, -0.014455f, 0.0860542f, -0.0446385f, 0.0359628f, 0.0240252f, 0.112996f, 0.094632f, -0.135482f, -0.00365424f, 0.0494169f, 0.185901f, -0.0513099f, -0.000944781f, -0.0541423f, -0.130556f, -0.233869f, -0.185643f, -0.00570781f, 0.0545927f, -0.054457f, 0.00955505f, 0.0386987f, 0.0384507f, -0.00104959f, 0.0531672f, -0.0766589f, -0.230745f, 0.14505f, 0.101353f, -0.0844657f, -0.0233638f, -0.0379312f, -0.00945792f, -0.202558f, -0.119602f, 0.0226143f, -0.0453615f, -0.071951f, 0.223703f, 0.344455f, -0.0623883f, -0.052098f, 0.0520586f, -0.140091f, -0.0596239f, -0.141676f, -0.013343f, -0.0524427f, -0.225276f, -0.112335f, -0.0850935f, -0.0892301f, 0.065496f, 0.137938f, 0.0560576f, 0.154093f, 0.173391f, 0.313445f, -0.0242388f, -0.068445f, 0.146597f, 0.0180209f, 0.178989f, -0.118049f, 0.106193f, -0.0328056f, -0.0267389f, 0.0901965f, -0.129721f, 0.0159628f, -0.0995975f, 0.0968064f, 0.0998746f, -0.0919719f, -0.053416f, -0.0821707f, -0.0483384f, 0.180465f, 0.0187676f, 0.0146237f, -0.112684f, 0.046035f, 0.153405f, -1.19614e-05f, -0.0421427f, 0.0821237f, -0.0308363f, -0.0324135f, 0.0147209f, -0.146685f, 0.121743f, -0.11718f, -0.00445091f, -0.0395488f, -0.121531f, -0.0694454f, 0.172886f, 0.0835573f, 0.265926f, 0.124949f, 0.0290115f, 0.125356f, 0.0961226f, 0.457451f, -0.0958567f, -0.00350148f, 0.147809f, -0.101534f, 0.104398f, -0.0537287f, 0.140411f, 0.0823862f, -0.0815362f, 0.0902028f, -0.174247f, -0.00646692f, 0.221576f, -0.0295718f, -0.102456f, 0.0384796f, 0.0802743f, 0.0163816f, -0.0162759f, -0.124626f, 0.0881398f, 0.0876266f, 0.22802f, 0.140473f, 0.0333231f, -0.118547f, -0.144139f, 0.0570663f, 0.0619606f, 0.0187579f, -0.0412077f, -0.0888968f, -0.0349804f, 0.318855f, -0.173824f, 0.0944477f, 0.0180668f, -0.148883f, -0.0851942f, -0.104603f, -0.0790042f, -0.0950323f, -0.0133843f, 0.0108449f, -0.351859f, -0.157695f, -0.108701f, -0.0294391f, -0.0939628f, -0.164028f, -0.0467405f, -0.0874406f, -0.094034f, 0.033249f, -0.272803f, 0.0498752f, -0.00824117f, -0.11591f, 0.0286613f, -0.197608f, 0.231257f, -0.00626354f, -0.0702874f, -0.0237348f, 0.0400383f, -0.171625f, 0.0357015f, 0.157899f, 0.165711f, -0.0219558f, 0.0856638f, -0.0723221f, 0.0535722f, 0.0578089f, 0.023314f, 0.0251418f, -0.0508246f, -0.00593728f, -0.0174724f, 0.0567787f, -0.230358f, 0.035812f, -0.185908f, 0.0863396f, -0.0601688f, -0.0980393f, 0.0140485f, -0.255149f, -0.000731179f, 0.217879f, 0.0743183f, -0.104901f, 0.216769f, -0.0482919f, -0.113859f, -0.03032f, -0.219758f, -0.0229453f, -0.133051f, 0.0627194f, -0.0223488f, -0.124755f, -0.017402f, -0.0484685f, 0.018337f, 0.0849907f, -0.0609281f, 0.0659621f, -0.0488192f, -0.139866f, -0.144223f, -0.0241202f, 0.0504802f, 0.0430913f, 0.0357202f, -0.149649f, 0.0820261f, 0.0318202f, 0.15027f, 0.0661016f, 0.0747123f, 0.00231465f, 0.201447f, -0.0660263f, -0.252116f, 0.0573818f, -0.0447926f, 0.0668174f, 0.00301092f, -0.178422f, 0.137892f, 0.0228328f, -0.0884502f, -0.114293f, 0.0213452f, 0.080643f, 0.0656067f, 0.035317f, 0.103264f, -0.00559141f, -0.160602f, -0.127731f, -0.0977164f, -0.0263403f, 0.155015f, 0.0644729f, 0.027809f, 0.13749f, 0.103534f, 0.0733221f, -0.0648505f, -0.00191882f, -0.0582054f, 0.0326567f, -0.164168f, -0.0242265f, -0.072144f, 0.0491359f, -0.0679505f, -0.195372f, 0.136091f, 0.0756408f, -0.174394f, 0.0211665f, 0.222092f, -0.0373786f, 0.0672465f, -0.00890659f, 0.0876757f, 0.197998f, -0.16732f, 0.0723107f, 0.0915797f, 0.0259003f, 0.0703658f, -0.0204355f, -0.117171f, 0.108487f, 0.00341093f, 0.0792466f, 0.128769f, -0.0751425f, 0.0470003f, -0.0449792f, -0.106271f, 0.0310195f, 0.0542083f, -0.200501f, 0.0464742f, 0.0116021f, 0.224922f, -0.028577f, -0.156118f, 0.0448646f, -0.149147f, -0.10016f, -0.0607476f, -0.144491f, 0.0459414f, -0.0582541f, -0.0265905f, -0.0502976f, 0.103042f, -0.25723f, 0.0336249f, 0.163841f, 0.130921f, 0.0749012f, -0.0680818f, -0.0507867f, -0.0100499f, -0.0568986f, 0.058327f, -0.0650027f, 0.0144343f, -0.0649182f, -0.108125f, 0.108384f, 0.127116f, 0.0579867f, -0.0479345f, -0.00511946f, 0.0384187f, 0.141223f, 0.0817245f, 0.0239134f, 0.0247064f, -0.221317f, -0.0161081f, 0.0159264f, 0.0183802f, 0.116362f, -0.0336379f, 0.0608195f, -0.0270331f, -0.0352726f, 0.148712f, -0.0709746f, 0.0291384f, -0.053869f, 0.0276907f, -0.0791907f, 0.137876f, 0.116221f, -0.103664f, 0.22063f, 0.17176f, 0.0678584f, -0.14821f, 0.0915509f, 0.0660162f, -0.074385f, -0.0151079f, -0.0220113f, 0.0820531f, 0.0523038f, 0.0281034f, -0.117479f, -0.109957f, -0.0337358f, 0.134553f, 0.079631f, -0.0421651f, 0.0662477f, -0.152794f, -0.016276f, 0.0677339f, 0.0504747f, -0.00110145f, 0.18123f, -0.101361f, 0.134697f, -0.109118f, 0.0068025f, 0.103569f, -0.0517373f, -0.164347f, 0.012871f, -0.00192389f, 0.0111564f, -0.132545f, 0.0229439f, -0.0275043f, 0.0274763f, 0.0198057f, 0.112087f, 0.0878935f, 0.0770695f, -0.171981f, -0.0492992f, 0.137301f, 0.0433335f, 0.042413f, 0.0155003f, -0.198834f, 0.0533409f, 0.0191029f, -0.0806243f, 0.0561305f, -0.0183593f, 0.0164316f, -0.00199239f, 0.169095f, -0.00859554f, 0.0397453f, 0.0419361f, 0.0285273f, -0.0120394f, 0.105144f, -0.0894821f, 0.08916f, -0.0430217f, -0.13751f, 0.0639039f, 0.0352381f, 0.0647933f, 0.194021f, 0.0896462f, -0.0358372f, 0.143915f, 0.0738918f, -0.0461328f, 0.0807701f, 0.148194f, -0.016117f, -0.0459193f, -0.0820022f, -0.00202246f, 0.191711f, 0.0661316f, -0.0855849f, 0.128798f, 0.0364059f, 0.0726977f, -0.0876825f, -0.105728f, -0.254595f, 0.168192f, 0.117645f, -0.0568892f, 0.16199f, -0.0357818f, -0.122085f, 0.124143f, 0.0774778f, -0.0390939f, -0.031585f, 0.112117f, 0.0120238f, -0.0610142f, 0.0313709f, 0.042984f, -0.138822f, 0.0607247f, 0.0229658f, 0.0641858f, 0.0151037f, 0.00767282f, 0.060004f, 0.144489f, -0.0374878f, 0.0827156f, -0.0932814f, 0.0875921f, -0.0531331f, 0.0803721f, -0.0132492f, -0.0938988f, -0.103993f, -0.0493939f, -0.0377761f, 0.251373f, 0.142677f, -0.032715f, 0.152048f, -0.00124129f, 0.0716068f, 0.00956766f, -0.0369916f, 0.00519432f, -0.126172f, 0.00172916f, -0.176537f, 0.0774523f, -0.170386f, -0.154203f, 0.190352f, 0.0255686f, -0.145291f, 0.108137f, -0.189781f, 0.0629136f, -0.0926047f, 0.0312405f, -0.255381f, -0.0679597f, 0.00922947f, -0.168558f, 0.187777f, 0.0255726f, 0.20613f, -0.136775f, -0.177f, 0.0244819f, 0.0781556f, 0.0197977f, -0.0290061f, -0.103558f, 0.0399512f, -0.0236349f, -0.0131369f, -0.0336583f, -0.0577976f, -0.130875f, 0.0702828f, 0.058145f, 0.0671021f, -0.0577003f, -0.0503693f, 0.0168152f, -0.282809f, 0.0912611f, 0.0531633f, -0.131444f, -0.0662757f, 0.133138f, 0.0223595f, -0.0327804f, 0.0268208f, 0.0587338f, 0.0174441f, -0.0180882f, -0.0163224f, 0.0599692f, -0.114655f, 0.361815f, 0.00322661f, 0.111076f, 0.101775f, -0.13068f, 0.302714f, 0.00500131f, -0.086191f, -0.0396698f, 0.0817408f, -0.0436462f, 0.0970575f, 0.20586f, 0.167716f, 0.0681541f, -0.103938f, -0.0637077f, -0.142524f, 0.0611986f, 0.0635891f, 0.0273219f, -0.179359f, -0.101431f, -0.0933721f, 0.159005f, -0.0459315f, -0.0390213f, -0.0233101f, -0.0870889f, 0.00149782f, -0.201415f, 0.00712625f, 0.231972f, -0.0350238f, 0.0293994f, 0.0412555f, -0.198868f, 0.0278495f, -0.0182273f, 0.107299f, -0.0131631f, -0.106871f, -0.159949f, -0.0861416f, 0.0412769f, 0.0853726f, 0.015589f, 0.0418062f, -0.188743f, -0.277988f, 0.0897232f, -0.0318778f, -0.142695f, 0.0586345f, 0.0323883f, 0.158931f, -0.109818f, 0.0508577f, -0.0842704f, -0.0225752f, 0.0602861f, 0.0987234f, -0.1846f, 0.12959f, -0.0365815f, 0.0901566f, 0.0870746f, 0.0853966f, -0.00431902f, 0.114822f, 0.0954549f, -0.0545084f, -0.104401f, -0.120637f, 0.090998f, -0.130909f, -0.0126249f, -0.148913f, 0.163382f, -0.0123444f, -0.129673f, 0.0515782f, 0.130497f, 0.180933f, -0.0632129f, -0.0403979f, -0.063006f, -0.0681135f, -0.20217f, 0.128892f, 0.0408059f, -0.0721418f, 0.152005f, -0.118831f, -0.172918f, -0.131598f, 0.232324f, 0.060085f, 0.0141131f, 0.0219422f, 0.107767f, 0.0640985f, -0.0623485f, -0.0412897f, -0.133879f, 0.104109f, -0.100383f, -0.117235f, -0.137473f, 0.1149f, -0.140438f, -0.0515366f, 0.0124563f, -0.133195f, 0.0898727f, 0.130228f, 0.0727901f, 0.0701116f, 0.236203f, 0.0227065f, -0.137993f, -0.0494905f, 0.0439939f, 0.142278f, -0.113859f, -0.160368f, -0.143282f, 0.123583f, -0.0535099f, -0.111852f, 0.0730124f, -0.23005f, 0.13827f, -0.0226136f, 0.084057f, 0.180386f, 0.00839323f, 0.234718f, 0.11713f, -0.147188f, -0.158055f, -0.0200178f, -0.123074f, 0.0683321f, 0.0449613f, 0.117177f, 0.170573f, 0.0446663f, -0.108605f, 0.0840917f, 0.0497646f, 0.0336182f, 0.139355f, 0.0360185f, 0.138814f, -0.0231408f, -0.0278952f, -0.0270706f, -0.0168267f, -0.145094f, -0.124716f, 0.0586625f, -0.0251274f, 0.0606853f, -0.147423f, 0.0854995f, 0.210241f, 0.0497896f, 0.0537564f, 0.00912126f, 0.0882285f, -0.017194f, -0.0138386f, 0.0700378f, 0.0319312f, 0.0221053f, -0.0144568f, -0.0852547f, -0.0283141f, -0.0256961f, 0.176971f, 0.0986009f, -0.0707535f, -0.127756f, 0.116596f, -0.124882f, -0.0255554f, 0.028969f, 0.137651f, 0.0728256f, -0.0796759f, 0.19251f, -0.0862567f, 0.018589f, 0.0863452f, -0.0692152f, 0.0392511f, 0.0446263f, -0.279956f, -0.0665377f, -0.06419f, -0.0676936f, 0.242758f, -0.0693788f, 0.047965f, -0.0116581f, -0.0110953f, -0.0835732f, 0.056325f, -0.089675f, -0.0040357f, -0.157509f, -0.140831f, -0.0331557f, 0.186644f, -0.0909078f, 0.0842588f, -0.163509f, 0.0398502f, -0.0188574f, 0.0889098f, 0.0138491f, 0.144131f, -0.162036f, 0.0445894f, 0.100251f, 0.0916798f, 0.166395f, -0.137134f, 0.214517f, -0.109735f, -0.111622f, 0.163103f, -0.0927315f, 0.0967638f, -0.115608f, -0.108905f, -0.222569f, -0.0768542f, 0.0249024f, 0.0330236f, -0.0829412f, 0.00766906f, 0.143308f, 0.0712682f, 0.0154826f, -0.136792f, -0.0169766f, 0.121836f, -0.0416192f, 0.0099032f, -0.128129f, -0.100827f, -0.0634154f, -0.0228757f, -0.160834f, -0.0820097f, -0.178114f, 0.0662788f, -0.000941255f, 0.0309269f, 0.0509444f, 0.13034f, 0.0258725f, -0.131666f, 0.108277f, 0.0395937f, -0.00811404f, 0.0786393f, -0.135662f, 0.0133231f, 0.173499f, -0.163284f, -0.0224906f, -0.00665345f, -0.0533379f, -0.131522f, -0.103984f, -0.0206072f, 0.00912571f, -0.0724774f, -0.0291832f, 0.0557172f, 0.149422f, -0.0457896f, -0.144702f, -0.0773116f, -0.0765428f, 0.0896067f, 0.0358179f, -0.0897252f, 0.117653f, -0.0364655f, 0.166535f, 0.01834f, 0.105553f, 0.057172f, 0.0401444f, -0.0837494f, 0.219608f, -0.174112f, 0.231012f, -0.148596f, -0.0183971f, 0.0213983f, -0.0622388f, 0.0100916f, 0.12334f, 0.234837f, 0.0664872f, 0.137154f, 0.00734591f, -0.062415f, 0.205686f, 0.113047f, 0.000575385f, -0.116718f, -0.0165835f, -0.081594f, -0.095452f, -0.0749323f, -0.122417f, 0.0677335f, 0.105289f, -0.022653f, -0.103429f, -0.0606251f, 0.00102075f, -0.126195f, 0.0608821f, 0.195855f, 0.0921915f, -0.159083f, -0.113269f, 0.10901f, 0.219588f, -0.0220762f, 0.0551116f, 0.18059f, -0.199499f, -0.0130613f, -0.00138426f, -0.074641f, 0.0814856f, 0.150209f, -0.190257f, -0.0152563f, -0.0511047f, -0.158146f, 0.189442f, 0.0833627f, -0.0404029f, -0.117733f, -0.26819f, 0.061939f, 0.109989f, -0.0540537f, -0.0652968f, -0.198018f, -0.0550514f, -0.26938f, 0.0748025f, 0.0871921f, -0.0992483f, -0.1471f, 0.0151256f, 0.131382f, -0.124709f, 0.0790433f, 0.0336372f, 0.0016725f, -0.124021f, 0.14385f, -0.177045f, -0.128637f, -0.016355f, 0.059851f, -0.00881128f, 0.158796f, 0.0328201f, 0.11785f, 0.082339f, -0.154766f, -0.0932948f, 0.0213303f, 0.0908482f, -0.103333f, -0.235841f, -0.0418065f, 0.0522141f, -0.00671328f, 0.0405856f, -0.118595f, 0.24031f, 0.0636851f, -0.119322f, -0.0349749f, 0.0146042f, -0.0756338f, -0.0797207f, -0.114637f, -0.192398f, -0.0449169f, 0.0144687f, 0.103353f, -0.198404f, -0.165665f, 0.305795f, 0.182551f, -9.44947e-05f, -0.188019f, 0.145859f, -0.0540762f, -0.0422273f, 0.079418f, -0.113134f, -0.0331273f, 0.0413852f, 0.0186806f, -0.134649f, -0.156647f, 0.0379808f, -0.0336731f, 0.123864f, 0.0149338f, 0.00657353f, -0.0419852f, -0.0957983f, 0.113604f, -0.00177105f, -0.0377948f, -0.0729322f, -0.0795412f, 0.018301f, 0.0501318f, -0.150514f, 0.0114589f, 0.194727f, -0.067147f, -0.114292f, 0.0578186f, 0.0348688f, -0.120958f, 0.0488574f, 0.0615167f, 0.0919836f, -0.0458583f, -0.0620742f, -0.235883f, 0.0716301f, -0.0573557f, -0.0335477f, 0.0332895f, 0.225656f, 0.111733f, -0.183892f, 0.096668f, -0.0172189f, 0.00316638f, -0.0248811f, 0.0408698f, 0.106239f, 0.120735f, -0.198324f, 0.179693f, -0.0869977f, 0.0919217f, 0.0513973f, 0.0840604f, 0.0290311f, -0.0281886f, 0.134455f, 0.117689f, -0.00212076f, 0.0472969f, -0.186076f, -0.203949f, 0.117962f, -0.00672446f, 0.0338923f, -0.19415f, -0.0798959f, 0.000549323f, -0.0499712f, 0.0791756f, -0.1247f, -0.203568f, -0.112774f, 0.105253f, -0.175439f, 0.0782389f, -0.0801494f, -0.0391737f, -0.0959358f, 0.0278142f, 0.081632f, -0.0858438f, 0.0603921f, -0.0914197f, -0.0116903f, -0.0168063f, -0.0998898f, -0.242943f, 0.000282315f, -0.0978673f, -0.181201f, -0.0418647f, -0.09246f, 0.130123f, -0.123f, 0.0854438f, 0.0409114f, -0.143904f, -0.0636778f, -0.105364f, 0.0406558f, 0.0023983f, -0.0976656f, 0.297788f, -0.200958f, -0.146904f, -0.227999f, 0.105231f, -0.0863152f, 0.186681f, 0.108009f, -0.0941487f, 0.0270565f, -0.0587463f, -0.0234597f, 0.0341337f, 0.0129784f, -0.0342323f, -0.0341861f, -0.164396f, 0.0601617f, 0.0199468f, -0.133455f, -0.109145f, -0.0117225f, 0.0220773f, -0.197391f, 0.139427f, -0.0728889f, 0.214279f, -0.164944f, 0.0369613f, -0.102114f, -0.185125f, -0.151873f, -0.00815908f, -0.0264697f, 0.00552361f, -0.156568f, 0.0592205f, 0.0311511f, 0.156493f, -0.00147048f, -0.171883f, -0.0962116f, 0.0902965f, 0.0563549f, 0.151594f, -0.0710129f, -0.0608283f, -0.152365f, 0.0426497f, 0.030058f, -0.115398f, 0.0787851f, -0.0380599f, -0.0300186f, -0.0918778f, 0.0722943f, 0.120473f, -0.0820042f, 0.169956f, 0.210586f, -0.0996093f, -0.0907196f, 0.000182023f, 0.0958698f, -0.162933f, 0.261805f, 0.0974647f, -0.136107f, 0.18988f, 0.125426f, -0.0366771f, -0.0348714f, -0.207521f, 0.00821073f, -0.0364474f, -0.0236695f, 0.151169f, 0.109363f, -0.117262f, 0.152242f, -0.0532562f, -0.00794913f, -0.0394138f, -0.0833678f, -0.0973465f, -0.00489541f, -0.0657901f, 0.201121f, 0.0696424f, -0.158346f, 0.215502f, 0.0627025f, 0.100304f, -0.00965824f, -0.0108773f, -0.0634982f, 0.162704f, -0.00708153f, 0.00731834f, -0.205403f, 0.0358317f, -0.129931f, 0.0845198f, 0.0927553f, -0.109328f, 0.0820288f, 0.0942257f, -0.0505503f, 0.0501674f, -0.00154132f, -0.0752623f, -0.0683064f, 0.0471702f, -0.0292958f, 0.0902095f, -0.138911f, 0.00477327f, -0.0754712f, 0.0463532f, 0.0414021f, -0.0300801f, 0.0141348f, 0.116683f, 0.124225f, 0.156021f, 0.155924f, 0.0038362f, -0.0565635f, 0.0605547f, 0.157843f, -7.7438e-05f, -4.01037e-05f, 0.105596f, 0.066878f, -0.104036f, 0.0175286f, -0.0669236f, 0.00825679f, -0.00457559f, 0.000918178f, -0.0109067f, 0.22797f, 0.0139599f, -0.092134f, -0.0777743f, -0.0937891f, -0.149157f, 0.0560046f, -0.0824797f, 0.0152864f, 0.181075f, 0.235977f, 0.187241f, 0.0522941f, 0.28482f, 0.169329f, -0.153408f, -0.142364f, 0.0739156f, -0.155945f, -0.029791f, -0.0390462f, -0.0330425f, 0.112735f, -0.0281493f, 0.0438765f, 0.145099f, 0.0904452f, -0.00968479f, -0.242941f, 0.17713f, 0.0550459f, 0.0826542f, 0.0873909f, -0.112603f, 0.0155304f, 0.0478797f, -0.0978957f, -0.236865f, -0.0587736f, -0.176084f, 0.118368f, -0.18309f, -0.0888975f, -0.0828919f, 0.278778f, -0.133874f, -0.0447204f, -0.0540337f, -0.067233f, -0.0708834f, 0.13262f, -0.0391171f, 0.0531088f, 0.148939f, -0.114385f, 0.181326f, -0.0141271f, -0.220534f, -0.0709256f, -0.242878f, 0.1084f, 0.246424f, -0.0597596f, -0.0450697f, -0.0967601f, -0.0582045f, -0.00437411f, -0.00567494f, 0.0800313f, -0.0221475f, 0.0221533f, 0.0923932f, -0.168607f, 0.0729527f, -0.110152f, -0.0455547f, -0.0380862f, -0.184675f, 0.146205f, 0.0347786f, 0.204044f, 0.0228161f, 0.0861639f, -0.294512f, -0.0446524f, 0.0105801f, 0.123291f, 0.026764f, 0.13687f, 0.110976f, 0.0115626f, -0.0192648f, 0.0325269f, -0.0424162f, -0.069614f, 0.0262381f, 0.0309285f, -0.0829241f, -0.0567973f, 0.0532956f, -0.170089f, 0.00834287f, 0.0721328f, -0.154345f, -0.13655f, 0.175838f, -0.033523f, 0.0249502f, 0.104262f, -0.123745f, 0.00598759f, -0.0874215f, -0.0350418f, -0.0135892f, 0.0332784f, -0.0792378f, -0.129224f, 0.0941027f, 0.0856813f, -0.154563f, 0.0701456f, -0.0798687f, 0.0172937f, 0.159772f, 0.0225921f, -0.0950126f, -0.059751f, -0.0427217f, 0.08318f, -0.0924692f, -0.114117f, -0.162264f, 0.0396529f, -0.133909f, -0.0371591f, -0.0465814f, 0.0630274f, 0.0513138f, 0.00463356f, -0.128431f, -0.0327311f, 0.0126202f, 0.217387f, -0.0492987f, 0.115991f, 0.0920301f, 0.308363f, -0.0760752f, 0.0959534f, 0.100822f, 0.117107f, -0.0781828f, 0.0521731f, -0.0421392f, 0.0978752f, 0.141379f, 0.109865f, 0.085368f, 0.115113f, 0.0261236f, -0.0683196f, 0.148308f, -0.0708932f, 0.0368186f, 0.152533f, 0.158547f, 0.0459096f, 0.0306562f, -0.0423953f, -0.072179f, -0.0729912f, -0.00781077f, 0.0063416f, -0.0051019f, -0.0715874f, 0.25341f, -0.0529974f, -0.0128047f, -0.0833249f, 0.0805988f, -0.129613f, -0.226091f, 0.133378f, 0.00953771f, -0.114086f, 0.0497221f, 0.0144519f, -0.173494f, 0.0521217f, -0.0482086f, -0.0422902f, -0.0557038f, -0.0682033f, 0.0102125f, -0.14366f, -0.0697444f, 0.154547f, -0.0110372f, -0.0211736f, -0.150309f, -0.123191f, -0.0425382f, -0.171605f, 0.155415f, -0.166307f, 0.122377f, 0.0116144f, -0.0539998f, -0.0222417f, 0.115696f, -0.049952f, -0.098371f, -0.0495835f, -0.0182399f, -0.0209928f, -0.0737246f, -0.0128612f, -0.0902179f, 0.0468175f, -0.065977f, -0.0934368f, 0.132834f, 0.0453857f, -0.0353219f, -0.226671f, 0.0750195f, 0.114393f, 0.139013f, -0.160871f, 0.108138f, 0.0270014f, 0.0463732f, -0.0537463f, -0.049449f, 0.128639f, -0.0350334f, 0.00048251f, -0.0125652f, 0.0523853f, -0.016491f, 0.0324093f, 0.0703661f, -0.199708f, 0.147244f, -0.265639f, -0.15994f, -0.0965098f, -0.161525f, 0.105471f, 0.334023f, 0.0532776f, -0.0912264f, -0.0993632f, -0.0280201f, -0.0997241f, 0.0207262f, -0.279589f, 0.0620953f, -0.139224f, 0.201923f, 0.223502f, 0.0117948f, 0.0800099f, -0.0281361f, 0.119896f, -0.154101f, 0.0686489f, 0.23895f, 0.0565735f, -0.00390363f, -0.108806f, -0.0184156f, -0.0952719f, 0.0555698f, -0.0834902f, -0.0271915f, 0.134121f, -0.0685145f, -0.088725f, -0.0443093f, -0.0480688f, 0.0027292f, -0.0516291f, 0.0195303f, -0.0489191f, -0.107815f, -0.176361f, -0.00391101f, -0.147003f, 0.060434f, 0.170413f, 0.0561194f, -0.131237f, 0.0726091f, -0.0843326f, 0.117474f, -0.0263671f, -0.252128f, 0.134363f, 0.156991f, 0.194593f, 0.0275572f, 0.0985565f, -0.0493994f, -0.0366721f, -0.0850576f, 0.209169f, -0.212784f, -0.228355f, 0.232156f, -0.013431f, -0.0345989f, 0.0129247f, -0.132096f, 0.150248f, -0.0600087f, -0.0593894f, 0.0500004f, -0.0771591f, -0.101367f, -0.0637289f, -0.00658742f, 0.0595553f, -0.024883f, 0.14593f, 0.0796428f, -0.140004f, 0.0487683f, -0.0853453f, -0.00578926f, -0.152167f, -0.0790761f, -0.0115236f, 0.101415f, 0.0400251f, 0.0865559f, 0.041953f, 0.0578223f, -0.0704437f, 0.0992961f, -0.0750895f, 0.0499278f, -0.0472411f, 0.0641343f, 0.0212827f, -0.147518f, 0.179523f, -0.0217679f, -0.0477648f, 0.0591903f, 0.0359961f, 0.088601f, 0.00351872f, 0.0138916f, 0.0402664f, 0.177149f, 0.00824283f, -0.0556286f, 0.0552217f, 0.0923312f, 0.0869264f, 0.0234759f, 0.032284f, -0.121977f, 0.125437f, 0.0256046f, 0.0472181f, 0.137565f, -0.0639655f, 0.0270246f, 0.067476f, -0.0202386f, 0.104383f, -0.0717281f, 0.0806002f, -0.166602f, 0.0998034f, 0.000647092f, -0.160719f, 0.0497379f, 0.0384154f, -0.0633026f, -0.0520781f, 0.139229f, -0.00294187f, -0.0901295f, -0.0135821f, -0.0053321f, 0.0298723f, -0.126232f, 0.0875468f, -0.0429418f, -0.105753f, -0.0178799f, 0.0229084f, 0.0993183f, 0.0577022f, 0.122253f, 0.04647f, -0.0176805f, -0.146407f, -0.0709359f, 0.00450018f, 0.0375465f, -0.00241717f, 0.0293403f, -0.068674f, -0.049723f, 0.0559693f, 0.057845f, 0.063096f, 0.00327565f, -0.00592193f, -0.0914547f, -0.052915f, 0.011102f, -0.00567772f, 0.133686f, 0.0674825f, -0.00363402f, -0.0414556f, 0.0609897f, -0.149365f, 0.0478243f, -0.177468f, 0.118778f, -0.014799f, -0.0871947f, 0.0689947f, 0.155482f, -0.0369854f, 0.0987065f, -0.071845f, -0.116411f, 0.0738949f, 0.141181f, -0.0790999f, -0.0754749f, 0.02829f, -0.0185175f, 0.00333218f, -0.0469422f, 0.109825f, -0.0707927f, -0.0336781f, 0.143263f, -0.0310005f, 0.0320288f, 0.0763625f, 0.0599478f, 0.157965f, 0.041396f, -0.14055f, 0.0972761f, -0.044779f, -0.0373882f, -0.0289342f, 0.0622346f, -0.0699207f, 0.00331574f, -0.0269189f, 0.0158214f, -0.0408447f, -0.093728f, 0.0917196f, -0.029065f, -0.149718f, 0.190454f, -0.064957f, -0.0974734f, -0.0702975f, -0.220812f, -0.0354488f, -0.146935f, -0.0893557f, -0.0410884f, -0.00240213f, 0.0855496f, -0.0700753f, 0.0310124f, -0.0899561f, -0.00018987f, 0.0193527f, 0.128614f, -0.0112893f, -0.0371573f, -0.0869235f, 0.102819f, -0.0986784f, 0.0763631f, -0.149149f, 0.0356725f, -0.0216257f, -0.0770793f, -0.0353079f, -0.287739f, 0.00743026f, -0.0143154f, -0.0153983f, 0.00505462f, -0.0240134f, 0.143812f, 0.0749231f, -0.0155015f, -0.0709973f, 0.017508f, -0.0312257f, 0.00915957f, 0.0896561f, 0.0267885f, -0.0266156f, -0.121392f, 0.0479986f, 0.0118704f, -0.0859582f, -0.057174f, 0.0377036f, 0.0228706f, 0.0202221f, -0.0391891f, 0.0629417f, 0.0304549f, -0.0941607f, 0.0373528f, -0.0860005f, -0.0236795f, -0.0897805f, 0.057088f, -0.127161f, -0.0595527f, 0.0423498f, 0.0212371f, 0.121061f, -0.105472f, 0.0604746f, 0.0791208f, 0.0138513f, 0.0690477f, 0.0775052f, 0.176592f, 0.00122752f, 0.0764635f, -0.0231328f, 0.115145f, 0.0496446f, 0.119988f, -0.00777896f, 0.115816f, -0.0169815f, -0.00778283f, -0.0195645f, 0.00298207f, -0.0127201f, -0.0872951f, -0.0373492f, -0.0138868f, 0.0418982f, -0.0148116f, -0.0763153f, -0.175304f, 0.0548373f, -0.0797324f, -0.079444f, 0.0145185f, -0.0092995f, 0.055826f, 0.00844336f, 0.178536f, -0.107245f, -0.0581695f, -0.0678855f, 0.0633352f, -0.262325f, 0.085923f, -0.153607f, -0.00526622f, 0.00714791f, 0.0085191f, -0.248135f, 0.14321f, 0.167815f, 0.0552139f, -0.155006f, 0.129658f, -0.10089f, -0.0653027f, -0.086797f, -0.0440034f, 0.155333f, -0.11702f, -0.075842f, 0.129593f, 0.0716502f, 0.0162046f, 0.102726f, 0.0563099f, 0.149793f, 0.0607657f, -0.039657f, 0.106518f, -0.00802f, 0.101859f, 0.0467591f, -0.188312f, -0.101944f, 0.249702f, -0.138609f, -0.0485763f, -0.101048f, 0.170413f, -0.10571f, 0.102673f, -0.0252557f, -0.146038f, -0.0110023f, -0.00639719f, -0.196252f, -0.0473422f, 0.222497f, 0.208774f, -0.122519f, -0.0492664f, 0.138949f, -0.00175908f, -0.0346246f, -0.278625f, 0.0744329f, 0.119306f, 0.1148f, 0.125998f, -0.0779714f, -0.0531994f, 0.102464f, 0.131047f, 0.235741f, 0.0160704f, -0.253698f, -0.117303f, 0.104103f, 0.173462f, 0.0788535f, -0.126948f, 0.226542f, 0.00785182f, -0.138863f, -0.204345f, -0.148693f, 0.0393945f, -0.0589496f, 0.0476139f, -0.00891774f, 0.054528f, -0.0274388f, 0.259764f, -0.0184982f, 0.0464081f, -0.0727399f, 0.0228946f, -0.140925f, -0.250482f, -0.117357f, 0.00328573f, -0.067149f, -0.19396f, -0.271407f, 0.082063f, 0.289259f, -0.203952f, -0.0917052f, -0.154962f, -0.0851835f, 0.0841598f, 0.106935f, -0.0570404f, 0.0096849f, -0.072624f, 0.170382f, 0.0172678f, 0.08511f, -0.0488175f, 0.0709689f, 0.00488072f, 0.186835f, -0.200469f, -0.0672107f, -0.211064f, 0.0241919f, -0.140238f, -0.124912f, 0.0127984f, 0.00100562f, 0.220982f, 0.0204989f, -0.0408498f, -0.0283247f, 0.0060206f, 0.134312f, -0.105003f, 0.0845336f, 0.0525455f, -0.130418f, -0.0201187f, -0.0716866f, -0.0582721f, 0.132725f, 0.0722155f, 0.141747f, 0.0205966f, 0.0967698f, -0.0254348f, -0.0275047f, -0.0538092f, -0.145773f, -0.112313f, -0.0790292f, 0.164409f, -0.171226f, -0.0330763f, -0.0433401f, 0.0137043f, -0.0226061f, -0.0752884f, -0.0118946f, -0.0606388f, 7.34772e-05f, 0.119924f, 0.13994f, 0.294623f, 0.0614132f, 0.267669f, 0.144075f, -0.026085f, -0.0386709f, -0.032511f, -0.0330909f, -0.110424f, -0.0559874f, -0.196786f, -0.0731073f, 0.103483f, 0.115845f, 0.20273f, -0.0435478f, 0.137333f, 0.0125484f, -0.035009f, -0.131347f, 0.185652f, 0.0915615f, 0.00988606f, -0.0423943f, -0.401112f, -0.168895f, 0.00194771f, -0.0320317f, 0.138003f, -0.00130969f, -0.0100481f, -0.0807856f, 0.0185314f, 0.0741704f, -0.0306238f, -0.0679718f, 0.0228838f, 0.229097f, 0.0531397f, -0.00957188f, -0.00225994f, 0.00525389f, 0.0218199f, 0.00731805f, -0.032041f, 0.121642f, 0.110984f, 0.057602f, 0.0138104f, -0.00488061f, -0.0492192f, 0.33436f, -0.0717361f, 0.0863138f, -0.0299386f, 0.000459265f, 0.0720175f, 0.0525023f, -0.142907f, -0.138931f, -0.128703f, 0.0366998f, 0.0837451f, -0.109776f, -0.0232616f, 0.170063f, 0.106598f, -0.126342f, -0.043239f, -0.0804324f, -0.110025f, -0.0771995f, 0.0981935f, 0.047013f, -0.0553584f, 0.0606691f, -0.074927f, -0.103423f, 0.028745f, -0.00984605f, -0.00385532f, 0.130327f, -0.0510355f, -0.0222256f, 0.122566f, 0.148499f, 0.100782f, 0.0574509f, -0.0515715f, 0.0438364f, -0.0380833f, -0.084709f, -0.131321f, -0.149418f, -0.0797491f, 0.316875f, -0.0321077f, 0.0150161f, 0.0376917f, 0.00989962f, -0.00131329f, 0.0673171f, 0.131532f, -0.045139f, 0.0319606f, 0.186066f, 0.136718f, 0.0906833f, 0.117223f, -0.06861f, -0.0319113f, 0.0400192f, 0.0378854f, 0.137974f, -0.0287174f, -0.009404f, -0.12745f, -0.0869295f, 0.114729f, -0.092402f, 0.00709146f, -0.0187893f, 0.0200674f, 0.0234794f, -0.0151676f, -0.184399f, 0.00469702f, -0.119847f, -0.0205459f, 0.0516747f, -0.0641841f, 0.118213f, -0.0523815f, 0.0979883f, 0.0207389f, -0.115183f, -0.228034f, -0.0137611f, 0.0681063f, 0.0702805f, 0.0274666f, 0.0534819f, -0.0292959f, -0.0329717f, -0.150681f, 0.205796f, -0.0144994f, 0.0324701f, 0.0102282f, -0.0277505f, -0.0934108f, -0.0984583f, 0.128595f, -0.0321262f, -0.0255906f, 0.0659542f, 0.0817577f, 0.117923f, 0.14978f, 0.12422f, -0.00923952f, -0.0540291f, -0.0321929f, 0.12542f, -0.195662f, 0.172576f, -0.00639092f, -0.136681f, -0.0330373f, -0.0103617f, -0.0148055f, 0.111327f, 0.096027f, -0.125357f, -0.118886f, -0.0144077f, -0.0523968f, 0.0992575f, 0.0141645f, 0.0687468f, -0.11347f, -0.0388204f, 0.0601631f, -0.0303265f, -0.012095f, 0.0319426f, -0.0165613f, 0.0750046f, 0.0288958f, 0.035477f, 0.00832511f, -0.118146f, -0.0589281f, -0.0528171f, 0.104375f, -0.0268535f, -0.0506696f, -0.134348f, 0.0186287f, -0.00206817f, -0.0630259f, 0.125283f, 0.203537f, -0.0299646f, -0.131177f, -0.0329357f, -0.0578531f, 0.131692f, -0.0460174f, -0.016195f, 0.122469f, -0.0405868f, 0.105545f, 0.0852513f, 0.0189091f, 0.175174f, -0.116372f, -0.0693871f, 0.125618f, 0.246107f, 0.0375574f, -0.0965322f, 0.0790914f, -0.162107f, 0.0220359f, -0.0780321f, -0.076183f, -0.120754f, 0.237783f, -0.170032f, 0.0326567f, 0.103191f, -0.0519771f, -0.0265962f, 0.011515f, -0.00909036f, -0.0143241f, -0.0307952f, 0.0112673f, -0.0250672f, -0.288445f, -0.0113077f, -0.166532f, 0.0247074f, 0.119729f, -0.245325f, 0.231557f, -0.274329f, 0.118456f, 0.0677861f, 0.135092f, 0.139018f, -0.0750762f, -0.00201163f, 0.0895766f, -0.00837478f, 0.0604441f, 0.113694f, -0.118207f, 0.313374f, 0.0333207f, -0.126162f, -0.100909f, -0.0388675f, -0.123029f, -0.0443524f, -0.223742f, -0.00604542f, 0.205039f, -0.144927f, -0.110742f, 0.0335538f, 0.0948325f, -0.212891f, -0.187954f, -0.100213f, 0.0272423f, -0.0710597f, 0.0732789f, 0.0610045f, 0.108682f, 0.122772f, 0.00643831f, 0.247717f, -0.0546628f, 0.0696219f, -0.023871f, 0.0836353f, 0.208345f, 0.0168878f, 0.18471f, 0.175703f, -0.0418388f, 0.0904419f, 0.0375463f, -0.00183775f, 0.0188257f, 0.0381886f, 0.00642325f, 0.0504449f, -0.0873899f, 0.0108448f, -0.112058f, -0.09519f, -0.0175302f, -0.221398f, -0.101487f, 0.100871f, 0.0957263f, -0.321578f, -0.119312f, -0.0510102f, -0.142558f, 0.110062f, -0.151288f, 0.0119669f, 0.166802f, 0.0767755f, 0.0762053f, 0.0301646f, -0.18271f, -0.100106f, -0.255979f, 0.135908f, -0.0653769f, -0.0125916f, 0.0761682f, 0.0373979f, 0.0284757f, 0.0401612f, -0.0855745f, -0.101468f, 0.0163791f, -0.043152f, -0.0121247f, 0.0513979f, -0.0187096f, 0.0229504f, -0.0662335f, 0.147263f, 0.0651022f, -0.0916602f, 0.112749f, -0.0772877f, -0.0888211f, -0.00696278f, -0.189441f, 0.00206075f, -0.114579f, -0.0472339f, -0.178951f, 0.235627f, 0.10784f, 0.0884853f, -0.0329543f, -0.0216831f, 0.0170108f, -0.0582196f, -0.0871027f, 0.0534678f, 0.0201976f, -0.0487449f, 0.105007f, 0.121275f, 0.0922554f, -0.154662f, -0.0702815f, -0.0623452f, -0.147625f, 0.144134f, -0.0461159f, 0.0983194f, 0.0545413f, -0.10015f, 0.175962f, 0.111006f, -0.109084f, 0.10717f, 0.0350171f, 0.0058561f, 0.086362f, 0.0294488f, -0.0417202f, -0.100819f, 0.0810403f, 0.0223905f, -0.0956501f, -0.018441f, -0.0939113f, 0.00128937f, 0.00569654f, 0.0883394f, 0.0149627f, 0.102346f, -0.0852892f, -0.0482466f, -0.00277497f, -0.0492442f, -0.109828f, -0.0437587f, -0.0260116f, 0.0801255f, -0.111308f, -0.0749189f, -0.0947581f, -0.0735604f, -0.191144f, 0.290969f, 0.0845039f, 0.0529046f, 0.0430621f, -0.054001f, 0.118409f, 0.056695f, 0.0548862f, -0.0524673f, 0.0912314f, 0.347554f, 0.132448f, -0.134279f, 0.00853797f, -0.128478f, -0.0180606f, -0.0967837f, -0.0394576f, 0.157508f, 0.0478013f, -0.0828822f, 0.0803439f, 0.0835446f, -0.00532606f, -0.0175792f, 0.0103274f, -0.123493f, 0.0581489f, -0.12862f, -0.207775f, 0.062414f, 0.0818249f, 0.0959275f, -0.0316041f, 0.0858107f, -0.0551252f, -0.0490641f, 0.139381f, 0.0146474f, 0.1039f, -0.218854f, 0.0272468f, 0.0506245f, 0.00464062f, -0.0846287f, 0.0728544f, 0.318067f, -0.311513f, -0.204957f, -0.0389929f, 0.0102801f, -0.219184f, -0.111264f, 0.154059f, -0.0476869f, -0.0750924f, 0.071651f, -0.130862f, 0.144135f, 0.019162f, 0.00512851f, -0.00827058f, 0.126799f, -0.0106238f, 0.0203421f, 0.267873f, 0.0374636f, -0.103828f, 0.00471261f, -0.0285048f, 0.0355229f, -0.0706798f, -0.0831381f, 0.150404f, -0.0214477f, -0.170757f, -0.224895f, -0.182033f, -0.112039f, 0.0450631f, -0.182452f, 0.152831f, 0.199954f, 0.00159238f, 0.0302488f, 0.10697f, 0.0774123f, -0.17725f, 0.0647174f, 0.134133f, 0.102763f, 0.0891834f, -0.121965f, 0.00378413f, 0.0208035f, -0.183268f, -0.0820034f, -0.00936298f, 0.621632f, -0.0832426f, 0.184116f, -0.250643f, 0.159862f, -0.0418621f, 0.0409018f, -0.0271931f, 0.162438f, 0.0627052f, -0.0314378f, 0.0184759f, -0.147638f, 0.159765f, 0.0493645f, 0.0385918f, 0.127691f, 0.115073f, -0.1988f, -0.212631f, -0.121627f, 0.0436398f, 0.0574111f, 0.0342922f, 0.0594333f, -0.0848946f, -0.118027f, -0.0679305f, -0.110688f, -0.118819f, -0.00404798f, 0.365826f, 0.126281f, 0.163799f, -0.155878f, -0.0198013f, 0.0732958f, 0.025033f, 0.00878617f, -0.0288794f, -0.0340888f, -0.0700362f, 0.135505f, 0.200345f, -0.27182f, 0.0421049f, 0.154914f, 0.322302f, -0.0355231f, 0.0250193f, -0.00381215f, 0.0130464f, 0.0894829f, -0.111475f, 0.000428428f, -0.131001f, 0.0586491f, 0.122083f, -0.0493182f, 0.188195f, 0.196667f, -0.303141f, -0.157354f, 0.118368f, -0.117998f, -0.114425f, 0.155291f, 0.0385336f, 0.197487f, -0.169868f, 0.246375f, -0.119066f, -0.0294555f, 0.00219155f, -0.126549f, 0.0880655f, 0.0644223f, 0.139199f, 0.0960026f, -0.0913918f, 0.0321977f, -0.0326568f, 0.132348f, -0.004017f, 0.143387f, -0.141734f, -0.0449425f, -0.0333623f, 0.125576f, -0.0995561f, -0.0103207f, -0.00238451f, 0.080405f, -0.269285f, 0.169342f, -0.0635095f, 0.263021f, -0.058456f, 0.0429495f, 0.120703f, 0.128975f, -0.233373f, 0.0719359f, 0.0992526f, 0.0598764f, 0.213152f, -0.139591f, 0.0111714f, 0.0244643f, -0.0729422f, -0.011408f, 0.130125f, -0.190623f, -0.03431f, 0.0630346f, -0.10769f, 0.0557043f, -0.0566449f, -0.0250544f, -0.022595f, -0.0427255f, 0.115632f, 0.11945f, 0.0430716f, -0.00753059f, -0.0264045f, 0.0189508f, -0.0368513f, 0.0594046f, 0.0847702f, 0.0520682f, -0.292042f, -0.0817949f, 0.114707f, 0.0144646f, 0.0496192f, 0.162581f, -0.000967547f, -0.160933f, -0.0640755f, -0.0227151f, -0.163904f, 0.03495f, -0.0516485f, 0.131296f, -0.0555282f, 0.04541f, -0.203044f, -0.0677995f, -0.0282836f, -0.0157647f, 0.0274509f, -0.00323077f, 0.0166242f, 0.0142531f, -0.0608151f, -0.0155284f, 0.00388386f, 0.0799918f, 0.00890975f, -0.0173836f, -0.0455636f, -0.136733f, 0.0276027f, 0.0437832f, 0.0316663f, 0.0881703f, 0.0465492f, 0.0519603f, -0.193578f, -0.0496555f, 0.0931399f, -0.0317259f, 0.0215178f, 0.024087f, 0.0543716f, 0.148835f, -0.0451403f, -0.130573f, -0.141463f, 0.251243f, -0.26975f, -0.045159f, -0.103599f, -0.108257f, 0.0120926f, 0.0162302f, 0.208273f, -0.117997f, -0.10228f, 0.110643f, 0.0297101f, -0.122294f, -0.139888f, -0.056092f, 0.0302582f, -0.00741781f, 0.203093f, 0.143503f, -0.0837938f, -0.0116276f, -0.00653999f, -0.133326f, 0.0569109f, -0.0812485f, 0.0351529f, -0.0447137f, -0.0853242f, -0.0430114f, -0.120941f, -0.0207683f, -0.130794f, -0.0681127f, -0.062079f, -0.0473481f, -0.0590327f, -0.0690426f, 0.116073f, -0.156337f, -0.0698394f, 0.0278422f, -0.186195f, 0.0850451f, 0.11294f, 0.145819f, -0.0959118f, -0.12598f, -0.0695861f, 0.216485f, 0.1839f, -0.243558f, -0.118465f, -0.106648f, 0.127068f, -0.127256f, 0.0653781f, -0.0455378f, -0.0581912f, 0.145361f, -0.0866306f, -0.142291f, 0.0526277f, 0.00160335f, -0.0599653f, 0.0961954f, 0.118676f, 0.0253811f, 0.0132581f, -0.112909f, 0.0391463f, 0.0684525f, 0.0949133f, -0.112881f, 0.079263f, -0.0703176f, -0.0694554f, -0.167346f, 0.0924824f, -0.0405912f, 0.0235699f, -0.0701803f, -0.0826208f, -0.0202728f, 0.0895154f, 0.0166184f, -0.254056f, 0.123371f, -0.117096f, -0.0597966f, 0.0675939f, 0.127635f, -0.0702422f, -0.0660869f, 0.0327793f, 0.117506f, 0.153872f, -0.021569f, 0.0607402f, -0.0355423f, 0.000446082f, -0.124814f, -0.0243035f, -0.13999f, -0.185608f, -0.0244495f, 0.165696f, -0.0417009f, -0.0462815f, -0.171717f, 0.0540185f, 0.105983f, 0.00400495f, 0.0617924f, 0.0953484f, -0.119981f, -0.0478153f, 0.124151f, -0.0580362f, -0.251137f, -0.0670799f, -0.0207918f, -0.0895491f, 0.0549684f, -0.101297f, 0.103393f, 0.16728f, 0.11157f, 0.0276304f, 0.0539408f, 0.0171824f, 0.135427f, -0.089379f, 0.00543987f, 0.00557666f, 0.189756f, 0.0213836f, -0.0246621f, 0.0809472f, -0.0340424f, 0.0491174f, -0.17131f, -0.197256f, -0.0410798f, 0.0300607f, -0.0763111f, -0.0790089f, 0.0441864f, 0.00250824f, 0.0679835f, -0.0521625f, -0.0488477f, 0.0338113f, 0.121874f, -0.284957f, -0.00764361f, 0.0874511f, 0.0209528f, -0.0541673f, -0.0646459f, 0.0933228f, -0.0609366f, 0.0359632f, -0.133078f, 0.115852f, -0.152472f, 0.0245674f, 0.0947455f, 0.168584f, -0.12835f, 0.196528f, 0.00736223f, 0.0236253f, -0.107442f, 0.0193074f, 0.00405551f, -0.188684f, -0.0705586f, -0.171386f, -0.0724126f, -0.127796f, -0.039175f, -0.139119f, -0.0190728f, 0.015776f, 0.0342653f, -0.122628f, -0.259812f, 0.0672887f, 0.134135f, 0.0262439f, 0.11908f, 0.0386907f, -0.027765f, 0.0688058f, 0.118998f, -0.0255169f, 0.0742543f, 0.0118295f, -0.0481748f, 0.0443727f, -0.0556238f, 0.0925499f, 0.156088f, 0.10911f, -0.0700683f, 0.0239504f, -0.0554307f, 0.380625f, 0.114306f, 0.171927f, -0.00470277f, 0.172147f, -0.163248f, 0.171447f, -0.0392949f, 0.20687f, -0.0467468f, -0.0364172f, -0.0607831f, -0.117176f, 0.0241643f, -0.122143f, 0.0331288f, 0.0445563f, 0.0415315f, 0.136307f, -0.145522f, 0.199695f, -0.0969085f, -0.12511f, -0.0965969f, -0.216429f, -0.0556251f, 0.0581658f, 0.239878f, 0.0503737f, -0.0581753f, 0.0964971f, 0.211981f, -0.091845f, -0.0141091f, 0.123538f, -0.0224233f, 0.09202f, -0.0208294f, 0.023048f, -0.168299f, 0.145559f, 0.085805f, -0.107464f, -0.0151204f, -0.0682115f, -0.110997f, 0.139676f, -0.198148f, -0.0244348f, 0.125171f, -0.178802f, -0.00394587f, 0.0693092f, 0.0744418f, 0.00431382f, -0.0236217f, -0.084621f, 0.0533957f, 0.197542f, -0.0397467f, 0.0119584f, -0.133915f, -0.00463094f, 0.0726216f, 0.116142f, 0.0927343f, -0.166937f, 0.0648558f, 0.117658f, 0.149842f, -0.0162273f, -0.0192975f, 0.115933f, 0.0554775f, 0.0364003f, -0.201476f, -0.222112f, -0.135981f, 0.0539566f, 0.146376f, -0.0651559f, 0.0200376f, 0.0703236f, -0.0165641f, -0.165693f, 0.0934821f, -0.0559198f, 0.0114938f, -0.0087794f, 0.0343226f, -0.0059043f, 0.0592818f, -0.121724f, 0.0488199f, 0.0149378f, 0.206129f, 0.187237f, -0.125548f, -0.031094f, 0.0835973f, 0.173406f, 0.3418f, -0.124081f, 0.0816775f, -0.109981f, -0.358008f, 0.133712f, -0.067144f, 0.203191f, -0.0457458f, -0.115089f, -0.170217f, -0.157483f, -0.0262286f, 0.0910993f, 0.0756705f, -0.131403f, -0.0598638f, -0.109005f, 0.216911f, 0.0832318f, -0.0175269f, 0.12017f, 0.305358f, 0.103844f, -0.026503f, -0.181148f, -0.0127285f, -0.0132412f, -0.00162869f, 0.0207631f, 0.202382f, -0.11838f, 0.0787106f, 0.0310042f, 0.0802855f, -0.135553f, -0.00581206f, -0.130546f, 0.14268f, -0.175324f, -0.00351506f, 0.0260055f, 0.0905363f, -0.0829958f, 0.10293f, 0.140962f, -0.138289f, -0.0337323f, -0.0941957f, -0.0287273f, -0.0890916f, 0.104141f, 0.163246f, -0.0345933f, -0.017931f, -0.0575784f, -0.11534f, -0.0218979f, 0.267272f, -0.0320888f, 0.115516f, -0.118214f, 0.0576934f, -0.00534889f, -0.11597f, -0.053527f, 0.112966f, 0.151295f, 0.0399955f, -0.254703f, -0.0661813f, 0.0729021f, -0.0553189f, 0.00701938f, -0.189838f, 0.0651111f, 0.134032f, -0.12525f, 0.24228f, 0.141132f, -0.0260896f, -0.070044f, -0.0526634f, -0.207561f, -0.0676873f, -0.143111f, -0.204514f, 0.0983934f, -0.0906274f, 0.0547451f, 0.0981515f, -0.103553f, -0.0185959f, 0.0756669f, 0.0207471f, -0.129061f, 0.156464f, -0.141991f, 0.109558f, -0.0627573f, -0.0641959f, -0.00789138f, -0.0107114f, -0.0115048f, -0.046329f, -0.224054f, -0.137206f, 0.0859283f, 0.131616f, 0.2777f, 0.055557f, -0.0708088f, -0.0538557f, 0.0987677f, -0.142932f, -0.0848473f, 0.0576767f, 0.112043f, -0.0689157f, 0.0540192f, -0.0624084f, -0.0948496f, 0.0586048f, 0.334999f, 0.0472563f, 0.0337087f, 0.0295298f, 0.156338f, -0.0337418f, 0.0578684f, -0.132013f, -0.0491207f, -0.000238756f, 0.193152f, 0.181843f, 0.10289f, 0.0659528f, 0.00414669f, -0.0721822f, 0.0942316f, -0.118434f, -0.0940982f, -0.0463031f, 0.0416631f, -0.148412f, -0.0393706f, 0.0349973f, 0.0294582f, -0.040009f, -0.00473275f, 0.136518f, 0.0550601f, 0.0140333f, -0.128485f, -0.0415799f, -0.0654132f, -0.064578f, 0.171196f, 0.111095f, -0.105509f, -0.136925f, -0.184257f, -0.156951f, 0.228061f, 0.114534f, -0.0119688f, 0.0530868f, -0.0197708f, -0.134919f, -0.220247f, -0.025252f, -0.22761f, 0.0414286f, 0.124117f, 0.0630268f, -0.0562616f, 0.225075f, -0.0493476f, 0.142481f, 0.0903304f, -0.10004f, -0.0811495f, -0.0299285f, -0.110955f, -0.0512479f, -0.143554f, 0.00671361f, -0.0443956f, 0.221817f, -0.00278022f, 0.0954209f, -0.00390204f, 0.0179967f, -0.162746f, 0.0567472f, -0.0244207f, -0.148689f, 0.095182f, 0.0172504f, 0.0152493f, 0.0553119f, 0.00691494f, 0.178258f, 0.0219367f, 0.153237f, 0.0664088f, -0.0316592f, -0.0157545f, -0.113491f, -0.234241f, -0.138673f, 0.026283f, -0.0553677f, 0.100685f, 0.0179657f, -0.00613625f, 0.12503f, -0.113112f, 0.00141486f, 0.0237294f, 0.151618f, -0.0547273f, -0.00264617f, 0.144756f, 0.0561577f, 0.0441063f, 0.104541f, -0.00823987f, 0.0346045f, -0.065161f, 0.245564f, -0.0426414f, -0.030484f, 0.249241f, 0.0692827f, 0.0610749f, -0.121067f, -0.153006f, -0.108968f, -0.131698f, 0.132474f, 0.0769292f, -0.0621536f, 0.0379712f, 0.142774f, -0.128423f, 0.107447f, 0.14869f, 0.212285f, 0.116663f, 0.0406976f, -0.114244f, -0.139742f, 0.0992123f, 0.106052f, 0.0236159f, 0.0532751f, 0.0722184f, 0.101856f, -0.0568198f, -0.0203083f, -0.249563f, 0.0314278f, 0.24554f, 0.192253f, 0.105292f, -0.176553f, 0.0182953f, -0.162484f, -0.0159771f, 0.248126f, -0.201752f, -0.0597972f, 0.163662f, 0.0359592f, -0.0370592f, 0.0493369f, 0.062471f, 0.0293029f, -0.192226f, -0.155816f, -0.0173566f, -0.0501594f, 0.333196f, -0.0302933f, -0.0662921f, 0.0321787f, -0.175027f, 0.0637224f, 0.0609763f, -0.137408f, -0.0497158f, 0.0256529f, 0.115951f, -0.0715037f, 0.0456869f, 0.170257f, -0.102079f, -0.0797223f, -0.0115923f, -0.028239f, -0.0489953f, 0.0308667f, -0.0392215f, 0.0091498f, 0.119341f, -0.0271525f, 0.0952384f, 0.0975866f, -0.122738f, -0.0201918f, -0.136831f, -0.168559f, -0.0655316f, 0.050682f, -0.116124f, -0.127519f, -0.0353531f, 0.0662269f, -0.0671971f, 0.0249307f, -0.0822884f, 0.0464896f, -0.112299f, 0.129869f, 0.00180486f, -0.216736f, -0.151765f, -0.0296738f, 0.0305862f, 0.100682f, 0.0411443f, -0.0171006f, 0.00587758f, -0.146638f, 0.00855318f, 0.033708f, -0.116999f, -0.0514464f, -0.140214f, -0.0964713f, -0.0269176f, -0.101548f, -0.283636f, 0.140532f, 0.0858806f, -0.230783f, -0.0286378f, -0.106056f, -0.126099f, -0.256643f, 0.119343f, -0.0545925f, -0.0159382f, -0.029783f, 0.0701516f, -0.0498638f, 0.105904f, -0.0602673f, 0.0725477f, 0.0725609f, -0.152357f, -0.085373f, 0.218802f, 0.173298f, 0.0803967f, -0.167979f, -0.162678f, 0.0305768f, -0.164647f, 0.0215681f, 0.000463574f, 0.270715f, -0.00295866f, -0.114551f, 0.0584949f, 0.111544f, 0.123542f, 0.0484252f, -0.23995f, 0.0917101f, 0.152924f, -0.114919f, -0.0429841f, 0.026999f, 0.0856462f, 0.0115069f, -0.0426149f, -0.0464711f, 0.01034f, 0.238899f, 0.0577966f, 0.21144f, 0.0991494f, -0.00483824f, 0.0567827f, -0.154261f, 0.130994f, 0.102288f, -0.0678653f, -0.0706912f, -0.210289f, -0.118426f, -0.0343207f, -0.154916f, 0.161049f, -0.0478522f, 0.102661f, 0.0108906f, -0.103758f, 0.0390385f, 0.174271f, 0.109843f, 0.00671668f, -0.0135555f, -0.206802f, 0.107195f, 0.0661205f, -0.127171f, 0.103611f, -0.0224681f, -0.134187f, 0.0901285f, -0.197876f, -0.0520321f, 0.14459f, -0.118563f, 0.0252278f, -0.139859f, 0.07151f, 0.0289102f, -0.163204f, -0.0518494f, -0.181234f, -0.00925548f, 0.2346f, -0.152022f, 0.139208f, 0.00444786f, -0.0317226f, -0.157432f, -0.0528321f, -0.0208838f, -0.0255997f, 0.0842752f, 0.0203938f, -0.0684608f, -0.16607f, -0.224833f, 0.0663524f, 0.353126f, 0.179083f, 0.0404662f, -0.130839f, -0.128233f, -0.147074f, -0.00700458f, -0.0141646f, 0.20008f, 0.0419057f, 0.103872f, -0.151002f, 0.040467f, 0.114995f, 0.0836601f, -0.00566969f, -0.115449f, -0.0117605f, 0.144741f, 0.0804304f, 0.0600017f, 0.0769862f, -0.0668935f, -0.117889f, 0.0407159f, -0.0519966f, 0.0453245f, 0.0232327f, -0.0353576f, 0.0974864f, -0.114842f, 0.107827f, -0.0124007f, -0.116903f, 0.0017538f, -0.192595f, -0.0845089f, -0.145635f, 0.0625003f, 0.130272f, -0.0813601f, -0.000628703f, 0.064344f, 0.117979f, 0.0748343f, -0.0425438f, -0.0745272f, -0.0356093f, 0.0961124f, 0.247422f, -0.0111715f, 0.0639781f, -0.00396973f, -0.0835036f, -0.0467534f, -0.173479f, -0.191194f, -0.0682115f, -0.181376f, 0.0291904f, -0.106075f, 0.0924545f, 0.13188f, -0.204534f, -0.0582763f, -0.063006f, -0.0191295f, 0.118408f, -0.155103f, -0.208561f, -0.00257655f, -0.0278397f, -0.0349798f, 0.193708f, 0.206104f, 0.238367f, -0.0511592f, -0.0259264f, -0.0512514f, -0.140731f, -0.140718f, 0.0625166f, 0.0154084f, 0.0463342f, 0.0331415f, -0.0151346f, 0.0538995f, -0.0521382f, -0.0901504f, 0.323236f, -0.127595f, 0.158692f, 0.165618f, 0.0321485f, -0.235276f, -0.0467389f, -0.0478533f, 0.0267783f, -0.0112504f, 0.12955f, -0.131759f, 0.0250798f, 0.0297737f, -0.123351f, -0.0462916f, -0.0601691f, -0.169514f, 0.0768223f, -0.172547f, 0.00863833f, -0.132059f, 0.109882f, -0.0541994f, 0.108905f, -0.0221439f, 0.136297f, -0.0268997f, 0.150023f, -0.0864496f, 0.025512f, 0.148027f, 0.139395f, 0.122416f, 0.249048f, 0.0819006f, -0.017772f, -0.125767f, -0.016484f, 0.0395336f, -0.0548037f, -0.223122f, -0.166761f, -0.0757889f, -0.0226887f, -0.105259f, -0.0352583f, -0.0432748f, -0.0944241f, 0.0690278f, -0.0961888f, 0.129761f, -0.165683f, -0.0763722f, -0.0451281f, 0.217425f, 0.0696649f, 0.0708892f, 0.0798329f, 0.0748843f, 0.0769466f, -0.0871541f, 0.139047f, -0.0325753f, -0.0597439f, -0.0811437f, -0.0832682f, -0.239208f, -0.0524307f, -0.0291281f, -0.0396967f, -0.167074f, 0.0869051f, -0.160389f, 0.0236742f, -0.0792648f, 0.172199f, 0.258201f, -0.109859f, -0.00524742f, -0.0147915f, -0.0684649f, 0.085636f, 0.0422699f, -0.0719259f, -0.0400511f, 0.110565f, -0.0275946f, 0.173568f, -0.0434963f, 0.231463f, 0.029332f, 0.0503372f, 0.17586f, 0.0107855f, 0.0701134f, 0.0489747f, -0.0516045f, -0.169565f, 0.187467f, -0.0587943f, -0.258745f, -0.0295657f, -0.171497f, 0.089733f, 0.123308f, -0.0844785f, 0.106371f, -0.00838618f, 0.0526407f, -0.240864f, 0.0891924f, -0.116663f, -0.103753f, 0.153124f, 0.081109f, -0.0454884f, -0.0369505f, -0.0974666f, 0.161862f, -0.15868f, -0.0577927f, -0.148928f, 0.0614618f, -0.129369f, -0.0587742f, -0.0393234f, -0.00244397f, 0.307357f, -0.101969f, 0.126875f, 0.0187606f, -0.0560896f, 0.0477976f, -0.272754f, 0.0133458f, 0.00948088f, 0.0818334f, -0.0158353f, 0.0495313f, 0.167124f, -0.229534f, -0.0461687f, -0.117349f, -0.00241059f, -0.0124886f, 0.067986f, 0.0263838f, 0.0843775f, -0.0437962f, -0.0732725f, 0.081337f, -0.0575028f, -0.176248f, 0.0246008f, -0.00573236f, 0.0340705f, -0.114427f, 0.136732f, 0.160702f, 0.0235913f, 0.0148508f, 0.147296f, -0.220212f, -0.0690821f, -0.018742f, -0.0814082f, 0.000462293f, -0.260802f, -0.106888f, -0.0567304f, 0.0586716f, -0.0342567f, 0.0584384f, 0.0444304f, 0.0114965f, -0.0701725f, 0.0600115f, 0.0741278f, 0.0735552f, 0.143475f, 0.115074f, 0.0681775f, 0.0776265f, -0.0174075f, 0.0840016f, 0.0752029f, 0.0179655f, 0.026884f, 0.182286f, -0.114825f, -0.066792f, 0.0889464f, 0.0858338f, -0.0113572f, -0.0881827f, 0.0822213f, 0.149808f, -0.0455913f, -0.0176346f, 0.0940022f, -0.0147701f, -0.144023f, 0.0855528f, -0.159248f, -0.124493f, 0.104408f, 0.0140008f, 0.0675851f, -0.0766063f, -0.116592f, 0.00655453f, -0.0886846f, 0.0209535f, -0.0210895f, 0.186624f, -0.0244961f, 0.0234838f, -0.224046f, -0.0801416f, 0.027588f, -0.0312838f, -0.10964f, -0.210306f, -0.0155024f, -0.168403f, -0.0946913f, 0.218398f, -0.0739848f, -0.00671045f, -0.0673331f, -0.106194f, -0.101885f, 0.206145f, -0.0444965f, 0.0931645f, -0.0368602f, -0.196886f, -0.130361f, 0.243789f, -0.00487948f, 0.198048f, -0.134132f, -0.125914f, -0.119715f, -0.0176049f, -0.388635f, -0.0317848f, 0.136989f, 0.0218202f, 0.121081f, 0.159851f, -0.0394718f, 0.0907094f, -0.150073f, 0.0709025f, 0.0719346f, -0.0621058f, -0.1778f, 0.189268f, 0.066535f, 0.0966753f, 0.125545f, 0.0455094f, -0.0985138f, 0.00208439f, -0.00763695f, 0.00639482f, 0.00322567f, 0.0865544f, 0.140985f, -0.0244202f, -0.0112032f, 0.0269489f, 0.0425887f, -0.0346553f, -0.117193f, 0.214534f, 0.0251244f, -0.0472421f, -0.0116764f, 0.213914f, -0.227229f, 0.1367f, -0.191525f, -0.077311f, -0.025126f, -0.146094f, -0.213257f, 0.016354f, 0.116038f, -0.0756774f, -0.112287f, -0.0956394f, 0.181322f, -0.084329f, 0.0831649f, -0.124323f, -0.0744675f, 0.0734857f, 0.0309128f, -0.151217f, 0.0040248f, -0.0974644f, 0.0475116f, -0.00806207f, 0.0677711f, 0.000488559f, 0.31419f, 0.0527593f, 0.112494f, 0.162696f, -0.0929699f, -0.0472898f, -0.0577593f, 0.0723963f, 0.117685f, 0.22069f, -0.0677903f, 0.0246052f, 0.0048672f, 0.0832389f, 0.00378989f, 0.179081f, -0.00879816f, 0.0225637f, 0.0158046f, 0.143528f, 0.0584804f, 0.0262781f, -0.00846017f, -0.104611f, -0.0221982f, 0.0252035f, 0.112715f, -0.199726f, 0.0350441f, -0.0686712f, 0.0434019f, -0.151195f, 0.0837621f, -0.130202f, -0.000811349f, -0.103073f, -0.0403161f, -0.0594069f, -0.0877509f, -0.00590993f, 0.0231722f, -0.121088f, -0.391103f, -0.019431f, 0.0817602f, 0.0527755f, 0.0992755f, 0.161425f, 0.0168443f, -0.0854238f, 0.10592f, -0.0542574f, -0.0479623f, -0.0679175f, -0.0501829f, -0.180061f, 0.0489264f, 0.126825f, 0.0269393f, 0.0674842f, 0.261283f, -0.113903f, 0.0516758f, -0.0185179f, 0.0888017f, -0.000139483f, 0.0807915f, -0.0108554f, -0.109691f, -0.0669186f, 0.121351f, 0.0566793f, -0.0167819f, 0.0822138f, 0.0704941f, -0.123956f, 0.0560745f, 0.19168f, -0.0295509f, 0.0395484f, 0.0906514f, 0.0670029f, -0.0354302f, 0.0351355f, 0.0676405f, -0.138673f, 0.0776129f, -0.0398722f, 0.0820865f, 0.0506494f, -0.0799403f, -0.0933255f, -0.0256913f, 0.0692704f, -0.132138f, 0.202244f, 0.151858f, 0.100586f, 0.115581f, -0.0117862f, 7.80844e-05f, 0.150684f, -0.0777769f, -0.0555746f, -0.209447f, -0.10494f, -0.0131148f, 0.0288863f, -0.0699032f, -0.0931896f, 0.0344133f, 0.00340128f, -0.0865655f, 0.130542f, 0.0761449f, 0.144283f, -0.0683641f, 0.0789137f, 0.0561416f, -0.097311f, 0.0548578f, 0.0422248f, 0.152139f, -0.0514651f, -0.0409513f, -0.0272965f, -0.0898351f, 0.0581419f, -0.0617364f, -0.0887362f, 0.119367f, -0.100116f, -0.176512f, 0.118884f, 0.0727275f, 0.0999158f, -0.00386374f, 0.0909081f, -0.0694219f, 0.0626571f, 0.0481447f, -0.0528535f, 0.0258354f, -0.0049836f, 0.0158656f, -0.0640526f, 0.00115454f, -0.00160752f, 0.045457f, -0.148332f, -0.168811f, -0.0513546f, -0.291317f, -0.00405431f, 0.0911846f, 0.0679518f, 0.0631274f, 0.160422f, 0.0543078f, -0.0356404f, -0.0199723f, -0.0491793f, 0.0300288f, -0.0123906f, 0.151868f, -0.0722189f, -0.0430919f, -0.0814115f, 0.0169131f, -0.0294944f, 0.111215f, -0.318497f, 0.13022f, -0.0527701f, 0.0636834f, -0.26041f, -0.0127658f, -0.0701126f, 0.00306886f, 0.21473f, -0.145459f, -0.079127f, 0.0994758f, -0.198761f, -0.00324043f, 0.106227f, -0.129003f, -0.206704f, 0.231621f, -0.160573f, 0.0855566f, 0.122829f, 0.0580241f, -0.0406969f, -0.0103319f, -0.220221f, 0.0424369f, 0.0871511f, 0.138163f, 0.194249f, 0.133906f, -0.0312908f, -0.086838f, -0.00202064f, -0.0939109f, 0.127476f, 0.00445663f, -0.0581692f, -0.155761f, 0.0404655f, 0.250563f, -0.00296771f, -0.00765122f, -0.0184977f, 0.0603397f, -0.0109015f, 0.141809f, -0.0170466f, 0.0246634f, -0.129598f, 0.0129825f, 0.0411581f, -0.122828f, -0.060903f, 0.0259857f, 0.13336f, 0.0596888f, -0.135078f, -0.0349223f, 0.0726095f, 0.0224667f, -0.247647f, -0.0856904f, 0.0555935f, 0.18741f, 0.133913f, -0.0126827f, 0.168418f, 0.13299f, 0.0292542f, 0.158248f, -0.0686903f, 0.00461184f, -0.148378f, 0.0430469f, 0.0463822f, 0.168026f, 0.00714021f, 0.0543366f, 0.0100729f, -0.0413269f, 0.0563973f, -0.0940585f, -0.0440974f, -0.216315f, -0.0652418f, 0.073364f, 0.285117f, -0.158906f, -0.232667f, 0.0734506f, 0.100076f, -0.021626f, -0.0475986f, 0.0766044f, -0.1133f, 0.031718f, 0.0114878f, 0.0387261f, -0.111513f, 0.251868f, -0.0321698f, -0.0163415f, -0.17881f, 0.0841402f, -0.120465f, 0.0686581f, -0.0289429f, -0.184975f, -0.136793f, 0.029196f, 0.224135f, 0.079326f, -0.0904605f, 0.115588f, 0.22912f, -0.0954724f, 0.155689f, 0.0461432f, -0.0328915f, 0.0100643f, 0.0483369f, -0.159618f, 0.131261f, 0.181069f, -0.111786f, 0.00678087f, 0.100649f, 0.16814f, 0.165055f, -0.0708342f, 0.114481f, -0.0747f, -0.114252f, -0.0769982f, -0.0460174f, 0.0728612f, -0.13306f, -0.0932621f, -0.019805f, 0.0200708f, -0.212277f, -0.138804f, 0.007889f, 0.328629f, -0.0594214f, 0.0403117f, -0.116803f, 0.109223f, 0.0604751f, -0.0515518f, 0.00232291f, 0.114706f, 0.227697f, 0.0966261f, -0.177347f, -0.134866f, -0.0814991f, 0.192626f, 0.0869204f, 0.0329994f, -0.162612f, -0.150023f, -0.0716221f, 0.259024f, 0.0160795f, 0.0360771f, 0.0459445f, 0.0688876f, 0.0756439f, -0.0865408f, 0.127239f, -0.0440397f, -0.011109f, 0.00759728f, 0.355613f, -0.0127272f, 0.151964f, 0.116613f, 0.0114587f, -0.128119f, -0.0993558f, -0.0549541f, -0.00706599f, 0.0289976f, 0.125323f, -0.053071f, 0.0550674f, -0.0443678f, -0.0726763f, -0.0886086f, -0.116334f, -0.0209192f, 0.063885f, 0.077968f, -0.104398f, -0.179266f, -0.194832f, -0.0639122f, -0.0518158f, 0.256032f, -0.108148f, -0.137127f, -0.104448f, 0.033398f, -0.0646876f, 0.0763196f, -0.0495609f, -0.100906f, -0.134571f, -0.161793f, 0.153519f, 0.149706f, -0.222311f, 0.0359835f, -0.00287598f, -0.160517f, 0.00736621f, 0.039841f, -0.000592701f, -0.056578f, 0.131453f, 0.0138361f, -0.134636f, -0.06908f, -0.0854272f, 0.0779619f, -0.0139243f, -0.0182492f, -0.0930189f, -0.00353997f, 0.0685898f, -0.0102112f, 0.0606511f, 0.063219f, -0.0267228f, 0.00053972f, -0.181391f, 0.0220468f, 0.202779f, 0.228042f, -0.191374f, -0.0386773f, -0.0833115f, 0.132745f, -0.0764369f, -0.0667909f, 0.0471235f, -0.0951605f, 0.0251998f, 0.109606f, -0.0551212f, 0.0423476f, -0.0914923f, 0.104139f, -0.166891f, -0.144794f, 0.00263478f, 0.0549706f, -0.0192015f, -0.094959f, -0.0470626f};
model->setOperandValue(op80, op80_init, sizeof(float) * 16384);
- model->addOperation(ANEURALNETWORKS_CONV_2D, {op86, op2, op1, b87, b88, b89, b90}, {op0});
static float op83_init[] = {-0.0293549f, -0.0835053f, -0.228576f, -0.0980885f, -0.0370638f, -0.195879f, -0.0452258f, 0.0371641f, -0.0585841f, -0.0984154f, -0.141293f};
model->setOperandValue(op83, op83_init, sizeof(float) * 11);
static float op84_init[] = {0.0984852f, 0.672424f, 0.392549f, -0.262182f, 0.303914f, -0.0118188f, 0.027943f, 0.0164078f, 0.200583f, -0.808626f, 0.234772f, -0.0253635f, -0.198519f, -0.176824f, -0.580674f, 0.0681573f, -0.0134279f, 0.172173f, -0.284882f, -0.0895141f, 0.0142356f, -0.0479431f, 0.0736678f, 0.00298977f, 0.152355f, -0.0370715f, 0.463201f, 0.0146613f, 0.0971624f, -0.0791196f, 0.556621f, -0.00950762f, 0.0160531f, 0.091037f, 0.376353f, -0.0996081f, -0.0418334f, -0.427482f, -0.202679f, -0.197079f, 0.021873f, -0.105617f, 0.36447f, 0.389277f, 0.0429815f, 0.0480496f, -0.170086f, -0.191548f, -0.237921f, 0.155838f, -0.100796f, 0.0539355f, 0.103154f, 0.0441985f, -0.20672f, 0.358565f, -0.105794f, -0.635905f, 0.193301f, 0.112419f, -0.184668f, 0.157954f, -0.301095f, -0.153072f, -0.0535615f, -0.0661999f, -0.197056f, -0.0835003f, -0.074932f, -0.111766f, -0.356266f, 0.649165f, -0.0527003f, -0.0597135f, 0.109839f, -0.270809f, 0.0333183f, -0.211454f, 0.0594729f, -0.166949f, 0.21057f, 0.224925f, -0.222835f, -0.0178217f, 0.127268f, 0.229248f, 0.262987f, 0.0318244f, 0.293201f, -0.361254f, -0.0624992f, -0.0696259f, 0.0456531f, -0.0287401f, 0.0863351f, -0.106142f, 0.81137f, 0.305728f, 0.398482f, -0.0190696f, -0.133965f, -0.223757f, -0.153242f, -0.261303f, 0.111363f, -0.113733f, 0.0028724f, -0.0878969f, 0.0498853f, -0.000613516f, -0.0819123f, -0.0154599f, -0.0938842f, -0.108295f, 0.340323f, -0.139579f, -0.117066f, 0.145283f, -0.106254f, 0.201248f, -0.152479f, 0.162457f, -0.0751263f, 0.00127508f, -0.0218281f, 0.126278f, -0.100075f, 0.426783f, -0.108719f, 0.207569f, -0.327427f, 0.277309f, 0.0404061f, -0.334901f, 0.154047f, -0.287619f, 0.0161922f, -0.00054208f, -0.233675f, 0.564603f, 0.201628f, 0.0510375f, -0.16502f, -0.0155493f, -0.125359f, -0.0996153f, 0.0133961f, -0.492208f, 0.109118f, -0.136327f, 0.0252329f, 0.0556799f, -0.196804f, -0.0612012f, -0.0392273f, 0.133385f, 0.253763f, -0.208136f, -0.00507434f, -0.0584744f, 0.0855089f, -0.00321895f, -0.209376f, 0.0618401f, 0.0129248f, -0.130721f, -0.168413f, 0.122652f, 0.0927544f, -0.180775f, -0.0463842f, -0.626248f, -0.00596579f, 0.0822374f, -0.254325f, -0.361624f, 0.778701f, -0.0705549f, 0.40832f, 0.0932269f, 0.10348f, 0.258843f, -0.117135f, 0.131713f, -0.457018f, -0.364692f, 0.0741725f, 0.168267f, 0.0904773f, -0.333243f, 0.18358f, -0.0407786f, -0.0115824f, 0.304328f, 0.177285f, 0.206312f, -0.503914f, 0.310439f, 0.533919f, 0.0925376f, 0.449889f, -0.45417f, 0.89017f, -0.00580558f, 0.317744f, 0.0176692f, -0.0267303f, -0.0657997f, -0.333455f, -0.0895455f, -0.0203959f, -0.329956f, 0.0542947f, -0.03533f, 0.0496151f, 0.145015f, 0.135449f, -0.239986f, -0.442413f, -0.0922021f, 0.396803f, 0.0695849f, -0.00921835f, 0.405834f, 0.477558f, 0.08952f, 0.101425f, -0.0264703f, -0.124621f, 0.070554f, -0.101953f, 0.224768f, 0.021384f, 0.293433f, -0.297231f, 0.0841252f, 0.0290684f, -0.211267f, -0.116215f, 0.433678f, -0.626231f, -0.139838f, 0.0290375f, -0.24486f, 0.282119f, -0.486426f, -0.402424f, -0.561959f, -0.450933f, 0.0501238f, -0.194682f, -0.231145f, -0.210372f, -0.0802564f, -0.170723f, -0.248902f, -0.0122576f, 0.0776341f, 0.197615f, 0.094212f, 0.0318287f, -0.237544f, 0.135516f, -0.537321f, -0.0906906f, 0.172587f, 0.179816f, 0.0792088f, 0.354531f, 0.0801259f, 0.0145845f, -0.14874f, 0.0367363f, -0.0733148f, -0.125755f, -0.252037f, -0.101672f, -0.14809f, -0.188341f, -0.264003f, -0.201581f, -0.0605842f, 0.0142779f, -0.322517f, -0.130978f, 0.301363f, -0.276394f, 0.0248554f, -0.168732f, 0.158651f, 0.150037f, -0.0472578f, 0.241238f, -0.109832f, -0.500172f, -0.0574687f, 0.143137f, 0.177313f, 0.0489008f, 0.24142f, -0.0742049f, -0.103464f, -0.0383113f, -0.0148622f, -0.101849f, 0.0425005f, 0.0543708f, 0.0710147f, 0.169901f, 0.304119f, 0.180413f, -0.330647f, -0.250029f, 0.0651902f, 0.173465f, -0.475872f, 0.393697f, 0.147345f, -0.00802343f, -0.0545821f, -0.119f, -0.0282713f, 0.0414947f, 0.0618215f, -0.132909f, 0.480818f, -0.124287f, -0.0484199f, -0.344362f, 0.071471f, 0.267047f, -0.279627f, -0.289336f, 0.0609794f, 0.339502f, -0.0956702f, -0.361749f, -0.0153208f, -0.102628f, 0.0936787f, -0.130392f, 0.348396f, 0.200636f, -0.249164f, -0.177583f, -0.0716032f, 0.118703f, 0.123365f, -0.0366422f, 0.231096f, 0.0022177f, 0.128202f, 0.222367f, -0.176409f, -0.153065f, -0.0287899f, -0.355792f, -0.543125f, 0.177245f, 0.116598f, 0.0451388f, -0.0286715f, -0.174033f, 0.476808f, 0.298325f, -0.0593149f, -0.0491401f, 0.0263619f, 0.0565123f, 0.0500395f, -0.40961f, -0.0481743f, -0.0744737f, -0.050528f, -0.428685f, -0.0457881f, -0.105794f, 0.0951161f, -0.299268f, -0.229566f, -0.206985f, -0.0780657f, -0.0322681f, 0.266195f, -0.0781984f, -0.598814f, -0.280207f, 0.0516518f, -0.0447187f, 0.0980521f, 0.0216666f, 0.038809f, 0.147272f, -0.357397f, 0.0504251f, 0.126596f, -0.0935991f, -0.142778f, 0.0864683f, -0.116768f, -0.164657f, -0.380078f, 0.00184015f, -0.0684899f, -0.134349f, 0.184285f, -0.281853f, -0.185581f, 0.347765f, 0.301739f, -0.17311f, -0.0586592f, -0.253355f, 0.135704f, -0.025141f, -0.398732f, 0.176819f, 0.164295f, -0.0964961f, 0.235867f, -0.162969f, -0.365092f, 0.0342f, 0.305977f, 0.192868f, -0.150942f, 0.132645f, 0.220341f, -0.158242f, -0.168888f, 0.103491f, -0.1672f, 0.0127892f, -0.0176947f, 0.230234f, -0.129157f, -0.319789f, -0.188887f, 0.469657f, 0.0599872f, 0.173128f, 0.207658f, -0.257826f, 0.422512f, 0.0304435f, -0.0700446f, 0.00292699f, -0.254277f, -0.0987592f, 0.0906241f, -0.234816f, 0.030083f, -0.00973596f, 0.120037f, -0.317601f, -0.12708f, 0.102184f, 0.0740557f, 0.191923f, 0.215419f, 0.090792f, -0.416807f, -0.211088f, -0.0667573f, -0.042666f, 0.00698668f, -0.187608f, 0.11397f, 0.0282127f, -0.0646227f, -0.0786383f, 0.338181f, -0.158486f, -0.0404435f, -0.148313f, 0.129857f, 0.036822f, 0.214085f, 0.0271965f, 0.0712011f, -0.0142654f, 0.21793f, -0.101845f, -0.0134659f, -0.386899f, -0.253225f, -0.201138f, -0.168f, -0.111886f, 0.149919f, -0.252716f, -0.312013f, -0.494531f, 0.20132f, 0.1455f, -0.0390248f, -0.2497f, 0.0187322f, 0.212352f, 0.176346f, -0.0186768f, -0.0587664f, 0.140535f, 0.130711f, -0.048937f, -0.0333832f, 0.146999f, -0.0536035f, -0.210655f, 0.277771f, 0.136683f, -0.458041f, 0.106529f, -0.152398f, -0.0336699f, 0.151721f, -0.0533765f, -0.168961f, 0.175815f, -0.24888f, 0.0907924f, -0.0133408f, 0.175644f, -0.0246879f, -0.00687254f, 0.185182f, -0.256385f, -0.163355f, -0.256984f, -0.315761f, -0.181824f, -0.0306672f, 0.152588f, -0.0713595f, -0.0721906f, -0.332328f, -0.322698f, -0.00929737f, 0.0818944f, 0.0742352f, -0.166805f, 0.0944738f, -0.167636f, 0.0871255f, 0.0792785f, 0.0354259f, 0.293364f, 0.215322f, 0.272799f, -0.0492312f, -0.269483f, -0.220346f, -0.0881883f, -0.105395f, 0.170322f, 0.0396378f, 0.0702321f, 0.0164758f, -0.0229642f, -0.120222f, -0.00534489f, 0.138123f, -0.141178f, 0.00600586f, 0.0114309f, 0.160046f, -0.0782422f, -0.221657f, -0.222359f, -0.0160572f, -0.0427344f, -0.0939852f, 0.19013f, 0.128755f, 0.0826387f, 0.0959137f, -0.121338f, 0.116419f, -0.0815084f, -0.148231f, -0.102396f, -0.302046f, -0.0136386f, 0.146457f, -0.273797f, -0.0766018f, 0.103427f, -0.0941844f, -0.236219f, -0.106905f, 0.188707f, -0.119065f, -0.109619f, -0.376718f, -0.250552f, -0.119213f, -0.0698239f, 0.0548951f, -0.0984231f, -0.274015f, 0.0116218f, -0.0560431f, -0.0176495f, 0.106143f, 0.191658f, -0.291245f, 0.198666f, -0.1415f, 0.121305f, 0.00787936f, -0.161106f, -0.0559996f, -0.025235f, -0.227444f, 0.124586f, 0.153714f, 0.0339968f, -0.0791643f, -0.204395f, -0.139891f, -0.136988f, -0.182275f, 0.059441f, -0.135392f, -0.0206536f, -0.177236f, -0.0461415f, 0.0707632f, 0.279827f, -0.00538458f, -0.0227107f, -0.0780397f, 0.0654234f, -0.00893195f, -0.111956f, -0.298613f, -0.35016f, 0.0515563f, -0.257037f, 0.139683f, -0.0568245f, -0.18912f, 0.054686f, 0.230304f, 0.0682762f, -0.104554f, -0.267018f, -0.00695182f, -0.42745f, -0.118246f, 0.240312f, -0.0283745f, -0.0410208f, -0.204045f, 0.0536799f, 0.158019f, -0.217282f, -0.255996f, -0.130733f, -0.0754242f, -0.205957f, -0.042236f, -0.237091f, -0.0547223f, 0.318243f, 0.114416f, -0.135642f, -0.0316242f, -0.347453f, 0.101281f, 0.012845f, -0.212307f, 0.135502f, -0.217902f, -0.0520036f, -0.169676f, 0.0155753f, -0.378887f, -0.120698f, 0.278682f, -0.208085f, 0.0188473f, -0.167479f, 0.3823f, -0.262327f, 0.0653896f, 0.0837105f, -0.175588f, -0.172008f, 0.279217f, 0.109674f, -0.0610411f, -0.261709f, -0.12329f, -0.214598f, 0.0449085f, 0.0995378f, 0.123743f, -0.20637f, 0.0336271f, 0.179009f, -0.103686f, -0.0319235f, 0.0991055f, -0.15149f, 0.11167f, -0.0458526f, -0.216373f, 0.0944096f, 0.257391f, -0.138348f, -0.0792016f, 0.236858f, -0.177544f, 0.00179313f, -0.0475954f, -0.325425f, -0.443611f, 0.269018f, 0.0823181f, -0.189893f, -0.00310759f, 0.38809f, -0.0297613f, -0.0772569f, 0.117555f, -0.0146545f, 0.24652f, -0.124915f, -0.0226053f, -0.00351846f, 0.123489f, 0.374272f, 0.00411916f, -0.0530559f, -0.459548f, -0.068397f, 0.351112f, 0.20717f, -0.169705f, -0.191568f, -0.0149611f, -0.200327f, -0.0366789f, -0.000831896f, 0.0329813f, 0.0928899f, -0.217083f, -0.1015f, -0.108356f, -0.155276f, -0.224902f, -0.161009f, -0.195741f, -0.196345f, 0.0696936f, -0.0903938f, 0.0346839f, 0.0342342f, 0.108802f, 0.0224264f, -0.116966f, -0.0868056f, 0.41173f, -0.139741f, 0.0816925f, 0.0206459f, -0.0857387f, -0.0889723f, 0.0252684f, 0.122225f, 0.281325f, -0.0975601f, -0.0890313f, -0.202703f, -0.232747f, -0.16356f, -0.109103f, -0.000627448f, -0.281988f, 0.133017f, 0.199669f, -0.305566f, -0.298914f, -0.120265f, -0.0757179f, -0.298619f, 0.183222f, -0.142981f, 0.0896671f, 0.175904f, 0.0175519f, -0.16538f, -0.0520677f, -0.0670482f, -0.00336189f, -0.223379f, -0.0609024f, -0.27571f, -0.0763393f, 0.295597f, 0.00951529f, 0.127656f, 0.323394f, 0.321615f, 0.184786f, 0.120165f, 0.0270615f, 0.232585f, -0.378135f, 0.00705762f, -0.152686f, -0.25289f, 0.0996134f, 0.0515323f, 0.0147273f, -0.746546f, -0.161453f, 0.0907721f, 0.015299f, -0.0842891f, -0.0432424f, -0.523789f, -0.271467f, 0.0367782f, -0.24899f, 0.207861f, 0.0755162f, 0.173391f, 0.222453f, -0.113516f, -0.24137f, 0.100824f, -0.0606065f, 0.00548546f, 0.0558509f, -0.0575758f, 0.245029f, 0.178345f, 0.143839f, -0.244105f, -0.172561f, -0.338056f, -0.127348f, 0.31021f, -0.115489f, -0.0672434f, -0.0625748f, -0.180578f, -0.227379f, 0.11236f, 0.10313f, 0.166569f, 0.158167f, -0.0638876f, 0.161796f, 0.0371649f, -0.328319f, -0.336786f, -0.211983f, 0.0293737f, -0.115773f, 0.00937545f, -0.246018f, 0.35231f, 0.195708f, 0.0478146f, -0.103948f, -0.106301f, 0.211148f, 0.379093f, 0.416716f, -0.174341f, -0.0187881f, -0.510292f, 0.0914475f, 0.0227487f, -0.100022f, -0.141782f, -0.0911218f, 0.0475971f, -0.244332f, -0.0995312f, -0.209683f, 0.0118146f, -0.333827f, 0.0784702f, 0.152256f, -0.0219116f, 0.138452f, -0.0222356f, -0.0565779f, 0.158486f, -0.24482f, -0.00680468f, 0.197839f, 0.0154492f, -0.00997484f, -0.221046f, -0.0717462f, -0.174674f, -0.121365f, -0.225961f, 0.0249583f, -0.012674f, -0.0461503f, 0.326105f, 0.159991f, 0.0172039f, -0.33672f, -0.0282964f, 0.340149f, -0.102354f, -0.32463f, 0.0968813f, 0.142316f, -0.0457009f, -0.449412f, 0.010723f, 0.234789f, -0.0556804f, 0.13699f, 0.346469f, 0.0485624f, 0.158279f, -0.064993f, -0.103656f, -0.058024f, -0.160934f, -0.154483f, -0.208516f, 0.171658f, -0.105681f, -0.0694062f, -0.430509f, 0.0281458f, -0.145734f, 0.00672611f, -0.263346f, 0.398998f, -0.107815f, 0.0612669f, 0.229766f, -0.0120696f, 0.221093f, -0.172262f, 0.0251312f, -0.0730561f, -0.316371f, 0.188185f, -0.046221f, -0.199885f, 0.119867f, 0.218638f, -0.329465f, -0.324384f, -0.141421f, 0.0441414f, 0.0694141f, 0.255176f, 0.0668514f, -0.0346173f, -0.00232405f, 0.194615f, 0.281005f, -0.0199741f, 0.035436f, 0.130112f, -0.0913306f, 0.329646f, -0.0752686f, 0.109595f, 0.0791733f, -0.0692778f, 0.305223f, -0.203791f, 0.124741f, 0.235692f, 0.0366247f, 0.0102351f, 0.0518547f, -0.0949171f, 0.149521f, -0.0588182f, -0.0129089f, -0.232551f, -0.0145967f, -0.0175136f, -0.0871548f, 0.0947253f, 0.0243044f, -0.0628619f, -0.0492656f, -0.299999f, -0.217482f, -0.140209f, -0.0874081f, 0.0812857f, 0.0233994f, -0.389155f, 0.200308f, -0.131029f, 0.299059f, -0.110117f, -0.289113f, -0.0365339f, -0.233167f, -0.108743f, -0.261932f, -0.159673f, -0.106053f, 0.199852f, -0.106121f, 0.0759607f, 0.472064f, -0.163932f, -0.31763f, 0.0104898f, -0.0210451f, -0.0787518f, 0.155917f, 0.102614f, -0.0425018f, 0.104758f, 0.0857415f, -0.155914f, 0.239264f, -0.144245f, 0.0138479f, -0.196582f, -0.225119f, 0.119061f, 0.0667646f, 0.0661826f, -0.190163f, 0.146226f, 0.0857013f, -0.39394f, 0.00735058f, 0.17735f, 0.244409f, 0.06301f, 0.169556f, -0.178062f, 0.12862f, 0.416925f, 0.0967157f, -0.00742805f, -0.000430865f, 0.151077f, -0.135911f, -0.259045f, -0.367174f, -0.13922f, 0.23333f, -0.219153f, -0.101108f, -0.108457f, -0.0457349f, -0.0666834f, 0.222968f, 0.0223704f, 0.0866147f, 0.0902093f, 0.141006f, 0.230202f, 0.0586954f, 0.26749f, 0.0443342f, 0.424975f, -0.159726f, -0.16713f, -0.10332f, 0.126135f, 0.125221f, 0.220837f, -0.121812f, -0.20649f, 0.161173f, -0.0608088f, 0.751833f, 0.177478f, -0.107548f, 0.0103489f, -0.212986f, 0.177713f, -0.353158f, -0.0872167f, 0.126602f, 0.0343864f, 0.0116791f, 0.0520713f, 0.00361525f, 0.194245f, -0.114742f, 0.020037f, -0.114726f, 0.126897f, 0.039019f, 0.445555f, -0.0193091f, 0.0637067f, -0.128501f, -0.0345904f, 0.0988956f, 0.178154f, -0.0259671f, -0.0257689f, -0.091025f, 0.0684302f, 0.131971f, 0.0459931f, 0.278118f, -0.0376653f, -0.156248f, -0.0789752f, -0.160455f, 0.353474f, 0.0503084f, -0.194132f, 0.124681f, -0.0915903f, 0.117273f, 0.0232574f, -0.0337332f, 0.0175596f, -0.203004f, 0.132872f, -0.200533f, 0.111507f, 0.452312f, 0.0770053f, 0.201455f, -0.267448f, 0.0539831f, -0.187271f, -0.0896206f, -0.0906231f, 0.174122f, 0.00151794f, -0.44301f, -0.038296f, -0.179995f, -0.0717158f, -0.136493f, -0.163935f, -0.0208884f, 0.361374f, 0.219308f, -0.0691815f, 0.20319f, -0.0567725f, 0.272091f, 0.228685f, 0.0701021f, -0.122392f, -0.280011f, 0.0584825f, -0.054271f, 0.00700558f, 0.0727541f, 0.0566045f, -0.197892f, 0.024467f, -0.192888f, -0.0819263f, -0.0201281f, 0.248612f, 0.0373216f, 0.0864792f, 0.283391f, 0.189835f, 0.0781828f, -0.0364776f, -0.00516293f, -0.136433f, -0.0563264f, 0.184467f, -0.103843f, 0.143026f, 0.153189f, -0.0523581f, 0.213201f, 0.144222f, -0.368817f, 0.150695f, 0.0357488f, 0.44351f, -0.167891f, 0.289154f, -0.227813f, -0.321075f, 0.0209248f, 0.00428332f, 0.0969976f, -0.108528f, 0.0284129f, 0.0762366f, 0.107821f, 0.119178f, 0.213134f, -0.061735f, -0.172152f, 0.161251f, -0.0093495f, 0.32946f, 0.219039f, -0.287137f, -0.0450728f, -0.0452836f, -0.212494f, -0.107495f, -0.188338f, 0.0459348f, -0.0377559f, -0.0839975f, -0.00428969f, -0.0232576f, 0.0289588f, 0.164926f, -0.0425852f, -0.0543849f, 0.11673f, 0.158114f, 0.159165f, 0.0941762f, -0.0546047f, 0.237165f, -0.0486095f, -0.146102f, -0.196763f, -0.300198f, 0.0103576f, -0.309314f, -0.122579f, -0.147076f, -0.252579f, -0.00101733f, -0.288208f, -0.22112f, 0.311517f, -0.112453f, 0.129476f, -0.324617f, -0.122931f, -0.123137f, 0.000923043f, -0.117103f, 0.0235433f, -0.271816f, 0.141558f, -0.057682f, -0.120304f, -0.106198f, 0.0265892f, 0.254805f, 0.173984f, -0.266907f, 0.0103511f, -0.0901396f, -0.164973f, -0.226945f, 0.0137655f, 0.0133529f, -0.151525f, 0.256784f, 0.132003f, 0.24828f, -0.0647662f, 0.143638f, 0.0600663f, -0.18841f, -0.0538587f, 0.293896f, -0.103811f, -0.389949f, 0.073149f, 0.102529f, 0.00501293f, 0.315232f, 0.231291f, -0.176493f, -0.140862f, -0.133106f, 0.0161411f, -0.210105f, -0.125995f, -0.0174128f, 0.00283163f, -0.16739f, -0.00931349f, -0.26984f, -0.315777f, -0.248987f, -0.144968f, 0.166966f, 0.169746f, -0.220713f, -0.0312972f, 0.156324f, -0.0407818f, -0.139328f, -0.440265f, -0.0850991f, 0.188168f, 0.106694f, 0.154731f, 0.159212f, -0.200953f, -0.037807f, 0.36218f, -0.123355f, 0.396598f, -0.036044f, -0.071492f, 0.189546f, -0.115796f, -0.0827317f, -0.0544022f, -0.222727f, 0.0347514f, -0.0295377f, 0.101372f, -0.0471416f, 0.218466f, -0.0403298f, -0.0743297f, -0.0607741f, -0.0177818f, -0.0976377f, 0.182365f, -0.26278f, 0.0619466f, 0.335466f, -0.039433f, -0.214658f, -0.00413142f, 0.118605f, -0.0871774f, -0.013047f, -0.0139049f, -0.0566686f, -0.0765434f, -0.0230406f, -0.10839f, -0.164259f, -0.110342f, -0.0567072f, 0.0359454f, 0.161352f, -0.271192f, 0.0673184f, -0.0400687f, -0.0291176f, -0.0505437f, -0.167017f, -0.244246f, 0.0127467f, -0.188325f, -0.171548f, 0.0819252f, -0.184143f, -0.0280647f, -0.175439f, -0.0298673f, 0.0928547f, -0.114129f, 0.160686f, 0.124866f, -0.0799349f, -0.0461555f, -0.0569828f, -0.07544f, -0.254674f, 0.200119f, 0.395232f, -0.104755f, -0.0705698f, -0.168159f, -0.363371f, -0.28949f, -0.157786f, 0.0803677f, 0.253256f, 0.183266f, -0.098531f, -0.217913f, -0.277753f, -0.0412087f, 0.0929791f, 0.0416587f, -0.393095f, -0.194569f, 0.115027f, 0.00374004f, -0.230992f, 0.178052f, 0.11554f, -0.112156f, -0.136296f, 0.147941f, 0.160641f, -0.0988691f, -0.156255f, -0.183889f, -0.198891f, 0.0487718f, -0.10064f, 0.0618672f, 0.129453f, 0.245253f, -0.0609817f, -0.0423283f, 0.209125f, -0.00764558f, -0.207093f, 0.090427f, 0.344761f, -0.210035f, 0.0190305f, 0.177226f, -0.478754f, 0.102217f, -0.0815951f, 0.184152f, -0.0708748f, -0.288034f, 0.212553f, -0.00799922f, 0.0402337f, -0.0634731f, -0.0157662f, 0.0380505f, 0.297157f, -0.102219f, 0.270945f, -0.0364033f, -0.223053f, -0.313967f, -0.256362f, 0.00947424f, 0.1584f, 0.0508195f, 0.127063f, 0.161099f, -0.176547f, -0.06178f, 0.28597f, 0.0661753f, 0.115497f, -0.266217f, 0.207641f, 0.288968f, -0.147556f, 0.00127605f, 0.25902f, 0.0888035f, -0.172818f, 0.0106958f, -0.259761f, -0.0210704f, 0.11259f, 0.118585f, -0.131654f, 0.0889418f, -0.141959f, 0.0686276f, 0.119914f, -0.315549f, -0.106624f, 0.356014f, 0.0856996f, -0.121974f, -0.0188067f, -0.150179f, -0.0971979f, -0.15594f, 0.15098f, -0.111329f, -0.258716f, -0.390928f, 0.105128f, -0.170122f, -0.114675f, -0.119159f, 0.0893049f, 0.0829629f, -0.174787f, -0.020651f, 0.059119f, -0.120192f, -0.192243f, 0.22854f, 0.0524963f, -0.17855f, 0.129937f, 0.0181097f, 0.151171f, -0.104886f, -0.195503f, 0.166139f, -0.132779f, -0.0952646f, -0.238117f, -0.120478f, 0.250843f, 0.0198936f, -0.16349f, 0.00793157f, -0.139775f, 0.0621653f, 0.102649f, 0.0159358f, -0.173693f, 0.000424589f, 0.0499097f, -0.213681f, 0.000829991f, 0.0470139f, -0.104087f, -0.104971f, 0.154429f, -0.0514045f, 0.021679f, 0.0637851f, 0.0263575f, -0.0773971f, 0.0792207f, 0.0289109f, -0.190421f, -0.114429f, -0.0980095f, 0.0697401f, -0.128251f, 0.0884518f, 0.215688f, -0.503879f, -0.0634976f, -0.0256412f, 0.26015f, -0.082886f, 0.0134682f, -0.1982f, 0.203755f, 0.237095f, -0.178199f, -0.110421f, -0.123333f, 0.0505219f, 0.0872408f, 0.134674f, -0.151414f, -0.20904f, 0.0162698f, -0.0281258f, -0.0696107f, 0.0384256f, -0.316446f, -0.0999238f, -0.0215575f, -0.16317f, -0.422117f, -0.401993f, 0.0318225f, 0.179985f, 0.0327708f, 0.237595f, 0.00156168f, 0.190076f, 0.0242173f, -0.149916f, -0.0292071f, -0.0634601f, -0.353369f, 0.191598f, 0.268846f, 0.0919142f, -0.0838139f, 0.041469f, 0.195228f, -0.304184f, -0.0524774f, 0.0257366f, -0.0669865f, 0.0712212f, -0.165418f, -0.0485386f, 0.135066f, 0.178966f, -0.315931f, -0.160149f, 0.198644f, 0.117106f, -0.130927f, -0.254406f, -0.151422f, 0.0451171f, 0.0421164f, -0.120035f, 0.0517401f, 0.0150269f, 0.0749926f, 0.268662f, -0.213943f, -0.0568393f, 0.122747f, 0.154528f, -0.0203424f, -0.0819281f, -0.201227f, 0.155029f, -0.285458f, -0.081893f, 0.141846f, 0.12811f, 0.17107f, -0.262672f, -0.112772f, -0.186101f, -0.257387f, -0.169401f, -0.263488f, 0.370405f, -0.462936f, -0.188147f, -0.332351f, 0.0125391f, 0.215156f, -0.513405f, -0.289543f, -0.443262f, -0.0851796f, -0.157583f, -0.22628f, 0.0640168f, 0.0691075f, 0.169624f, -0.0885214f, 0.0678881f, -0.178388f, 0.11724f, -0.0459048f, 0.0283356f, 0.135743f, 0.21108f, 0.197132f, -0.298021f, -0.127577f, -0.0454851f, -0.295987f, -0.113867f, 0.0862119f, -0.0201072f, -0.290276f, 0.0147507f, -0.247042f, 0.420167f, -0.376847f, 0.203432f, -0.158043f, 0.0810597f, -0.566199f, 0.218187f, -0.318247f, -0.400209f, -0.219316f, -0.0448023f, -0.357235f, -0.26102f, -0.303588f, 0.00072887f, -0.205802f, -0.175228f, -0.0968084f, -0.0754828f, 0.047413f, 0.131296f, -0.112247f, 0.183774f, 0.0840453f, -0.0239575f, 0.0597386f, 0.0678879f, 0.208753f, -0.381256f, 0.0543436f, 0.0230677f, -0.275275f, 0.197361f, 0.318349f, 0.230976f, -0.0475114f, 0.0923948f, 0.270554f, 0.0193927f, -0.0845898f, -0.074267f, -0.185875f, 0.329959f, -0.00671641f, -0.19907f, -0.208328f, 0.089362f, 0.0418336f, -0.054819f, 0.138547f, 0.318673f, 0.300046f, -0.149823f, -0.146389f, -0.178329f, 0.260826f, -0.0446269f, 0.22329f, 0.0233915f, -0.408598f, -0.210239f, -0.0839846f, -0.210073f, -0.203917f, 0.333065f, 0.0654963f, -0.110438f, 0.0976637f, -0.171706f, -0.0396424f, 0.196927f, 0.107167f, -0.526091f, -0.272819f, -0.0621517f, -0.360691f, -0.0803204f, -0.0894648f, -0.215345f, 0.0738301f, -0.165395f, -0.505362f, -0.510371f, 0.495546f, 0.281085f, -0.349988f, -0.102217f, 0.29955f, 0.101695f, 0.216987f, 0.220804f, -0.264158f, 0.208857f, 0.490646f, -0.235616f, 0.0697848f, -0.0828848f, -0.0676367f, -0.137579f, 0.0101326f, -0.0646971f, -0.245946f, -0.0958766f, -0.274682f, -0.467907f, 0.0970127f, -0.254426f, 0.03253f, 0.0122821f, -0.0339391f, -0.364834f, 0.164962f, -0.180429f, -0.378582f, -0.00960021f, -0.228418f, -0.0264938f, 0.0259812f, -0.295185f, -0.357585f, -0.380096f, 0.0525056f, -0.233331f, 0.13387f, 0.105961f, 0.243387f, 0.258494f, 0.0371437f, 0.0632561f, 0.110992f, -0.208983f, -0.185678f, 0.292418f, 0.0286353f, -0.00408131f, 0.102217f, -0.136994f, 0.0622825f, 0.395963f, -0.348133f, -0.223302f, 0.273627f, -0.193556f, 0.338264f, -0.159462f, -0.491361f, 0.161778f, 0.156135f, 0.0641617f, 0.0999903f, -0.529532f, -0.285966f, -0.135576f, 0.236579f, -0.130519f, -0.0764042f, 0.493032f, -0.0883978f, 0.150384f, 0.106229f, 0.02975f, 0.318695f, 0.265394f, 0.130223f, -0.0455514f, -0.115114f, 0.107133f, -0.250837f, -0.0966183f, -0.123644f, 0.342727f, -0.0986773f, -0.0127951f, -0.434297f, -0.0685123f, 0.0869741f, -0.269507f, 0.396272f, 0.305987f, 0.145169f, -0.250147f, 0.0425825f, -0.27173f, -0.0943471f, -0.401917f, -0.0518213f, 0.220465f, -0.00776957f, -0.308669f, 0.151246f, 0.040435f, -0.246938f, 0.161326f, -0.657021f, -0.029663f, -0.156154f, -0.0231731f, -0.0567502f, -0.149723f, -0.157589f, -0.0150168f, 0.143093f, 0.0119803f, -0.282194f, 0.00609295f, 0.133509f, -0.238658f, 0.469585f, -0.15437f, 0.123749f, -0.438739f, -0.235357f, 0.196981f, -0.178078f, 0.179464f, -0.360465f, 0.146581f, -0.0722637f, -0.359168f, -0.0213761f, -0.0719016f, 0.228349f, 0.00872679f, -0.0720084f, 0.0129347f, -0.0606057f, 0.209901f, 0.261428f, 0.318637f, 0.0668506f, 0.262152f, -0.188527f, 0.017398f, 0.238802f, -0.119243f, -0.335925f, -0.0708997f, 0.0131007f, -0.183616f, 0.139393f, 0.229401f, -0.0356139f, 0.117969f, -0.0359544f, -0.0976415f, -0.261919f, -0.132652f, 0.0511542f, 0.0250922f, -0.202336f, 0.156581f, -0.21006f, -0.164616f, 0.49608f, -0.143283f, 0.0167009f, 0.0382558f, -0.192059f, -0.0298086f, 0.16408f, 0.0327906f, -0.0112998f, 0.107964f, -0.805638f, 0.341425f, 0.104876f, -0.379418f, -0.16812f, 0.0873235f, -0.591176f, 0.347932f, -0.092094f, -0.0951583f, -0.079231f, -0.102f, 0.430467f, -0.0629909f, 0.103386f, -0.394243f, 0.0921294f, -0.303268f, -0.0878409f, 0.0222568f, 0.177541f, 0.05269f, -0.245371f, -0.394972f, 0.169095f, -0.0322228f, 0.0854907f, -0.277685f, 0.169834f, -0.157112f, -0.125601f, -0.123642f, 0.287326f, -0.11461f, -0.0400871f, 0.0935002f, -0.239499f, -0.00406349f, 0.116467f, 0.195647f, 0.0169376f, 0.108949f, -0.256211f, 0.199251f, -0.22503f, 0.183724f, -0.0459538f, -0.0573185f, -0.135267f, -0.17563f, -0.105615f, -0.216777f, 0.136895f, -0.131041f, 0.143448f, 0.116321f, 0.341659f, 0.04663f, -0.138582f, 0.113484f, 0.000281706f, 0.183075f, -0.205364f, 0.217528f, -0.0325774f, -0.0481017f, -0.00686094f, -0.13989f, 0.0995296f, -0.476637f, 0.120914f, 0.178213f, 0.11095f, -0.154424f, 0.169363f, 0.288232f, 0.105104f, 0.440652f, 0.0404736f, -0.163574f, -0.0724218f, -0.174028f, 0.137715f, 0.255176f, -0.133188f, -0.10359f, -0.150963f, -0.0850369f, 0.162774f, -0.00694466f, -0.523244f, -0.400547f, -0.11478f, 0.0923003f, 0.00922158f, 0.165169f, 0.114364f, 0.396211f, 0.0621255f, 0.413189f, 0.0759307f, -0.148507f, 0.243803f, 0.066523f, -0.0649491f, 0.0867938f, 0.134912f, -0.44741f, 0.133082f, 0.0237098f, -0.327549f, -0.0172026f, -0.104394f, -0.204443f, 0.0804548f, -0.25669f, -0.280141f, 0.184742f, -0.182915f, -0.301567f, -0.132653f, -0.362342f, -0.0867399f, -0.248574f, 0.018783f, -0.0144377f, -0.193732f, -0.0568637f, 0.0212203f, 0.145462f, -0.04467f, 0.188485f, -0.0192423f, -0.162427f, -0.431459f, -0.316196f, -0.0197834f, 0.142554f, 0.161446f, -0.204556f, 0.10123f, 0.136505f, -0.0421437f, 0.0382004f, -0.0105015f, 0.26352f, 0.128504f, 0.220373f, -0.0459283f, -0.0794771f, 0.126873f, 0.102329f, 0.160555f, -0.344226f, 0.11844f, -0.152884f, -0.369259f, -0.732194f, -0.285659f, 0.27297f, 0.0434638f, -0.115029f, -0.178296f, -0.010171f, -0.108856f, 0.243398f, -0.120003f, 0.0617609f, -0.0377697f, 0.0882623f, 0.317397f, -0.142634f, 0.0613519f, 0.0625693f, 0.29804f, -0.276065f, -0.283755f, -0.0586926f, 0.0609932f, 0.172328f, 0.380084f, 0.0817355f, -0.0889897f, 0.16975f, -0.0727911f, 0.558122f, 0.129139f, 0.0967012f, -0.00808779f, -0.281368f, 0.229454f, -0.0657459f, 0.110639f, 0.0990761f, -0.0734602f, -0.124961f, 0.120193f, 0.0117927f, -0.00164934f, -0.068704f, 0.0934271f, -0.150389f, 0.267866f, 0.111924f, 0.22073f, -0.0826743f, 0.0181881f, 0.164808f, 0.08553f, 0.0064627f, -0.100066f, -0.196847f, -0.260685f, -0.161078f, -0.0889612f, 0.267343f, -0.183189f, 0.099878f, 0.206179f, -0.134037f, -0.0753274f, 0.073361f, 0.123856f, -0.11014f, -0.23651f, -0.079332f, -0.179564f, -0.0953625f, 0.0816014f, -0.0153009f, 0.0216921f, -0.214616f, 0.0721763f, -0.337629f, 0.113998f, 0.30383f, 0.213949f, 0.0748996f, -0.154083f, 0.082343f, 0.0915755f, -0.165324f, -0.161256f, -0.0732527f, -0.0771391f, -0.179746f, 0.148814f, -0.229269f, -0.00684043f, -0.0877735f, -0.232043f, 0.0358457f, 0.0860737f, -0.016937f, 0.0052483f, 0.203986f, -0.0327027f, 0.0828824f, 0.0515511f, -0.0446207f, 0.0495584f, 0.06504f, -0.0502581f, -0.0989093f, -0.242931f, -0.161322f, 0.0412978f, 0.0882053f, -0.0868244f, 0.0333411f, 0.0033292f, 0.0956053f, 0.224343f, -0.0605414f, 0.200487f, 0.139677f, 0.0741737f, 0.131144f, -0.0156217f, 0.119855f, -0.0672591f, 0.0646749f, 0.0212678f, -0.0612522f, 0.127438f, 0.165742f, 0.149455f, 0.120228f, 0.245928f, -0.536011f, -0.0221017f, 0.0210271f, 0.196356f, 0.0401149f, -0.00733165f, -0.270396f, -0.00968083f, -0.0709557f, -0.120717f, 0.140489f, 0.0935343f, -0.172696f, 0.301435f, -0.0935873f, -0.0353977f, 0.0539549f, -0.0338224f, -0.239903f, -0.0209894f, -0.17114f, 0.267786f, 0.20251f, -0.0980189f, -0.04852f, -0.207071f, -0.253257f, -0.0564701f, -0.0518127f, -0.0537929f, -0.390881f, 0.0470064f, 0.0306878f, 0.104422f, 0.150282f, 0.0117885f, -0.093087f, -0.0377776f, -0.0618607f, -0.0869537f, 0.137726f, 0.0903727f, 0.0346921f, 0.0111f, -0.241767f, -0.201946f, 0.09471f, -0.156048f, -0.0978701f, -0.239229f, -0.0308635f, -0.122071f, -0.433478f, -0.0514787f, -0.182472f, -0.181954f, 0.0416541f, -0.0883368f, 0.157402f, -0.462445f, -0.103609f, -0.160994f, -0.0133393f, -0.096508f, 0.100438f, 0.00418135f, -0.0122206f, 0.172408f, 0.0437795f, -0.172367f, -0.0189107f, -0.0304423f, 0.0780768f, -0.116228f, -0.0305065f, -0.0440305f, 0.00286725f, -0.157059f, 0.132452f, -0.101883f, -0.138483f, 0.00723927f, 0.0342281f, 0.206677f, -0.0770022f, 0.0227105f, -0.111016f, -0.170921f, 0.055846f, 0.246527f, -0.142554f, -0.380108f, -0.0346903f, 0.138706f, -0.176424f, 0.112018f, 0.0435032f, -0.127998f, -0.169885f, -0.0509104f, -0.0870096f, -0.535699f, -0.0638343f, -0.0311837f, 0.078099f, -0.0342351f, 0.0749799f, 0.3883f, -0.154977f, 0.224178f, 0.0550229f, 0.107375f, 0.33049f, 0.0969202f, 0.0756623f, -0.233299f, -0.104361f, 0.442374f, 0.0844492f, 0.0705411f, -0.140545f, -0.0663961f, -0.0728755f, -0.0621244f, -0.0819853f, -0.112193f, -0.176114f, -0.0938139f, -0.214228f, 0.0190762f, -0.213562f, -0.190233f, 0.133314f, -0.148665f, 0.0915799f, 0.187216f, -0.284974f, 0.00733069f, 0.0156916f, 0.015107f, 0.0318654f, 0.346104f, -0.124227f, 0.137341f, 0.0592528f, -0.387351f, -0.221991f, 0.360592f, -0.0931174f, -0.0492834f, 0.199867f, -0.0852204f, 0.150399f, 0.0413833f, 0.235906f, -0.0706518f, -0.166653f, -0.0586646f, -0.109711f, -0.0823073f, 0.257342f, -0.224644f, -0.430506f, -0.105588f, 0.0250296f, -0.042311f, -0.0996558f, -0.115579f, -0.286667f, -0.154598f, -0.137322f, 0.176363f, 0.088216f, 0.161978f, 0.255623f, -0.0123169f, -0.00387241f, -0.318043f, -0.21894f, -0.412465f, -0.415855f, 0.255024f, 0.361044f, 0.0300423f, -0.119439f, 0.0657428f, -0.238206f, 0.340391f, 0.201176f, 0.102395f, 0.216324f, -0.121531f, 0.265799f, 0.0327802f, 0.194072f, -0.0792337f, 0.456093f, 0.0971469f, -0.0170099f, -0.0294468f, -0.318039f, -0.242527f, -0.1083f, 0.295943f, -0.0284033f, -0.156199f, -0.20311f, -0.075091f, 0.528829f, -0.165604f, 0.0532403f, 0.0505752f, -0.413034f, 0.175453f, -0.0970195f, -0.029351f, 0.103333f, 0.271092f, 0.0511197f, -0.182135f, 0.112932f, -0.32439f, 0.294457f, -0.0818895f, 0.0914322f, 0.185025f, 0.0543957f, -0.0167575f, 0.504046f, -0.0647153f, -0.166975f, 0.0248059f, 0.0379442f, 0.0980366f, -0.178135f, 0.143822f, 0.45732f, -0.0912428f, -0.179338f, 0.349726f, -0.0596313f, -0.299861f, 0.112567f, 0.0666395f, 0.345303f, 0.164124f, -0.00265316f, -0.0732412f, 0.348079f, -0.249414f, 0.0465329f, 0.0693596f, 0.0799214f, 0.000123214f, 0.180679f, 0.0912923f, -0.300121f, -0.288428f, 0.150135f, 0.112936f, 0.104813f, -0.0555879f, -0.00205972f, -0.0251151f, -0.0788264f, -0.016778f, -0.110796f, -0.083048f, -0.212734f, 0.288568f, -0.114228f, -0.113358f, 0.110789f, 0.118645f, 0.133466f, -0.0298552f, -0.241374f, 0.157257f, 0.0861554f, -0.0909277f, 0.00156177f, 0.106539f, -0.209104f, -0.106974f, 0.0203283f, -0.18111f, -0.311602f, -0.00371812f, 0.0711113f, -0.206721f, 0.286076f, 0.139713f, 0.116621f, 0.182792f, 0.0246107f, -0.17972f, 0.041917f, 0.0724635f, 0.266344f, 0.0989191f, 0.0723898f, 0.0257298f, 0.104898f, 0.0681826f, -0.0704781f, 0.00212139f, -0.363547f, 0.0274255f, -0.106295f, -0.363965f, 0.127051f, -0.0575343f, -0.200952f, -0.0666189f, -0.139465f, -0.0171747f, 0.253794f, -0.258602f, -0.166356f, -0.107649f, 0.267331f, 0.104521f, -0.020921f, -0.0780469f, 0.125002f, 0.0202556f, -0.0899181f, -0.126559f, -0.297855f, 0.121539f, -0.0671643f, -0.0444782f, 0.334408f, 0.0882725f, -0.0879492f, -0.00277655f, -0.0616985f, 0.0564236f, -0.11618f, -0.22836f, 0.112953f, 0.176082f, 0.09988f, -0.00635589f, -0.114234f, 0.241135f, 0.0966775f, -0.0961065f, 0.137214f, -0.0832349f, -0.54299f, -0.2335f, -0.033801f, -0.11505f, -0.366386f, -0.238099f, -0.0951656f, 0.263106f, 0.129292f, -0.14762f, 0.0700404f, 0.0195349f, -0.286227f, -0.273371f, 0.0587288f, -0.257152f, -0.136248f, -0.13336f, -0.248086f, 0.273973f, -0.302625f, -0.085841f, -0.0839808f, -0.130464f, 0.252972f, -0.0415149f, -0.0695038f, -0.091557f, -0.262375f, -0.0645785f, 0.188566f, -0.202261f, -0.112712f, 0.00631479f, 0.0132917f, -0.0130675f, -0.302285f, 0.0556928f, -0.0211812f, -0.0555546f, 0.0291112f, 0.168815f, 0.143654f, -0.00564186f, -0.0614248f, -0.0939664f, 0.0959667f, -0.209823f, -0.103889f, -0.206011f, -0.0394793f, 0.0545815f, -0.0348762f, -0.132075f, -0.0489917f, -0.177563f, -0.164591f, -0.0174372f, -0.276844f, -0.132214f, -0.236278f, -0.0614254f, -0.230962f, -0.409367f, -0.08959f, 0.182197f, -0.341314f, -0.0645579f, -0.0161434f, -0.166644f, -0.0784324f, -0.387537f, 0.236617f, -0.115318f, -0.11315f, -0.109817f, -0.0949309f, -0.253715f, -0.254404f, -0.0876592f, -0.243118f, -0.219172f, 0.0341202f, 0.0203343f, 0.0435131f, -0.0266338f, 0.140304f, -0.20669f, -0.130739f, 0.0213059f, 0.182793f, -0.0711616f, -0.165651f, -0.212373f, -0.0972764f, -0.284464f, -0.0834676f, -0.129573f, -0.133945f, 0.0684521f, -0.133913f, 0.165726f, -0.176839f, -0.0940447f, -0.145421f, -0.0471074f, 0.00950449f, 0.0308656f, -0.00761046f, -0.19397f, -0.161623f, 0.10975f, -0.0398157f, 0.00168868f, 0.0626417f, -0.118388f, -0.134741f, -0.243707f, 0.146451f, -0.165854f, 0.0585878f, 0.0269307f, 0.163195f, -0.197056f, 0.0438799f, -0.152668f, -0.178631f, -0.167278f, 0.0258257f, -0.22958f, -0.101918f, 0.0360034f, -0.165612f, -0.112482f, -0.419959f, -0.369384f, 0.0468117f, 0.202511f, 0.161559f, 0.0360435f, -0.211843f, 0.0480519f, -0.252478f, -0.0951382f, 0.100791f, -0.379245f, -0.129869f, -0.036501f, 0.0685223f, 0.0247177f, -0.0751386f, -0.12451f, 0.244585f, -0.0103249f, -0.346383f, -0.300614f, 0.230366f, -0.187795f, -0.0326416f, 0.0735751f, -0.0136039f, -0.0219528f, 0.0629145f, -0.0308739f, -0.101514f, -0.169444f, 0.058706f, -0.133274f, -0.200294f, -0.372511f, -0.214898f, -0.184366f, 0.253648f, -0.0362453f, 0.0618937f, 0.0838244f, -0.0386255f, 0.129191f, -0.147435f, -0.180809f, -0.0797491f, -0.286544f, -0.273005f, 0.116222f, -0.255255f, -0.0504643f, -0.0567216f, -0.0204081f, 0.206331f, -0.225266f, -0.211665f, -0.259216f, -0.0676753f, -0.176153f, 0.285802f, -0.00560349f, -0.0253936f, -0.182537f, -0.344487f, -0.341246f, -0.171879f, 0.24462f, 0.015354f, -0.0255803f, -0.0855239f, -0.151488f, -0.0329621f, 0.311794f, 0.0889872f, -0.142655f, -0.00124048f, 0.0175189f, 0.0459686f, 0.279491f, -0.237445f, 0.0570048f, -0.00665275f, -0.0558817f, 0.0731352f, 0.0291331f, 0.0918153f, 0.0276626f, -0.135103f, -0.303909f, 0.0283329f, -0.203482f, -0.0849922f, -0.284485f, -0.214908f, 0.0836636f, -0.219738f, 0.136157f, 0.0332432f, -0.143305f, 0.0283252f, -0.178703f, -0.0742534f, -0.153174f, 0.02235f, -0.0753622f, -0.210102f, -0.0915751f, -0.0189732f, -0.239039f, -0.135349f, -0.104589f, -0.0658414f, -0.183206f, -0.123006f, 0.0835748f, -0.0703047f, -0.207461f, -0.274129f, -0.225327f, -0.113485f, 0.13316f, 0.0295303f, -0.0958281f};
model->setOperandValue(op84, op84_init, sizeof(float) * 2816);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op86, op2, op1, b87, b88, b89, b90}, {op0});
model->addOperation(ANEURALNETWORKS_DEPTHWISE_CONV_2D, {op0, op29, op28, b91, b92, b93, b94, b95}, {op27});
model->addOperation(ANEURALNETWORKS_CONV_2D, {op27, op32, op31, b96, b97, b98, b99}, {op30});
model->addOperation(ANEURALNETWORKS_DEPTHWISE_CONV_2D, {op30, op35, op34, b100, b101, b102, b103, b104}, {op33});
diff --git a/nn/runtime/test/generated/models/mobilenet_quantized.model.cpp b/nn/runtime/test/generated/models/mobilenet_quantized.model.cpp
index 33d4ddfc6..0f7692336 100644
--- a/nn/runtime/test/generated/models/mobilenet_quantized.model.cpp
+++ b/nn/runtime/test/generated/models/mobilenet_quantized.model.cpp
@@ -617,7 +617,6 @@ void CreateModel(Model *model) {
model->setOperandValue(op40, op40_init, sizeof(int32_t) * 128);
static uint8_t op41_init[] = {130, 106, 119, 96, 123, 111, 103, 111, 123, 112, 130, 112, 136, 143, 130, 132, 125, 107, 116, 123, 123, 115, 111, 139, 135, 143, 115, 95, 134, 119, 108, 111, 108, 128, 128, 95, 107, 182, 115, 96, 125, 113, 106, 96, 128, 81, 121, 113, 101, 118, 134, 119, 111, 111, 139, 99, 129, 112, 117, 92, 123, 128, 120, 109, 115, 108, 108, 111, 118, 91, 120, 136, 121, 91, 111, 138, 113, 146, 136, 87, 117, 115, 111, 94, 117, 112, 70, 118, 109, 92, 82, 107, 133, 93, 113, 109, 113, 158, 104, 115, 111, 120, 114, 128, 121, 115, 114, 108, 104, 103, 123, 86, 112, 117, 103, 117, 143, 123, 104, 105, 80, 118, 96, 110, 115, 152, 106, 114, 101, 109, 126, 86, 113, 99, 99, 118, 110, 111, 116, 105, 74, 83, 110, 143, 136, 124, 124, 106, 95, 61, 109, 104, 76, 116, 118, 116, 80, 111, 125, 112, 113, 142, 105, 110, 128, 58, 112, 129, 163, 108, 86, 65, 129, 47, 135, 80, 123, 101, 158, 133, 114, 115, 83, 103, 101, 130, 110, 141, 74, 108, 106, 113, 114, 112, 95, 122, 154, 15, 100, 128, 123, 154, 116, 106, 108, 113, 98, 104, 106, 127, 99, 117, 130, 134, 132, 134, 125, 140, 54, 106, 158, 93, 109, 107, 104, 64, 109, 101, 80, 104, 118, 79, 176, 110, 116, 116, 158, 125, 136, 149, 127, 117, 114, 119, 113, 143, 109, 138, 50, 127, 72, 103, 110, 118, 94, 120, 111, 108, 103, 110, 113, 111, 109, 110, 120, 120, 98, 117, 131, 117, 113, 99, 118, 117, 113, 115, 120, 109, 107, 107, 125, 88, 110, 134, 131, 107, 111, 111, 114, 119, 114, 126, 110, 106, 112, 113, 117, 117, 137, 106, 103, 104, 101, 114, 115, 124, 115, 106, 123, 113, 118, 127, 112, 113, 108, 114, 123, 115, 114, 113, 116, 113, 106, 113, 114, 103, 113, 115, 116, 116, 115, 99, 122, 107, 139, 150, 120, 115, 112, 131, 102, 102, 142, 123, 114, 114, 102, 103, 123, 118, 120, 113, 114, 120, 106, 124, 124, 114, 112, 126, 73, 119, 113, 114, 111, 113, 104, 114, 103, 114, 107, 116, 88, 115, 108, 112, 84, 114, 108, 112, 119, 76, 109, 112, 171, 111, 123, 109, 139, 119, 126, 121, 108, 111, 172, 112, 81, 76, 103, 143, 111, 116, 121, 137, 104, 126, 106, 252, 103, 172, 118, 70, 155, 113, 104, 109, 109, 98, 161, 129, 93, 54, 117, 110, 98, 113, 108, 120, 116, 124, 87, 117, 122, 158, 113, 123, 113, 103, 122, 101, 149, 140, 124, 139, 111, 177, 103, 110, 109, 113, 106, 116, 111, 118, 150, 116, 123, 138, 120, 107, 110, 117, 117, 153, 106, 126, 122, 78, 129, 90, 149, 99, 83, 149, 119, 110, 113, 87, 110, 112, 115, 87, 111, 121, 122, 109, 125, 142, 86, 109, 123, 120, 127, 99, 123, 149, 119, 118, 142, 121, 174, 106, 106, 90, 104, 123, 118, 122, 111, 208, 125, 121, 78, 120, 137, 108, 113, 163, 147, 140, 87, 95, 100, 108, 159, 155, 77, 144, 106, 149, 134, 91, 105, 183, 172, 72, 146, 111, 146, 118, 52, 145, 143, 103, 155, 85, 95, 108, 146, 161, 140, 105, 84, 110, 87, 121, 127, 120, 138, 151, 140, 70, 112, 160, 94, 98, 95, 94, 77, 101, 142, 75, 149, 65, 79, 112, 90, 105, 106, 147, 80, 120, 102, 111, 129, 82, 139, 117, 97, 64, 0, 100, 105, 123, 147, 118, 137, 121, 92, 89, 143, 83, 120, 119, 109, 115, 107, 179, 148, 147, 112, 85, 155, 97, 138, 74, 108, 106, 138, 150, 77, 137, 134, 84, 144, 139, 229, 88, 111, 96, 255, 129, 98, 72, 120, 133, 91, 109, 152, 141, 100, 111, 111, 119, 114, 110, 116, 114, 112, 119, 76, 120, 108, 117, 134, 101, 110, 97, 111, 120, 118, 112, 103, 18, 104, 59, 112, 156, 149, 111, 104, 115, 102, 109, 114, 130, 106, 121, 114, 116, 107, 116, 121, 119, 110, 117, 125, 117, 106, 108, 115, 110, 118, 114, 135, 115, 122, 96, 110, 127, 111, 101, 119, 114, 114, 114, 107, 113, 114, 124, 116, 114, 111, 115, 111, 129, 117, 127, 148, 95, 116, 115, 116, 150, 107, 144, 101, 110, 119, 113, 120, 112, 113, 117, 116, 91, 114, 115, 111, 133, 105, 116, 112, 146, 130, 115, 114, 113, 116, 110, 107, 112, 107, 113, 119, 125, 61, 109, 106, 97, 116, 132, 114, 117, 118, 24, 118, 112, 121, 106, 104, 137, 115, 111, 113, 110, 119, 121, 117, 118, 122, 117, 115, 96, 106, 117, 114, 121, 104, 110, 111, 135, 110, 125, 112, 107, 120, 106, 112, 113, 118, 119, 110, 128, 109, 109, 111, 141, 112, 117, 133, 130, 108, 140, 110, 114, 116, 116, 99, 106, 122, 108, 118, 131, 120, 111, 101, 116, 111, 122, 116, 116, 118, 114, 117, 114, 117, 139, 117, 112, 114, 119, 114, 104, 121, 110, 133, 114, 119, 113, 108, 108, 105, 120, 117, 128, 118, 116, 143, 109, 98, 118, 117, 112, 113, 106, 106, 116, 111, 114, 112, 115, 118, 119, 113, 117, 117, 112, 106, 113, 107, 114, 114, 116, 123, 111, 110, 115, 155, 112, 131, 113, 119, 143, 109, 112, 108, 112, 109, 148, 115, 103, 106, 110, 128, 119, 115, 114, 117, 114, 137, 112, 112, 110, 112, 108, 125, 100, 103, 108, 122, 115, 116, 115, 103, 114, 116, 115, 99, 121, 114, 113, 121, 126, 112, 107, 113, 115, 131, 158, 106, 178, 110, 115, 108, 116, 73, 96, 116, 110, 101, 117, 108, 102, 116, 125, 121, 109, 118, 112, 118, 115, 99, 113, 108, 204, 109, 113, 114, 110, 115, 119, 117, 126, 153, 110, 117, 115, 105, 116, 108, 99, 122, 129, 112, 114, 171, 113, 79, 118, 117, 93, 115, 135, 112, 112, 117, 115, 117, 103, 81, 115, 116, 104, 108, 117, 99, 112, 110, 104, 127, 118, 116, 116, 104, 140, 211, 133, 153, 110, 115, 113, 105, 108, 112, 107, 111, 121, 113, 114, 114, 111, 117, 116, 112, 119, 107, 113, 116, 103, 110, 114, 113, 114, 113, 119, 114, 103, 114, 104, 109, 123, 121, 114, 113, 114, 116, 113, 115, 93, 113, 110, 113, 115, 111, 117, 117, 121, 112, 124, 116, 112, 111, 115, 114, 108, 117, 114, 119, 123, 116, 129, 112, 107, 110, 116, 115, 114, 120, 114, 113, 108, 117, 125, 111, 114, 116, 114, 113, 118, 120, 122, 122, 115, 117, 113, 113, 120, 113, 115, 111, 115, 112, 115, 125, 108, 109, 114, 116, 119, 113, 104, 110, 117, 114, 119, 114, 121, 130, 117, 113, 111, 115, 114, 116, 117, 109, 111, 122, 116, 106, 112, 111, 105, 138, 113, 122, 115, 117, 91, 111, 112};
model->setOperandValue(op41, op41_init, sizeof(uint8_t) * 1152);
- model->addOperation(ANEURALNETWORKS_CONV_2D, {op88, op2, op1, b104, b105, b106, b107}, {op0});
static int32_t op43_init[] = {-9119, 406, 8026, -2442, 22035, 7830, 5239, -16418, 3066, 244, 17626, -11691, -4541, 14457, 8384, 3859, -8220, -7972, -3881, -3717, 2242, -11349, -13582, 486, 3455, -2435, -1912, 3185, -10701, 3372, -3150, 6759, 10970, 6665, 16016, 2817, 14058, -556, 7218, -8752, 11568, -9512, -11502, -4894, 4890, -1483, 7517, 9618, 3061, 12316, 19276, 10143, -23717, -5637, 13399, -2423, 4260, 799, 2376, 3188, 4820, 7535, -3916, 13854, 14245, -704, -770, 1514, 3930, 745, 1798, -5644, -3217, 19698, -265, -26449, 11816, -20746, -2139, 1080, -13117, -2232, 4019, -4479, -6283, 1058, 16636, 16248, 3525, -2806, -2207, 3284, 952, 6117, 15768, -10433, 14577, -8859, -11789, 7781, 16966, 18525, -3438, 7560, 211, 3327, -807, 17839, -2552, 9905, 2938, -3158, 5764, 7175, 4135, -10571, 16909, 17722, 16820, 1561, 9099, 7644, 21318, 1901, 22449, 60, -7215, -9300};
model->setOperandValue(op43, op43_init, sizeof(int32_t) * 128);
static uint8_t op44_init[] = {117, 122, 124, 128, 124, 104, 122, 132, 111, 133, 115, 130, 122, 119, 110, 126, 129, 110, 114, 115, 128, 111, 113, 124, 128, 126, 126, 125, 121, 115, 115, 132, 118, 105, 130, 136, 125, 113, 120, 122, 117, 118, 125, 123, 103, 117, 107, 122, 104, 116, 118, 125, 119, 132, 118, 125, 121, 125, 155, 120, 125, 127, 117, 120, 112, 121, 126, 121, 121, 120, 120, 121, 112, 108, 173, 136, 126, 117, 134, 114, 131, 120, 104, 124, 124, 128, 122, 116, 116, 114, 123, 125, 132, 129, 131, 156, 116, 119, 121, 133, 120, 125, 118, 126, 125, 123, 113, 121, 116, 119, 122, 119, 113, 117, 118, 124, 123, 143, 121, 137, 142, 133, 136, 105, 127, 121, 88, 100, 127, 125, 122, 116, 119, 131, 112, 127, 112, 114, 126, 124, 123, 135, 125, 122, 106, 117, 123, 136, 138, 119, 107, 123, 126, 125, 121, 129, 128, 133, 108, 137, 85, 106, 105, 97, 125, 132, 98, 122, 115, 122, 125, 122, 124, 126, 145, 143, 107, 123, 116, 123, 114, 123, 137, 118, 116, 138, 141, 119, 128, 122, 131, 133, 120, 117, 122, 135, 119, 123, 108, 122, 126, 139, 108, 115, 123, 118, 125, 133, 125, 131, 116, 126, 125, 103, 124, 102, 120, 119, 119, 115, 122, 128, 117, 128, 105, 94, 127, 135, 121, 131, 118, 99, 126, 120, 119, 146, 115, 128, 120, 127, 127, 149, 118, 135, 131, 132, 121, 112, 121, 109, 124, 117, 130, 120, 115, 123, 144, 129, 124, 122, 122, 125, 126, 120, 115, 122, 68, 121, 115, 115, 128, 125, 130, 127, 115, 122, 117, 116, 137, 114, 122, 128, 130, 126, 129, 120, 114, 119, 121, 127, 123, 118, 119, 122, 124, 119, 119, 120, 117, 117, 119, 112, 118, 121, 121, 121, 114, 124, 128, 120, 123, 123, 132, 137, 120, 117, 124, 87, 98, 134, 120, 124, 130, 114, 124, 120, 130, 120, 123, 118, 119, 119, 110, 127, 120, 123, 114, 121, 123, 160, 111, 128, 126, 120, 117, 117, 122, 120, 122, 120, 118, 131, 125, 124, 126, 123, 122, 126, 124, 125, 120, 134, 117, 122, 125, 130, 119, 125, 132, 121, 143, 124, 132, 117, 122, 109, 128, 138, 108, 120, 125, 58, 128, 120, 139, 128, 116, 118, 131, 130, 106, 131, 97, 129, 132, 132, 118, 116, 125, 123, 108, 115, 119, 132, 120, 113, 124, 122, 119, 133, 118, 135, 122, 117, 127, 133, 125, 110, 126, 114, 137, 122, 115, 125, 95, 125, 121, 117, 124, 114, 118, 127, 119, 121, 110, 119, 124, 120, 140, 127, 124, 121, 125, 117, 122, 123, 135, 98, 140, 120, 138, 131, 114, 121, 133, 119, 111, 115, 105, 109, 128, 116, 117, 126, 122, 105, 102, 126, 125, 119, 115, 143, 121, 107, 116, 124, 106, 147, 123, 140, 112, 124, 132, 135, 127, 133, 111, 135, 117, 124, 117, 123, 107, 128, 122, 111, 122, 124, 115, 125, 129, 99, 114, 119, 117, 125, 115, 109, 141, 124, 118, 117, 121, 110, 107, 114, 119, 128, 112, 123, 116, 109, 113, 121, 126, 124, 95, 117, 125, 118, 133, 122, 108, 126, 132, 127, 125, 101, 145, 99, 134, 121, 116, 129, 139, 118, 117, 97, 124, 113, 132, 128, 128, 129, 139, 110, 122, 121, 117, 131, 109, 131, 121, 115, 116, 121, 134, 123, 115, 118, 101, 127, 125, 107, 92, 129, 126, 126, 120, 120, 124, 121, 119, 118, 116, 131, 95, 99, 119, 126, 130, 131, 113, 121, 124, 106, 134, 115, 120, 140, 122, 128, 112, 125, 100, 120, 122, 107, 130, 117, 115, 131, 126, 112, 103, 108, 140, 107, 128, 120, 132, 110, 117, 109, 149, 131, 126, 121, 102, 115, 128, 109, 125, 79, 109, 124, 122, 114, 142, 132, 120, 125, 116, 116, 122, 116, 111, 123, 117, 131, 127, 119, 125, 130, 102, 121, 130, 122, 127, 135, 124, 132, 116, 123, 123, 111, 135, 107, 106, 125, 113, 128, 119, 135, 121, 157, 126, 114, 126, 114, 120, 128, 135, 110, 127, 116, 130, 129, 126, 126, 123, 119, 115, 122, 100, 122, 116, 112, 120, 125, 122, 101, 118, 133, 133, 124, 125, 121, 122, 119, 113, 117, 118, 126, 115, 148, 111, 122, 133, 110, 113, 118, 131, 115, 129, 117, 114, 93, 122, 124, 111, 135, 111, 127, 117, 106, 121, 115, 125, 135, 123, 117, 123, 123, 118, 124, 126, 123, 136, 120, 118, 112, 131, 126, 129, 112, 112, 117, 127, 148, 124, 92, 112, 122, 124, 118, 130, 119, 132, 123, 123, 119, 131, 122, 118, 124, 132, 115, 129, 121, 124, 126, 122, 122, 118, 123, 123, 132, 122, 125, 122, 124, 125, 131, 119, 133, 130, 126, 131, 118, 121, 122, 130, 115, 126, 128, 122, 126, 123, 118, 122, 122, 125, 125, 125, 127, 119, 120, 124, 120, 121, 119, 124, 118, 127, 122, 125, 122, 125, 119, 124, 111, 130, 125, 119, 130, 122, 122, 129, 120, 118, 131, 118, 116, 127, 125, 122, 125, 118, 118, 120, 129, 125, 124, 124, 124, 120, 123, 122, 119, 118, 116, 118, 115, 120, 128, 122, 123, 121, 123, 121, 124, 125, 130, 119, 125, 122, 131, 118, 123, 124, 124, 124, 124, 130, 119, 128, 128, 123, 122, 122, 121, 122, 126, 122, 120, 129, 124, 119, 123, 131, 160, 145, 160, 107, 136, 126, 124, 122, 107, 124, 123, 120, 119, 118, 134, 122, 125, 124, 121, 116, 126, 120, 125, 123, 119, 135, 131, 110, 111, 136, 121, 118, 123, 118, 108, 133, 120, 118, 119, 107, 125, 120, 85, 124, 113, 122, 113, 131, 120, 155, 121, 122, 117, 125, 121, 100, 119, 136, 122, 145, 119, 124, 130, 90, 120, 127, 108, 118, 101, 103, 130, 116, 126, 133, 133, 118, 110, 97, 127, 127, 126, 129, 96, 120, 104, 120, 110, 123, 133, 123, 145, 105, 108, 121, 135, 125, 120, 101, 103, 115, 114, 114, 120, 92, 139, 119, 104, 126, 109, 112, 142, 128, 83, 102, 129, 120, 130, 121, 119, 144, 118, 89, 116, 123, 125, 116, 129, 126, 123, 115, 116, 126, 116, 131, 114, 119, 121, 128, 128, 149, 125, 121, 123, 125, 126, 110, 123, 112, 119, 130, 115, 101, 118, 122, 122, 125, 118, 117, 120, 118, 134, 123, 115, 145, 118, 126, 113, 115, 121, 115, 117, 120, 117, 11, 123, 126, 118, 121, 117, 130, 118, 120, 119, 119, 138, 124, 115, 125, 125, 126, 129, 128, 115, 124, 112, 119, 125, 103, 131, 117, 125, 130, 123, 108, 123, 125, 115, 124, 128, 130, 141, 122, 123, 121, 122, 161, 137, 119, 131, 126, 131, 127, 121, 119, 123, 105, 115, 142, 120, 116, 122, 124, 129, 116, 122, 120, 117, 101, 119, 126, 113, 113, 121, 83, 122, 122, 123, 124, 121, 111, 123, 121, 121, 129, 128, 119, 121, 120, 114, 129, 139, 124, 124, 121, 123, 124, 125, 128, 116, 114, 118, 119, 130, 113, 125, 126, 130, 122, 131, 121, 116, 119, 126, 121, 131, 119, 108, 123, 126, 120, 126, 135, 124, 126, 138, 121, 130, 113, 123, 126, 129, 140, 116, 129, 128, 126, 117, 127, 121, 107, 129, 122, 124, 123, 119, 107, 111, 115, 123, 124, 124, 114, 121, 120, 120, 141, 119, 121, 123, 126, 131, 121, 123, 120, 131, 127, 121, 119, 124, 126, 134, 134, 114, 138, 121, 114, 121, 121, 123, 120, 116, 123, 107, 130, 126, 125, 124, 125, 124, 127, 122, 127, 131, 122, 123, 132, 121, 128, 128, 128, 121, 128, 125, 128, 128, 131, 125, 128, 126, 125, 124, 128, 123, 113, 116, 116, 103, 125, 120, 125, 122, 117, 114, 111, 120, 107, 125, 123, 124, 132, 119, 120, 121, 123, 127, 116, 131, 128, 122, 122, 116, 127, 103, 115, 129, 124, 118, 117, 119, 127, 126, 118, 121, 123, 118, 114, 127, 127, 127, 119, 126, 125, 120, 114, 123, 123, 121, 117, 118, 124, 117, 127, 129, 113, 117, 126, 114, 119, 121, 130, 130, 153, 113, 109, 123, 122, 120, 110, 116, 120, 129, 108, 126, 119, 122, 116, 122, 125, 114, 121, 104, 117, 117, 122, 118, 115, 119, 121, 121, 145, 124, 109, 122, 129, 133, 139, 118, 117, 121, 119, 115, 122, 121, 118, 124, 126, 98, 119, 126, 120, 120, 119, 117, 109, 124, 115, 170, 103, 121, 132, 135, 101, 122, 124, 106, 193, 125, 114, 119, 119, 114, 124, 124, 113, 109, 122, 98, 133, 127, 124, 123, 124, 104, 121, 111, 131, 112, 103, 129, 126, 123, 100, 96, 113, 123, 97, 137, 120, 116, 120, 130, 121, 128, 122, 111, 125, 113, 115, 125, 119, 122, 119, 115, 114, 122, 125, 120, 118, 132, 124, 120, 94, 133, 126, 102, 119, 122, 110, 125, 122, 116, 133, 107, 124, 120, 119, 124, 118, 113, 128, 116, 131, 125, 122, 168, 114, 109, 129, 126, 133, 139, 118, 157, 99, 128, 126, 109, 132, 117, 135, 107, 134, 94, 131, 110, 125, 111, 126, 120, 130, 97, 112, 139, 106, 123, 104, 135, 125, 134, 133, 124, 116, 117, 130, 108, 123, 135, 119, 115, 123, 136, 121, 115, 121, 133, 109, 130, 111, 127, 135, 107, 123, 124, 128, 113, 125, 91, 111, 126, 96, 75, 123, 89, 119, 118, 118, 117, 127, 117, 126, 133, 95, 121, 129, 127, 125, 129, 135, 92, 107, 72, 135, 126, 120, 119, 118, 117, 129, 121, 123, 110, 118, 124, 124, 95, 126, 125, 115, 120, 119, 116, 110, 125, 124, 123, 119, 123, 122, 119, 120, 142, 117, 116, 127, 120, 114, 122, 124, 115, 124, 127, 122, 114, 128, 125, 83, 123, 118, 146, 132, 137, 118, 123, 148, 116, 127, 141, 120, 118, 120, 127, 122, 123, 120, 117, 104, 123, 110, 122, 104, 119, 77, 126, 117, 140, 115, 112, 112, 109, 115, 120, 124, 129, 135, 121, 119, 123, 110, 133, 97, 147, 92, 93, 129, 118, 123, 133, 128, 124, 119, 108, 154, 117, 120, 122, 125, 125, 133, 132, 124, 130, 114, 122, 115, 116, 124, 94, 112, 138, 130, 136, 136, 120, 112, 124, 126, 125, 118, 122, 123, 138, 114, 108, 124, 130, 122, 126, 113, 120, 120, 128, 125, 130, 127, 119, 126, 145, 108, 116, 121, 115, 133, 122, 126, 56, 41, 141, 124, 131, 120, 125, 136, 119, 119, 148, 98, 129, 120, 117, 131, 116, 117, 142, 121, 138, 121, 132, 125, 120, 140, 128, 120, 117, 85, 125, 122, 117, 139, 131, 64, 138, 139, 112, 124, 143, 120, 143, 113, 116, 127, 101, 116, 122, 122, 118, 129, 94, 118, 119, 96, 85, 123, 125, 124, 124, 126, 108, 135, 107, 130, 113, 119, 120, 123, 141, 136, 124, 110, 133, 121, 120, 121, 125, 138, 122, 124, 120, 128, 121, 115, 133, 114, 119, 104, 103, 119, 115, 113, 135, 115, 125, 122, 119, 121, 120, 126, 124, 111, 123, 115, 119, 114, 120, 109, 122, 133, 118, 120, 105, 119, 121, 118, 118, 126, 112, 140, 122, 121, 110, 119, 118, 115, 120, 112, 114, 112, 105, 128, 118, 126, 113, 121, 119, 129, 121, 121, 142, 128, 138, 113, 116, 121, 135, 124, 126, 119, 126, 120, 118, 123, 114, 119, 122, 119, 130, 120, 125, 116, 109, 117, 119, 120, 118, 121, 117, 127, 129, 125, 135, 132, 110, 113, 99, 119, 124, 109, 125, 129, 131, 120, 120, 118, 130, 136, 122, 126, 140, 113, 138, 114, 125, 115, 111, 122, 119, 125, 126, 155, 102, 118, 126, 114, 122, 118, 125, 118, 125, 118, 121, 103, 130, 119, 126, 141, 115, 119, 104, 124, 120, 126, 124, 127, 127, 120, 128, 121, 132, 113, 126, 129, 125, 119, 120, 132, 116, 107, 126, 117, 113, 144, 129, 115, 90, 149, 132, 124, 132, 126, 120, 113, 134, 137, 107, 121, 124, 122, 118, 123, 128, 127, 113, 121, 121, 118, 120, 119, 124, 122, 103, 129, 124, 128, 123, 127, 116, 122, 109, 118, 108, 119, 119, 130, 129, 122, 146, 137, 125, 111, 115, 123, 106, 119, 128, 118, 125, 123, 125, 117, 126, 119, 120, 126, 126, 132, 123, 129, 102, 114, 124, 131, 120, 115, 132, 151, 140, 113, 141, 133, 120, 124, 131, 121, 128, 135, 126, 122, 123, 117, 116, 114, 120, 117, 126, 114, 123, 127, 117, 135, 105, 150, 124, 122, 120, 122, 95, 139, 114, 123, 122, 118, 115, 116, 115, 110, 106, 136, 113, 119, 127, 135, 125, 94, 132, 140, 128, 130, 120, 114, 126, 133, 119, 123, 117, 126, 114, 127, 119, 105, 121, 117, 117, 135, 181, 119, 128, 114, 163, 127, 118, 93, 117, 129, 112, 110, 94, 123, 126, 116, 121, 115, 126, 124, 139, 138, 110, 121, 118, 104, 123, 108, 110, 121, 128, 108, 102, 119, 132, 124, 119, 124, 102, 116, 114, 125, 92, 119, 126, 120, 146, 122, 110, 115, 120, 101, 118, 118, 116, 117, 119, 120, 121, 139, 124, 119, 115, 119, 125, 114, 128, 136, 116, 108, 118, 106, 107, 129, 122, 132, 132, 123, 125, 127, 121, 135, 124, 153, 132, 126, 111, 116, 133, 131, 111, 139, 147, 126, 127, 117, 119, 124, 117, 119, 109, 112, 136, 112, 121, 126, 112, 134, 123, 165, 188, 129, 124, 112, 127, 118, 119, 117, 130, 123, 130, 124, 114, 135, 126, 117, 119, 118, 127, 121, 118, 125, 135, 130, 106, 121, 124, 114, 110, 112, 116, 127, 172, 121, 127, 116, 125, 124, 115, 120, 117, 124, 130, 119, 127, 112, 116, 129, 114, 124, 112, 113, 136, 112, 189, 114, 113, 123, 110, 122, 131, 132, 138, 119, 96, 120, 126, 119, 120, 131, 125, 131, 117, 118, 122, 126, 134, 169, 114, 124, 113, 120, 131, 115, 145, 165, 118, 92, 119, 95, 127, 128, 134, 133, 123, 124, 123, 127, 123, 119, 128, 124, 138, 114, 190, 118, 135, 119, 122, 102, 97, 163, 131, 125, 117, 129, 125, 108, 102, 131, 129, 124, 118, 122, 120, 129, 124, 101, 127, 106, 127, 108, 127, 116, 137, 124, 118, 106, 128, 104, 118, 105, 131, 115, 105, 142, 113, 118, 116, 134, 132, 134, 124, 193, 123, 117, 134, 130, 131, 132, 164, 125, 121, 121, 103, 118, 108, 120, 100, 113, 104, 103, 117, 124, 124, 114, 113, 109, 112, 128, 112, 115, 147, 116, 107, 118, 108, 119, 200, 119, 127, 111, 112, 135, 171, 121, 128, 191, 104, 132, 123, 125, 122, 126, 116, 121, 116, 109, 127, 128, 127, 122, 128, 118, 126, 129, 123, 130, 134, 117, 132, 126, 122, 121, 126, 151, 129, 116, 120, 134, 117, 125, 132, 121, 116, 123, 119, 125, 128, 126, 121, 121, 126, 122, 125, 124, 125, 127, 122, 125, 130, 120, 123, 113, 121, 132, 120, 120, 118, 123, 125, 135, 114, 130, 116, 120, 130, 127, 122, 129, 133, 131, 130, 130, 118, 118, 123, 114, 116, 119, 120, 132, 125, 133, 128, 121, 131, 123, 130, 126, 120, 125, 140, 120, 106, 125, 128, 118, 136, 130, 117, 135, 114, 122, 128, 134, 126, 134, 123, 127, 132, 136, 128, 133, 120, 127, 135, 135, 115, 124, 131, 129, 127, 117, 125, 129, 137, 122, 113, 125, 116, 124, 132, 125, 115, 120, 122, 114, 132, 118, 121, 141, 112, 121, 120, 123, 121, 125, 119, 114, 158, 121, 117, 120, 115, 124, 122, 104, 119, 134, 119, 116, 121, 113, 111, 122, 124, 120, 136, 102, 108, 121, 115, 123, 123, 168, 119, 123, 140, 121, 112, 128, 129, 138, 123, 124, 122, 128, 110, 110, 124, 129, 127, 119, 121, 124, 157, 116, 116, 120, 116, 96, 125, 119, 54, 122, 132, 130, 123, 154, 117, 119, 132, 121, 115, 119, 133, 118, 123, 109, 118, 127, 105, 121, 126, 115, 136, 123, 110, 122, 118, 125, 138, 123, 120, 125, 131, 126, 112, 124, 123, 120, 117, 118, 132, 122, 132, 106, 114, 129, 106, 125, 100, 115, 118, 124, 116, 121, 139, 130, 133, 133, 110, 129, 120, 153, 121, 124, 119, 128, 125, 121, 115, 121, 116, 128, 111, 107, 120, 117, 132, 96, 125, 120, 118, 114, 129, 127, 121, 123, 134, 126, 122, 122, 110, 120, 129, 113, 111, 126, 127, 113, 108, 110, 117, 157, 125, 130, 110, 128, 150, 118, 112, 111, 117, 113, 86, 66, 133, 126, 112, 116, 130, 124, 122, 119, 119, 127, 115, 91, 143, 143, 120, 131, 132, 139, 110, 117, 140, 131, 119, 123, 107, 114, 109, 127, 113, 108, 113, 128, 144, 116, 118, 116, 117, 127, 117, 79, 134, 115, 146, 123, 110, 94, 122, 136, 104, 139, 116, 123, 132, 126, 108, 120, 111, 124, 142, 131, 129, 118, 125, 114, 132, 127, 117, 124, 126, 125, 122, 124, 132, 128, 119, 114, 119, 92, 88, 116, 119, 125, 100, 117, 122, 119, 130, 124, 124, 100, 127, 121, 120, 120, 122, 131, 154, 122, 134, 124, 120, 128, 116, 107, 117, 128, 117, 126, 127, 125, 125, 131, 78, 130, 82, 123, 119, 128, 127, 121, 121, 128, 124, 136, 94, 43, 119, 133, 109, 110, 160, 127, 120, 87, 118, 117, 127, 121, 94, 136, 121, 123, 123, 113, 120, 117, 107, 123, 123, 123, 133, 117, 134, 85, 94, 124, 123, 120, 130, 123, 111, 98, 115, 114, 134, 118, 124, 134, 131, 104, 135, 126, 118, 103, 120, 127, 111, 134, 93, 120, 169, 124, 135, 122, 121, 121, 109, 131, 124, 123, 121, 127, 135, 130, 127, 114, 138, 125, 105, 130, 120, 118, 127, 115, 129, 157, 147, 113, 128, 120, 123, 109, 130, 127, 124, 119, 158, 122, 110, 126, 111, 129, 64, 113, 124, 123, 129, 123, 116, 128, 123, 124, 118, 129, 125, 150, 133, 121, 126, 116, 127, 113, 125, 105, 119, 93, 145, 129, 123, 126, 118, 159, 124, 120, 110, 127, 121, 120, 120, 118, 127, 127, 78, 108, 123, 124, 124, 127, 122, 109, 136, 138, 121, 139, 125, 107, 112, 122, 138, 111, 116, 117, 118, 121, 124, 120, 128, 111, 133, 126, 83, 130, 134, 127, 118, 96, 124, 122, 126, 118, 127, 125, 132, 103, 125, 132, 117, 131, 126, 110, 125, 122, 131, 127, 122, 111, 125, 121, 108, 135, 115, 129, 123, 120, 119, 125, 124, 116, 125, 112, 128, 122, 116, 118, 118, 113, 120, 105, 114, 122, 135, 83, 116, 107, 123, 124, 127, 113, 126, 128, 110, 134, 122, 156, 108, 114, 130, 126, 132, 154, 114, 118, 122, 124, 120, 119, 124, 106, 129, 125, 114, 128, 122, 135, 119, 136, 117, 106, 97, 109, 122, 121, 119, 124, 131, 123, 138, 121, 107, 139, 121, 108, 131, 125, 111, 125, 110, 122, 126, 128, 125, 123, 108, 107, 126, 124, 132, 123, 110, 117, 122, 123, 115, 111, 123, 119, 119, 144, 136, 122, 122, 126, 115, 116, 114, 115, 122, 122, 105, 131, 117, 124, 123, 117, 113, 119, 132, 125, 128, 118, 123, 119, 128, 124, 114, 110, 124, 117, 123, 126, 120, 123, 121, 127, 104, 114, 87, 139, 148, 126, 126, 119, 118, 112, 118, 111, 128, 107, 128, 123, 124, 124, 119, 115, 119, 127, 123, 114, 126, 133, 130, 125, 107, 124, 125, 119, 123, 110, 129, 135, 122, 125, 124, 119, 124, 123, 122, 129, 123, 132, 117, 120, 121, 119, 115, 132, 120, 127, 118, 118, 112, 123, 124, 121, 127, 117, 120, 117, 119, 122, 109, 110, 121, 107, 119, 114, 118, 135, 129, 112, 114, 115, 123, 115, 119, 124, 131, 125, 114, 121, 110, 124, 132, 110, 112, 115, 123, 123, 124, 135, 116, 109, 136, 144, 75, 96, 119, 119, 124, 118, 126, 122, 114, 124, 130, 121, 110, 113, 130, 125, 118, 127, 119, 125, 123, 119, 222, 120, 137, 126, 118, 135, 123, 130, 121, 118, 121, 122, 128, 144, 156, 119, 123, 118, 143, 115, 130, 123, 129, 123, 125, 131, 120, 130, 115, 130, 140, 130, 90, 119, 123, 133, 134, 129, 110, 114, 115, 136, 119, 121, 119, 122, 119, 129, 135, 122, 121, 113, 129, 111, 125, 117, 127, 106, 127, 125, 110, 123, 130, 116, 109, 137, 128, 124, 137, 113, 119, 129, 126, 128, 157, 122, 128, 124, 119, 123, 128, 129, 112, 132, 119, 112, 125, 122, 129, 115, 88, 118, 122, 120, 113, 118, 129, 97, 135, 128, 126, 124, 127, 116, 124, 141, 124, 130, 147, 93, 140, 124, 94, 122, 133, 123, 134, 119, 129, 123, 135, 119, 132, 116, 130, 131, 121, 135, 123, 118, 122, 127, 126, 131, 107, 137, 100, 135, 137, 117, 121, 119, 119, 115, 123, 107, 114, 122, 118, 114, 125, 115, 124, 123, 121, 127, 122, 112, 111, 115, 123, 167, 108, 121, 101, 116, 133, 124, 123, 123, 122, 124, 126, 125, 125, 125, 120, 124, 136, 119, 121, 130, 121, 126, 120, 124, 147, 117, 125, 121, 115, 117, 119, 106, 131, 119, 126, 115, 122, 111, 117, 117, 118, 124, 101, 119, 124, 126, 151, 122, 126, 124, 122, 122, 111, 114, 137, 130, 123, 126, 132, 120, 133, 118, 112, 118, 127, 118, 112, 116, 113, 115, 125, 124, 117, 116, 120, 121, 124, 127, 138, 120, 124, 124, 105, 121, 136, 128, 117, 113, 110, 121, 127, 119, 123, 131, 123, 122, 120, 117, 124, 121, 150, 100, 112, 115, 122, 128, 122, 122, 117, 146, 123, 138, 122, 116, 122, 133, 118, 112, 121, 122, 125, 115, 126, 119, 119, 106, 112, 120, 126, 126, 124, 121, 126, 100, 118, 113, 115, 116, 124, 121, 120, 124, 126, 174, 109, 131, 127, 122, 114, 120, 122, 119, 132, 113, 123, 127, 125, 113, 119, 120, 124, 127, 126, 107, 124, 109, 115, 115, 120, 108, 133, 127, 120, 127, 114, 119, 110, 115, 119, 124, 124, 120, 116, 114, 125, 120, 127, 124, 123, 127, 126, 103, 125, 124, 129, 115, 124, 151, 129, 124, 130, 117, 122, 107, 124, 116, 112, 109, 125, 124, 135, 127, 111, 104, 130, 119, 134, 133, 119, 110, 119, 117, 119, 112, 124, 118, 125, 96, 116, 127, 119, 152, 129, 113, 120, 116, 121, 125, 124, 132, 136, 134, 88, 130, 136, 127, 123, 116, 112, 142, 108, 101, 120, 94, 104, 133, 136, 100, 129, 81, 112, 120, 120, 134, 123, 123, 131, 123, 131, 110, 136, 126, 127, 128, 121, 121, 121, 123, 124, 95, 129, 118, 115, 132, 131, 119, 164, 98, 105, 127, 104, 125, 124, 105, 136, 147, 123, 133, 124, 121, 118, 114, 113, 125, 135, 129, 107, 114, 126, 124, 110, 126, 121, 129, 121, 131, 127, 128, 134, 120, 122, 112, 95, 123, 119, 128, 95, 117, 126, 140, 117, 128, 100, 123, 122, 122, 127, 122, 123, 109, 135, 124, 107, 118, 146, 131, 132, 94, 124, 123, 127, 112, 86, 124, 138, 116, 119, 121, 114, 121, 130, 117, 155, 112, 120, 136, 119, 120, 120, 133, 126, 121, 126, 129, 129, 145, 148, 121, 116, 128, 123, 123, 118, 119, 127, 118, 120, 91, 126, 127, 123, 117, 94, 134, 86, 135, 145, 123, 132, 136, 113, 117, 118, 131, 37, 117, 65, 123, 114, 117, 88, 121, 129, 131, 126, 122, 124, 176, 125, 125, 139, 122, 125, 120, 132, 121, 115, 116, 125, 132, 119, 129, 120, 130, 128, 141, 119, 127, 121, 124, 125, 157, 127, 119, 127, 112, 108, 120, 127, 185, 122, 112, 114, 129, 115, 115, 126, 121, 120, 121, 122, 97, 124, 115, 121, 120, 122, 123, 124, 110, 116, 106, 120, 93, 112, 117, 123, 122, 123, 133, 124, 122, 119, 102, 128, 123, 121, 111, 127, 113, 121, 123, 108, 125, 131, 118, 121, 111, 143, 130, 121, 119, 128, 130, 123, 137, 124, 118, 124, 107, 121, 125, 120, 105, 131, 118, 116, 120, 116, 127, 117, 120, 118, 114, 123, 127, 123, 133, 117, 123, 120, 117, 121, 119, 117, 107, 101, 151, 124, 124, 119, 130, 139, 111, 107, 123, 111, 120, 119, 71, 120, 125, 103, 120, 126, 138, 123, 124, 114, 130, 121, 123, 123, 124, 91, 125, 114, 121, 111, 126, 125, 123, 112, 125, 128, 131, 112, 122, 159, 125, 123, 121, 111, 126, 123, 120, 124, 115, 107, 125, 116, 131, 118, 139, 123, 120, 132, 118, 119, 110, 114, 125, 129, 115, 132, 125, 112, 118, 128, 124, 125, 125, 106, 108, 105, 132, 108, 118, 120, 124, 125, 131, 121, 129, 145, 116, 114, 113, 134, 116, 122, 124, 121, 127, 126, 107, 133, 119, 120, 103, 131, 125, 108, 111, 138, 113, 121, 126, 118, 129, 128, 124, 127, 134, 119, 119, 118, 124, 116, 104, 121, 143, 113, 124, 136, 134, 119, 117, 115, 117, 114, 141, 120, 119, 111, 118, 123, 122, 129, 115, 135, 122, 85, 124, 116, 133, 103, 113, 122, 133, 121, 120, 110, 93, 137, 119, 119, 123, 134, 121, 128, 116, 120, 113, 125, 127, 112, 122, 126, 117, 130, 127, 117, 104, 113, 115, 114, 119, 123, 120, 111, 120, 135, 125, 105, 124, 122, 125, 97, 118, 123, 105, 124, 131, 135, 103, 127, 124, 117, 108, 114, 117, 121, 111, 121, 122, 121, 128, 112, 117, 125, 134, 130, 121, 122, 119, 129, 118, 119, 125, 121, 107, 126, 130, 125, 130, 129, 114, 130, 113, 115, 125, 129, 120, 117, 130, 111, 122, 121, 121, 123, 115, 128, 109, 123, 87, 121, 117, 124, 129, 119, 132, 131, 100, 125, 127, 86, 123, 136, 118, 123, 121, 120, 122, 125, 105, 128, 128, 128, 114, 122, 131, 120, 127, 127, 108, 124, 136, 132, 126, 97, 113, 118, 122, 123, 120, 120, 128, 123, 115, 133, 121, 136, 127, 127, 127, 127, 137, 114, 141, 123, 111, 143, 123, 94, 120, 124, 115, 129, 113, 121, 116, 120, 126, 118, 114, 113, 135, 119, 130, 141, 120, 119, 178, 121, 132, 109, 111, 131, 122, 126, 104, 117, 131, 107, 132, 131, 115, 121, 147, 124, 118, 117, 123, 126, 117, 131, 121, 122, 115, 106, 120, 124, 112, 116, 131, 119, 119, 125, 124, 136, 124, 112, 132, 109, 118, 114, 135, 124, 117, 132, 126, 129, 105, 126, 126, 119, 117, 120, 109, 119, 126, 125, 125, 141, 114, 122, 115, 114, 125, 126, 123, 123, 118, 123, 123, 128, 106, 115, 108, 114, 121, 119, 104, 124, 125, 98, 121, 128, 128, 115, 122, 93, 108, 123, 114, 140, 114, 108, 125, 119, 127, 115, 126, 122, 126, 112, 121, 120, 124, 127, 117, 116, 117, 112, 120, 129, 111, 123, 114, 118, 128, 110, 86, 119, 137, 124, 124, 41, 113, 119, 147, 122, 122, 119, 125, 113, 127, 117, 118, 127, 103, 120, 119, 128, 131, 122, 130, 122, 123, 97, 134, 120, 112, 120, 124, 117, 130, 112, 103, 121, 93, 128, 123, 111, 121, 131, 119, 116, 112, 115, 126, 126, 121, 126, 114, 121, 94, 116, 153, 117, 120, 123, 118, 117, 114, 125, 117, 132, 147, 124, 120, 117, 122, 123, 112, 126, 128, 118, 112, 118, 104, 125, 129, 112, 100, 126, 128, 123, 130, 119, 115, 123, 117, 122, 119, 141, 123, 117, 123, 124, 128, 118, 128, 128, 120, 128, 108, 113, 125, 120, 132, 124, 112, 124, 123, 116, 122, 108, 124, 115, 122, 115, 105, 108, 105, 122, 116, 117, 109, 111, 119, 129, 110, 120, 110, 62, 116, 116, 110, 119, 112, 102, 124, 118, 109, 112, 115, 125, 108, 126, 121, 127, 125, 128, 130, 122, 121, 112, 126, 124, 122, 124, 120, 125, 129, 129, 121, 120, 118, 115, 125, 123, 121, 123, 117, 114, 120, 115, 124, 123, 124, 100, 118, 103, 121, 122, 126, 121, 80, 115, 123, 124, 119, 119, 116, 123, 122, 122, 124, 102, 123, 123, 117, 116, 124, 124, 130, 118, 112, 125, 135, 125, 128, 117, 121, 125, 128, 127, 123, 125, 122, 129, 119, 118, 131, 129, 121, 129, 105, 118, 122, 127, 129, 128, 124, 121, 123, 118, 123, 118, 123, 129, 121, 114, 124, 119, 128, 119, 125, 130, 116, 120, 124, 124, 122, 129, 115, 126, 116, 137, 104, 120, 117, 128, 123, 134, 112, 129, 116, 124, 117, 119, 161, 108, 121, 115, 129, 118, 123, 178, 119, 122, 166, 138, 107, 107, 121, 128, 101, 123, 126, 136, 126, 122, 109, 134, 106, 126, 121, 189, 108, 132, 124, 124, 119, 124, 162, 124, 134, 125, 119, 120, 147, 104, 115, 124, 123, 123, 121, 121, 126, 113, 120, 124, 134, 113, 96, 135, 109, 118, 109, 145, 113, 100, 106, 128, 127, 145, 133, 178, 130, 113, 121, 120, 147, 121, 118, 98, 171, 122, 118, 88, 80, 113, 118, 106, 127, 124, 109, 106, 125, 114, 104, 145, 124, 113, 118, 119, 126, 120, 139, 114, 140, 120, 116, 124, 127, 118, 123, 108, 124, 130, 129, 108, 117, 133, 144, 128, 109, 156, 136, 119, 174, 105, 109, 118, 128, 140, 113, 116, 124, 126, 123, 122, 127, 130, 116, 121, 100, 133, 110, 123, 117, 125, 117, 110, 124, 73, 126, 121, 129, 124, 129, 121, 131, 118, 126, 117, 120, 121, 119, 112, 122, 124, 119, 128, 144, 139, 123, 128, 124, 129, 59, 123, 106, 126, 115, 132, 119, 125, 122, 117, 127, 123, 116, 146, 120, 126, 123, 118, 129, 121, 130, 67, 134, 117, 115, 122, 130, 125, 106, 180, 133, 115, 134, 120, 117, 132, 129, 113, 123, 123, 120, 110, 129, 126, 118, 123, 118, 140, 124, 128, 121, 117, 117, 123, 95, 115, 126, 130, 120, 115, 116, 113, 108, 127, 115, 125, 119, 122, 114, 141, 123, 122, 130, 96, 103, 113, 118, 143, 128, 114, 126, 131, 121, 131, 102, 130, 121, 123, 117, 128, 118, 123, 129, 97, 138, 126, 122, 101, 114, 135, 133, 119, 131, 124, 121, 126, 112, 124, 120, 122, 148, 132, 117, 115, 117, 140, 122, 112, 116, 148, 116, 125, 122, 121, 120, 119, 136, 127, 130, 115, 117, 124, 128, 117, 117, 129, 118, 124, 123, 117, 130, 117, 123, 135, 106, 126, 120, 138, 115, 121, 110, 120, 129, 165, 120, 115, 131, 119, 120, 118, 119, 114, 119, 118, 126, 126, 116, 121, 117, 118, 121, 110, 126, 125, 122, 117, 109, 169, 130, 100, 134, 126, 118, 123, 125, 119, 130, 108, 119, 88, 122, 113, 118, 149, 119, 124, 121, 116, 117, 116, 121, 116, 117, 115, 134, 117, 123, 124, 81, 127, 119, 123, 119, 122, 134, 115, 126, 120, 103, 133, 119, 126, 106, 123, 102, 126, 120, 128, 112, 117, 128, 116, 123, 120, 117, 120, 125, 122, 139, 117, 120, 127, 126, 126, 122, 115, 119, 117, 111, 123, 119, 118, 104, 120, 117, 124, 95, 120, 158, 131, 125, 127, 117, 123, 118, 113, 125, 120, 128, 71, 119, 129, 129, 124, 126, 118, 122, 116, 137, 119, 121, 121, 138, 113, 125, 128, 130, 143, 125, 122, 113, 127, 124, 124, 124, 110, 120, 124, 121, 128, 111, 110, 118, 118, 122, 129, 120, 117, 112, 121, 109, 122, 112, 126, 118, 130, 110, 115, 134, 130, 123, 130, 117, 120, 117, 137, 125, 115, 142, 110, 105, 118, 119, 155, 139, 124, 117, 129, 125, 128, 119, 122, 123, 123, 123, 126, 133, 123, 120, 140, 125, 115, 104, 116, 119, 109, 117, 111, 139, 119, 121, 122, 124, 123, 117, 133, 116, 121, 125, 93, 128, 128, 121, 108, 115, 122, 117, 116, 113, 117, 119, 121, 104, 121, 120, 117, 134, 124, 115, 115, 113, 120, 114, 113, 135, 120, 114, 131, 117, 114, 118, 117, 126, 115, 124, 124, 125, 121, 112, 116, 115, 119, 118, 119, 114, 140, 123, 120, 100, 120, 114, 167, 147, 125, 124, 117, 126, 122, 118, 120, 124, 136, 120, 121, 127, 129, 116, 127, 129, 120, 123, 131, 108, 110, 117, 115, 123, 123, 118, 113, 117, 128, 124, 143, 106, 131, 115, 137, 131, 111, 127, 125, 103, 120, 112, 119, 136, 144, 114, 156, 152, 142, 128, 107, 124, 117, 118, 114, 145, 126, 105, 116, 115, 120, 129, 121, 109, 125, 116, 111, 133, 109, 116, 126, 128, 127, 105, 115, 125, 111, 118, 127, 114, 121, 121, 115, 112, 125, 105, 123, 123, 110, 125, 116, 217, 142, 127, 115, 101, 131, 114, 130, 139, 121, 111, 119, 137, 116, 123, 114, 135, 122, 112, 125, 116, 105, 106, 113, 96, 148, 115, 124, 90, 113, 131, 124, 109, 115, 124, 128, 127, 137, 115, 119, 114, 118, 108, 122, 123, 158, 108, 120, 120, 117, 107, 122, 109, 125, 124, 110, 104, 118, 111, 121, 134, 118, 114, 119, 119, 133, 109, 112, 113, 126, 123, 158, 124, 123, 125, 116, 88, 96, 132, 128, 122, 132, 121, 134, 116, 121, 117, 121, 127, 123, 116, 125, 138, 128, 126, 120, 121, 119, 119, 108, 129, 79, 127, 126, 131, 123, 123, 130, 101, 117, 129, 124, 120, 125, 126, 107, 118, 119, 126, 120, 134, 124, 131, 135, 112, 124, 161, 124, 114, 105, 133, 121, 124, 120, 130, 129, 128, 115, 126, 116, 102, 132, 128, 117, 124, 121, 126, 96, 117, 114, 121, 118, 99, 117, 125, 173, 120, 129, 122, 120, 128, 120, 123, 126, 116, 121, 118, 126, 120, 120, 115, 112, 117, 114, 117, 120, 117, 128, 115, 137, 115, 124, 121, 128, 121, 127, 109, 125, 105, 124, 115, 128, 117, 115, 121, 108, 112, 119, 108, 101, 124, 146, 131, 102, 116, 120, 130, 125, 116, 122, 174, 124, 122, 124, 102, 114, 116, 124, 112, 111, 121, 112, 117, 96, 145, 190, 92, 112, 105, 128, 125, 113, 138, 127, 174, 116, 182, 142, 110, 120, 155, 131, 151, 115, 164, 146, 120, 122, 124, 121, 126, 119, 100, 149, 120, 97, 128, 125, 125, 115, 125, 121, 175, 119, 120, 121, 143, 123, 137, 84, 125, 132, 119, 161, 159, 122, 138, 136, 127, 121, 113, 142, 117, 122, 146, 116, 131, 127, 114, 117, 110, 128, 118, 103, 90, 126, 119, 142, 120, 116, 125, 95, 145, 122, 128, 157, 124, 127, 115, 129, 117, 125, 137, 143, 130, 183, 116, 125, 114, 150, 116, 127, 110, 113, 119, 138, 117, 120, 123, 114, 119, 120, 115, 96, 125, 124, 142, 113, 120, 122, 122, 126, 119, 117, 127, 130, 124, 124, 120, 146, 120, 123, 121, 117, 121, 125, 120, 124, 123, 124, 123, 123, 125, 122, 120, 124, 124, 119, 127, 120, 124, 122, 122, 124, 122, 136, 108, 122, 131, 124, 123, 123, 119, 126, 122, 115, 123, 118, 122, 136, 122, 118, 123, 122, 123, 120, 122, 122, 122, 126, 123, 124, 122, 118, 125, 123, 123, 122, 123, 121, 193, 126, 118, 126, 118, 121, 119, 116, 157, 118, 120, 124, 124, 122, 119, 121, 129, 135, 119, 122, 122, 120, 123, 111, 122, 122, 117, 123, 125, 124, 120, 122, 121, 125, 133, 121, 139, 121, 136, 122, 122, 122, 124, 122, 123, 119, 123, 119, 126, 119, 124, 128, 138, 114, 121, 126, 124, 124, 120, 131, 144, 118, 124, 123, 124, 167, 109, 127, 99, 121, 121, 191, 112, 126, 97, 122, 134, 126, 120, 127, 158, 122, 177, 135, 131, 109, 123, 129, 150, 126, 130, 136, 116, 119, 136, 128, 123, 129, 156, 106, 107, 114, 113, 122, 122, 126, 129, 124, 134, 96, 121, 119, 114, 109, 106, 106, 117, 139, 128, 117, 109, 124, 72, 124, 121, 167, 114, 126, 119, 132, 139, 116, 127, 110, 114, 122, 117, 70, 100, 118, 116, 128, 128, 134, 120, 107, 123, 121, 114, 117, 118, 158, 114, 124, 107, 145, 144, 115, 135, 131, 127, 156, 128, 141, 132, 102, 121, 126, 126, 117, 120, 129, 116, 108, 131, 128, 123, 129, 121, 113, 121, 130, 136, 108, 122, 125, 124, 126, 109, 120, 122, 126, 116, 122, 118, 126, 122, 115, 156, 119, 125, 111, 130, 124, 114, 115, 130, 113, 124, 116, 122, 124, 124, 125, 123, 114, 107, 121, 121, 122, 117, 123, 113, 113, 126, 114, 116, 122, 125, 116, 116, 124, 123, 124, 124, 134, 115, 124, 128, 117, 130, 122, 118, 121, 115, 115, 127, 118, 124, 124, 121, 117, 119, 121, 121, 120, 106, 121, 123, 126, 108, 122, 126, 111, 119, 122, 122, 126, 118, 119, 120, 124, 115, 122, 117, 121, 121, 124, 135, 116, 123, 136, 121, 121, 134, 157, 118, 117, 119, 118, 129, 125, 118, 122, 124, 129, 118, 126, 124, 120, 121, 129, 123, 122, 116, 123, 126, 138, 81, 123, 120, 112, 120, 131, 120, 117, 126, 118, 105, 121, 123, 118, 123, 122, 164, 121, 126, 121, 127, 117, 117, 137, 120, 113, 107, 122, 126, 120, 123, 114, 123, 130, 123, 122, 124, 127, 121, 113, 134, 120, 122, 123, 117, 134, 130, 123, 116, 122, 91, 122, 121, 122, 122, 122, 124, 123, 98, 119, 130, 106, 126, 136, 123, 115, 124, 120, 125, 123, 138, 117, 120, 115, 102, 115, 133, 118, 126, 121, 122, 118, 128, 138, 122, 111, 113, 130, 120, 115, 118, 123, 121, 128, 123, 134, 123, 116, 124, 124, 122, 127, 127, 120, 128, 132, 115, 125, 118, 123, 118, 120, 118, 119, 125, 116, 121, 113, 111, 128, 64, 119, 148, 130, 118, 174, 111, 126, 130, 120, 120, 66, 125, 117, 146, 114, 124, 118, 129, 126, 132, 125, 127, 115, 100, 126, 129, 131, 122, 136, 133, 123, 126, 132, 141, 129, 115, 132, 128, 120, 132, 133, 119, 112, 128, 121, 119, 120, 107, 128, 125, 49, 113, 58, 127, 121, 125, 127, 82, 119, 114, 124, 110, 121, 121, 119, 115, 134, 120, 124, 125, 117, 126, 121, 132, 121, 127, 109, 121, 134, 132, 124, 112, 114, 130, 129, 112, 125, 117, 126, 144, 134, 124, 125, 129, 145, 126, 128, 54, 132, 54, 133, 121, 111, 123, 121, 118, 123, 119, 101, 94, 123, 112, 126, 131, 118, 132, 126, 124, 130, 123, 121, 107, 120, 131, 137, 114, 126, 110, 137, 56, 134, 119, 119, 136, 115, 122, 102, 120, 122, 125, 123, 122, 128, 121, 124, 124, 88, 128, 122, 116, 129, 122, 121, 124, 123, 124, 130, 124, 125, 114, 129, 128, 138, 121, 125, 122, 119, 125, 123, 121, 119, 119, 121, 121, 123, 122, 155, 95, 140, 124, 138, 119, 122, 123, 121, 126, 123, 117, 93, 125, 103, 125, 124, 126, 127, 123, 120, 121, 122, 123, 120, 122, 120, 125, 120, 121, 124, 122, 126, 125, 123, 110, 124, 121, 119, 119, 120, 121, 123, 125, 124, 120, 124, 123, 122, 81, 123, 127, 121, 129, 128, 124, 127, 155, 122, 120, 122, 128, 120, 122, 124, 124, 124, 125, 123, 77, 124, 117, 124, 124, 121, 107, 124, 121, 121, 123, 114, 115, 122, 118, 124, 118, 121, 140, 124, 113, 116, 129, 107, 93, 75, 85, 127, 125, 133, 122, 114, 120, 117, 117, 126, 101, 136, 125, 92, 117, 123, 115, 136, 127, 136, 138, 110, 129, 107, 117, 94, 128, 121, 131, 111, 113, 105, 94, 130, 127, 109, 117, 110, 118, 99, 118, 127, 119, 126, 135, 125, 110, 124, 132, 119, 111, 116, 105, 122, 138, 118, 140, 111, 120, 133, 110, 118, 129, 123, 132, 104, 114, 131, 110, 121, 127, 109, 126, 127, 99, 128, 120, 126, 118, 129, 125, 110, 119, 112, 109, 120, 126, 120, 116, 108, 136, 140, 138, 139, 120, 105, 100, 111, 122, 114, 111, 131, 123, 102, 112, 110, 118, 119, 131, 111, 121, 132, 119, 140, 117, 98, 130, 126, 92, 164, 110, 120, 121, 126, 101, 129, 124, 121, 146, 125, 138, 124, 124, 120, 132, 122, 122, 125, 118, 90, 160, 113, 129, 123, 129, 124, 126, 124, 121, 113, 103, 117, 112, 126, 112, 152, 130, 122, 116, 107, 109, 123, 119, 115, 117, 120, 114, 107, 128, 120, 113, 123, 121, 119, 119, 126, 116, 142, 117, 126, 118, 127, 117, 132, 127, 118, 126, 105, 109, 121, 134, 133, 127, 115, 124, 164, 123, 122, 120, 111, 127, 117, 124, 127, 125, 131, 130, 108, 117, 123, 123, 123, 119, 121, 123, 146, 107, 123, 121, 117, 128, 119, 119, 153, 129, 115, 138, 109, 116, 104, 124, 119, 120, 112, 120, 134, 121, 116, 111, 128, 120, 143, 117, 123, 121, 118, 132, 116, 129, 125, 132, 128, 116, 143, 115, 115, 118, 136, 120, 122, 124, 105, 120, 124, 130, 115, 123, 115, 120, 115, 122, 122, 121, 122, 113, 124, 107, 114, 121, 124, 128, 124, 112, 131, 115, 103, 125, 121, 123, 117, 114, 118, 121, 121, 152, 113, 115, 119, 130, 127, 128, 121, 110, 128, 121, 122, 85, 120, 118, 123, 143, 120, 117, 113, 125, 107, 119, 124, 141, 113, 114, 114, 120, 136, 120, 120, 122, 127, 123, 126, 121, 125, 122, 109, 131, 123, 127, 120, 122, 125, 128, 120, 122, 134, 126, 130, 130, 102, 113, 127, 152, 127, 122, 126, 119, 103, 120, 114, 126, 118, 118, 116, 118, 119, 118, 115, 120, 127, 129, 135, 129, 123, 133, 133, 119, 124, 99, 130, 124, 113, 126, 122, 119, 113, 121, 129, 113, 114, 109, 121, 114, 117, 122, 119, 124, 130, 123, 113, 120, 193, 115, 169, 119, 119, 118, 104, 119, 116, 127, 120, 112, 110, 122, 123, 116, 120, 123, 112, 124, 109, 128, 117, 123, 120, 129, 121, 116, 116, 93, 125, 118, 112, 118, 120, 120, 102, 116, 120, 82, 129, 124, 118, 109, 111, 123, 124, 116, 116, 113, 117, 111, 119, 107, 84, 128, 126, 126, 122, 123, 119, 132, 122, 119, 122, 124, 130, 113, 121, 122, 107, 129, 113, 124, 127, 132, 122, 122, 99, 114, 102, 120, 118, 125, 117, 123, 122, 113, 121, 116, 127, 118, 118, 151, 118, 128, 119, 118, 114, 112, 116, 123, 104, 117, 129, 144, 125, 120, 113, 127, 123, 119, 126, 118, 116, 142, 125, 126, 129, 116, 118, 120, 125, 118, 133, 113, 128, 126, 121, 123, 145, 123, 148, 143, 124, 132, 121, 128, 122, 129, 123, 137, 121, 129, 125, 124, 118, 126, 117, 126, 115, 128, 124, 126, 121, 116, 119, 125, 118, 131, 111, 136, 122, 123, 127, 120, 111, 130, 113, 125, 127, 125, 132, 123, 119, 130, 123, 118, 129, 118, 118, 122, 124, 129, 127, 117, 123, 120, 140, 124, 126, 121, 129, 123, 128, 115, 124, 118, 129, 121, 122, 127, 126, 125, 126, 120, 127, 119, 129, 122, 120, 122, 128, 126, 136, 116, 127, 119, 124, 128, 124, 146, 119, 133, 115, 118, 114, 117, 121, 129, 130, 125, 114, 115, 128, 115, 113, 124, 104, 131, 90, 100, 149, 127, 123, 123, 128, 100, 121, 117, 128, 129, 129, 125, 125, 128, 122, 124, 121, 127, 125, 124, 127, 138, 127, 135, 119, 126, 145, 125, 126, 137, 122, 124, 127, 129, 125, 127, 121, 115, 131, 116, 127, 124, 116, 123, 125, 107, 120, 122, 126, 116, 121, 130, 104, 107, 130, 135, 124, 121, 133, 127, 122, 124, 128, 133, 133, 110, 130, 129, 125, 145, 124, 119, 109, 136, 113, 125, 118, 117, 91, 128, 126, 124, 134, 116, 127, 127, 95, 137, 114, 124, 120, 129, 122, 116, 114, 120, 142, 128, 133, 132, 135, 118, 122, 115, 134, 122, 101, 123, 129, 125, 123, 117, 66, 113, 130, 112, 115, 137, 139, 123, 121, 126, 130, 122, 122, 127, 120, 136, 116, 128, 123, 119, 126, 124, 121, 127, 128, 114, 124, 125, 132, 128, 120, 126, 123, 126, 124, 125, 129, 123, 119, 122, 131, 122, 120, 122, 119, 117, 118, 139, 122, 124, 129, 124, 130, 128, 120, 118, 119, 122, 122, 116, 122, 120, 124, 121, 125, 129, 120, 118, 122, 120, 127, 138, 126, 121, 122, 115, 122, 124, 141, 142, 122, 125, 127, 123, 119, 123, 122, 124, 110, 132, 125, 121, 120, 124, 121, 115, 110, 127, 119, 128, 123, 122, 121, 116, 124, 119, 124, 130, 123, 124, 125, 126, 113, 134, 120, 115, 125, 122, 113, 125, 119, 123, 128, 123, 132, 125, 131, 123, 120, 132, 128, 115, 118, 132, 126, 114, 121, 122, 131, 106, 135, 135, 114, 128, 123, 107, 129, 118, 123, 118, 116, 113, 117, 121, 114, 123, 117, 133, 123, 112, 120, 138, 119, 120, 109, 104, 123, 135, 120, 117, 122, 118, 139, 134, 126, 102, 125, 116, 112, 114, 118, 126, 122, 145, 127, 146, 122, 126, 118, 132, 122, 137, 152, 122, 125, 126, 79, 119, 117, 123, 122, 126, 113, 120, 128, 112, 116, 117, 126, 103, 119, 120, 125, 119, 122, 124, 117, 116, 131, 116, 112, 107, 111, 119, 126, 122, 125, 96, 116, 119, 115, 117, 119, 131, 138, 124, 107, 127, 121, 109, 130, 116, 122, 109, 115, 126, 117, 130, 127, 117, 115, 128, 121, 125, 109, 124, 122, 125, 115, 129, 119, 103, 133, 126, 120, 131, 121, 114, 113, 128, 127, 116, 119, 123, 125, 122, 115, 107, 125, 119, 123, 116, 120, 136, 118, 129, 120, 119, 121, 115, 120, 106, 112, 128, 119, 111, 112, 116, 123, 112, 133, 106, 120, 124, 120, 123, 122, 110, 119, 126, 121, 60, 113, 125, 130, 127, 131, 121, 121, 124, 113, 109, 120, 129, 127, 145, 119, 111, 123, 126, 108, 110, 124, 131, 124, 125, 120, 135, 136, 113, 124, 128, 104, 120, 115, 115, 122, 125, 112, 120, 113, 110, 103, 124, 120, 120, 131, 122, 157, 99, 121, 125, 89, 129, 128, 100, 127, 131, 126, 114, 125, 125, 107, 126, 119, 120, 122, 121, 147, 124, 135, 113, 124, 116, 139, 117, 116, 103, 131, 112, 111, 153, 124, 126, 133, 123, 122, 129, 114, 117, 128, 119, 133, 121, 110, 123, 124, 99, 121, 123, 127, 125, 115, 127, 125, 119, 122, 119, 113, 116, 125, 121, 131, 112, 123, 120, 123, 123, 130, 129, 129, 109, 119, 117, 117, 110, 109, 121, 116, 120, 124, 142, 125, 113, 125, 124, 121, 134, 127, 143, 128, 122, 129, 108, 124, 114, 122, 127, 124, 132, 115, 111, 112, 125, 117, 139, 121, 114, 121, 118, 121, 123, 128, 106, 107, 122, 124, 118, 113, 125, 126, 121, 126, 128, 122, 118, 126, 131, 116, 127, 122, 119, 126, 100, 117, 116, 128, 129, 123, 124, 132, 130, 125, 115, 124, 119, 116, 118, 136, 124, 123, 157, 103, 114, 120, 122, 93, 135, 118, 122, 111, 119, 126, 162, 152, 146, 139, 99, 120, 119, 121, 113, 107, 120, 125, 121, 112, 99, 151, 116, 107, 117, 129, 112, 110, 107, 117, 162, 125, 152, 135, 121, 119, 152, 110, 141, 112, 120, 117, 142, 122, 122, 116, 110, 118, 110, 106, 107, 110, 120, 120, 125, 123, 139, 125, 135, 120, 124, 120, 118, 119, 119, 125, 137, 123, 127, 148, 110, 128, 138, 117, 121, 106, 96, 107, 113, 124, 117, 117, 128, 113, 103, 120, 123, 115, 121, 82, 117, 108, 116, 94, 120, 125, 126, 129, 112, 104, 115, 130, 124, 123, 102, 125, 105, 120, 100, 118, 115, 165, 122, 106, 119, 125, 116, 138, 114, 99, 116, 152, 126, 128, 123, 116, 132, 121, 98, 118, 117, 126, 132, 128, 126, 121, 125, 127, 130, 119, 129, 94, 119, 127, 120, 118, 116, 124, 116, 122, 119, 121, 158, 126, 120, 122, 129, 120, 127, 119, 120, 122, 125, 119, 127, 123, 124, 114, 122, 122, 128, 94, 141, 127, 157, 125, 112, 122, 125, 123, 120, 118, 125, 118, 120, 110, 124, 129, 120, 121, 121, 120, 118, 123, 132, 129, 125, 123, 123, 123, 124, 121, 120, 129, 120, 118, 88, 122, 117, 121, 89, 123, 128, 127, 128, 128, 123, 122, 122, 124, 122, 120, 123, 149, 95, 112, 127, 124, 126, 121, 126, 127, 112, 124, 121, 115, 122, 125, 125, 128, 95, 127, 138, 124, 123, 120, 119, 124, 138, 127, 117, 120, 130, 123, 129, 125, 125, 111, 145, 105, 124, 120, 118, 120, 122, 119, 121, 121, 107, 115, 120, 122, 123, 120, 127, 118, 126, 126, 118, 134, 129, 76, 133, 103, 129, 142, 132, 125, 129, 119, 125, 121, 117, 127, 126, 126, 121, 120, 123, 121, 116, 112, 124, 130, 116, 120, 133, 122, 121, 127, 123, 115, 124, 115, 116, 125, 122, 121, 120, 127, 115, 119, 121, 121, 132, 130, 122, 132, 117, 118, 120, 119, 123, 128, 119, 122, 119, 123, 122, 125, 121, 106, 127, 120, 122, 129, 117, 117, 127, 122, 127, 110, 123, 114, 121, 120, 118, 126, 119, 128, 118, 126, 125, 114, 117, 120, 121, 127, 126, 117, 128, 122, 124, 124, 99, 120, 120, 129, 127, 128, 117, 127, 124, 107, 128, 118, 123, 123, 123, 120, 120, 124, 124, 121, 114, 88, 123, 119, 132, 124, 118, 120, 128, 137, 117, 123, 140, 120, 126, 122, 131, 125, 122, 124, 119, 124, 128, 80, 101, 127, 120, 127, 120, 114, 88, 116, 123, 125, 121, 123, 123, 120, 117, 133, 127, 104, 120, 122, 121, 125, 122, 121, 121, 125, 113, 106, 138, 124, 126, 126, 139, 144, 125, 130, 96, 121, 125, 116, 111, 130, 127, 117, 117, 119, 122, 126, 125, 92, 121, 124, 125, 119, 121, 122, 103, 97, 121, 129, 120, 132, 123, 129, 88, 121, 124, 124, 110, 127, 124, 120, 136, 117, 109, 141, 97, 126, 121, 103, 120, 141, 119, 148, 123, 121, 116, 128, 126, 113, 125, 120, 68, 123, 120, 125, 88, 122, 115, 122, 118, 110, 95, 116, 145, 125, 116, 116, 119, 112, 106, 129, 161, 126, 121, 93, 125, 130, 101, 123, 122, 115, 127, 114, 117, 120, 121, 122, 126, 140, 114, 114, 115, 125, 122, 125, 127, 117, 125, 129, 124, 126, 119, 123, 131, 115, 116, 118, 114, 129, 105, 120, 129, 134, 132, 119, 129, 120, 114, 109, 113, 121, 121, 115, 126, 121, 140, 128, 114, 121, 124, 109, 120, 122, 122, 114, 114, 124, 147, 116, 123, 115, 110, 151, 115, 117, 123, 137, 117, 126, 125, 139, 116, 129, 120, 103, 120, 129, 129, 119, 117, 134, 118, 113, 126, 112, 119, 115, 121, 121, 123, 124, 115, 103, 121, 142, 126, 120, 124, 123, 116, 111, 127, 107, 108, 124, 125, 113, 122, 136, 113, 127, 128, 124, 129, 131, 94, 103, 108, 117, 123, 123, 120, 124, 128, 132, 117, 57, 124, 113, 127, 117, 123, 159, 133, 121, 123, 113, 120, 114, 117, 126, 128, 123, 126, 127, 120, 107, 122, 118, 135, 118, 112, 126, 129, 144, 160, 117, 121, 120, 118, 119, 75, 105, 123, 132, 121, 131, 125, 114, 124, 112, 119, 152, 142, 118, 117, 123, 107, 116, 122, 136, 119, 113, 136, 125, 146, 129, 117, 104, 125, 108, 127, 119, 115, 115, 110, 122, 124, 122, 122, 143, 119, 96, 116, 125, 126, 121, 120, 128, 113, 116, 128, 110, 119, 120, 123, 125, 117, 126, 120, 130, 122, 119, 103, 104, 116, 102, 125, 145, 124, 120, 111, 130, 130, 130, 117, 120, 127, 106, 160, 121, 114, 121, 118, 117, 122, 126, 129, 128, 122, 110, 122, 140, 123, 121, 122, 110, 129, 112, 116, 126, 110, 126, 128, 132, 109, 104, 113, 125, 112, 122, 135, 120, 120, 135, 125, 133, 127, 122, 122, 117, 137, 119, 115, 127, 130, 119, 114, 117, 128, 117, 120, 129, 116, 130, 123, 127, 124, 111, 111, 125, 114, 126, 128, 116, 125, 88, 125, 117, 116, 121, 115, 115, 136, 121, 120, 119, 125, 145, 118, 108, 133, 118, 134, 137, 122, 135, 112, 124, 118, 114, 129, 118, 132, 111, 126, 118, 124, 112, 114, 120, 122, 116, 121, 108, 119, 130, 108, 128, 122, 130, 125, 124, 139, 129, 108, 120, 137, 121, 118, 129, 128, 115, 118, 122, 125, 119, 125, 121, 109, 153, 107, 130, 118, 127, 123, 125, 124, 121, 125, 114, 121, 85, 97, 69, 123, 126, 126, 122, 121, 114, 127, 117, 121, 117, 136, 122, 125, 124, 120, 136, 134, 139, 115, 75, 126, 128, 121, 126, 122, 101, 124, 134, 122, 147, 128, 126, 130, 123, 123, 124, 112, 117, 117, 115, 100, 123, 129, 77, 125, 119, 120, 132, 108, 117, 125, 157, 126, 121, 132, 125, 126, 113, 117, 120, 126, 118, 118, 120, 147, 120, 114, 120, 129, 116, 125, 129, 109, 126, 119, 116, 116, 120, 120, 131, 123, 121, 123, 117, 109, 118, 113, 124, 111, 125, 149, 115, 126, 115, 116, 114, 127, 128, 110, 123, 119, 122, 99, 119, 118, 118, 132, 123, 131, 123, 104, 119, 141, 118, 142, 117, 108, 115, 139, 113, 111, 117, 115, 128, 145, 139, 141, 53, 142, 50, 136, 124, 130, 128, 136, 128, 134, 122, 125, 146, 131, 127, 128, 131, 121, 120, 110, 118, 112, 119, 119, 118, 127, 120, 122, 116, 114, 120, 125, 125, 104, 132, 134, 177, 111, 139, 129, 119, 138, 118, 122, 120, 130, 123, 117, 128, 114, 114, 116, 123, 129, 111, 106, 123, 136, 69, 132, 113, 116, 116, 122, 124, 108, 111, 113, 132, 118, 107, 119, 132, 128, 118, 126, 112, 112, 98, 122, 114, 126, 121, 119, 137, 118, 126, 125, 130, 133, 120, 57, 125, 112, 108, 129, 105, 108, 123, 140, 190, 128, 121, 146, 121, 112, 130, 131, 118, 134, 115, 133, 125, 175, 125, 115, 120, 117, 125, 113, 129, 119, 122, 115, 113, 99, 122, 123, 120, 115, 126, 123, 137, 126, 127, 101, 123, 128, 125, 122, 126, 119, 131, 90, 124, 125, 114, 108, 111, 131, 113, 119, 131, 163, 128, 116, 125, 138, 120, 117, 124, 114, 124, 114, 190, 109, 125, 124, 130, 122, 122, 119, 125, 119, 126, 121, 122, 122, 112, 125, 115, 125, 121, 115, 123, 115, 126, 119, 115, 118, 105, 130, 121, 123, 128, 101, 137, 120, 116, 115, 121, 118, 110, 111, 122, 116, 129, 112, 120, 120, 115, 118, 128, 119, 120, 116, 120, 108, 124, 116, 119, 110, 109, 126, 99, 115, 116, 114, 80, 116, 119, 125, 119, 124, 123, 115, 130, 109, 130, 107, 116, 134, 124, 121, 124, 143, 123, 108, 118, 115, 121, 119, 105, 110, 125, 127, 128, 114, 130, 116, 111, 116, 121, 128, 120, 112, 91, 132, 136, 122, 122, 106, 121, 123, 115, 119, 115, 119, 118, 118, 123, 109, 128, 129, 123, 177, 123, 120, 118, 121, 126, 120, 144, 129, 133, 116, 118, 122, 115, 115, 121, 120, 126, 127, 119, 127, 97, 119, 125, 110, 147, 115, 129, 114, 128, 125, 122, 122, 113, 123, 119, 119, 108, 119, 138, 121, 148, 121, 121, 123, 125, 122, 121, 131, 142, 133, 114, 118, 114, 116, 122, 122, 125, 117, 115, 114, 133, 129, 125, 103, 92, 120, 151, 116, 117, 124, 120, 107, 118, 136, 118, 126, 133, 122, 118, 124, 113, 122, 125, 138, 124, 125, 127, 125, 125, 128, 122, 122, 126, 125, 128, 128, 123, 125, 139, 126, 142, 128, 125, 125, 117, 123, 125, 121, 120, 128, 127, 124, 122, 131, 121, 125, 136, 123, 136, 123, 124, 125, 126, 132, 121, 123, 118, 124, 122, 126, 122, 127, 126, 124, 115, 129, 118, 115, 117, 117, 126, 124, 124, 124, 124, 127, 126, 123, 126, 125, 124, 125, 125, 121, 127, 127, 134, 131, 121, 122, 129, 126, 130, 133, 114, 134, 124, 118, 121, 124, 125, 115, 125, 126, 115, 122, 129, 125, 124, 129, 123, 128, 123, 121, 128, 127, 122, 127, 122, 136, 125, 133, 122, 127, 129, 137, 126, 125, 130, 126, 130, 118, 122, 122, 121, 124, 118, 109, 114, 109, 123, 121, 127, 133, 136, 112, 128, 108, 148, 106, 94, 114, 123, 123, 120, 135, 119, 143, 121, 111, 122, 98, 91, 129, 99, 124, 124, 119, 117, 144, 121, 126, 122, 118, 121, 117, 123, 111, 148, 120, 143, 129, 123, 115, 116, 116, 119, 101, 122, 108, 136, 121, 121, 109, 118, 140, 106, 124, 134, 149, 122, 113, 126, 132, 129, 102, 132, 123, 119, 132, 115, 120, 145, 100, 118, 125, 114, 115, 122, 144, 138, 119, 125, 119, 130, 119, 128, 154, 115, 121, 96, 119, 121, 108, 125, 118, 118, 131, 146, 134, 105, 123, 124, 110, 143, 123, 105, 119, 94, 115, 127, 122, 128, 120, 113, 117, 123, 107, 125, 108, 118, 129, 131, 119, 130, 121, 126, 124, 117, 123, 112, 122, 108, 103, 123, 201, 121, 123, 115, 126, 122, 101, 122, 115, 125, 107, 126, 128, 117, 122, 133, 101, 150, 124, 129, 127, 130, 108, 104, 131, 129, 129, 136, 120, 128, 125, 115, 125, 111, 136, 122, 125, 120, 126, 122, 114, 62, 137, 118, 125, 125, 110, 126, 130, 114, 117, 123, 112, 121, 133, 119, 116, 108, 109, 135, 114, 118, 122, 107, 121, 118, 131, 123, 127, 131, 122, 108, 126, 124, 123, 125, 115, 111, 122, 132, 119, 122, 124, 111, 117, 121, 117, 117, 118, 121, 112, 119, 142, 133, 121, 123, 120, 124, 114, 138, 124, 86, 112, 130, 145, 136, 116, 120, 119, 114, 119, 119, 121, 107, 112, 104, 114, 161, 113, 147, 140, 108, 121, 107, 126, 114, 136, 112, 115, 114, 116, 97, 115, 128, 124, 125, 128, 115, 126, 119, 119, 101, 92, 123, 126, 95, 115, 111, 125, 120, 99, 118, 121, 110, 136, 118, 124, 107, 120, 110, 115, 112, 112, 112, 140, 113, 139, 107, 102, 121, 129, 124, 109, 146, 105, 129, 111, 121, 123, 107, 120, 126, 111, 115, 114, 113, 92, 114, 122, 125, 100, 135, 119, 130, 94, 111, 121, 113, 127, 148, 114, 117, 120, 115, 119, 129, 121, 162, 102, 121, 105, 97, 120, 121, 96, 122, 128, 119, 118, 116, 114, 108, 113, 115, 137, 108, 137, 164, 114, 94, 148, 109, 122, 133, 116, 99, 128, 127, 112, 124, 105, 117, 122, 129, 109, 126, 124, 110, 122, 134, 104, 122, 117, 114, 125, 123, 129, 124, 124, 111, 126, 124, 120, 120, 122, 109, 122, 109, 109, 113, 117, 127, 130, 131, 110, 133, 114, 115, 124, 121, 122, 125, 121, 128, 120, 127, 113, 118, 125, 127, 128, 124, 126, 125, 116, 130, 119, 119, 115, 121, 120, 124, 123, 143, 98, 113, 128, 114, 126, 119, 111, 132, 134, 113, 133, 128, 119, 124, 116, 118, 127, 135, 126, 113, 121, 118, 122, 117, 122, 126, 125, 118, 124, 132, 127, 130, 128, 122, 120, 114, 124, 118, 129, 122, 110, 109, 128, 117, 130, 114, 128, 128, 121, 118, 120, 112, 107, 127, 113, 122, 124, 125, 130, 128, 118, 124, 127, 133, 128, 123, 134, 119, 127, 157, 120, 117, 118, 123, 133, 126, 125, 113, 139, 116, 129, 119, 115, 105, 123, 127, 131, 118, 124, 116, 96, 114, 118, 114, 92, 117, 139, 111, 121, 130, 120, 117, 105, 125, 124, 116, 112, 117, 109, 111, 117, 110, 105, 117, 116, 122, 127, 122, 128, 129, 102, 132, 120, 125, 118, 136, 108, 116, 112, 118, 124, 113, 119, 126, 121, 116, 123, 118, 106, 124, 112, 118, 114, 136, 126, 121, 102, 123, 125, 121, 132, 155, 119, 114, 115, 120, 126, 136, 133, 199, 97, 118, 118, 109, 124, 129, 106, 117, 121, 135, 123, 112, 118, 113, 127, 111, 116, 114, 78, 140, 114, 107, 80, 113, 121, 154, 121, 108, 114, 122, 103, 125, 120, 126, 109, 150, 122, 124, 131, 110, 129, 130, 111, 120, 127, 115, 130, 130, 114, 117, 123, 118, 127, 132, 147, 124, 123, 119, 129, 116, 112, 107, 123, 125, 131, 126, 119, 120, 116, 124, 120, 126, 126, 137, 144, 135, 129, 141, 137, 118, 147, 120, 124, 80, 113, 137, 111, 142, 122, 131, 93, 110, 119, 123, 122, 118, 122, 120, 119, 122, 124, 77, 122, 126, 124, 117, 90, 121, 125, 135, 123, 120, 110, 120, 124, 113, 118, 128, 126, 118, 115, 120, 132, 119, 83, 118, 124, 118, 120, 131, 118, 128, 121, 120, 134, 118, 129, 128, 119, 120, 120, 116, 105, 126, 110, 126, 123, 118, 113, 122, 119, 120, 91, 111, 144, 130, 125, 121, 107, 124, 129, 130, 116, 116, 119, 110, 122, 119, 118, 130, 120, 116, 102, 130, 123, 123, 118, 120, 116, 128, 130, 134, 120, 65, 118, 121, 134, 126, 122, 154, 108, 122, 126, 122, 122, 118, 136, 127, 119, 123, 119, 124, 120, 103, 102, 119, 104, 124, 136, 122, 121, 106, 168, 115, 122, 125, 121, 125, 157, 121, 124, 122, 108, 123, 121, 115, 120, 123, 124, 153, 114, 116, 123, 117, 132, 116, 116, 120, 129, 120, 118, 125, 112, 118, 127, 124, 107, 111, 119, 120, 113, 123, 125, 104, 123, 130, 124, 96, 117, 125, 125, 122, 105, 119, 118, 122, 120, 128, 122, 127, 117, 127, 118, 118, 115, 120, 129, 125, 125, 113, 126, 115, 105, 120, 111, 109, 137, 119, 137, 122, 126, 109, 125, 133, 80, 148, 101, 122, 120, 127, 124, 121, 151, 123, 109, 123, 127, 130, 116, 137, 133, 121, 125, 120, 101, 116, 121, 123, 129, 125, 124, 102, 134, 142, 124, 122, 117, 124, 167, 124, 119, 118, 124, 117, 121, 122, 117, 113, 120, 116, 129, 127, 104, 121, 118, 129, 114, 125, 126, 138, 128, 128, 121, 122, 122, 127, 121, 121, 129, 130, 127, 119, 100, 112, 134, 121, 115, 105, 123, 119, 112, 119, 112, 116, 124, 114, 118, 128, 120, 111, 114, 124, 120, 137, 122, 118, 125, 131, 126, 128, 109, 124, 131, 128, 111, 123, 124, 117, 123, 121, 121, 121, 115, 122, 121, 142, 109, 146, 119, 131, 116, 118, 129, 122, 162, 110, 117, 126, 126, 124, 119, 95, 137, 124, 121, 128, 118, 121, 122, 118, 113, 127, 119, 115, 113, 118, 123, 121, 128, 120, 118, 124, 128, 123, 124, 122, 111, 122, 126, 123, 119, 129, 121, 126, 115, 120, 121, 119, 123, 123, 122, 122, 125, 122, 119, 173, 119, 122, 121, 128, 118, 124, 108, 129, 112, 120, 123, 124, 114, 120, 102, 139, 126, 120, 118, 121, 130, 123, 126, 125, 130, 122, 112, 121, 125, 111, 124, 104, 104, 122, 124, 99, 147, 123, 118, 108, 122, 123, 122, 129, 126, 132, 115, 130, 127, 130, 111, 125, 120, 124, 128, 120, 95, 123, 136, 127, 126, 119, 121, 122, 122, 120, 125, 119, 118, 124, 119, 128, 123, 120, 139, 121, 130, 118, 119, 124, 125, 120, 132, 126, 131, 130, 109, 117, 113, 122, 126, 128, 125, 126, 114, 127, 130, 127, 120, 121, 125, 120, 120, 121, 126, 122, 122, 132, 127, 113, 119, 128, 132, 130, 126, 115, 125, 123, 127, 122, 116, 120, 116, 120, 129, 127, 117, 118, 141, 124, 114, 119, 126, 131, 121, 124, 129, 119, 126, 116, 116, 106, 139, 122, 118, 132, 123, 127, 123, 124, 126, 129, 120, 126, 116, 122, 125, 126, 122, 119, 122, 125, 118, 123, 124, 126, 116, 120, 117, 123, 121, 112, 121, 117, 119, 125, 127, 127, 120, 120, 124, 128, 125, 128, 126, 133, 115, 128, 128, 123, 124, 131, 125, 126, 118, 123, 130, 130, 130, 130, 123, 123, 125, 129, 125, 124, 116, 125, 127, 116, 125, 113, 129, 122, 109, 129, 123, 114, 123, 126, 109, 122, 119, 121, 124, 122, 128, 124, 141, 121, 122, 118, 122, 116, 116, 121, 130, 120, 121, 112, 128, 167, 123, 121, 126, 115, 124, 117, 122, 117, 123, 116, 112, 123, 121, 120, 124, 116, 124, 119, 97, 115, 115, 128, 120, 122, 131, 113, 115, 133, 131, 122, 124, 127, 123, 119, 121, 124, 121, 119, 122, 160, 127, 121, 126, 111, 119, 122, 119, 119, 123, 126, 127, 93, 118, 129, 115, 133, 116, 125, 112, 116, 125, 123, 122, 120, 126, 120, 114, 126, 119, 122, 123, 125, 124, 113, 114, 127, 119, 123, 130, 117, 124, 127, 132, 152, 119, 97, 118, 124, 121, 125, 131, 124, 117, 127, 124, 118, 121, 106, 129, 126, 133, 113, 120, 111, 119, 119, 116, 119, 108, 117, 120, 134, 122, 116, 125, 124, 118, 118, 129, 122, 110, 121, 122, 123, 123, 119, 113, 124, 120, 120, 124, 118, 108, 117, 128, 115, 118, 129, 121, 125, 103, 119, 103, 133, 127, 117, 130, 117, 114, 125, 119, 121, 120, 126, 112, 124, 122, 114, 115, 120, 120, 126, 121, 132, 127, 123, 141, 121, 111, 115, 127, 130, 136, 128, 109, 115, 124, 133, 126, 115, 121, 114, 129, 121, 177, 126, 122, 120, 121, 122, 118, 111, 120, 132, 94, 126, 123, 125, 118, 119, 120, 124, 125, 112, 131, 119, 129, 126, 122, 132, 125, 115, 230, 136, 96, 117, 124, 129, 136, 124, 128, 123, 115, 127, 115, 150, 129, 123, 128, 124, 124, 124, 119, 119, 114, 127, 136, 113, 119, 124, 131, 113, 107, 122, 114, 125, 110, 121, 109, 103, 111, 113, 137, 124, 121, 109, 118, 158, 102, 115, 124, 125, 122, 120, 106, 125, 102, 121, 121, 120, 131, 129, 133, 123, 132, 123, 102, 82, 119, 128, 123, 112, 122, 106, 119, 119, 125, 123, 115, 123, 106, 113, 115, 116, 117, 136, 122, 123, 109, 89, 129, 117, 112, 123, 126, 136, 125, 115, 123, 128, 126, 122, 123, 110, 129, 151, 113, 119, 124, 124, 113, 113, 119, 122, 116, 125, 118, 120, 122, 117, 131, 112, 128, 125, 102, 129, 126, 82, 124, 105, 129, 148, 123, 111, 115, 127, 95, 122, 106, 121, 114, 119, 116, 110, 123, 119, 121, 124, 119, 119, 117, 125, 99, 128, 115, 120, 121, 123, 117, 126, 138, 159, 118, 117, 114, 99, 127, 126, 137, 114, 116, 120, 125, 121, 134, 117, 122, 120, 124, 125, 119, 93, 110, 120, 126, 121, 81, 119, 119, 110, 128, 124, 126, 125, 130, 109, 116, 113, 126, 132, 136, 128, 114, 124, 113, 125, 120, 164, 125, 117, 121, 123, 137, 122, 119, 60, 118, 124, 113, 121, 112, 125, 129, 118, 118, 106, 124, 119, 109, 127, 138, 120, 118, 120, 121, 118, 112, 110, 114, 93, 102, 118, 122, 125, 118, 128, 110, 118, 128, 135, 106, 123, 116, 123, 116, 135, 132, 130, 129, 119, 107, 134, 113, 127, 134, 118, 124, 122, 124, 125, 111, 125, 124, 122, 112, 115, 126, 124, 126, 140, 127, 121, 123, 134, 113, 123, 127, 119, 115, 121, 127, 121, 112, 121, 121, 122, 123, 100, 122, 118, 110, 127, 118, 134, 126, 131, 119, 121, 122, 123, 123, 123, 131, 131, 124, 138, 126, 122, 128, 122, 120, 121, 125, 122, 124, 126, 126, 123, 125, 132, 112, 105, 125, 115, 132, 120, 125, 101, 92, 142, 127, 122, 123, 124, 121, 132, 123, 139, 119, 120, 121, 122, 126, 116, 137, 150, 123, 127, 126, 115, 120, 117, 132, 135, 124, 121, 92, 127, 126, 124, 126, 123, 94, 127, 137, 117, 122, 146, 113, 123, 120, 109, 123, 120, 122, 120, 122, 113, 123, 116, 114, 121, 117, 134, 85, 120, 127, 132, 138, 121, 121, 130, 97, 112, 49, 125, 127, 127, 121, 114, 135, 119, 118, 139, 117, 130, 121, 118, 132, 45, 134, 50, 119, 129, 143, 118, 129, 130, 126, 116, 134, 126, 123, 131, 119, 128, 127, 135, 121, 134, 107, 123, 112, 117, 133, 120, 122, 118, 132, 129, 132, 124, 110, 129, 119, 153, 91, 127, 138, 112, 128, 130, 121, 128, 131, 120, 124, 129, 120, 119, 121, 127, 111, 123, 120, 128, 129, 64, 117, 122, 120, 114, 125, 131, 130, 122, 142, 126, 124, 115, 125, 127, 120, 125, 122, 116, 128, 108, 120, 116, 126, 131, 130, 131, 127, 132, 119, 130, 109, 124, 45, 127, 117, 114, 121, 132, 135, 124, 127, 95, 123, 141, 114, 122, 131, 119, 117, 117, 110, 139, 128, 109, 135, 119, 125, 125, 136, 119, 127, 123, 123, 126, 130, 122, 131, 122, 116, 112, 124, 124, 117, 114, 125, 127, 138, 131, 127, 132, 127, 113, 121, 115, 104, 123, 114, 74, 128, 99, 115, 119, 135, 128, 142, 135, 124, 123, 132, 125, 110, 130, 116, 129, 124, 124, 113, 116, 132, 123, 104, 126, 117, 139, 130, 125, 126, 122, 106, 134, 117, 128, 123, 124, 137, 129, 118, 114, 118, 121, 123, 126, 122, 132, 100, 116, 69, 128, 131, 138, 128, 120, 127, 110, 120, 125, 125, 133, 121, 119, 123, 128, 126, 124, 131, 124, 141, 125, 147, 120, 122, 110, 120, 129, 0, 110, 154, 116, 124, 121, 123, 139, 127, 119, 130, 136, 121, 127, 127, 119, 128, 122, 130, 119, 124, 125, 127, 125, 128, 127, 117, 117, 124, 134, 128, 120, 124, 126, 126, 130, 121, 129, 124, 122, 119, 125, 122, 128, 122, 127, 122, 124, 134, 121, 123, 144, 122, 136, 133, 121, 122, 120, 130, 126, 117, 123, 120, 123, 123, 123, 130, 123, 122, 116, 123, 124, 124, 121, 122, 120, 123, 126, 122, 121, 119, 127, 126, 123, 125, 123, 120, 123, 121, 122, 125, 127, 127, 123, 124, 120, 120, 125, 135, 116, 137, 127, 118, 122, 122, 126, 115, 122, 121, 119, 130, 118, 129, 126, 120, 122, 123, 120, 122, 127, 126, 121, 130, 126, 126, 129, 129, 120, 128, 120, 136, 125, 119, 122, 127, 125, 118, 126, 118, 119, 113, 133, 119, 122, 119, 108, 118, 121, 126, 116, 133, 116, 127, 117, 122, 108, 132, 118, 132, 127, 125, 119, 125, 125, 123, 138, 126, 134, 97, 124, 122, 87, 129, 141, 99, 114, 121, 119, 121, 126, 124, 120, 123, 151, 113, 119, 121, 115, 112, 120, 127, 117, 110, 112, 131, 120, 137, 124, 120, 135, 110, 125, 119, 130, 116, 119, 150, 123, 126, 124, 108, 116, 118, 117, 128, 129, 118, 118, 115, 121, 123, 137, 115, 121, 119, 123, 123, 117, 121, 124, 117, 118, 103, 148, 128, 116, 124, 131, 118, 107, 127, 117, 123, 155, 122, 132, 113, 118, 118, 153, 124, 133, 124, 111, 124, 114, 123, 103, 117, 115, 134, 115, 105, 118, 121, 129, 117, 122, 124, 121, 115, 119, 121, 125, 114, 121, 123, 143, 102, 129, 125, 129, 120, 112, 112, 119, 103, 122, 127, 123, 122, 125, 117, 140, 102, 135, 112, 126, 124, 131, 113, 103, 114, 123, 117, 120, 119, 122, 110, 125, 152, 119, 114, 114, 125, 121, 103, 127, 118, 118, 119, 144, 145, 116, 120, 124, 120, 119, 115, 122, 124, 107, 119, 119, 105, 126, 121, 145, 124, 110, 117, 112, 117, 150, 120, 129, 118, 124, 115, 92, 93, 137, 120, 116, 127, 119, 127, 128, 126, 149, 104, 126, 125, 115, 122, 134, 111, 110, 119, 130, 111, 117, 120, 121, 129, 126, 120, 107, 112, 140, 123, 94, 114, 120, 122, 122, 127, 109, 130, 121, 107, 126, 134, 115, 123, 123, 118, 126, 120, 128, 114, 134, 133, 122, 117, 120, 125, 124, 121, 119, 132, 124, 103, 115, 123, 122, 125, 127, 119, 128, 119, 104, 122, 134, 136, 119, 115, 122, 119, 134, 141, 129, 114, 118, 116, 125, 125, 120, 108, 121, 132, 128, 128, 118, 133, 121, 130, 123, 138, 118, 115, 133, 119, 114, 125, 130, 103, 107, 128, 105, 136, 120, 113, 105, 104, 111, 130, 127, 119, 128, 126, 124, 121, 145, 114, 129, 124, 120, 125, 121, 129, 146, 125, 131, 124, 138, 120, 136, 146, 120, 121, 117, 113, 118, 117, 115, 114, 123, 110, 103, 143, 122, 125, 142, 123, 108, 126, 110, 127, 115, 119, 118, 124, 112, 127, 114, 126, 124, 117, 115, 118, 123, 120, 112, 124, 122, 122, 122, 124, 116, 113, 120, 122, 121, 125, 122, 122, 124, 123, 117, 129, 121, 124, 123, 120, 110, 123, 111, 116, 123, 117, 120, 121, 120, 122, 124, 118, 124, 119, 118, 118, 120, 120, 114, 123, 117, 123, 123, 118, 117, 117, 121, 118, 122, 121, 122, 116, 123, 125, 119, 123, 127, 118, 130, 124, 127, 124, 125, 119, 121, 119, 124, 124, 124, 119, 124, 122, 121, 117, 118, 117, 119, 125, 109, 119, 122, 121, 113, 119, 117, 115, 125, 118, 124, 122, 125, 114, 121, 127, 119, 122, 128, 124, 119, 122, 121, 124, 125, 117, 123, 117, 115, 120, 124, 121, 125, 109, 122, 118, 121, 120, 122, 119, 123, 121, 118, 122, 123, 78, 119, 112, 112, 133, 124, 111, 106, 132, 123, 159, 124, 122, 114, 122, 121, 120, 117, 115, 121, 127, 115, 123, 132, 125, 132, 101, 141, 114, 117, 142, 123, 115, 126, 112, 131, 118, 109, 119, 115, 114, 125, 118, 119, 128, 120, 120, 125, 89, 118, 118, 116, 124, 125, 133, 121, 150, 135, 92, 113, 115, 164, 129, 132, 112, 119, 125, 117, 119, 118, 160, 117, 116, 120, 132, 111, 115, 112, 114, 128, 118, 116, 115, 109, 124, 152, 122, 111, 117, 120, 124, 128, 135, 131, 119, 133, 111, 124, 120, 115, 124, 128, 126, 134, 123, 128, 117, 112, 115, 118, 118, 123, 104, 120, 109, 130, 134, 111, 117, 118, 122, 127, 118, 125, 120, 142, 131, 111, 105, 124, 120, 125, 142, 116, 125, 106, 78, 126, 116, 123, 121, 93, 111, 129, 118, 108, 152, 108, 108, 123, 111, 124, 128, 119, 119, 121, 111, 120, 117, 112, 113, 119, 141, 119, 122, 107, 139, 122, 122, 121, 114, 128, 115, 121, 113, 152, 116, 103, 128, 123, 126, 124, 117, 129, 99, 128, 116, 118, 207, 137, 117, 109, 135, 90, 125, 148, 155, 116, 107, 111, 113, 105, 128, 117, 123, 123, 125, 113, 115, 143, 118, 120, 118, 118, 119, 136, 104, 104, 125, 120, 128, 117, 122, 113, 145, 105, 123, 108, 119, 119, 124, 129, 142, 161, 114, 125, 101, 121, 123, 94, 107, 147, 114, 136, 117, 127, 119, 137, 120, 131, 128, 123, 116, 115, 127, 159, 122, 130, 113, 110, 111, 96, 154, 79, 178, 146, 122, 126, 113, 102, 109, 122, 117, 122, 104, 122, 127, 133, 113, 129, 114, 122, 116, 127, 132, 119, 117, 140, 116, 151, 129, 116, 124, 113, 101, 114, 147, 122, 125, 121, 122, 119, 102, 107, 128, 117, 116, 113, 113, 115, 127, 117, 120, 115, 114, 111, 111, 114, 112, 122, 131, 140, 123, 111, 108, 126, 125, 176, 185, 100, 113, 107, 120, 119, 144, 129, 125, 97, 145, 126, 126, 119, 115, 146, 111, 86, 115, 145, 106, 126, 118, 120, 96, 110, 126, 127, 178, 114, 126, 145, 122, 113, 189, 106, 111, 130, 115, 97, 113, 99, 116, 136, 127, 138, 117, 134, 121, 117, 115, 153, 131, 126, 169, 116, 126, 122, 129, 109, 132, 124, 102, 141, 126, 143, 126, 119, 121, 106, 123, 117, 107, 112, 112, 135, 119, 100, 127, 120, 122, 128, 131, 135, 122, 123, 136, 136, 120, 97, 127, 112, 136, 117, 123, 135, 93, 124, 117, 101, 120, 103, 122, 120, 126, 111, 124, 116, 123, 118, 157, 115, 129, 121, 131, 111, 122, 118, 126, 99, 143, 122, 118, 127, 122, 116, 135, 106, 100, 113, 109, 130, 119, 121, 114, 134, 123, 102, 99, 128, 130, 120, 121, 155, 119, 109, 111, 132, 124, 126, 116, 140, 109, 132, 123, 131, 116, 120, 123, 130, 117, 121, 103, 120, 102, 133, 119, 111, 125, 118, 118, 130, 127, 101, 122, 112, 117, 128, 102, 102, 142, 129, 104, 124, 120, 122, 120, 124, 120, 122, 130, 114, 133, 123, 117, 122, 121, 126, 117, 122, 122, 121, 120, 120, 128, 127, 125, 124, 120, 117, 124, 116, 126, 124, 121, 120, 120, 129, 120, 131, 121, 121, 118, 118, 133, 118, 124, 123, 120, 126, 124, 120, 121, 126, 122, 120, 121, 119, 121, 122, 116, 121, 119, 122, 121, 125, 121, 138, 117, 120, 120, 119, 121, 124, 120, 132, 139, 122, 123, 126, 120, 123, 125, 116, 120, 122, 128, 114, 116, 126, 122, 112, 125, 123, 122, 116, 123, 123, 116, 120, 118, 120, 124, 120, 128, 124, 117, 116, 120, 114, 136, 120, 123, 123, 122, 122, 122, 121, 122, 120, 115, 129, 119, 126, 124, 121, 124, 131, 121, 122, 132, 134, 119, 117, 118, 123, 124, 121, 127, 121, 120, 126, 109, 123, 120, 116, 119, 126, 123, 121, 129, 114, 123, 119, 121, 120, 122, 105, 126, 101, 106, 116, 118, 125, 126, 126, 118, 133, 120, 114, 123, 123, 122, 122, 118, 119, 123, 118, 116, 118, 123, 123, 121, 125, 121, 122, 115, 123, 117, 125, 119, 125, 119, 129, 119, 130, 115, 122, 128, 115, 124, 118, 113, 124, 127, 114, 129, 125, 119, 125, 113, 119, 121, 131, 124, 104, 120, 120, 121, 113, 121, 128, 118, 119, 125, 129, 124, 128, 126, 113, 119, 114, 124, 121, 123, 120, 112, 117, 124, 113, 124, 118, 123, 125, 122, 117, 121, 121, 103, 128, 113, 121, 124, 123, 126, 123, 113, 121, 127, 128, 124, 45, 135, 131, 122, 125, 121, 125, 115, 121, 119, 148, 127, 122, 116, 141, 123, 121, 119, 121, 122, 157, 134, 137, 119, 109, 121, 121, 120, 137, 122, 119, 127, 118, 120, 118, 109, 124, 126, 117, 124, 106, 127, 117, 109, 100, 121, 120, 124, 119, 125, 118, 125, 109, 119, 94, 120, 125, 122, 116, 118, 123, 121, 117, 116, 122, 99, 126, 124, 149, 122, 119, 121, 122, 118, 124, 114, 119, 119, 116, 113, 120, 121, 120, 116, 115, 121, 126, 121, 124, 121, 120, 124, 123, 154, 129, 122, 123, 164, 118, 121, 114, 134, 121, 119, 121, 126, 120, 119, 123, 127, 119, 123, 118, 126, 119, 124, 125, 121, 120, 115, 110, 111, 122, 120, 122, 122, 121, 123, 126, 121, 123, 120, 127, 122, 119, 128, 145, 126, 121, 114, 109, 115, 121, 120, 104, 133, 126, 108, 123, 123, 120, 117, 125, 128, 121, 121, 117, 126, 155, 165, 128, 119, 131, 118, 119, 149, 118, 128, 121, 126, 122, 119, 125, 120, 103, 107, 131, 119, 120, 126, 123, 125, 118, 125, 123, 140, 131, 103, 118, 119, 117, 74, 77, 123, 110, 137, 126, 125, 118, 116, 108, 116, 122, 120, 123, 133, 116, 120, 153, 129, 123, 120, 128, 119, 112, 133, 121, 126, 126, 126, 128, 125, 118, 152, 116, 120, 127, 110, 121, 121, 126, 124, 117, 115, 91, 122, 128, 124, 114, 114, 90, 116, 102, 125, 119, 114, 124, 121, 109, 120, 122, 181, 123, 121, 111, 138, 119, 127, 125, 127, 125, 122, 125, 120, 139, 145, 124, 123, 118, 120, 125, 118, 121, 117, 131, 105, 114, 122, 123, 121, 138, 116, 145, 133, 119, 134, 122, 129, 120, 126, 122, 129, 123, 134, 135, 124, 117, 123, 130, 126, 124, 113, 121, 130, 125, 123, 122, 132, 123, 129, 111, 139, 120, 113, 130, 119, 111, 134, 107, 108, 116, 115, 121, 131, 127, 123, 119, 114, 117, 133, 111, 123, 125, 133, 134, 123, 128, 116, 136, 125, 123, 122, 135, 126, 129, 133, 116, 124, 123, 115, 117, 134, 123, 118, 126, 121, 110, 117, 131, 121, 131, 112, 113, 128, 129, 123, 140, 121, 113, 133, 113, 142, 120, 134, 119, 120, 111, 124, 117, 115, 130, 128, 111, 149, 126, 116, 121, 129, 115, 126, 116, 148, 115, 127, 119, 121, 116, 125, 120, 116, 129, 121, 112, 137, 125, 108, 124, 128, 122, 113, 122, 110, 118, 93, 123, 107, 116, 114, 136, 130, 121, 103, 114, 130, 117, 117, 122, 112, 117, 120, 102, 130, 111, 120, 123, 118, 118, 113, 117, 96, 103, 120, 126, 107, 131, 123, 130, 121, 126, 120, 105, 119, 122, 97, 123, 127, 120, 125, 144, 120, 117, 111, 114, 117, 124, 131, 122, 121, 108, 122, 105, 115, 124, 124, 121, 124, 123, 123, 146, 98, 119, 127, 108, 133, 127, 130, 162, 128, 114, 122, 108, 130, 128, 121, 110, 124, 105, 110, 135, 128, 121, 114, 141, 120, 130, 128, 120, 115, 125, 117, 117, 109, 127, 125, 108, 129, 123, 123, 119, 127, 123, 128, 137, 115, 117, 119, 127, 122, 118, 125, 127, 128, 136, 118, 158, 133, 162, 116, 125, 120, 123, 126, 121, 130, 124, 122, 119, 116, 123, 123, 118, 118, 122, 144, 137, 121, 55, 124, 121, 124, 124, 123, 106, 86, 134, 123, 138, 121, 119, 110, 126, 121, 126, 127, 122, 123, 119, 86, 126, 123, 75, 127, 120, 120, 121, 109, 117, 116, 121, 126, 120, 131, 121, 120, 129, 111, 132, 119, 117, 123, 128, 165, 125, 98, 127, 119, 115, 122, 125, 131, 115, 121, 121, 112, 122, 123, 124, 121, 124, 122, 122, 112, 129, 114, 125, 109, 115, 71, 120, 126, 125, 116, 123, 124, 135, 120, 113, 123, 121, 116, 127, 125, 122, 124, 123, 179, 107, 141, 116, 119, 129, 120, 124, 116, 121, 103, 125, 125, 121, 126, 118, 135, 122, 123, 121, 125, 120, 119, 131, 130, 120, 121, 107, 127, 108, 120, 134, 123, 123, 92, 122, 119, 112, 121, 120, 124, 123, 132, 125, 127, 130, 113, 120, 128, 128, 118, 124, 105, 120, 122, 122, 121, 124, 128, 122, 110, 127, 122, 122, 121, 103, 117, 121, 114, 121, 129, 124, 122, 124, 118, 120, 156, 116, 121, 119, 124, 119, 124, 122, 118, 131, 108, 119, 120, 117, 123, 130, 123, 126, 114, 123, 130, 126, 104, 128, 127, 120, 114, 117, 121, 117, 135, 127, 125, 117, 122, 116, 127, 115, 110, 91, 132, 127, 131, 129, 92, 127, 126, 132, 127, 116, 124, 122, 122, 127, 135, 118, 125, 129, 107, 135, 120, 130, 124, 131, 98, 122, 116, 110, 123, 125, 107, 118, 127, 116, 121, 114, 123, 110, 122, 125, 119, 120, 111, 121, 129, 127, 124, 115, 115, 114, 120, 115, 121, 127, 127, 131, 121, 124, 125, 125, 126, 136, 121, 122, 126, 109, 119, 114, 127, 106, 131, 124, 126, 127, 132, 121, 122, 132, 133, 134, 125, 117, 122, 119, 123, 125, 119, 124, 121, 113, 106, 125, 122, 122, 127, 136, 122, 127, 122, 107, 128, 129, 130, 127, 139, 122, 117, 142, 128, 127, 118, 90, 135, 131, 132, 124, 130, 132, 121, 127, 124, 116, 120, 127, 127, 121, 122, 114, 128, 121, 124, 122, 123, 93, 140, 119, 127, 134, 122, 123, 127, 125, 122, 116, 139, 124, 123, 125, 123, 126, 126, 123, 119, 125, 119, 126, 125, 125, 124, 143, 123, 143, 131, 123, 124, 123, 125, 128, 123, 125, 126, 128, 126, 124, 133, 122, 121, 134, 126, 132, 126, 121, 128, 124, 133, 126, 125, 119, 125, 128, 131, 120, 121, 123, 121, 118, 125, 115, 122, 122, 114, 125, 122, 123, 127, 120, 119, 128, 127, 123, 134, 126, 122, 131, 130, 123, 125, 141, 133, 122, 121, 125, 125, 128, 130, 119, 139, 110, 124, 118, 124, 127, 111, 124, 119, 114, 121, 125, 128, 119, 123, 124, 125, 125, 132, 129, 132, 124, 130, 118, 143, 125, 131, 130, 123, 122, 128, 124, 124, 126, 123, 116, 123, 129, 119, 120, 113, 127, 113, 120, 119, 106, 118, 121, 127, 130, 126, 119, 115, 131, 121, 120, 129, 116, 123, 123, 127, 124, 119, 124, 118, 132, 120, 107, 120, 111, 115, 115, 122, 125, 122, 119, 103, 116, 118, 123, 119, 123, 137, 122, 115, 117, 119, 125, 122, 125, 122, 125, 121, 123, 131, 121, 118, 123, 130, 128, 129, 122, 125, 118, 119, 124, 125, 116, 108, 131, 118, 121, 123, 117, 118, 131, 119, 117, 117, 126, 114, 124, 127, 124, 120, 114, 125, 126, 119, 126, 129, 130, 116, 127, 132, 126, 117, 138, 123, 114, 123, 124, 110, 123, 115, 117, 124, 116, 119, 122, 117, 125, 123, 113, 118, 112, 125, 128, 122, 112, 107, 124, 107, 127, 133, 117, 158, 121, 123, 117, 126, 125, 122, 126, 128, 138, 120, 122, 120, 89, 124, 120, 122, 120, 121, 118, 119, 123, 124, 116, 133, 120, 122, 125, 123, 121, 125, 125, 124, 119, 115, 126, 124, 121, 123, 125, 86, 119, 106, 117, 124, 124, 120, 123, 121, 149, 120, 154, 124, 99, 125, 122, 123, 121, 122, 120, 125, 120, 123, 121, 121, 121, 123, 126, 126, 125, 122, 121, 119, 118, 126, 122, 125, 123, 92, 120, 123, 120, 148, 124, 119, 121, 121, 121, 119, 126, 128, 126, 215, 127, 124, 121, 128, 122, 125, 128, 126, 123, 125, 124, 123, 122, 123, 126, 124, 120, 117, 125, 117, 120, 115, 123, 119, 123, 124, 118, 128, 115, 120, 122, 125, 124, 116, 121, 122, 124, 118, 126, 124, 136, 111, 132, 116, 116, 125, 124, 119, 118, 130, 121, 130, 103, 95, 121, 127, 121, 125, 122, 121, 120, 129, 111, 128, 126, 150, 122, 122, 131, 132, 123, 155, 172, 123, 121, 124, 125, 124, 121, 116, 120, 120, 121, 123, 117, 128, 123, 138, 115, 147, 136, 119, 119, 121, 121, 119, 138, 125, 132, 123, 106, 140, 120, 127, 123, 129, 124, 116, 106, 124, 122, 127, 123, 123, 111, 116, 122, 123, 118, 126, 132, 112, 113, 122, 115, 123, 131, 122, 119, 147, 110, 123, 111, 130, 118, 119, 108, 122, 122, 124, 102, 110, 131, 117, 131, 120, 107, 124, 113, 121, 107, 125, 122, 126, 114, 117, 117, 113, 120, 112, 117, 108, 133, 110, 124, 122, 123, 119, 119, 128, 123, 117, 120, 114, 123, 122, 125, 124, 120, 126, 123, 123, 107, 116, 142, 110, 89, 119, 76, 119, 101, 127, 117, 118, 129, 120, 126, 133, 118, 121, 120, 123, 132, 108, 142, 122, 150, 126, 123, 125, 119, 119, 131, 111, 105, 124, 123, 122, 115, 109, 114, 123, 129, 120, 126, 116, 115, 127, 123, 120, 91, 126, 121, 115, 122, 118, 124, 124, 123, 118, 128, 119, 126, 118, 117, 122, 126, 118, 120, 122, 125, 54, 120, 113, 127, 133, 114, 126, 118, 121, 125, 120, 121, 120, 128, 120, 127, 118, 120, 127, 123, 118, 121, 122, 122, 115, 120, 171, 126, 124, 124, 117, 129, 109, 128, 116, 114, 127, 120, 122, 132, 112, 110, 123, 110, 110, 123, 131, 124, 120, 120, 255, 113, 117, 130, 117, 105, 104, 129, 115, 120, 123, 126, 226, 97, 117, 93, 110, 104, 129, 132, 128, 121, 132, 126, 121, 133, 111, 127, 114, 113, 116, 103, 116, 108, 123, 111, 127, 108, 121, 111, 123, 120, 105, 122, 119, 125, 125, 253, 134, 118, 114, 124, 127, 139, 110, 119, 106, 121, 117, 134, 122, 117, 122, 117, 119, 119, 108, 130, 125, 108, 124, 115, 128, 129, 120, 123, 109, 120, 108, 113, 121, 110, 119, 127, 109, 126, 125, 124, 146, 118, 123, 118, 144, 121, 123, 113, 116, 114, 107, 135, 113, 102, 113, 121, 114, 130, 127, 112, 123, 109, 110, 122, 127, 115, 114, 107, 109, 123, 125, 111, 112, 122, 97, 118, 112, 134, 90, 126, 123, 126, 130, 123, 111, 116, 124, 112, 123, 121, 137, 127, 125, 116, 136, 116, 130, 127, 100, 127, 129, 104, 116, 117, 114, 121, 119, 115, 145, 126, 127, 130, 134, 114, 98, 123, 116, 121, 69, 118, 114, 126, 126, 116, 124, 117, 138, 126, 127, 80, 119, 127, 120, 113, 120, 126, 125, 121, 122, 122, 124, 115, 129, 119, 118, 121, 133, 124, 129, 123, 103, 123, 126, 137, 125, 128, 123, 121, 121, 126, 142, 111, 118, 122, 130, 122, 125, 132, 121, 123, 111, 117, 129, 126, 120, 124, 130, 123, 118, 127, 130, 129, 115, 126, 116, 133, 116, 113, 120, 118, 120, 117, 118, 121, 142, 129, 122, 118, 118, 113, 122, 114, 120, 126, 133, 127, 117, 119, 119, 145, 130, 117, 118, 114, 123, 113, 143, 122, 160, 121, 120, 122, 132, 119, 138, 124, 134, 124, 125, 111, 123, 115, 123, 156, 117, 105, 124, 123, 116, 115, 143, 126, 104, 125, 122, 119, 115, 111, 121, 116, 120, 116, 122, 117, 107, 122, 123, 126, 115, 132, 123, 124, 125, 127, 112, 118, 165, 117, 115, 114, 128, 118, 115, 131, 125, 107, 122, 111, 128, 127, 123, 190, 147, 112, 111, 120, 125, 139, 110, 112, 111, 104, 119, 136, 118, 107, 122, 127, 126, 109, 128, 121, 122, 133, 116, 153, 110, 124, 119, 123, 127, 117, 134, 119, 150, 126, 123, 109, 116, 120, 119, 125, 127, 126, 123, 89, 122, 110, 122, 121, 119, 123, 120, 125, 125, 140, 117, 120, 114, 127, 121, 125, 123, 123, 119, 124, 127, 121, 120, 122, 122, 135, 122, 120, 122, 121, 122, 121, 120, 118, 113, 122, 126, 125, 118, 109, 128, 136, 124, 116, 121, 125, 126, 123, 121, 108, 119, 102, 120, 121, 122, 125, 127, 112, 122, 118, 126, 121, 120, 119, 104, 122, 122, 88, 123, 121, 121, 123, 121, 122, 122, 116, 122, 123, 124, 123, 123, 116, 121, 119, 120, 124, 122, 119, 134, 120, 122, 120, 107, 126, 122, 126, 135, 123, 123, 132, 123, 120, 120, 119, 121, 120, 121, 121, 130, 118, 121, 122, 120, 124, 123, 127, 129, 124, 125, 115, 127, 118, 120, 123, 123, 121, 99, 123, 121, 125, 95, 123, 121, 119, 119, 127, 109, 123, 117, 114, 123, 126, 122, 126, 112, 117, 128, 124, 122, 123, 122, 115, 123, 120, 122, 123, 114, 123, 116, 122, 102, 127, 116, 111, 120, 115, 124, 126, 121, 119, 122, 119, 117, 121, 121, 120, 113, 123, 122, 126, 120, 134, 114, 123, 121, 118, 121, 121, 123, 136, 113, 123, 122, 108, 118, 123, 118, 131, 129, 110, 124, 126, 122, 118, 125, 121, 127, 119, 124, 121, 130, 122, 121, 122, 121, 116, 120, 122, 128, 128, 128, 120, 121, 116, 125, 129, 123, 122, 125, 119, 126, 112, 125, 121, 117, 110, 128, 115, 124, 120, 122, 124, 112, 119, 120, 116, 118, 122, 122, 121, 122, 119, 122, 123, 124, 97, 127, 118, 120, 119, 125, 127, 124, 123, 125, 110, 121, 120, 118, 126, 120, 121, 122, 126, 122, 127, 128, 118, 128, 119, 126, 126, 124, 124, 125, 123, 129, 120, 124, 123, 126, 122, 119, 120, 131, 124, 114, 158, 125, 115, 122, 122, 124, 120, 129, 126, 118, 119, 124, 123, 115, 118, 121, 126, 121, 124, 119, 124, 126, 126, 126, 125, 119, 87, 121, 122, 124, 118, 116, 123, 127, 112, 120, 122, 122, 121, 123, 124, 133, 122, 122, 114, 120, 123, 97, 121, 123, 117, 112, 127, 124, 117, 120, 118, 122, 128, 130, 123, 122, 120, 121, 120, 118, 121, 135, 126, 121, 121, 124, 123, 125, 123, 116, 113, 120, 120, 137, 128, 122, 121, 123, 120, 131, 125, 123, 120, 113, 121, 114, 114, 121, 93, 127, 117, 130, 137, 119, 120, 122, 131, 126, 117, 131, 126, 134, 124, 132, 125, 131, 131, 116, 133, 119, 103, 121, 118, 117, 125, 118, 134, 110, 122, 125, 118, 121, 123, 124, 123, 143, 132, 119, 114, 125, 122, 114, 121, 124, 128, 109, 126, 139, 119, 126, 123, 121, 134, 127, 123, 122, 119, 120, 121, 125, 111, 110, 146, 117, 122, 122, 121, 127, 129, 121, 121, 124, 128, 122, 126, 126, 123, 121, 120, 121, 128, 124, 128, 124, 127, 116, 135, 126, 119, 117, 136, 128, 119, 135, 129, 106, 133, 118, 117, 118, 116, 121, 131, 125, 127, 125, 120, 126, 115, 124, 115, 128, 117, 105, 123, 116, 133, 120, 124, 119, 122, 121, 123, 122, 125, 125, 120, 120, 123, 124, 120, 114, 124, 123, 121, 122, 123, 122, 124, 121, 93, 115, 101, 122, 134, 124, 123, 122, 121, 126, 121, 122, 123, 117, 126, 123, 122, 118, 119, 132, 111, 121, 131, 121, 124, 121, 123, 129, 123, 119, 119, 126, 122, 120, 123, 125, 126, 124, 126, 119, 123, 121, 124, 120, 121, 122, 138, 128, 124, 123, 121, 121, 121, 121, 121, 120, 126, 122, 121, 120, 121, 122, 121, 120, 115, 120, 125, 112, 122, 129, 117, 125, 122, 122, 121, 123, 123, 125, 124, 123, 118, 121, 124, 123, 122, 126, 119, 125, 126, 120, 122, 118, 120, 136, 123, 126, 115, 117, 118, 116, 120, 120, 131, 123, 125, 137, 125, 123, 117, 128, 113, 111, 122, 111, 126, 124, 117, 123, 122, 126, 124, 112, 121, 117, 131, 112, 118, 108, 123, 122, 119, 106, 120, 133, 107, 106, 124, 142, 124, 118, 114, 118, 102, 116, 122, 115, 119, 125, 122, 123, 124, 123, 119, 154, 116, 108, 122, 129, 121, 115, 116, 112, 103, 122, 120, 126, 135, 122, 139, 120, 144, 124, 128, 124, 124, 114, 121, 126, 118, 119, 100, 127, 123, 120, 118, 113, 122, 125, 120, 114, 127, 119, 112, 123, 145, 121, 120, 127, 132, 116, 123, 129, 126, 122, 111, 131, 125, 120, 102, 126, 125, 120, 113, 151, 142, 118, 103, 124, 115, 120, 111, 122, 116, 123, 120, 118, 104, 126, 118, 106, 114, 115, 120, 118, 117, 174, 117, 120, 124, 121, 119, 130, 125, 116, 127, 128, 119, 125, 135, 119, 122, 113, 126, 122, 133, 124, 120, 133, 125, 114, 126, 120, 125, 121, 122, 121, 121, 118, 122, 128, 117, 120, 121, 120, 134, 98, 126, 119, 126, 118, 121, 122, 120, 122, 126, 121, 119, 122, 135, 123, 122, 117, 126, 120, 120, 115, 123, 124, 123, 125, 121, 118, 119, 120, 123, 123, 119, 110, 125, 110, 127, 121, 119, 97, 122, 122, 123, 137, 124, 119, 119, 122, 122, 126, 122, 118, 121, 107, 124, 121, 117, 114, 123, 120, 114, 117, 121, 119, 116, 121, 121, 119, 120, 133, 121, 104, 120, 110, 122, 127, 118, 118, 122, 117, 120, 116, 124, 124, 122, 118, 108, 117, 122, 123, 124, 128, 104, 96, 111, 134, 130, 117, 124, 120, 120, 106, 122, 130, 117, 120, 113, 125, 128, 105, 119, 117, 124, 115, 120, 117, 111, 121, 123, 154, 132, 132, 110, 116, 123, 113, 127, 126, 120, 124, 127, 127, 126, 113, 124, 140, 136, 121, 120, 133, 123, 115, 116, 117, 93, 111, 122, 122, 124, 114, 116, 109, 133, 118, 126, 117, 123, 127, 111, 110, 121, 132, 109, 114, 127, 127, 153, 119, 121, 132, 122, 126, 133, 139, 131, 137, 128, 121, 134, 120, 124, 119, 112, 125, 112, 125, 111, 125, 116, 125, 127, 126, 118, 122, 120, 132, 111, 119, 115, 116, 117, 130, 111, 123, 98, 119, 120, 125, 113, 121, 124, 122, 122, 137, 121, 117, 119, 122, 121, 104, 119, 113, 109, 134, 121, 126, 114, 125, 120, 122, 117, 113, 123, 119, 117, 130, 120, 123, 122, 124, 121, 127, 117, 110, 120, 117, 120, 122, 117, 107, 123, 113, 122, 115, 121, 118, 121, 118, 119, 119, 118, 122, 121, 125, 127, 129, 128, 120, 124, 110, 122, 118, 124, 116, 124, 120, 121, 126, 146, 101, 121, 123, 107, 121, 120, 122, 125, 126, 120, 120, 116, 122, 122, 118, 123, 122, 133, 114, 120, 119, 125, 121, 120, 128, 125, 115, 122, 116, 121, 125, 129, 120, 123, 120, 117, 128, 122, 116, 114, 121, 115, 121, 116, 120, 114, 120, 123, 123, 119, 119, 119, 120, 120, 119, 114, 123, 126, 113, 116, 112, 124, 114, 140, 126, 117, 121, 119, 128, 127, 117, 111, 122, 122, 129, 123, 123, 123, 116, 123, 120, 118, 110, 127, 132, 122, 124, 119, 121, 120, 128, 123, 124, 118, 115, 128, 126, 103, 123, 138, 118, 114, 105, 113, 117, 125, 121, 123, 120, 122, 121, 123, 115, 94, 121, 122, 129, 116, 123, 122, 106, 133, 124, 120, 121, 117, 126, 120, 102, 105, 125, 128, 120, 121, 116, 109, 99, 116, 120, 127, 119, 120, 127, 133, 123, 114, 125, 122, 124, 122, 97, 133, 179, 133, 120, 131, 118, 127, 126, 123, 114, 120, 124, 120, 116, 127, 126, 128, 125, 116, 158, 129, 96, 126, 120, 128, 123, 129, 125, 105, 123, 133, 119, 118, 124, 123, 116, 121, 135, 121, 117, 125, 119, 126, 121, 125, 123, 149, 95, 123, 128, 131, 115, 131, 126, 88, 100, 126, 109, 188, 122, 122, 126, 116, 121, 127, 120, 122, 114, 116, 127, 119, 120, 137, 127, 113, 124, 112, 121, 127, 121, 123, 128, 128, 118, 118, 124, 123, 114, 113, 114, 125, 121, 118, 128, 116, 125, 124, 124, 122, 113, 121, 119, 125, 130, 124, 126, 122, 122, 120, 117, 120, 129, 83, 114, 114, 114, 112, 122, 120, 115, 122, 120, 122, 132, 122, 127, 100, 184, 121, 105, 124, 120, 129, 120, 125, 122, 126, 123, 139, 118, 128, 113, 131, 130, 118, 117, 127, 136, 112, 116, 130, 116, 127, 125, 119, 122, 117, 117, 143, 117, 127, 128, 121, 115, 135, 107, 117, 98, 126, 105, 118, 117, 122, 128, 128, 111, 118, 127, 132, 141, 126, 116, 113, 143, 116, 109, 117, 120, 98, 129, 120, 129, 117, 122, 122, 113, 124, 115, 106, 115, 129, 134, 106, 134, 109, 114, 120, 112, 126, 118, 124, 126, 123, 131, 122, 117, 117, 113, 118, 105, 117, 138, 129, 123, 115, 137, 150, 115, 123, 121, 139, 118, 117, 112, 133, 122, 120, 117, 119, 118, 117, 113, 123, 130, 116, 112, 127, 117, 110, 123, 122, 101, 123, 126, 113, 118, 123, 121, 149, 121, 123, 135, 138, 125, 144, 114, 123, 121, 114, 113, 115, 106, 117, 123, 125, 113, 117, 128, 120, 117, 150, 123, 121, 120, 123, 122, 124, 105, 121, 120, 138, 123, 116, 108, 111, 124, 123};
@@ -676,6 +675,7 @@ void CreateModel(Model *model) {
model->setOperandValue(op84, op84_init, sizeof(uint8_t) * 1025024);
static int32_t op87_init[] = {1, 1001};
model->setOperandValue(op87, op87_init, sizeof(int32_t) * 2);
+ model->addOperation(ANEURALNETWORKS_CONV_2D, {op88, op2, op1, b104, b105, b106, b107}, {op0});
model->addOperation(ANEURALNETWORKS_DEPTHWISE_CONV_2D, {op0, op29, op28, b108, b109, b110, b111, b112}, {op27});
model->addOperation(ANEURALNETWORKS_CONV_2D, {op27, op32, op31, b113, b114, b115, b116}, {op30});
model->addOperation(ANEURALNETWORKS_DEPTHWISE_CONV_2D, {op30, op35, op34, b117, b118, b119, b120, b121}, {op33});
diff --git a/nn/runtime/test/generated/models/pad_float_1_relaxed.model.cpp b/nn/runtime/test/generated/models/pad_float_1_relaxed.model.cpp
new file mode 100644
index 000000000..79e94e52a
--- /dev/null
+++ b/nn/runtime/test/generated/models/pad_float_1_relaxed.model.cpp
@@ -0,0 +1,26 @@
+// Generated file (from: pad_float_1_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 3, 1});
+ OperandType type2(Type::TENSOR_FLOAT32, {1, 4, 7, 1});
+ OperandType type1(Type::TENSOR_INT32, {4, 2});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ // Phase 2, operations
+ static int32_t op2_init[] = {0, 0, 0, 2, 1, 3, 0, 0};
+ model->setOperandValue(op2, op2_init, sizeof(int32_t) * 8);
+ model->addOperation(ANEURALNETWORKS_PAD, {op1, op2}, {op3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op3});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/pad_relaxed.model.cpp b/nn/runtime/test/generated/models/pad_relaxed.model.cpp
new file mode 100644
index 000000000..bfdd87fc1
--- /dev/null
+++ b/nn/runtime/test/generated/models/pad_relaxed.model.cpp
@@ -0,0 +1,26 @@
+// Generated file (from: pad_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 1});
+ OperandType type2(Type::TENSOR_FLOAT32, {1, 4, 4, 1});
+ OperandType type1(Type::TENSOR_INT32, {4, 2});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type2);
+ // Phase 2, operations
+ static int32_t op2_init[] = {0, 0, 1, 1, 1, 1, 0, 0};
+ model->setOperandValue(op2, op2_init, sizeof(int32_t) * 8);
+ model->addOperation(ANEURALNETWORKS_PAD, {op1, op2}, {op3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op3});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/relu_float_2_relaxed.model.cpp b/nn/runtime/test/generated/models/relu_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..ac0c412be
--- /dev/null
+++ b/nn/runtime/test/generated/models/relu_float_2_relaxed.model.cpp
@@ -0,0 +1,21 @@
+// Generated file (from: relu_float_2_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {2, 64, 40, 2});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto output = model->addOperand(&type0);
+ // Phase 2, operations
+ model->addOperation(ANEURALNETWORKS_RELU, {input}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/resize_bilinear_2_relaxed.model.cpp b/nn/runtime/test/generated/models/resize_bilinear_2_relaxed.model.cpp
new file mode 100644
index 000000000..4d196b023
--- /dev/null
+++ b/nn/runtime/test/generated/models/resize_bilinear_2_relaxed.model.cpp
@@ -0,0 +1,29 @@
+// Generated file (from: resize_bilinear_2_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {1, 3, 3, 2});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto height = model->addOperand(&type2);
+ auto width = model->addOperand(&type2);
+ // Phase 2, operations
+ static int32_t height_init[] = {3};
+ model->setOperandValue(height, height_init, sizeof(int32_t) * 1);
+ static int32_t width_init[] = {3};
+ model->setOperandValue(width, width_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_RESIZE_BILINEAR, {op1, height, width}, {op2});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1},
+ {op2});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/space_to_batch_float_1_relaxed.model.cpp b/nn/runtime/test/generated/models/space_to_batch_float_1_relaxed.model.cpp
new file mode 100644
index 000000000..dfd2d6035
--- /dev/null
+++ b/nn/runtime/test/generated/models/space_to_batch_float_1_relaxed.model.cpp
@@ -0,0 +1,30 @@
+// Generated file (from: space_to_batch_float_1_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 4, 4, 1});
+ OperandType type3(Type::TENSOR_FLOAT32, {4, 2, 2, 1});
+ OperandType type2(Type::TENSOR_INT32, {2, 2});
+ OperandType type1(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto block_size = model->addOperand(&type1);
+ auto paddings = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t block_size_init[] = {2, 2};
+ model->setOperandValue(block_size, block_size_init, sizeof(int32_t) * 2);
+ static int32_t paddings_init[] = {0, 0, 0, 0};
+ model->setOperandValue(paddings, paddings_init, sizeof(int32_t) * 4);
+ model->addOperation(ANEURALNETWORKS_SPACE_TO_BATCH_ND, {input, block_size, paddings}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/space_to_batch_float_2_relaxed.model.cpp b/nn/runtime/test/generated/models/space_to_batch_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..09c98af93
--- /dev/null
+++ b/nn/runtime/test/generated/models/space_to_batch_float_2_relaxed.model.cpp
@@ -0,0 +1,30 @@
+// Generated file (from: space_to_batch_float_2_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 5, 2, 1});
+ OperandType type3(Type::TENSOR_FLOAT32, {6, 2, 2, 1});
+ OperandType type2(Type::TENSOR_INT32, {2, 2});
+ OperandType type1(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto block_size = model->addOperand(&type1);
+ auto paddings = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t block_size_init[] = {3, 2};
+ model->setOperandValue(block_size, block_size_init, sizeof(int32_t) * 2);
+ static int32_t paddings_init[] = {1, 0, 2, 0};
+ model->setOperandValue(paddings, paddings_init, sizeof(int32_t) * 4);
+ model->addOperation(ANEURALNETWORKS_SPACE_TO_BATCH_ND, {input, block_size, paddings}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/space_to_batch_float_3_relaxed.model.cpp b/nn/runtime/test/generated/models/space_to_batch_float_3_relaxed.model.cpp
new file mode 100644
index 000000000..7c75a611d
--- /dev/null
+++ b/nn/runtime/test/generated/models/space_to_batch_float_3_relaxed.model.cpp
@@ -0,0 +1,30 @@
+// Generated file (from: space_to_batch_float_3_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 4, 2, 1});
+ OperandType type3(Type::TENSOR_FLOAT32, {6, 2, 4, 1});
+ OperandType type2(Type::TENSOR_INT32, {2, 2});
+ OperandType type1(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto block_size = model->addOperand(&type1);
+ auto paddings = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t block_size_init[] = {3, 2};
+ model->setOperandValue(block_size, block_size_init, sizeof(int32_t) * 2);
+ static int32_t paddings_init[] = {1, 1, 2, 4};
+ model->setOperandValue(paddings, paddings_init, sizeof(int32_t) * 4);
+ model->addOperation(ANEURALNETWORKS_SPACE_TO_BATCH_ND, {input, block_size, paddings}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/space_to_batch_relaxed.model.cpp b/nn/runtime/test/generated/models/space_to_batch_relaxed.model.cpp
new file mode 100644
index 000000000..fa24e8f91
--- /dev/null
+++ b/nn/runtime/test/generated/models/space_to_batch_relaxed.model.cpp
@@ -0,0 +1,30 @@
+// Generated file (from: space_to_batch_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 2});
+ OperandType type3(Type::TENSOR_FLOAT32, {4, 1, 1, 2});
+ OperandType type2(Type::TENSOR_INT32, {2, 2});
+ OperandType type1(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto block_size = model->addOperand(&type1);
+ auto paddings = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t block_size_init[] = {2, 2};
+ model->setOperandValue(block_size, block_size_init, sizeof(int32_t) * 2);
+ static int32_t paddings_init[] = {0, 0, 0, 0};
+ model->setOperandValue(paddings, paddings_init, sizeof(int32_t) * 4);
+ model->addOperation(ANEURALNETWORKS_SPACE_TO_BATCH_ND, {input, block_size, paddings}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/squeeze_float_1_relaxed.model.cpp b/nn/runtime/test/generated/models/squeeze_float_1_relaxed.model.cpp
new file mode 100644
index 000000000..57ba0b31a
--- /dev/null
+++ b/nn/runtime/test/generated/models/squeeze_float_1_relaxed.model.cpp
@@ -0,0 +1,26 @@
+// Generated file (from: squeeze_float_1_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 24, 1});
+ OperandType type2(Type::TENSOR_FLOAT32, {1, 24});
+ OperandType type1(Type::TENSOR_INT32, {1});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto squeezeDims = model->addOperand(&type1);
+ auto output = model->addOperand(&type2);
+ // Phase 2, operations
+ static int32_t squeezeDims_init[] = {2};
+ model->setOperandValue(squeezeDims, squeezeDims_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_SQUEEZE, {input, squeezeDims}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/squeeze_relaxed.model.cpp b/nn/runtime/test/generated/models/squeeze_relaxed.model.cpp
new file mode 100644
index 000000000..9a4108944
--- /dev/null
+++ b/nn/runtime/test/generated/models/squeeze_relaxed.model.cpp
@@ -0,0 +1,26 @@
+// Generated file (from: squeeze_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {4, 1, 1, 2});
+ OperandType type2(Type::TENSOR_FLOAT32, {4, 2});
+ OperandType type1(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto squeezeDims = model->addOperand(&type1);
+ auto output = model->addOperand(&type2);
+ // Phase 2, operations
+ static int32_t squeezeDims_init[] = {1, 2};
+ model->setOperandValue(squeezeDims, squeezeDims_init, sizeof(int32_t) * 2);
+ model->addOperation(ANEURALNETWORKS_SQUEEZE, {input, squeezeDims}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/strided_slice_float_10_relaxed.model.cpp b/nn/runtime/test/generated/models/strided_slice_float_10_relaxed.model.cpp
new file mode 100644
index 000000000..bb119d6b8
--- /dev/null
+++ b/nn/runtime/test/generated/models/strided_slice_float_10_relaxed.model.cpp
@@ -0,0 +1,42 @@
+// Generated file (from: strided_slice_float_10_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {1, 3});
+ OperandType type0(Type::TENSOR_FLOAT32, {2, 3});
+ OperandType type1(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto begins = model->addOperand(&type1);
+ auto ends = model->addOperand(&type1);
+ auto strides = model->addOperand(&type1);
+ auto beginMask = model->addOperand(&type2);
+ auto endMask = model->addOperand(&type2);
+ auto shrinkAxisMask = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t begins_init[] = {1, 0};
+ model->setOperandValue(begins, begins_init, sizeof(int32_t) * 2);
+ static int32_t ends_init[] = {2, 2};
+ model->setOperandValue(ends, ends_init, sizeof(int32_t) * 2);
+ static int32_t strides_init[] = {1, 1};
+ model->setOperandValue(strides, strides_init, sizeof(int32_t) * 2);
+ static int32_t beginMask_init[] = {0};
+ model->setOperandValue(beginMask, beginMask_init, sizeof(int32_t) * 1);
+ static int32_t endMask_init[] = {2};
+ model->setOperandValue(endMask, endMask_init, sizeof(int32_t) * 1);
+ static int32_t shrinkAxisMask_init[] = {0};
+ model->setOperandValue(shrinkAxisMask, shrinkAxisMask_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_STRIDED_SLICE, {input, begins, ends, strides, beginMask, endMask, shrinkAxisMask}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/strided_slice_float_11_relaxed.model.cpp b/nn/runtime/test/generated/models/strided_slice_float_11_relaxed.model.cpp
new file mode 100644
index 000000000..6ef5523fd
--- /dev/null
+++ b/nn/runtime/test/generated/models/strided_slice_float_11_relaxed.model.cpp
@@ -0,0 +1,42 @@
+// Generated file (from: strided_slice_float_11_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type0(Type::TENSOR_FLOAT32, {2, 3});
+ OperandType type3(Type::TENSOR_FLOAT32, {3});
+ OperandType type1(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto begins = model->addOperand(&type1);
+ auto ends = model->addOperand(&type1);
+ auto strides = model->addOperand(&type1);
+ auto beginMask = model->addOperand(&type2);
+ auto endMask = model->addOperand(&type2);
+ auto shrinkAxisMask = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t begins_init[] = {0, 0};
+ model->setOperandValue(begins, begins_init, sizeof(int32_t) * 2);
+ static int32_t ends_init[] = {2, 3};
+ model->setOperandValue(ends, ends_init, sizeof(int32_t) * 2);
+ static int32_t strides_init[] = {1, 1};
+ model->setOperandValue(strides, strides_init, sizeof(int32_t) * 2);
+ static int32_t beginMask_init[] = {0};
+ model->setOperandValue(beginMask, beginMask_init, sizeof(int32_t) * 1);
+ static int32_t endMask_init[] = {0};
+ model->setOperandValue(endMask, endMask_init, sizeof(int32_t) * 1);
+ static int32_t shrinkAxisMask_init[] = {1};
+ model->setOperandValue(shrinkAxisMask, shrinkAxisMask_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_STRIDED_SLICE, {input, begins, ends, strides, beginMask, endMask, shrinkAxisMask}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/strided_slice_float_1_relaxed.model.cpp b/nn/runtime/test/generated/models/strided_slice_float_1_relaxed.model.cpp
new file mode 100644
index 000000000..af53508bf
--- /dev/null
+++ b/nn/runtime/test/generated/models/strided_slice_float_1_relaxed.model.cpp
@@ -0,0 +1,42 @@
+// Generated file (from: strided_slice_float_1_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {2});
+ OperandType type0(Type::TENSOR_FLOAT32, {4});
+ OperandType type1(Type::TENSOR_INT32, {1});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto begins = model->addOperand(&type1);
+ auto ends = model->addOperand(&type1);
+ auto strides = model->addOperand(&type1);
+ auto beginMask = model->addOperand(&type2);
+ auto endMask = model->addOperand(&type2);
+ auto shrinkAxisMask = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t begins_init[] = {1};
+ model->setOperandValue(begins, begins_init, sizeof(int32_t) * 1);
+ static int32_t ends_init[] = {3};
+ model->setOperandValue(ends, ends_init, sizeof(int32_t) * 1);
+ static int32_t strides_init[] = {1};
+ model->setOperandValue(strides, strides_init, sizeof(int32_t) * 1);
+ static int32_t beginMask_init[] = {0};
+ model->setOperandValue(beginMask, beginMask_init, sizeof(int32_t) * 1);
+ static int32_t endMask_init[] = {0};
+ model->setOperandValue(endMask, endMask_init, sizeof(int32_t) * 1);
+ static int32_t shrinkAxisMask_init[] = {0};
+ model->setOperandValue(shrinkAxisMask, shrinkAxisMask_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_STRIDED_SLICE, {input, begins, ends, strides, beginMask, endMask, shrinkAxisMask}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/strided_slice_float_2_relaxed.model.cpp b/nn/runtime/test/generated/models/strided_slice_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..94e14aafa
--- /dev/null
+++ b/nn/runtime/test/generated/models/strided_slice_float_2_relaxed.model.cpp
@@ -0,0 +1,42 @@
+// Generated file (from: strided_slice_float_2_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {2});
+ OperandType type0(Type::TENSOR_FLOAT32, {4});
+ OperandType type1(Type::TENSOR_INT32, {1});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto begins = model->addOperand(&type1);
+ auto ends = model->addOperand(&type1);
+ auto strides = model->addOperand(&type1);
+ auto beginMask = model->addOperand(&type2);
+ auto endMask = model->addOperand(&type2);
+ auto shrinkAxisMask = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t begins_init[] = {-3};
+ model->setOperandValue(begins, begins_init, sizeof(int32_t) * 1);
+ static int32_t ends_init[] = {3};
+ model->setOperandValue(ends, ends_init, sizeof(int32_t) * 1);
+ static int32_t strides_init[] = {1};
+ model->setOperandValue(strides, strides_init, sizeof(int32_t) * 1);
+ static int32_t beginMask_init[] = {0};
+ model->setOperandValue(beginMask, beginMask_init, sizeof(int32_t) * 1);
+ static int32_t endMask_init[] = {0};
+ model->setOperandValue(endMask, endMask_init, sizeof(int32_t) * 1);
+ static int32_t shrinkAxisMask_init[] = {0};
+ model->setOperandValue(shrinkAxisMask, shrinkAxisMask_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_STRIDED_SLICE, {input, begins, ends, strides, beginMask, endMask, shrinkAxisMask}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/strided_slice_float_3_relaxed.model.cpp b/nn/runtime/test/generated/models/strided_slice_float_3_relaxed.model.cpp
new file mode 100644
index 000000000..54fe6b707
--- /dev/null
+++ b/nn/runtime/test/generated/models/strided_slice_float_3_relaxed.model.cpp
@@ -0,0 +1,42 @@
+// Generated file (from: strided_slice_float_3_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {3});
+ OperandType type0(Type::TENSOR_FLOAT32, {4});
+ OperandType type1(Type::TENSOR_INT32, {1});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto begins = model->addOperand(&type1);
+ auto ends = model->addOperand(&type1);
+ auto strides = model->addOperand(&type1);
+ auto beginMask = model->addOperand(&type2);
+ auto endMask = model->addOperand(&type2);
+ auto shrinkAxisMask = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t begins_init[] = {-5};
+ model->setOperandValue(begins, begins_init, sizeof(int32_t) * 1);
+ static int32_t ends_init[] = {3};
+ model->setOperandValue(ends, ends_init, sizeof(int32_t) * 1);
+ static int32_t strides_init[] = {1};
+ model->setOperandValue(strides, strides_init, sizeof(int32_t) * 1);
+ static int32_t beginMask_init[] = {0};
+ model->setOperandValue(beginMask, beginMask_init, sizeof(int32_t) * 1);
+ static int32_t endMask_init[] = {0};
+ model->setOperandValue(endMask, endMask_init, sizeof(int32_t) * 1);
+ static int32_t shrinkAxisMask_init[] = {0};
+ model->setOperandValue(shrinkAxisMask, shrinkAxisMask_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_STRIDED_SLICE, {input, begins, ends, strides, beginMask, endMask, shrinkAxisMask}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/strided_slice_float_4_relaxed.model.cpp b/nn/runtime/test/generated/models/strided_slice_float_4_relaxed.model.cpp
new file mode 100644
index 000000000..ceaae5a40
--- /dev/null
+++ b/nn/runtime/test/generated/models/strided_slice_float_4_relaxed.model.cpp
@@ -0,0 +1,42 @@
+// Generated file (from: strided_slice_float_4_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {1});
+ OperandType type0(Type::TENSOR_FLOAT32, {4});
+ OperandType type1(Type::TENSOR_INT32, {1});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto begins = model->addOperand(&type1);
+ auto ends = model->addOperand(&type1);
+ auto strides = model->addOperand(&type1);
+ auto beginMask = model->addOperand(&type2);
+ auto endMask = model->addOperand(&type2);
+ auto shrinkAxisMask = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t begins_init[] = {1};
+ model->setOperandValue(begins, begins_init, sizeof(int32_t) * 1);
+ static int32_t ends_init[] = {-2};
+ model->setOperandValue(ends, ends_init, sizeof(int32_t) * 1);
+ static int32_t strides_init[] = {1};
+ model->setOperandValue(strides, strides_init, sizeof(int32_t) * 1);
+ static int32_t beginMask_init[] = {0};
+ model->setOperandValue(beginMask, beginMask_init, sizeof(int32_t) * 1);
+ static int32_t endMask_init[] = {0};
+ model->setOperandValue(endMask, endMask_init, sizeof(int32_t) * 1);
+ static int32_t shrinkAxisMask_init[] = {0};
+ model->setOperandValue(shrinkAxisMask, shrinkAxisMask_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_STRIDED_SLICE, {input, begins, ends, strides, beginMask, endMask, shrinkAxisMask}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/strided_slice_float_5_relaxed.model.cpp b/nn/runtime/test/generated/models/strided_slice_float_5_relaxed.model.cpp
new file mode 100644
index 000000000..133ce3de7
--- /dev/null
+++ b/nn/runtime/test/generated/models/strided_slice_float_5_relaxed.model.cpp
@@ -0,0 +1,42 @@
+// Generated file (from: strided_slice_float_5_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {3});
+ OperandType type0(Type::TENSOR_FLOAT32, {4});
+ OperandType type1(Type::TENSOR_INT32, {1});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto begins = model->addOperand(&type1);
+ auto ends = model->addOperand(&type1);
+ auto strides = model->addOperand(&type1);
+ auto beginMask = model->addOperand(&type2);
+ auto endMask = model->addOperand(&type2);
+ auto shrinkAxisMask = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t begins_init[] = {1};
+ model->setOperandValue(begins, begins_init, sizeof(int32_t) * 1);
+ static int32_t ends_init[] = {3};
+ model->setOperandValue(ends, ends_init, sizeof(int32_t) * 1);
+ static int32_t strides_init[] = {1};
+ model->setOperandValue(strides, strides_init, sizeof(int32_t) * 1);
+ static int32_t beginMask_init[] = {1};
+ model->setOperandValue(beginMask, beginMask_init, sizeof(int32_t) * 1);
+ static int32_t endMask_init[] = {0};
+ model->setOperandValue(endMask, endMask_init, sizeof(int32_t) * 1);
+ static int32_t shrinkAxisMask_init[] = {0};
+ model->setOperandValue(shrinkAxisMask, shrinkAxisMask_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_STRIDED_SLICE, {input, begins, ends, strides, beginMask, endMask, shrinkAxisMask}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/strided_slice_float_6_relaxed.model.cpp b/nn/runtime/test/generated/models/strided_slice_float_6_relaxed.model.cpp
new file mode 100644
index 000000000..52a95d526
--- /dev/null
+++ b/nn/runtime/test/generated/models/strided_slice_float_6_relaxed.model.cpp
@@ -0,0 +1,42 @@
+// Generated file (from: strided_slice_float_6_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {3});
+ OperandType type0(Type::TENSOR_FLOAT32, {4});
+ OperandType type1(Type::TENSOR_INT32, {1});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto begins = model->addOperand(&type1);
+ auto ends = model->addOperand(&type1);
+ auto strides = model->addOperand(&type1);
+ auto beginMask = model->addOperand(&type2);
+ auto endMask = model->addOperand(&type2);
+ auto shrinkAxisMask = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t begins_init[] = {1};
+ model->setOperandValue(begins, begins_init, sizeof(int32_t) * 1);
+ static int32_t ends_init[] = {3};
+ model->setOperandValue(ends, ends_init, sizeof(int32_t) * 1);
+ static int32_t strides_init[] = {1};
+ model->setOperandValue(strides, strides_init, sizeof(int32_t) * 1);
+ static int32_t beginMask_init[] = {0};
+ model->setOperandValue(beginMask, beginMask_init, sizeof(int32_t) * 1);
+ static int32_t endMask_init[] = {1};
+ model->setOperandValue(endMask, endMask_init, sizeof(int32_t) * 1);
+ static int32_t shrinkAxisMask_init[] = {0};
+ model->setOperandValue(shrinkAxisMask, shrinkAxisMask_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_STRIDED_SLICE, {input, begins, ends, strides, beginMask, endMask, shrinkAxisMask}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/strided_slice_float_7_relaxed.model.cpp b/nn/runtime/test/generated/models/strided_slice_float_7_relaxed.model.cpp
new file mode 100644
index 000000000..f9e3187a6
--- /dev/null
+++ b/nn/runtime/test/generated/models/strided_slice_float_7_relaxed.model.cpp
@@ -0,0 +1,41 @@
+// Generated file (from: strided_slice_float_7_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type0(Type::TENSOR_FLOAT32, {3});
+ OperandType type1(Type::TENSOR_INT32, {1});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto begins = model->addOperand(&type1);
+ auto ends = model->addOperand(&type1);
+ auto strides = model->addOperand(&type1);
+ auto beginMask = model->addOperand(&type2);
+ auto endMask = model->addOperand(&type2);
+ auto shrinkAxisMask = model->addOperand(&type2);
+ auto output = model->addOperand(&type0);
+ // Phase 2, operations
+ static int32_t begins_init[] = {-1};
+ model->setOperandValue(begins, begins_init, sizeof(int32_t) * 1);
+ static int32_t ends_init[] = {-4};
+ model->setOperandValue(ends, ends_init, sizeof(int32_t) * 1);
+ static int32_t strides_init[] = {-1};
+ model->setOperandValue(strides, strides_init, sizeof(int32_t) * 1);
+ static int32_t beginMask_init[] = {0};
+ model->setOperandValue(beginMask, beginMask_init, sizeof(int32_t) * 1);
+ static int32_t endMask_init[] = {0};
+ model->setOperandValue(endMask, endMask_init, sizeof(int32_t) * 1);
+ static int32_t shrinkAxisMask_init[] = {0};
+ model->setOperandValue(shrinkAxisMask, shrinkAxisMask_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_STRIDED_SLICE, {input, begins, ends, strides, beginMask, endMask, shrinkAxisMask}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/strided_slice_float_8_relaxed.model.cpp b/nn/runtime/test/generated/models/strided_slice_float_8_relaxed.model.cpp
new file mode 100644
index 000000000..9e15b708b
--- /dev/null
+++ b/nn/runtime/test/generated/models/strided_slice_float_8_relaxed.model.cpp
@@ -0,0 +1,42 @@
+// Generated file (from: strided_slice_float_8_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {1, 3});
+ OperandType type0(Type::TENSOR_FLOAT32, {2, 3});
+ OperandType type1(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto begins = model->addOperand(&type1);
+ auto ends = model->addOperand(&type1);
+ auto strides = model->addOperand(&type1);
+ auto beginMask = model->addOperand(&type2);
+ auto endMask = model->addOperand(&type2);
+ auto shrinkAxisMask = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t begins_init[] = {1, -1};
+ model->setOperandValue(begins, begins_init, sizeof(int32_t) * 2);
+ static int32_t ends_init[] = {2, -4};
+ model->setOperandValue(ends, ends_init, sizeof(int32_t) * 2);
+ static int32_t strides_init[] = {2, -1};
+ model->setOperandValue(strides, strides_init, sizeof(int32_t) * 2);
+ static int32_t beginMask_init[] = {0};
+ model->setOperandValue(beginMask, beginMask_init, sizeof(int32_t) * 1);
+ static int32_t endMask_init[] = {0};
+ model->setOperandValue(endMask, endMask_init, sizeof(int32_t) * 1);
+ static int32_t shrinkAxisMask_init[] = {0};
+ model->setOperandValue(shrinkAxisMask, shrinkAxisMask_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_STRIDED_SLICE, {input, begins, ends, strides, beginMask, endMask, shrinkAxisMask}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/strided_slice_float_9_relaxed.model.cpp b/nn/runtime/test/generated/models/strided_slice_float_9_relaxed.model.cpp
new file mode 100644
index 000000000..f5a27bc6a
--- /dev/null
+++ b/nn/runtime/test/generated/models/strided_slice_float_9_relaxed.model.cpp
@@ -0,0 +1,42 @@
+// Generated file (from: strided_slice_float_9_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {2, 2});
+ OperandType type0(Type::TENSOR_FLOAT32, {2, 3});
+ OperandType type1(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto begins = model->addOperand(&type1);
+ auto ends = model->addOperand(&type1);
+ auto strides = model->addOperand(&type1);
+ auto beginMask = model->addOperand(&type2);
+ auto endMask = model->addOperand(&type2);
+ auto shrinkAxisMask = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t begins_init[] = {1, 0};
+ model->setOperandValue(begins, begins_init, sizeof(int32_t) * 2);
+ static int32_t ends_init[] = {2, 2};
+ model->setOperandValue(ends, ends_init, sizeof(int32_t) * 2);
+ static int32_t strides_init[] = {1, 1};
+ model->setOperandValue(strides, strides_init, sizeof(int32_t) * 2);
+ static int32_t beginMask_init[] = {1};
+ model->setOperandValue(beginMask, beginMask_init, sizeof(int32_t) * 1);
+ static int32_t endMask_init[] = {0};
+ model->setOperandValue(endMask, endMask_init, sizeof(int32_t) * 1);
+ static int32_t shrinkAxisMask_init[] = {0};
+ model->setOperandValue(shrinkAxisMask, shrinkAxisMask_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_STRIDED_SLICE, {input, begins, ends, strides, beginMask, endMask, shrinkAxisMask}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/strided_slice_relaxed.model.cpp b/nn/runtime/test/generated/models/strided_slice_relaxed.model.cpp
new file mode 100644
index 000000000..1dcb838b4
--- /dev/null
+++ b/nn/runtime/test/generated/models/strided_slice_relaxed.model.cpp
@@ -0,0 +1,42 @@
+// Generated file (from: strided_slice_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type3(Type::TENSOR_FLOAT32, {1, 2});
+ OperandType type0(Type::TENSOR_FLOAT32, {2, 3});
+ OperandType type1(Type::TENSOR_INT32, {2});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto begins = model->addOperand(&type1);
+ auto ends = model->addOperand(&type1);
+ auto strides = model->addOperand(&type1);
+ auto beginMask = model->addOperand(&type2);
+ auto endMask = model->addOperand(&type2);
+ auto shrinkAxisMask = model->addOperand(&type2);
+ auto output = model->addOperand(&type3);
+ // Phase 2, operations
+ static int32_t begins_init[] = {0, 0};
+ model->setOperandValue(begins, begins_init, sizeof(int32_t) * 2);
+ static int32_t ends_init[] = {2, 3};
+ model->setOperandValue(ends, ends_init, sizeof(int32_t) * 2);
+ static int32_t strides_init[] = {2, 2};
+ model->setOperandValue(strides, strides_init, sizeof(int32_t) * 2);
+ static int32_t beginMask_init[] = {0};
+ model->setOperandValue(beginMask, beginMask_init, sizeof(int32_t) * 1);
+ static int32_t endMask_init[] = {0};
+ model->setOperandValue(endMask, endMask_init, sizeof(int32_t) * 1);
+ static int32_t shrinkAxisMask_init[] = {0};
+ model->setOperandValue(shrinkAxisMask, shrinkAxisMask_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_STRIDED_SLICE, {input, begins, ends, strides, beginMask, endMask, shrinkAxisMask}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/sub_broadcast_float_relaxed.model.cpp b/nn/runtime/test/generated/models/sub_broadcast_float_relaxed.model.cpp
new file mode 100644
index 000000000..610b8e369
--- /dev/null
+++ b/nn/runtime/test/generated/models/sub_broadcast_float_relaxed.model.cpp
@@ -0,0 +1,27 @@
+// Generated file (from: sub_broadcast_float_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type2(Type::INT32, {});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2});
+ OperandType type1(Type::TENSOR_FLOAT32, {2, 2});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type1);
+ auto act = model->addOperand(&type2);
+ auto op3 = model->addOperand(&type1);
+ // Phase 2, operations
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_SUB, {op1, op2, act}, {op3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op3});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/sub_relaxed.model.cpp b/nn/runtime/test/generated/models/sub_relaxed.model.cpp
new file mode 100644
index 000000000..440de653c
--- /dev/null
+++ b/nn/runtime/test/generated/models/sub_relaxed.model.cpp
@@ -0,0 +1,26 @@
+// Generated file (from: sub_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type1(Type::INT32, {});
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 1});
+ // Phase 1, operands
+ auto op1 = model->addOperand(&type0);
+ auto op2 = model->addOperand(&type0);
+ auto act = model->addOperand(&type1);
+ auto op3 = model->addOperand(&type0);
+ // Phase 2, operations
+ static int32_t act_init[] = {0};
+ model->setOperandValue(act, act_init, sizeof(int32_t) * 1);
+ model->addOperation(ANEURALNETWORKS_SUB, {op1, op2, act}, {op3});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {op1, op2},
+ {op3});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/transpose_float_1_relaxed.model.cpp b/nn/runtime/test/generated/models/transpose_float_1_relaxed.model.cpp
new file mode 100644
index 000000000..c73b4bb0c
--- /dev/null
+++ b/nn/runtime/test/generated/models/transpose_float_1_relaxed.model.cpp
@@ -0,0 +1,26 @@
+// Generated file (from: transpose_float_1_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {2, 3, 4, 5});
+ OperandType type2(Type::TENSOR_FLOAT32, {4, 2, 3, 5});
+ OperandType type1(Type::TENSOR_INT32, {4});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto perms = model->addOperand(&type1);
+ auto output = model->addOperand(&type2);
+ // Phase 2, operations
+ static int32_t perms_init[] = {2, 0, 1, 3};
+ model->setOperandValue(perms, perms_init, sizeof(int32_t) * 4);
+ model->addOperation(ANEURALNETWORKS_TRANSPOSE, {input, perms}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/models/transpose_relaxed.model.cpp b/nn/runtime/test/generated/models/transpose_relaxed.model.cpp
new file mode 100644
index 000000000..af74ac488
--- /dev/null
+++ b/nn/runtime/test/generated/models/transpose_relaxed.model.cpp
@@ -0,0 +1,25 @@
+// Generated file (from: transpose_relaxed.mod.py). Do not edit
+void CreateModel(Model *model) {
+ OperandType type0(Type::TENSOR_FLOAT32, {1, 2, 2, 1});
+ OperandType type1(Type::TENSOR_INT32, {4});
+ // Phase 1, operands
+ auto input = model->addOperand(&type0);
+ auto perms = model->addOperand(&type1);
+ auto output = model->addOperand(&type0);
+ // Phase 2, operations
+ static int32_t perms_init[] = {0, 2, 1, 3};
+ model->setOperandValue(perms, perms_init, sizeof(int32_t) * 4);
+ model->addOperation(ANEURALNETWORKS_TRANSPOSE, {input, perms}, {output});
+ // Phase 3, inputs and outputs
+ model->identifyInputsAndOutputs(
+ {input},
+ {output});
+ // Phase 4: set relaxed execution
+ model->relaxComputationFloat32toFloat16(true);
+ assert(model->isValid());
+}
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/avg_pool_float_5_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/avg_pool_float_5_relaxed.model.cpp
new file mode 100644
index 000000000..fc6032777
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/avg_pool_float_5_relaxed.model.cpp
@@ -0,0 +1,82 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 4, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 4,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 1, 2, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::AVERAGE_POOL_2D,
+ .inputs = {0, 2, 1, 1, 1, 1, 3},
+ .outputs = {4},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {4};
+ std::vector<uint8_t> operandValues = {
+ 2, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/batch_to_space_float_1_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/batch_to_space_float_1_relaxed.model.cpp
new file mode 100644
index 000000000..94852f138
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/batch_to_space_float_1_relaxed.model.cpp
@@ -0,0 +1,64 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 2, 2, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 4, 4, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::BATCH_TO_SPACE_ND,
+ .inputs = {0, 1},
+ .outputs = {2},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {2};
+ std::vector<uint8_t> operandValues = {
+ 2, 0, 0, 0, 2, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/batch_to_space_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/batch_to_space_relaxed.model.cpp
new file mode 100644
index 000000000..5ea7313d9
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/batch_to_space_relaxed.model.cpp
@@ -0,0 +1,64 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 1, 1, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::BATCH_TO_SPACE_ND,
+ .inputs = {0, 1},
+ .outputs = {2},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {2};
+ std::vector<uint8_t> operandValues = {
+ 2, 0, 0, 0, 2, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/conv_float_2_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/conv_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..19a9ff550
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/conv_float_2_relaxed.model.cpp
@@ -0,0 +1,100 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 3, 4, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 3, 3, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 36},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 36, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 40, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 44, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 2,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 48, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 3, 4, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::CONV_2D,
+ .inputs = {0, 1, 2, 3, 5, 5, 4},
+ .outputs = {6},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {6};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 128, 64, 0, 0, 224, 64, 0, 0, 0, 64, 0, 0, 160, 64, 0, 0, 0, 65, 0, 0, 64, 64, 0, 0, 192, 64, 0, 0, 16, 65, 0, 0, 72, 195, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_2_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..aae5378a2
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_2_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 3, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 16},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 2,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 88, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 92, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 1, 4},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::DEPTHWISE_CONV_2D,
+ .inputs = {0, 1, 2, 3, 5, 5, 6, 4},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 16, 193, 0, 0, 32, 65, 0, 0, 48, 193, 0, 0, 64, 65, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 80, 65, 0, 0, 96, 193, 0, 0, 112, 65, 0, 0, 128, 193, 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 2, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_large_2_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_large_2_relaxed.model.cpp
new file mode 100644
index 000000000..81fc5524b
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_large_2_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 16},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 4,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 2,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 88, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 92, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 1, 1, 4},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::DEPTHWISE_CONV_2D,
+ .inputs = {0, 1, 2, 3, 3, 3, 3, 5, 5, 6, 4},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 62, 0, 0, 0, 0, 0, 0, 32, 65, 0, 0, 200, 66, 0, 0, 128, 62, 0, 0, 128, 63, 0, 0, 160, 65, 0, 0, 200, 66, 0, 0, 128, 62, 0, 0, 0, 0, 0, 0, 240, 65, 0, 0, 200, 66, 0, 0, 128, 62, 0, 0, 128, 63, 0, 0, 32, 66, 0, 0, 200, 66, 0, 124, 18, 73, 0, 230, 42, 73, 0, 80, 67, 73, 0, 186, 91, 73, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.model.cpp
new file mode 100644
index 000000000..e205e2dbb
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_large_2_weights_as_inputs_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 3},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 4,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 2,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 1, 1, 4},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::DEPTHWISE_CONV_2D,
+ .inputs = {0, 1, 2, 3, 3, 3, 3, 5, 5, 6, 4},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1, 2};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_relaxed.model.cpp
new file mode 100644
index 000000000..4af71d6c3
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 3, 3, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 64, .length = 16},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 4,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 80, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 84, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 2,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 88, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 92, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 4},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::DEPTHWISE_CONV_2D,
+ .inputs = {0, 1, 2, 3, 3, 3, 3, 5, 5, 6, 4},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 62, 0, 0, 0, 0, 205, 204, 76, 62, 0, 0, 0, 0, 0, 0, 128, 62, 0, 0, 0, 0, 0, 0, 0, 0, 154, 153, 153, 62, 0, 0, 128, 62, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 62, 205, 204, 204, 61, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_weights_as_inputs_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_weights_as_inputs_relaxed.model.cpp
new file mode 100644
index 000000000..ddec86c3f
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/depthwise_conv2d_float_weights_as_inputs_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 3, 3, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 4,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 2,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 4},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::DEPTHWISE_CONV_2D,
+ .inputs = {0, 1, 2, 3, 3, 3, 3, 5, 5, 6, 4},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1, 2};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/dequantize_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/dequantize_relaxed.model.cpp
new file mode 100644
index 000000000..c08e16c41
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/dequantize_relaxed.model.cpp
@@ -0,0 +1,53 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_QUANT8_ASYMM,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 1,
+ .scale = 1.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::DEQUANTIZE,
+ .inputs = {0},
+ .outputs = {1},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {1};
+ std::vector<uint8_t> operandValues = {};
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/div_broadcast_float_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/div_broadcast_float_relaxed.model.cpp
new file mode 100644
index 000000000..9f6d6b7de
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/div_broadcast_float_relaxed.model.cpp
@@ -0,0 +1,73 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::DIV,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/div_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/div_relaxed.model.cpp
new file mode 100644
index 000000000..c875b57cd
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/div_relaxed.model.cpp
@@ -0,0 +1,73 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::DIV,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/fully_connected_float_2_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/fully_connected_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..ef3d655b9
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/fully_connected_float_2_relaxed.model.cpp
@@ -0,0 +1,82 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 8},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {16, 8},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 512},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {16},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 512, .length = 64},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 16},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 576, .length = 4},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::FULLY_CONNECTED,
+ .inputs = {0, 1, 2, 4},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 166, 9, 187, 61, 142, 177, 211, 61, 242, 12, 162, 190, 210, 58, 170, 189, 103, 243, 24, 62, 243, 90, 73, 190, 191, 71, 253, 189, 73, 157, 128, 61, 102, 22, 129, 61, 36, 69, 68, 62, 195, 244, 125, 189, 158, 235, 123, 189, 244, 24, 141, 190, 213, 64, 115, 61, 142, 174, 242, 189, 49, 64, 162, 189, 205, 200, 224, 61, 21, 26, 8, 60, 80, 82, 128, 189, 74, 179, 121, 189, 54, 232, 75, 189, 126, 143, 218, 189, 37, 147, 163, 190, 116, 70, 212, 189, 202, 108, 136, 62, 125, 3, 83, 61, 78, 210, 252, 189, 0, 26, 165, 62, 119, 72, 49, 61, 233, 243, 49, 190, 61, 243, 66, 190, 137, 236, 3, 59, 189, 200, 196, 61, 5, 78, 6, 62, 165, 75, 255, 60, 76, 27, 206, 61, 128, 215, 159, 190, 245, 186, 165, 189, 208, 127, 207, 189, 82, 211, 238, 189, 215, 166, 17, 62, 226, 59, 177, 61, 99, 126, 14, 190, 80, 115, 50, 62, 111, 213, 149, 189, 202, 252, 163, 61, 75, 63, 225, 59, 29, 227, 106, 62, 64, 166, 213, 189, 133, 66, 52, 190, 213, 151, 85, 190, 247, 143, 133, 186, 183, 235, 37, 190, 14, 135, 165, 61, 215, 248, 204, 188, 124, 126, 152, 61, 34, 84, 129, 190, 203, 161, 37, 190, 58, 173, 11, 190, 71, 30, 8, 60, 244, 26, 187, 59, 57, 214, 197, 61, 199, 101, 148, 62, 77, 17, 96, 62, 55, 251, 155, 190, 110, 82, 145, 189, 39, 162, 95, 61, 202, 164, 22, 62, 96, 115, 14, 190, 30, 23, 149, 189, 74, 65, 39, 190, 71, 228, 43, 190, 48, 218, 3, 190, 13, 142, 18, 189, 15, 39, 240, 61, 200, 182, 140, 60, 216, 211, 134, 62, 199, 217, 52, 190, 211, 162, 134, 62, 171, 205, 191, 189, 128, 182, 173, 190, 72, 136, 170, 62, 183, 42, 57, 62, 20, 62, 147, 62, 130, 226, 135, 61, 158, 178, 74, 190, 61, 100, 234, 189, 41, 179, 113, 190, 150, 8, 212, 61, 97, 109, 12, 189, 140, 19, 191, 61, 132, 217, 20, 190, 39, 48, 93, 61, 182, 74, 128, 62, 59, 199, 32, 62, 108, 236, 18, 190, 2, 102, 14, 190, 14, 47, 72, 189, 83, 91, 138, 61, 250, 95, 14, 62, 57, 241, 165, 62, 112, 125, 104, 62, 213, 204, 26, 61, 57, 42, 103, 190, 249, 106, 119, 190, 38, 54, 183, 62, 50, 5, 11, 62, 44, 16, 157, 190, 109, 1, 1, 190, 117, 62, 188, 61, 23, 126, 136, 62, 173, 221, 246, 60, 123, 193, 135, 62, 171, 6, 17, 62, 179, 210, 164, 60, 51, 167, 171, 61, 55, 23, 255, 189, 208, 13, 205, 189, 95, 66, 69, 189, 39, 133, 57, 62, 63, 200, 154, 62, 178, 71, 40, 189, 149, 216, 21, 62, 245, 242, 123, 189, 1, 48, 110, 190, 179, 9, 144, 62, 214, 229, 20, 62, 248, 56, 99, 190, 193, 114, 36, 190, 95, 181, 82, 62, 108, 95, 160, 189, 19, 182, 159, 189, 239, 226, 253, 58, 213, 236, 129, 60, 21, 198, 22, 61, 225, 70, 74, 60, 245, 189, 134, 58, 167, 231, 29, 61, 46, 115, 154, 61, 38, 109, 170, 60, 222, 144, 70, 61, 140, 45, 4, 189, 142, 231, 51, 62, 143, 111, 175, 189, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/fully_connected_float_4d_simple.model.cpp b/nn/runtime/test/generated/vts_models/fully_connected_float_4d_simple.model.cpp
index fcf290060..5ad81c96c 100644
--- a/nn/runtime/test/generated/vts_models/fully_connected_float_4d_simple.model.cpp
+++ b/nn/runtime/test/generated/vts_models/fully_connected_float_4d_simple.model.cpp
@@ -71,7 +71,6 @@ Model createTestModel() {
.outputIndexes = outputIndexes,
.operandValues = operandValues,
.pools = pools,
- .relaxComputationFloat32toFloat16 = true,
};
}
diff --git a/nn/runtime/test/generated/vts_models/fully_connected_float_4d_simple_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/fully_connected_float_4d_simple_relaxed.model.cpp
new file mode 100644
index 000000000..fcf290060
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/fully_connected_float_4d_simple_relaxed.model.cpp
@@ -0,0 +1,82 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 1, 5, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {3, 10},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 120},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {3},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 120, .length = 12},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 3},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 132, .length = 4},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::FULLY_CONNECTED,
+ .inputs = {0, 1, 2, 4},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 16, 65, 0, 0, 32, 65, 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 16, 65, 0, 0, 32, 65, 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 0, 224, 64, 0, 0, 0, 65, 0, 0, 16, 65, 0, 0, 32, 65, 0, 0, 128, 63, 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/fully_connected_float_large_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/fully_connected_float_large_relaxed.model.cpp
new file mode 100644
index 000000000..9a3314046
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/fully_connected_float_large_relaxed.model.cpp
@@ -0,0 +1,82 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 5},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 5},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 20},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::FULLY_CONNECTED,
+ .inputs = {0, 1, 2, 4},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 64, 0, 0, 64, 64, 0, 0, 128, 64, 0, 0, 160, 64, 0, 0, 192, 64, 0, 186, 91, 73, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/fully_connected_float_large_weights_as_inputs_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/fully_connected_float_large_weights_as_inputs_relaxed.model.cpp
new file mode 100644
index 000000000..63801367d
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/fully_connected_float_large_weights_as_inputs_relaxed.model.cpp
@@ -0,0 +1,82 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 5},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 5},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::FULLY_CONNECTED,
+ .inputs = {0, 1, 2, 4},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1, 2};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/l2_normalization_2_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/l2_normalization_2_relaxed.model.cpp
new file mode 100644
index 000000000..8be70f221
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/l2_normalization_2_relaxed.model.cpp
@@ -0,0 +1,53 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 1, 1, 6},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 1, 1, 6},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::L2_NORMALIZATION,
+ .inputs = {0},
+ .outputs = {1},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {1};
+ std::vector<uint8_t> operandValues = {};
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/l2_pool_float_2_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/l2_pool_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..afa7aa3aa
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/l2_pool_float_2_relaxed.model.cpp
@@ -0,0 +1,82 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 4, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 4,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 1, 2, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::L2_POOL_2D,
+ .inputs = {0, 2, 1, 1, 1, 1, 3},
+ .outputs = {4},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {4};
+ std::vector<uint8_t> operandValues = {
+ 2, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/l2_pool_float_large_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/l2_pool_float_large_relaxed.model.cpp
new file mode 100644
index 000000000..453d1b9d7
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/l2_pool_float_large_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 3},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 4,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 1, 1, 3},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::L2_POOL_2D,
+ .inputs = {0, 5, 5, 5, 5, 3, 4, 1, 2, 6},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 2, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/max_pool_float_4_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/max_pool_float_4_relaxed.model.cpp
new file mode 100644
index 000000000..33cac4917
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/max_pool_float_4_relaxed.model.cpp
@@ -0,0 +1,82 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 4, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 4,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 1, 2, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::MAX_POOL_2D,
+ .inputs = {0, 2, 1, 1, 1, 1, 3},
+ .outputs = {4},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {4};
+ std::vector<uint8_t> operandValues = {
+ 2, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/mean_float_1_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/mean_float_1_relaxed.model.cpp
new file mode 100644
index 000000000..436a063f9
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/mean_float_1_relaxed.model.cpp
@@ -0,0 +1,73 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 3, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::MEAN,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 1, 0, 0, 0, 0, 0, 0, 0, 253, 255, 255, 255, 253, 255, 255, 255, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/mean_float_2_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/mean_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..bae3fd2ff
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/mean_float_2_relaxed.model.cpp
@@ -0,0 +1,73 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 3, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 8},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 3, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::MEAN,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/mean_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/mean_relaxed.model.cpp
new file mode 100644
index 000000000..8896acf05
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/mean_relaxed.model.cpp
@@ -0,0 +1,73 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::MEAN,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 2, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/pad_float_1_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/pad_float_1_relaxed.model.cpp
new file mode 100644
index 000000000..6f04ef501
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/pad_float_1_relaxed.model.cpp
@@ -0,0 +1,64 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 3, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {4, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 32},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 4, 7, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::PAD,
+ .inputs = {0, 1},
+ .outputs = {2},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {2};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/pad_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/pad_relaxed.model.cpp
new file mode 100644
index 000000000..3aa8599a0
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/pad_relaxed.model.cpp
@@ -0,0 +1,64 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {4, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 32},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 4, 4, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::PAD,
+ .inputs = {0, 1},
+ .outputs = {2},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {2};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/relu_float_2_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/relu_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..b4ba428c9
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/relu_float_2_relaxed.model.cpp
@@ -0,0 +1,53 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 64, 40, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 64, 40, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::RELU,
+ .inputs = {0},
+ .outputs = {1},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {1};
+ std::vector<uint8_t> operandValues = {};
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/resize_bilinear_2_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/resize_bilinear_2_relaxed.model.cpp
new file mode 100644
index 000000000..6c4c7f32b
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/resize_bilinear_2_relaxed.model.cpp
@@ -0,0 +1,73 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 3, 3, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::RESIZE_BILINEAR,
+ .inputs = {0, 2, 3},
+ .outputs = {1},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {1};
+ std::vector<uint8_t> operandValues = {
+ 3, 0, 0, 0, 3, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/space_to_batch_float_1_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/space_to_batch_float_1_relaxed.model.cpp
new file mode 100644
index 000000000..39d8e5c39
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/space_to_batch_float_1_relaxed.model.cpp
@@ -0,0 +1,73 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 4, 4, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 2, 2, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::SPACE_TO_BATCH_ND,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/space_to_batch_float_2_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/space_to_batch_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..3871335cd
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/space_to_batch_float_2_relaxed.model.cpp
@@ -0,0 +1,73 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 5, 2, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {6, 2, 2, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::SPACE_TO_BATCH_ND,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 3, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/space_to_batch_float_3_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/space_to_batch_float_3_relaxed.model.cpp
new file mode 100644
index 000000000..819de5262
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/space_to_batch_float_3_relaxed.model.cpp
@@ -0,0 +1,73 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 4, 2, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {6, 2, 4, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::SPACE_TO_BATCH_ND,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 3, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 4, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/space_to_batch_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/space_to_batch_relaxed.model.cpp
new file mode 100644
index 000000000..51bf2a9d3
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/space_to_batch_relaxed.model.cpp
@@ -0,0 +1,73 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 1, 1, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::SPACE_TO_BATCH_ND,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/squeeze_float_1_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/squeeze_float_1_relaxed.model.cpp
new file mode 100644
index 000000000..8ebd2805d
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/squeeze_float_1_relaxed.model.cpp
@@ -0,0 +1,64 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 24, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 24},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::SQUEEZE,
+ .inputs = {0, 1},
+ .outputs = {2},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {2};
+ std::vector<uint8_t> operandValues = {
+ 2, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/squeeze_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/squeeze_relaxed.model.cpp
new file mode 100644
index 000000000..3f7769a59
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/squeeze_relaxed.model.cpp
@@ -0,0 +1,64 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 1, 1, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::SQUEEZE,
+ .inputs = {0, 1},
+ .outputs = {2},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {2};
+ std::vector<uint8_t> operandValues = {
+ 1, 0, 0, 0, 2, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/strided_slice_float_10_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/strided_slice_float_10_relaxed.model.cpp
new file mode 100644
index 000000000..28789fe46
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/strided_slice_float_10_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 3},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 8},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 3},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::STRIDED_SLICE,
+ .inputs = {0, 1, 2, 3, 4, 5, 6},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/strided_slice_float_11_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/strided_slice_float_11_relaxed.model.cpp
new file mode 100644
index 000000000..33e24b6ae
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/strided_slice_float_11_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 3},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 8},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {3},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::STRIDED_SLICE,
+ .inputs = {0, 1, 2, 3, 4, 5, 6},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/strided_slice_float_1_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/strided_slice_float_1_relaxed.model.cpp
new file mode 100644
index 000000000..a271a1098
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/strided_slice_float_1_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::STRIDED_SLICE,
+ .inputs = {0, 1, 2, 3, 4, 5, 6},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/strided_slice_float_2_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/strided_slice_float_2_relaxed.model.cpp
new file mode 100644
index 000000000..490ead682
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/strided_slice_float_2_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::STRIDED_SLICE,
+ .inputs = {0, 1, 2, 3, 4, 5, 6},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 253, 255, 255, 255, 3, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/strided_slice_float_3_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/strided_slice_float_3_relaxed.model.cpp
new file mode 100644
index 000000000..a89cb2a6e
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/strided_slice_float_3_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {3},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::STRIDED_SLICE,
+ .inputs = {0, 1, 2, 3, 4, 5, 6},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 251, 255, 255, 255, 3, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/strided_slice_float_4_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/strided_slice_float_4_relaxed.model.cpp
new file mode 100644
index 000000000..7a669c854
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/strided_slice_float_4_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::STRIDED_SLICE,
+ .inputs = {0, 1, 2, 3, 4, 5, 6},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 1, 0, 0, 0, 254, 255, 255, 255, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/strided_slice_float_5_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/strided_slice_float_5_relaxed.model.cpp
new file mode 100644
index 000000000..54eb217d9
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/strided_slice_float_5_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {3},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::STRIDED_SLICE,
+ .inputs = {0, 1, 2, 3, 4, 5, 6},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/strided_slice_float_6_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/strided_slice_float_6_relaxed.model.cpp
new file mode 100644
index 000000000..d35d04e41
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/strided_slice_float_6_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {3},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::STRIDED_SLICE,
+ .inputs = {0, 1, 2, 3, 4, 5, 6},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 1, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/strided_slice_float_7_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/strided_slice_float_7_relaxed.model.cpp
new file mode 100644
index 000000000..6103b3bca
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/strided_slice_float_7_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {3},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 4, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 12, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 20, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {3},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::STRIDED_SLICE,
+ .inputs = {0, 1, 2, 3, 4, 5, 6},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 255, 255, 255, 255, 252, 255, 255, 255, 255, 255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/strided_slice_float_8_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/strided_slice_float_8_relaxed.model.cpp
new file mode 100644
index 000000000..6a1f53287
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/strided_slice_float_8_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 3},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 8},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 3},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::STRIDED_SLICE,
+ .inputs = {0, 1, 2, 3, 4, 5, 6},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 1, 0, 0, 0, 255, 255, 255, 255, 2, 0, 0, 0, 252, 255, 255, 255, 2, 0, 0, 0, 255, 255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/strided_slice_float_9_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/strided_slice_float_9_relaxed.model.cpp
new file mode 100644
index 000000000..1f52adae2
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/strided_slice_float_9_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 3},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 8},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::STRIDED_SLICE,
+ .inputs = {0, 1, 2, 3, 4, 5, 6},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/strided_slice_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/strided_slice_relaxed.model.cpp
new file mode 100644
index 000000000..c0ddf4499
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/strided_slice_relaxed.model.cpp
@@ -0,0 +1,109 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 3},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 8, .length = 8},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 16, .length = 8},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 24, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 28, .length = 4},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 32, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::STRIDED_SLICE,
+ .inputs = {0, 1, 2, 3, 4, 5, 6},
+ .outputs = {7},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {7};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/sub_broadcast_float_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/sub_broadcast_float_relaxed.model.cpp
new file mode 100644
index 000000000..3a2c09b3b
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/sub_broadcast_float_relaxed.model.cpp
@@ -0,0 +1,73 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 2},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::SUB,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/sub_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/sub_relaxed.model.cpp
new file mode 100644
index 000000000..03fb88f73
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/sub_relaxed.model.cpp
@@ -0,0 +1,73 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::INT32,
+ .dimensions = {},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 4},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::SUB,
+ .inputs = {0, 1, 2},
+ .outputs = {3},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0, 1};
+ const std::vector<uint32_t> outputIndexes = {3};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/transpose_float_1_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/transpose_float_1_relaxed.model.cpp
new file mode 100644
index 000000000..40b09965c
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/transpose_float_1_relaxed.model.cpp
@@ -0,0 +1,64 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {2, 3, 4, 5},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {4, 2, 3, 5},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::TRANSPOSE,
+ .inputs = {0, 1},
+ .outputs = {2},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {2};
+ std::vector<uint8_t> operandValues = {
+ 2, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}
diff --git a/nn/runtime/test/generated/vts_models/transpose_relaxed.model.cpp b/nn/runtime/test/generated/vts_models/transpose_relaxed.model.cpp
new file mode 100644
index 000000000..11d963925
--- /dev/null
+++ b/nn/runtime/test/generated/vts_models/transpose_relaxed.model.cpp
@@ -0,0 +1,64 @@
+// Generated code. Do not edit
+// Create the model
+Model createTestModel() {
+ const std::vector<Operand> operands = {
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_INPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ },
+ {
+ .type = OperandType::TENSOR_INT32,
+ .dimensions = {4},
+ .numberOfConsumers = 1,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::CONSTANT_COPY,
+ .location = {.poolIndex = 0, .offset = 0, .length = 16},
+ },
+ {
+ .type = OperandType::TENSOR_FLOAT32,
+ .dimensions = {1, 2, 2, 1},
+ .numberOfConsumers = 0,
+ .scale = 0.0f,
+ .zeroPoint = 0,
+ .lifetime = OperandLifeTime::MODEL_OUTPUT,
+ .location = {.poolIndex = 0, .offset = 0, .length = 0},
+ }
+ };
+
+ const std::vector<Operation> operations = {
+ {
+ .type = OperationType::TRANSPOSE,
+ .inputs = {0, 1},
+ .outputs = {2},
+ }
+ };
+
+ const std::vector<uint32_t> inputIndexes = {0};
+ const std::vector<uint32_t> outputIndexes = {2};
+ std::vector<uint8_t> operandValues = {
+ 0, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0
+ };
+ const std::vector<hidl_memory> pools = {};
+
+ return {
+ .operands = operands,
+ .operations = operations,
+ .inputIndexes = inputIndexes,
+ .outputIndexes = outputIndexes,
+ .operandValues = operandValues,
+ .pools = pools,
+ .relaxComputationFloat32toFloat16 = true,
+ };
+}
+
+
+bool is_ignored(int i) {
+ static std::set<int> ignore = {};
+ return ignore.find(i) != ignore.end();
+}