Home
last modified time | relevance | path

Searched refs:results (Results 1 – 25 of 109) sorted by relevance

12345

/AliOS-Things-master/solutions/tflite_micro_speech_demo/micro_speech/train/speech_commands/
A Drecognize_commands_test.cc28 Tensor results(DT_FLOAT, {3}); in TEST() local
29 test::FillValues<float>(&results, {1.0f, 0.0f, 0.0f}); in TEST()
35 results, 0, &found_command, &score, &is_new_command)); in TEST()
41 Tensor results(DT_FLOAT, {3}); in TEST() local
43 test::FillValues<float>(&results, {0.0f, 1.0f, 0.0f}); in TEST()
52 results, current_time_ms, &found_command, &score, &is_new_command)); in TEST()
62 test::FillValues<float>(&results, {0.0f, 0.0f, 1.0f}); in TEST()
71 results, current_time_ms, &found_command, &score, &is_new_command)); in TEST()
100 Tensor results(DT_FLOAT, {3}); in TEST() local
101 test::FillValues<float>(&results, {1.0f, 0.0f, 0.0f}); in TEST()
[all …]
A Dwav_to_features.py73 results = audio_processor.get_features_for_wav(input_wav, model_settings,
75 features = results[0]
/AliOS-Things-master/components/py_engine/tests/extmod/
A Dutime_res.py52 results = results_map.get(func_name)
53 if results is None:
55 elif func_name == "ticks_cpu" and results == {0}:
58 elif len(results) < min_len:
61 % (func_name, len(results), "s"[: len(results) != 1], TEST_TIME, min_len)
/AliOS-Things-master/components/ucloud_ai/src/model/
A Docr.cc192 cout << "results size: " << outcome.result().getData().results.size() << endl; in recognizeCharacter()
194 for (i = 0; i < outcome.result().getData().results.size(); i++) { in recognizeCharacter()
195 cout << i << "text: " << outcome.result().getData().results[i].text << endl; in recognizeCharacter()
196 cout << i << "probability: " << outcome.result().getData().results[i].probability << endl; in recognizeCharacter()
202 text = outcome.result().getData().results[i].text; in recognizeCharacter()
204 result.character.probability = outcome.result().getData().results[i].probability; in recognizeCharacter()
205 result.character.left = outcome.result().getData().results[i].textRectangles.left; in recognizeCharacter()
206 result.character.angle = outcome.result().getData().results[i].textRectangles.angle; in recognizeCharacter()
207 result.character.top = outcome.result().getData().results[i].textRectangles.top; in recognizeCharacter()
208 result.character.height = outcome.result().getData().results[i].textRectangles.height; in recognizeCharacter()
[all …]
/AliOS-Things-master/solutions/tflite_micro_speech_demo/micro_speech/
A Drecognize_commands_test.cc80 TfLiteTensor results = tflite::testing::CreateQuantizedTensor( in TF_LITE_MICRO_TEST() local
89 &results, 0, &found_command, &score, &is_new_command)); in TF_LITE_MICRO_TEST()
177 TfLiteTensor results = tflite::testing::CreateQuantizedTensor( in TF_LITE_MICRO_TEST() local
186 &results, 100, &found_command, &score, &is_new_command)); in TF_LITE_MICRO_TEST()
189 &results, 0, &found_command, &score, &is_new_command)); in TF_LITE_MICRO_TEST()
199 TfLiteTensor results = tflite::testing::CreateQuantizedTensor( in TF_LITE_MICRO_TEST() local
208 &results, 100, &found_command, &score, &is_new_command)); in TF_LITE_MICRO_TEST()
/AliOS-Things-master/components/py_engine/engine/tools/
A Dgen-cpydiff.py98 results = []
138 results.append(output)
140 results.sort(key=lambda x: x.class_)
141 return results
186 def gen_rst(results): argument
198 for output in results:
254 results = run_tests(files)
255 gen_rst(results)
/AliOS-Things-master/components/ai_agent/src/engine/tflite-micro/third_party/ruy/ruy/
A Dbenchmark.cc87 return std::move(test_set.results); in Benchmark()
164 const auto& results = Benchmark<TestSetType>(shape); in Benchmark() local
168 for (const auto& result : results) { in Benchmark()
169 if (results.size() > 1) { in Benchmark()
188 for (const auto& result : results) { in Benchmark()
202 for (const auto& result : results) { in Benchmark()
A Dkernel_avx2_fma.cc455 auto rounding_right_shift = [=](__m256i& results,
468 const __m256i r_plus_nudge = _mm256_add_epi32(results, nudge);
479 results = intrin_utils::mm256_blendv_epi32(
501 __m256i results =
503 results = _mm256_permutevar8x32_epi32(results, repack_perm);
505 rounding_right_shift(results, right_shift);
506 accum = _mm256_add_epi32(results, post_scaling_offset);
916 results = _mm256_permutevar8x32_epi32(results, repack_perm);
929 const __m256i r_plus_nudge = _mm256_add_epi32(results, nudge);
941 results = intrin_utils::mm256_blendv_epi32(
[all …]
A Dkernel_avx.cc834 auto rounding_right_shift = [=](__m256i& results,
860 results, intrin_utils::mm256_sub_epi32<path>(
864 results = intrin_utils::mm256_blendv_epi32(
891 __m256i results;
895 results = intrin_utils::PermuteEpi32EvenOdds<path>(results);
897 rounding_right_shift(results, right_shift);
1371 __m256i results;
1375 results = intrin_utils::PermuteEpi32EvenOdds<path>(results);
1391 intrin_utils::mm256_add_epi32<path>(results, nudge);
1402 results, intrin_utils::mm256_sub_epi32<path>(
[all …]
/AliOS-Things-master/components/SDL2/src/image/external/libwebp-1.0.2/src/enc/
A Dpicture_psnr_enc.c180 int type, float results[5]) { in WebPPictureDistortion()
187 results == NULL) { in WebPPictureDistortion()
209 w, h, 4, type, &distortion, results + c)) { in WebPPictureDistortion()
216 results[4] = (type == 1) ? (float)GetLogSSIM(total_distortion, total_size) in WebPPictureDistortion()
248 int type, float results[5]) { in WebPPictureDistortion()
253 if (results == NULL) return 0; in WebPPictureDistortion()
254 for (i = 0; i < 5; ++i) results[i] = 0.f; in WebPPictureDistortion()
/AliOS-Things-master/components/amp/libjs/init/
A Dpromise.js267 var results = [];
273 results[index] = value;
275 resolve(results);
285 results[i] = promise;
289 resolve(results);
/AliOS-Things-master/components/ai_agent/src/engine/tflite-micro/tensorflow/lite/kernels/internal/optimized/
A Dsse_tensor_utils.cc328 float* __restrict__ const results) { in SseSparseMatrix4VectorsMultiplyAccumulate() argument
336 float* __restrict__ result0 = results + 0 * m_rows; in SseSparseMatrix4VectorsMultiplyAccumulate()
337 float* __restrict__ result1 = results + 1 * m_rows; in SseSparseMatrix4VectorsMultiplyAccumulate()
338 float* __restrict__ result2 = results + 2 * m_rows; in SseSparseMatrix4VectorsMultiplyAccumulate()
339 float* __restrict__ result3 = results + 3 * m_rows; in SseSparseMatrix4VectorsMultiplyAccumulate()
396 float* __restrict__ results) { in SseSparseMatrixBatchVectorMultiplyAccumulate() argument
403 matrix, ledger, m_rows, m_cols, vectors, scaling_factors_fx4, results); in SseSparseMatrixBatchVectorMultiplyAccumulate()
406 results += kBatchSize4 * m_rows; in SseSparseMatrixBatchVectorMultiplyAccumulate()
411 results); in SseSparseMatrixBatchVectorMultiplyAccumulate()
414 results += m_rows; in SseSparseMatrixBatchVectorMultiplyAccumulate()
/AliOS-Things-master/components/SDL2/src/video/winrt/
A DSDL_winrtmessagebox.cpp107 IntPtr results = safe_cast<IntPtr>((int)(operation->GetResults()->Id)); in WINRT_ShowMessageBox()
108 int clicked_index = results.ToInt32(); in WINRT_ShowMessageBox()
/AliOS-Things-master/components/py_engine/engine/py/
A Dobjslice.c55 mp_obj_t results[3] = { in slice_indices() local
60 return mp_obj_new_tuple(3, results); in slice_indices()
/AliOS-Things-master/components/ucloud_ai/src/model/aliyun-openapi/ocr/include/alibabacloud/ocr/model/
A DRecognizeCharacterResult.h51 std::vector<Result> results; member
A DRecognizeQrCodeResult.h47 std::vector<Element::Result> results; member
A DRecognizeStampResult.h60 std::vector<ResultsItem> results; member
/AliOS-Things-master/components/ucloud_ai/src/model/aliyun-openapi/facebody/include/alibabacloud/facebody/model/
A DDetectLivingFaceResult.h52 std::vector<Element::Result> results; member
A DBodyPostureResult.h58 std::vector<Output::Result> results; member
A DRecognizePublicFaceResult.h61 std::vector<Element::Result> results; member
A DHandPostureResult.h72 std::vector<Output::Result> results; member
/AliOS-Things-master/components/SDL2/src/image/external/zlib-1.2.11/doc/
A Dtxtvsbin.txt12 It is, however, possible to obtain satisfactory results by employing
82 consistent results, regardless what alphabet encoding is being used.
84 results on a text encoded, say, using ISO-8859-16 versus UTF-8.)
100 formatted office documents, compiled object code, etc. The results
/AliOS-Things-master/components/ucloud_ai/src/model/aliyun-openapi/imagerecog/include/alibabacloud/imagerecog/model/
A DRecognizeLogoResult.h56 std::vector<Element::Result> results; member
/AliOS-Things-master/components/ucloud_ai/src/model/aliyun-openapi/ocr/src/model/
A DRecognizeQrCodeResult.cc64 elementObject.results.push_back(resultsObject); in parse()
A DRecognizeCharacterResult.cc62 data_.results.push_back(resultObject); in parse()

Completed in 27 milliseconds

12345