Home
last modified time | relevance | path

Searched refs:InferenceInOut (Results 1 – 8 of 8) sorted by relevance

/test/mlts/benchmark/src/com/android/nn/benchmark/core/
DInferenceInOut.java22 public class InferenceInOut { class
37 public InferenceInOut(byte[] input, byte[][] expectedOutputs, int expectedClass) { in InferenceInOut() method in InferenceInOut
43 public InferenceInOut(InputCreatorInterface inputCreator, byte[][] expectedOutputs, in InferenceInOut() method in InferenceInOut
DInferenceInOutSequence.java46 private List<InferenceInOut> mInputOutputs;
60 public InferenceInOut get(int i) { in get()
112 sequence.mInputOutputs.add(new InferenceInOut( in readAssets()
236 InferenceInOut.InputCreatorInterface creator = in readDataset()
237 new InferenceInOut.InputCreatorInterface() { in readDataset()
251 sequence.mInputOutputs.add(new InferenceInOut(creator, null, in readDataset()
/test/mlts/benchmark/src/com/android/nn/benchmark/evaluators/
DBaseSequenceEvaluator.java4 import com.android.nn.benchmark.core.InferenceInOut;
56 InferenceInOut inOut = inferenceInOuts.get(result.mInputOutputSequenceIndex) in EvaluateAccuracy()
DTopK.java22 import com.android.nn.benchmark.core.InferenceInOut;
64 InferenceInOut io = sequence.get(0); in EvaluateAccuracy()
/test/mlts/benchmark/jni/
Drun_tflite.h32 struct InferenceInOut { struct
44 using InferenceInOutSequence = std::vector<InferenceInOut>; argument
Drun_tflite.cpp281 const InferenceInOut& data = seq[i]; in benchmark()
591 const InferenceInOut& data = seq[i]; in dumpAllLayers()
/test/mlts/benchmark/src/com/android/nn/crashtest/core/test/
DRunModelsInMultipleProcesses.java23 import com.android.nn.benchmark.core.InferenceInOut;
184 final InferenceInOut.InputCreatorInterface creator = in writeModelInput()
/test/mlts/benchmark/native/
Dmulti_process_test.cpp99 InferenceInOut entry{ in readInputData()