Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- InferenceEngine::CNNNetwork passed_network = ie.ReadNetwork("/data/local/tmp/network.xml");
- auto executable_network = ie.LoadNetwork(passed_network, "GNA", config);
- auto inferRequest = executable_network.CreateInferRequest();
- std::vector<InferenceEngine::Blob::Ptr> ptrInputBlobs;
- for (auto& input : passed_network.getInputsInfo()) {
- ptrInputBlobs.push_back(inferRequest.GetBlob(input.first)); // Get the blob pointer from the loaded model. (network.xml)
- }
- std::cout << "ptrInBlob = " << ptrInputBlobs[0] << " size = " << ptrInputBlobs.size() << std::endl;
- float* dest = ptrInputBlobs[0]->buffer().as<float*>();
- for (uint8_t i = 0; i < ptrInputBlobs[0]->byteSize()/4; i++) {
- *(dest + i) = (i + 2); // I set the values to a random value but you can copy from your pointer data. SetBlob also does the same.
- }
- for (int i = 0;i < 300; i++) {
- inferRequest.StartAsync(); //for async infer
- // can also use inferRequest.infer like in the example hello_classification
- inferRequest.Wait(100); //check right value to infer
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement