Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/bin/env python
- # -*- coding: utf-8 -*-
- # encoding=utf-8 vi:ts=4:sw=4:expandtab:ft=python
- """
- /***************************************************************************
- *
- * Copyright (c) 2019 Baidu.com, Inc. All Rights Reserved
- * @file model.py
- * @author lipeihan01(lipeihan01@baidu.com)
- * @date 2019/5/23 10:31:21
- * @brief this file is DEMARK CE WITH TENSORRT CONFIG
- *
- **************************************************************************/
- """
- import threading
- import numpy as np
- import paddle
- import paddle.fluid as fluid
- import time
- def Set_Config(model_path):
- target_model_path = model_path
- prog_file = "{}/model".format(target_model_path)
- params_file = "{}/params".format(target_model_path)
- config = fluid.core.AnalysisConfig(prog_file, params_file)
- config.enable_use_gpu(100, 0) # set GPU memory and gpu id
- config.enable_tensorrt_engine(1 << 30, 1, precision_mode=fluid.core.AnalysisConfig.Precision.Int8, use_static=False, use_calib_mode=True)
- # run trt fp32
- #config.enable_tensorrt_engine(workspace_size=1 << 30, max_batch_size=1, precision_mode=fluid.core.AnalysisConfig.Precision.Float32)
- return config
- def load_fake_data(batch_size=1):
- """
- Load data
- """
- channels = 3
- height = 300
- width = 300
- input_num = channels * height * width * batch_size
- input_data = [[0 for x in range(input_num)]]
- sum_i = 0
- for i in range(input_num):
- input_data[0][i] = 1
- sum_i += input_data[0][i]
- # print("sum_i: {}".format(sum_i))
- the_data = []
- for data in input_data:
- the_data += data
- input_tensor = fluid.core.PaddleTensor()
- input_tensor.shape = [batch_size, channels, height, width]
- input_tensor.data = fluid.core.PaddleBuf(the_data)
- input_value = [input_tensor]
- return input_value
- def run_inference(model_path, repeat_times=100):
- """
- run inference
- """
- config = Set_Config(model_path)
- predict = fluid.core.create_paddle_predictor(config)
- input_value = load_fake_data()
- outputs = predict.run(input_value)
- results = outputs[0].data.float_data()
- # warmup
- for i in range(5):
- outputs = predict.run(input_value)
- print("warm_up: {0}".format(i))
- t = []
- t1 = time.time()
- for i in range(repeat_times):
- outputs = predict.run(input_value)
- t2 = time.time()
- print (t2 - t1) * 10, " ms"
- return results, t
- if __name__ == '__main__':
- model_path = "./MobileNet_SSD_infer_model"
- results, t = run_inference(model_path)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement