Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- syntax = "proto3";
- package tensorflow.serving;
- import "model.proto";
- import "tensor.proto";
- message PredictRequest {
- float satisfaction_level = 1;
- float last_evaluation = 2;
- int32 number_project = 3;
- int32 average_monthly_hours = 4;
- int32 time_spend_company = 5;
- int32 work_accident = 6;
- int32 promotion_last_5years = 7;
- string sales = 8;
- string salary = 9;
- };
- message PredictResponse {
- // Matching probability of digit 0-9 in range [0.0, 1.0].
- repeated int32 left = 1;
- };
- service PredictService {
- // Classifies image into digits.
- rpc Predict (PredictRequest) returns (PredictResponse);
- };
- var PROTO_PATH = __dirname + '/predict.proto';
- var grpc = require('grpc');
- var attrition = grpc.load(PROTO_PATH).tensorflow.serving;
- function main() {
- var client = new attrition.PredictService('localhost:8500', grpc.credentials.createInsecure());
- var req = new attrition.PredictRequest();
- req.satisfaction_level = 0.38;
- req.last_evaluation = 0.53;
- req.number_project = 2;
- req.average_monthly_hours = 157;
- req.time_spend_company = 3;
- req.work_accident = 0;
- req.promotion_last_5years = 0;
- req.sales = 'sales';
- req.salary = 'low';
- function run() {
- client.predict(req, (err, predictResponse) => {
- if (err) {
- console.log(err)
- } else {
- var results = predictResponse ? predictResponse.value : [];
- console.log(results)
- }
- });
- };
- run();
- }
- main();
- var PROTO_PATH = __dirname + '/protos/prediction_service.proto';
- var grpc = require('grpc');
- var attrition = grpc.load(PROTO_PATH).tensorflow;
- function main() {
- var client = new attrition.PredictionService('localhost:8500', grpc.credentials.createInsecure());
- var features = {
- model_spec: {name: 'attrition', signature_name: 'predict'},
- inputs: {
- average_monthly_hours: {
- dtype: "DT_INT32",
- tensor_shape: {
- dim: {
- size: 1
- }
- },
- int_val: 157
- }, last_evaluation: {
- dtype: "DT_FLOAT",
- tensor_shape: {
- dim: {
- size: 1
- }
- },
- float_val: 0.53
- }, number_project: {
- dtype: "DT_INT32",
- tensor_shape: {
- dim: {
- size: 1
- }
- },
- int_val: 2
- }, promotion_last_5years: {
- dtype: "DT_INT32",
- tensor_shape: {
- dim: {
- size: 1
- }
- },
- int_val: 0
- }, salary: {
- dtype: "DT_STRING",
- tensor_shape: {
- dim: {
- size: 1
- }
- },
- string_val: 'low'
- }, sales: {
- dtype: "DT_STRING",
- tensor_shape: {
- dim: {
- size: 1
- }
- },
- string_val: "sales"
- }, satisfaction_level: {
- dtype: "DT_FLOAT",
- tensor_shape: {
- dim: {
- size: 1
- }
- },
- float_val: 0.38
- }, time_spend_company: {
- dtype: "DT_INT32",
- tensor_shape: {
- dim: {
- size: 1
- }
- },
- int_val: 3
- }, work_accident: {
- dtype: "DT_INT32",
- tensor_shape: {
- dim: {
- size: 1
- }
- },
- int_val: 0
- }
- }
- }
- function run() {
- client.predict(features, (err, predictResponse) => {
- if (err) {
- console.log("Received error running prediction:")
- console.log(err)
- } else {
- var results = predictResponse ? predictResponse.value : [];
- console.log(results)
- }
- });
- };
- run();
- }
- main();
- feature_inputs = {
- 'satisfaction_level': tf.placeholder(dtype=tf.float32, shape=[1,], name='satisfaction_level'),
- 'last_evaluation': tf.placeholder(dtype=tf.float32, shape=[1,], name='last_evaluation'),
- 'number_project': tf.placeholder(dtype=tf.int32, shape=[1,], name='number_project'),
- 'average_monthly_hours': tf.placeholder(dtype=tf.int32, shape=[1,], name='average_monthly_hours'),
- 'time_spend_company': tf.placeholder(dtype=tf.int32, shape=[1,], name='time_spend_company'),
- 'work_accident': tf.placeholder(dtype=tf.int32, shape=[1,], name='work_accident'),
- 'promotion_last_5years': tf.placeholder(dtype=tf.int32, shape=[1,], name='promotion_last_5years'),
- 'sales': tf.placeholder(dtype=tf.string, shape=[1,], name='sales'),
- 'salary': tf.placeholder(dtype=tf.string, shape=[1,], name='salary')
- }
- serving_input_receiver_fn = tf.estimator.export.build_raw_serving_input_receiver_fn(feature_inputs)
- model.export_savedmodel("./exported_model", serving_input_receiver_fn)
- saved_model_cli show --dir ../python/notebooks/exported_model/1510845524/ --all
- MetaGraphDef with tag-set: 'serve' contains the following SignatureDefs:
- signature_def['predict']:
- The given SavedModel SignatureDef contains the following input(s):
- inputs['average_monthly_hours'] tensor_info:
- dtype: DT_INT32
- shape: (-1, 1)
- name: average_monthly_hours_1:0
- inputs['last_evaluation'] tensor_info:
- dtype: DT_FLOAT
- shape: (-1, 1)
- name: last_evaluation_1:0
- inputs['number_project'] tensor_info:
- dtype: DT_INT32
- shape: (-1, 1)
- name: number_project_1:0
- inputs['promotion_last_5years'] tensor_info:
- dtype: DT_INT32
- shape: (-1, 1)
- name: promotion_last_5years_1:0
- inputs['salary'] tensor_info:
- dtype: DT_STRING
- shape: (-1, 1)
- name: salary_1:0
- inputs['sales'] tensor_info:
- dtype: DT_STRING
- shape: (-1, 1)
- name: sales_1:0
- inputs['satisfaction_level'] tensor_info:
- dtype: DT_FLOAT
- shape: (-1, 1)
- name: satisfaction_level:0
- inputs['time_spend_company'] tensor_info:
- dtype: DT_INT32
- shape: (-1, 1)
- name: time_spend_company_1:0
- inputs['work_accident'] tensor_info:
- dtype: DT_INT32
- shape: (-1, 1)
- name: work_accident_1:0
- The given SavedModel SignatureDef contains the following output(s):
- outputs['class_ids'] tensor_info:
- dtype: DT_INT64
- shape: (-1, 1)
- name: dnn/head/predictions/classes:0
- outputs['classes'] tensor_info:
- dtype: DT_STRING
- shape: (-1, 1)
- name: dnn/head/predictions/str_classes:0
- outputs['logistic'] tensor_info:
- dtype: DT_FLOAT
- shape: (-1, 1)
- name: dnn/head/predictions/logistic:0
- outputs['logits'] tensor_info:
- dtype: DT_FLOAT
- shape: (-1, 1)
- name: dnn/head/predictions/logits:0
- outputs['probabilities'] tensor_info:
- dtype: DT_FLOAT
- shape: (-1, 2)
- name: dnn/head/predictions/probabilities:0
- Method name is: tensorflow/serving/predict
- tensorflow_model_server --port=8500 --model_name=attrition --model_base_path=`pwd`/exported_model/
- 2017-11-20 12:18:20.380088: I external/org_tensorflow/tensorflow/cc/saved_model/loader.cc:155] Restoring SavedModel bundle.
- 2017-11-20 12:18:20.386764: I external/org_tensorflow/tensorflow/cc/saved_model/loader.cc:190] Running LegacyInitOp on SavedModel bundle.
- 2017-11-20 12:18:20.393305: I external/org_tensorflow/tensorflow/cc/saved_model/loader.cc:284] Loading SavedModel: success. Took 44036 microseconds.
- 2017-11-20 12:18:20.393473: I tensorflow_serving/core/loader_harness.cc:86] Successfully loaded servable version {name: attrition version: 1510845524}
- 2017-11-20 12:18:20.397036: I tensorflow_serving/model_servers/main.cc:288] Running ModelServer at 0.0.0.0:8500 ...
Add Comment
Please, Sign In to add comment