Skip to content

Commit

Permalink
Add end-to-end example and tutorial for serving InceptionV3 model in …
Browse files Browse the repository at this point in the history
…Kubernetes.

Change: 117868038
  • Loading branch information
fangweili authored and tensorflower-gardener committed Mar 22, 2016
1 parent 9c90bdf commit e1c16e4
Show file tree
Hide file tree
Showing 21 changed files with 24,079 additions and 1,248 deletions.
5 changes: 5 additions & 0 deletions WORKSPACE
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,11 @@ local_repository(
path = __workspace_dir__ + "/tensorflow",
)

local_repository(
name = "inception_model",
path = __workspace_dir__ + "/tf_models/inception",
)

load('//tensorflow/tensorflow:workspace.bzl', 'tf_workspace')
tf_workspace("tensorflow/", "@tf")

Expand Down
85 changes: 70 additions & 15 deletions tensorflow_serving/example/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ licenses(["notice"]) # Apache 2.0

exports_files(["LICENSE"])

load("//tensorflow_serving:serving.bzl", "serving_proto_library")

filegroup(
name = "all_files",
srcs = glob(
Expand All @@ -23,19 +25,12 @@ filegroup(
),
)

cc_library(
name = "mnist_grpc_cc",
srcs = [
"mnist_inference.grpc.pb.cc",
"mnist_inference.pb.cc",
],
hdrs = [
"mnist_inference.grpc.pb.h",
"mnist_inference.pb.h",
],
deps = [
"@grpc//:grpc++",
],
serving_proto_library(
name = "mnist_inference_proto",
srcs = ["mnist_inference.proto"],
has_services = 1,
cc_api_version = 2,
cc_grpc_version = 1,
)

py_library(
Expand Down Expand Up @@ -68,7 +63,7 @@ cc_binary(
"@tf//tensorflow/core:lib",
"@tf//tensorflow/core:protos_all_cc",
"@tf//tensorflow/core:tensorflow",
":mnist_grpc_cc",
":mnist_inference_proto",
"//tensorflow_serving/session_bundle",
"//tensorflow_serving/session_bundle:manifest_proto",
"//tensorflow_serving/session_bundle:signature",
Expand All @@ -87,7 +82,7 @@ cc_binary(
"@tf//tensorflow/core:lib",
"@tf//tensorflow/core:protos_all_cc",
"@tf//tensorflow/core:tensorflow",
":mnist_grpc_cc",
":mnist_inference_proto",
"//tensorflow_serving/batching:batch_scheduler",
"//tensorflow_serving/batching:batch_scheduler_retrier",
"//tensorflow_serving/batching:streaming_batch_scheduler",
Expand All @@ -113,3 +108,63 @@ py_binary(
":mnist_input_data",
],
)

serving_proto_library(
name = "inception_inference_proto",
srcs = ["inception_inference.proto"],
has_services = 1,
cc_api_version = 2,
cc_grpc_version = 1,
)

py_binary(
name = "inception_export",
srcs = [
"inception_export.py",
],
deps = [
"@inception_model//inception",
"@tf//tensorflow:tensorflow_py",
"//tensorflow_serving/session_bundle:exporter",
],
)

cc_binary(
name = "inception_inference",
srcs = [
"inception_inference.cc",
],
linkopts = ["-lm"],
deps = [
"@grpc//:grpc",
"@tf//tensorflow/core:framework",
"@tf//tensorflow/core:lib",
"@tf//tensorflow/core:protos_all_cc",
"@tf//tensorflow/core:tensorflow",
":inception_inference_proto",
"//tensorflow_serving/batching:batch_scheduler",
"//tensorflow_serving/batching:batch_scheduler_retrier",
"//tensorflow_serving/batching:streaming_batch_scheduler",
"//tensorflow_serving/core:manager",
"//tensorflow_serving/core:servable_handle",
"//tensorflow_serving/core:servable_id",
"//tensorflow_serving/servables/tensorflow:simple_servers",
"//tensorflow_serving/session_bundle",
"//tensorflow_serving/session_bundle:manifest_proto",
"//tensorflow_serving/session_bundle:signature",
"//tensorflow_serving/util:unique_ptr_with_deps",
],
)

py_binary(
name = "inception_client",
srcs = [
"inception_client.py",
"inception_inference_pb2.py",
],
data = [
"imagenet_lsvrc_2015_synsets.txt",
"imagenet_metadata.txt",
],
deps = ["@tf//tensorflow:tensorflow_py"],
)
Loading

0 comments on commit e1c16e4

Please sign in to comment.