@@ -12,6 +12,8 @@ licenses(["notice"]) # Apache 2.0
1212
1313exports_files (["LICENSE" ])
1414
15+ load ("//tensorflow_serving:serving.bzl" , "serving_proto_library" )
16+
1517filegroup (
1618 name = "all_files" ,
1719 srcs = glob (
@@ -23,19 +25,12 @@ filegroup(
2325 ),
2426)
2527
26- cc_library (
27- name = "mnist_grpc_cc" ,
28- srcs = [
29- "mnist_inference.grpc.pb.cc" ,
30- "mnist_inference.pb.cc" ,
31- ],
32- hdrs = [
33- "mnist_inference.grpc.pb.h" ,
34- "mnist_inference.pb.h" ,
35- ],
36- deps = [
37- "@grpc//:grpc++" ,
38- ],
28+ serving_proto_library (
29+ name = "mnist_inference_proto" ,
30+ srcs = ["mnist_inference.proto" ],
31+ has_services = 1 ,
32+ cc_api_version = 2 ,
33+ cc_grpc_version = 1 ,
3934)
4035
4136py_library (
@@ -68,7 +63,7 @@ cc_binary(
6863 "@tf//tensorflow/core:lib" ,
6964 "@tf//tensorflow/core:protos_all_cc" ,
7065 "@tf//tensorflow/core:tensorflow" ,
71- ":mnist_grpc_cc " ,
66+ ":mnist_inference_proto " ,
7267 "//tensorflow_serving/session_bundle" ,
7368 "//tensorflow_serving/session_bundle:manifest_proto" ,
7469 "//tensorflow_serving/session_bundle:signature" ,
@@ -87,7 +82,7 @@ cc_binary(
8782 "@tf//tensorflow/core:lib" ,
8883 "@tf//tensorflow/core:protos_all_cc" ,
8984 "@tf//tensorflow/core:tensorflow" ,
90- ":mnist_grpc_cc " ,
85+ ":mnist_inference_proto " ,
9186 "//tensorflow_serving/batching:batch_scheduler" ,
9287 "//tensorflow_serving/batching:batch_scheduler_retrier" ,
9388 "//tensorflow_serving/batching:streaming_batch_scheduler" ,
@@ -113,3 +108,63 @@ py_binary(
113108 ":mnist_input_data" ,
114109 ],
115110)
111+
112+ serving_proto_library (
113+ name = "inception_inference_proto" ,
114+ srcs = ["inception_inference.proto" ],
115+ has_services = 1 ,
116+ cc_api_version = 2 ,
117+ cc_grpc_version = 1 ,
118+ )
119+
120+ py_binary (
121+ name = "inception_export" ,
122+ srcs = [
123+ "inception_export.py" ,
124+ ],
125+ deps = [
126+ "@inception_model//inception" ,
127+ "@tf//tensorflow:tensorflow_py" ,
128+ "//tensorflow_serving/session_bundle:exporter" ,
129+ ],
130+ )
131+
132+ cc_binary (
133+ name = "inception_inference" ,
134+ srcs = [
135+ "inception_inference.cc" ,
136+ ],
137+ linkopts = ["-lm" ],
138+ deps = [
139+ "@grpc//:grpc" ,
140+ "@tf//tensorflow/core:framework" ,
141+ "@tf//tensorflow/core:lib" ,
142+ "@tf//tensorflow/core:protos_all_cc" ,
143+ "@tf//tensorflow/core:tensorflow" ,
144+ ":inception_inference_proto" ,
145+ "//tensorflow_serving/batching:batch_scheduler" ,
146+ "//tensorflow_serving/batching:batch_scheduler_retrier" ,
147+ "//tensorflow_serving/batching:streaming_batch_scheduler" ,
148+ "//tensorflow_serving/core:manager" ,
149+ "//tensorflow_serving/core:servable_handle" ,
150+ "//tensorflow_serving/core:servable_id" ,
151+ "//tensorflow_serving/servables/tensorflow:simple_servers" ,
152+ "//tensorflow_serving/session_bundle" ,
153+ "//tensorflow_serving/session_bundle:manifest_proto" ,
154+ "//tensorflow_serving/session_bundle:signature" ,
155+ "//tensorflow_serving/util:unique_ptr_with_deps" ,
156+ ],
157+ )
158+
159+ py_binary (
160+ name = "inception_client" ,
161+ srcs = [
162+ "inception_client.py" ,
163+ "inception_inference_pb2.py" ,
164+ ],
165+ data = [
166+ "imagenet_lsvrc_2015_synsets.txt" ,
167+ "imagenet_metadata.txt" ,
168+ ],
169+ deps = ["@tf//tensorflow:tensorflow_py" ],
170+ )
0 commit comments