From ff796b10206de9a8738e792346b1b1dd22c7091c Mon Sep 17 00:00:00 2001 From: blamb Date: Wed, 7 Nov 2018 10:27:03 -0800 Subject: [PATCH] Fix book.yaml Remove 'via' PiperOrigin-RevId: 220486241 --- tensorflow_serving/g3doc/_book.yaml | 4 ---- tensorflow_serving/g3doc/custom_servable.md | 2 +- tensorflow_serving/g3doc/docker.md | 10 +++++----- tensorflow_serving/g3doc/overview.md | 2 +- tensorflow_serving/g3doc/setup.md | 2 +- 5 files changed, 8 insertions(+), 12 deletions(-) diff --git a/tensorflow_serving/g3doc/_book.yaml b/tensorflow_serving/g3doc/_book.yaml index 95f253ea06a..8f64a4108be 100644 --- a/tensorflow_serving/g3doc/_book.yaml +++ b/tensorflow_serving/g3doc/_book.yaml @@ -25,12 +25,8 @@ upper_tabs: path: /serving/setup - title: Serve a TensorFlow model path: /serving/serving_basic - - title: REST API - path: /serving/api_rest - title: Build a TensorFlow ModelServer path: /serving/serving_advanced - - title: Use TensorFlow Serving with Docker - path: /serving/docker - title: Use TensorFlow Serving with Kubernetes path: /serving/serving_kubernetes - title: Create a new kind of servable diff --git a/tensorflow_serving/g3doc/custom_servable.md b/tensorflow_serving/g3doc/custom_servable.md index 28cce071d0b..a7333871414 100644 --- a/tensorflow_serving/g3doc/custom_servable.md +++ b/tensorflow_serving/g3doc/custom_servable.md @@ -30,7 +30,7 @@ document). In addition to your `Loader`, you will need to define a `SourceAdapter` that instantiates a `Loader` from a given storage path. Most simple use-cases can -specify the two objects concisely via the `SimpleLoaderSourceAdapter` class +specify the two objects concisely with the `SimpleLoaderSourceAdapter` class (in `core/simple_loader.h`). Advanced use-cases may opt to specify `Loader` and `SourceAdapter` classes separately using the lower-level APIs, e.g. if the `SourceAdapter` needs to retain some state, and/or if state needs to be shared diff --git a/tensorflow_serving/g3doc/docker.md b/tensorflow_serving/g3doc/docker.md index dfbfaba70dd..456fc82ba44 100644 --- a/tensorflow_serving/g3doc/docker.md +++ b/tensorflow_serving/g3doc/docker.md @@ -1,6 +1,6 @@ -# Using TensorFlow Serving via Docker +# Using TensorFlow Serving with Docker -One of the easiest ways to get started using TensorFlow Serving is via +One of the easiest ways to get started using TensorFlow Serving is with [Docker](http://www.docker.com/). ## Installing Docker @@ -136,8 +136,8 @@ deploy and will load your model for serving on startup. ### Serving example -Let's run through a full example where we load a SavedModel and call it via the -REST API. First pull the serving image: +Let's run through a full example where we load a SavedModel and call it using +the REST API. First pull the serving image: ```shell docker pull tensorflow/serving @@ -209,7 +209,7 @@ details, see [running a serving image](#running-a-serving-image). ### GPU Serving example Let's run through a full example where we load a model with GPU-bound ops and -call it via the REST API. +call it using the REST API. First install [`nvidia-docker`](#install-nvidia-docker). Next you can pull the latest TensorFlow Serving GPU docker image by running: diff --git a/tensorflow_serving/g3doc/overview.md b/tensorflow_serving/g3doc/overview.md index 4a85ae1982b..c24f59b0d9a 100644 --- a/tensorflow_serving/g3doc/overview.md +++ b/tensorflow_serving/g3doc/overview.md @@ -121,7 +121,7 @@ TensorFlow Serving Managers provide a simple, narrow interface -- ### Core -**TensorFlow Serving Core** manages (via standard TensorFlow Serving APIs) the +Using the standard TensorFlow Serving APis, *TensorFlow Serving Core* manages the following aspects of servables: * lifecycle diff --git a/tensorflow_serving/g3doc/setup.md b/tensorflow_serving/g3doc/setup.md index 70429202244..ccc419e7d45 100644 --- a/tensorflow_serving/g3doc/setup.md +++ b/tensorflow_serving/g3doc/setup.md @@ -4,7 +4,7 @@ ### Installing using Docker -The easiest and most straight-forward way of using TensorFlow Serving is via +The easiest and most straight-forward way of using TensorFlow Serving is with [Docker images](docker.md). We highly recommend this route unless you have specific needs that are not addressed by running in a container.