Skip to content

Commit 4e53037

Browse files
[release-1.16] fix ML example (#6184)
* fix ML example * fix dockerize * type variable df * fix code --------- Co-authored-by: Yasuhiro Matsumoto <mattn.jp@gmail.com>
1 parent 9de70f0 commit 4e53037

File tree

3 files changed

+121
-69
lines changed

3 files changed

+121
-69
lines changed

code-samples/community/serving/machinelearning-python-bentoml/README.md

Lines changed: 95 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -39,15 +39,18 @@ as API endpoint with Knative Serving.
3939
API, which is the entry point for accessing this machine learning service.
4040

4141
```python
42-
from bentoml import env, artifacts, api, BentoService
43-
from bentoml.handlers import DataframeHandler
44-
from bentoml.artifact import SklearnModelArtifact
42+
import bentoml
43+
import joblib
4544
46-
@env(auto_pip_dependencies=True)
47-
@artifacts([SklearnModelArtifact('model')])
48-
class IrisClassifier(BentoService):
4945
50-
@api(DataframeHandler)
46+
@bentoml.service
47+
class IrisClassifier:
48+
iris_model = bentoml.models.get("iris_classifier:latest")
49+
50+
def __init__(self):
51+
self.model = joblib.load(self.iris_model.path_of("model.pkl"))
52+
53+
@bentoml.api
5154
def predict(self, df):
5255
return self.artifacts.model.predict(df)
5356
```
@@ -58,10 +61,11 @@ as API endpoint with Knative Serving.
5861
given data and then save the model with BentoML to local disk.
5962

6063
```python
64+
import joblib
6165
from sklearn import svm
6266
from sklearn import datasets
6367
64-
from iris_classifier import IrisClassifier
68+
import bentoml
6569
6670
if __name__ == "__main__":
6771
# Load training data
@@ -72,14 +76,9 @@ as API endpoint with Knative Serving.
7276
clf = svm.SVC(gamma='scale')
7377
clf.fit(X, y)
7478
75-
# Create a iris classifier service instance
76-
iris_classifier_service = IrisClassifier()
77-
78-
# Pack the newly trained model artifact
79-
iris_classifier_service.pack('model', clf)
80-
81-
# Save the prediction service to disk for model serving
82-
saved_path = iris_classifier_service.save()
79+
with bentoml.models.create("iris_classifier") as bento_model:
80+
joblib.dump(clf, bento_model.path_of("model.pkl"))
81+
print(f"Model saved: {bento_model}")
8382
```
8483
8584
1. Run the `main.py` file to train and save the model:
@@ -91,48 +90,97 @@ as API endpoint with Knative Serving.
9190
1. Use BentoML CLI to check saved model's information.
9291
9392
```bash
94-
bentoml get IrisClassifier:latest
93+
bentoml get iris_classifier:latest
9594
```
9695
9796
Example:
9897
9998
```bash
100-
> bentoml get IrisClassifier:latest
99+
> bentoml get iris_classifier:latest -o json
101100
{
102-
"name": "IrisClassifier",
103-
"version": "20200305171229_0A1411",
104-
"uri": {
105-
"type": "LOCAL",
106-
"uri": "/Users/bozhaoyu/bentoml/repository/IrisClassifier/20200305171229_0A1411"
101+
"service": "iris_classifier:IrisClassifier",
102+
"name": "iris_classifier",
103+
"version": "ar67rxqxqcrqi7ol",
104+
"bentoml_version": "1.2.16",
105+
"creation_time": "2024-05-21T14:40:20.737900+00:00",
106+
"labels": {
107+
"owner": "bentoml-team",
108+
"project": "gallery"
107109
},
108-
"bentoServiceMetadata": {
110+
"models": [],
111+
"runners": [],
112+
"entry_service": "IrisClassifier",
113+
"services": [
114+
{
115+
"name": "IrisClassifier",
116+
"service": "",
117+
"models": [
118+
{
119+
"tag": "iris_sklearn:ml5evdaxpwrqi7ol",
120+
"module": "",
121+
"creation_time": "2024-05-21T14:21:17.070059+00:00"
122+
}
123+
],
124+
"dependencies": [],
125+
"config": {}
126+
}
127+
],
128+
"envs": [],
129+
"schema": {
109130
"name": "IrisClassifier",
110-
"version": "20200305171229_0A1411",
111-
"createdAt": "2020-03-06T01:12:49.431011Z",
112-
"env": {
113-
"condaEnv": "name: bentoml-IrisClassifier\nchannels:\n- defaults\ndependencies:\n- python=3.7.3\n- pip\n",
114-
"pipDependencies": "bentoml==0.6.2\nscikit-learn",
115-
"pythonVersion": "3.7.3"
116-
},
117-
"artifacts": [
118-
{
119-
"name": "model",
120-
"artifactType": "SklearnModelArtifact"
121-
}
122-
],
123-
"apis": [
131+
"type": "service",
132+
"routes": [
124133
{
125134
"name": "predict",
126-
"handlerType": "DataframeHandler",
127-
"docs": "BentoService API",
128-
"handlerConfig": {
129-
"orient": "records",
130-
"typ": "frame",
131-
"input_dtypes": null,
132-
"output_orient": "records"
135+
"route": "/predict",
136+
"batchable": false,
137+
"input": {
138+
"properties": {
139+
"df": {
140+
"title": "Df"
141+
}
142+
},
143+
"required": [
144+
"df"
145+
],
146+
"title": "Input",
147+
"type": "object"
148+
},
149+
"output": {
150+
"title": "AnyIODescriptor"
133151
}
134152
}
135153
]
154+
},
155+
"apis": [],
156+
"docker": {
157+
"distro": "debian",
158+
"python_version": "3.11",
159+
"cuda_version": null,
160+
"env": null,
161+
"system_packages": null,
162+
"setup_script": null,
163+
"base_image": null,
164+
"dockerfile_template": null
165+
},
166+
"python": {
167+
"requirements_txt": "./requirements.txt",
168+
"packages": null,
169+
"lock_packages": true,
170+
"pack_git_packages": true,
171+
"index_url": null,
172+
"no_index": null,
173+
"trusted_host": null,
174+
"find_links": null,
175+
"extra_index_url": null,
176+
"pip_args": null,
177+
"wheels": null
178+
},
179+
"conda": {
180+
"environment_yml": null,
181+
"channels": null,
182+
"dependencies": null,
183+
"pip": null
136184
}
137185
}
138186
```
@@ -141,7 +189,7 @@ as API endpoint with Knative Serving.
141189
BentoML CLI command to start an API server locally and test it with the `curl` command.
142190
143191
```bash
144-
bentoml serve IrisClassifier:latest
192+
bentoml serve iris_classifier:latest
145193
```
146194
147195
In another terminal window, make `curl` request with sample data to the API server
@@ -164,12 +212,8 @@ a Dockerfile is automatically generated when saving the model.
164212
username and run the following commands.
165213
166214
```bash
167-
# jq might not be installed on your local system, please follow jq install
168-
# instruction at https://stedolan.github.io/jq/download/
169-
saved_path=$(bentoml get IrisClassifier:latest -q | jq -r ".uri.uri")
170-
171215
# Build and push the container on your local machine.
172-
docker buildx build --platform linux/arm64,linux/amd64 -t "{username}/iris-classifier" --push $saved_path
216+
bentoml containerize iris_classifier:latest -t "{username}/iris-classifier" --push
173217
```
174218
175219
1. In `service.yaml`, replace `{username}` with your Docker hub username:
Lines changed: 21 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,23 @@
1-
from bentoml import env, artifacts, api, BentoService
2-
from bentoml.handlers import DataframeHandler
3-
from bentoml.artifact import SklearnModelArtifact
1+
import numpy as np
2+
import bentoml
3+
from pydantic import Field
4+
from bentoml.validators import Shape
5+
from typing_extensions import Annotated
6+
import joblib
47

5-
@env(auto_pip_dependencies=True)
6-
@artifacts([SklearnModelArtifact('model')])
7-
class IrisClassifier(BentoService):
88

9-
@api(DataframeHandler)
10-
def predict(self, df):
11-
return self.artifacts.model.predict(df)
9+
@bentoml.service
10+
class IrisClassifier:
11+
iris_model = bentoml.models.get("iris_sklearn:latest")
12+
13+
def __init__(self):
14+
self.model = joblib.load(self.iris_model.path_of("model.pkl"))
15+
16+
@bentoml.api
17+
def predict(
18+
self,
19+
df: Annotated[np.ndarray, Shape((-1, 4))] = Field(
20+
default=[[5.2, 2.3, 5.0, 0.7]]
21+
),
22+
) -> np.ndarray:
23+
return self.model.predict(df)
Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
1+
import joblib
12
from sklearn import svm
23
from sklearn import datasets
34

4-
from iris_classifier import IrisClassifier
5+
import bentoml
56

67
if __name__ == "__main__":
78
# Load training data
@@ -12,11 +13,6 @@
1213
clf = svm.SVC(gamma='scale')
1314
clf.fit(X, y)
1415

15-
# Create a iris classifier service instance
16-
iris_classifier_service = IrisClassifier()
17-
18-
# Pack the newly trained model artifact
19-
iris_classifier_service.pack('model', clf)
20-
21-
# Save the prediction service to disk for model serving
22-
saved_path = iris_classifier_service.save()
16+
with bentoml.models.create("iris_classifier") as bento_model:
17+
joblib.dump(clf, bento_model.path_of("model.pkl"))
18+
print(f"Model saved: {bento_model}")

0 commit comments

Comments
 (0)