@@ -39,15 +39,18 @@ as API endpoint with Knative Serving.
39
39
API, which is the entry point for accessing this machine learning service.
40
40
41
41
` ` ` python
42
- from bentoml import env, artifacts, api, BentoService
43
- from bentoml.handlers import DataframeHandler
44
- from bentoml.artifact import SklearnModelArtifact
42
+ import bentoml
43
+ import joblib
45
44
46
- @env(auto_pip_dependencies=True)
47
- @artifacts([SklearnModelArtifact(' model' )])
48
- class IrisClassifier(BentoService):
49
45
50
- @api(DataframeHandler)
46
+ @bentoml.service
47
+ class IrisClassifier:
48
+ iris_model = bentoml.models.get(" iris_classifier:latest" )
49
+
50
+ def __init__(self):
51
+ self.model = joblib.load(self.iris_model.path_of(" model.pkl" ))
52
+
53
+ @bentoml.api
51
54
def predict(self, df):
52
55
return self.artifacts.model.predict(df)
53
56
` ` `
@@ -58,10 +61,11 @@ as API endpoint with Knative Serving.
58
61
given data and then save the model with BentoML to local disk.
59
62
60
63
` ` ` python
64
+ import joblib
61
65
from sklearn import svm
62
66
from sklearn import datasets
63
67
64
- from iris_classifier import IrisClassifier
68
+ import bentoml
65
69
66
70
if __name__ == " __main__" :
67
71
# Load training data
@@ -72,14 +76,9 @@ as API endpoint with Knative Serving.
72
76
clf = svm.SVC(gamma=' scale' )
73
77
clf.fit(X, y)
74
78
75
- # Create a iris classifier service instance
76
- iris_classifier_service = IrisClassifier ()
77
-
78
- # Pack the newly trained model artifact
79
- iris_classifier_service.pack(' model' , clf)
80
-
81
- # Save the prediction service to disk for model serving
82
- saved_path = iris_classifier_service.save ()
79
+ with bentoml.models.create(" iris_classifier" ) as bento_model:
80
+ joblib.dump(clf, bento_model.path_of(" model.pkl" ))
81
+ print(f" Model saved: {bento_model}" )
83
82
` ` `
84
83
85
84
1. Run the ` main.py` file to train and save the model:
@@ -91,48 +90,97 @@ as API endpoint with Knative Serving.
91
90
1. Use BentoML CLI to check saved model' s information.
92
91
93
92
```bash
94
- bentoml get IrisClassifier :latest
93
+ bentoml get iris_classifier :latest
95
94
```
96
95
97
96
Example:
98
97
99
98
```bash
100
- > bentoml get IrisClassifier :latest
99
+ > bentoml get iris_classifier :latest -o json
101
100
{
102
- "name": "IrisClassifier",
103
- "version": "20200305171229_0A1411",
104
- "uri": {
105
- "type": "LOCAL",
106
- "uri": "/Users/bozhaoyu/bentoml/repository/IrisClassifier/20200305171229_0A1411"
101
+ "service": "iris_classifier:IrisClassifier",
102
+ "name": "iris_classifier",
103
+ "version": "ar67rxqxqcrqi7ol",
104
+ "bentoml_version": "1.2.16",
105
+ "creation_time": "2024-05-21T14:40:20.737900+00:00",
106
+ "labels": {
107
+ "owner": "bentoml-team",
108
+ "project": "gallery"
107
109
},
108
- "bentoServiceMetadata": {
110
+ "models": [],
111
+ "runners": [],
112
+ "entry_service": "IrisClassifier",
113
+ "services": [
114
+ {
115
+ "name": "IrisClassifier",
116
+ "service": "",
117
+ "models": [
118
+ {
119
+ "tag": "iris_sklearn:ml5evdaxpwrqi7ol",
120
+ "module": "",
121
+ "creation_time": "2024-05-21T14:21:17.070059+00:00"
122
+ }
123
+ ],
124
+ "dependencies": [],
125
+ "config": {}
126
+ }
127
+ ],
128
+ "envs": [],
129
+ "schema": {
109
130
"name": "IrisClassifier",
110
- "version": "20200305171229_0A1411",
111
- "createdAt": "2020-03-06T01:12:49.431011Z",
112
- "env": {
113
- "condaEnv": "name: bentoml-IrisClassifier\nchannels:\n- defaults\ndependencies:\n- python=3.7.3\n- pip\n",
114
- "pipDependencies": "bentoml==0.6.2\nscikit-learn",
115
- "pythonVersion": "3.7.3"
116
- },
117
- "artifacts": [
118
- {
119
- "name": "model",
120
- "artifactType": "SklearnModelArtifact"
121
- }
122
- ],
123
- "apis": [
131
+ "type": "service",
132
+ "routes": [
124
133
{
125
134
"name": "predict",
126
- "handlerType": "DataframeHandler",
127
- "docs": "BentoService API",
128
- "handlerConfig": {
129
- "orient": "records",
130
- "typ": "frame",
131
- "input_dtypes": null,
132
- "output_orient": "records"
135
+ "route": "/predict",
136
+ "batchable": false,
137
+ "input": {
138
+ "properties": {
139
+ "df": {
140
+ "title": "Df"
141
+ }
142
+ },
143
+ "required": [
144
+ "df"
145
+ ],
146
+ "title": "Input",
147
+ "type": "object"
148
+ },
149
+ "output": {
150
+ "title": "AnyIODescriptor"
133
151
}
134
152
}
135
153
]
154
+ },
155
+ "apis": [],
156
+ "docker": {
157
+ "distro": "debian",
158
+ "python_version": "3.11",
159
+ "cuda_version": null,
160
+ "env": null,
161
+ "system_packages": null,
162
+ "setup_script": null,
163
+ "base_image": null,
164
+ "dockerfile_template": null
165
+ },
166
+ "python": {
167
+ "requirements_txt": "./requirements.txt",
168
+ "packages": null,
169
+ "lock_packages": true,
170
+ "pack_git_packages": true,
171
+ "index_url": null,
172
+ "no_index": null,
173
+ "trusted_host": null,
174
+ "find_links": null,
175
+ "extra_index_url": null,
176
+ "pip_args": null,
177
+ "wheels": null
178
+ },
179
+ "conda": {
180
+ "environment_yml": null,
181
+ "channels": null,
182
+ "dependencies": null,
183
+ "pip": null
136
184
}
137
185
}
138
186
```
@@ -141,7 +189,7 @@ as API endpoint with Knative Serving.
141
189
BentoML CLI command to start an API server locally and test it with the `curl` command.
142
190
143
191
```bash
144
- bentoml serve IrisClassifier :latest
192
+ bentoml serve iris_classifier :latest
145
193
```
146
194
147
195
In another terminal window, make `curl` request with sample data to the API server
@@ -164,12 +212,8 @@ a Dockerfile is automatically generated when saving the model.
164
212
username and run the following commands.
165
213
166
214
```bash
167
- # jq might not be installed on your local system, please follow jq install
168
- # instruction at https://stedolan.github.io/jq/download/
169
- saved_path=$(bentoml get IrisClassifier:latest -q | jq -r ".uri.uri")
170
-
171
215
# Build and push the container on your local machine.
172
- docker buildx build --platform linux/arm64,linux/amd64 - t "{username}/iris-classifier" --push $saved_path
216
+ bentoml containerize iris_classifier:latest - t "{username}/iris-classifier" --push
173
217
```
174
218
175
219
1. In `service.yaml`, replace `{username}` with your Docker hub username:
0 commit comments