From 4a5b2ce245cc501ef7d2bf979622da3becbb4c47 Mon Sep 17 00:00:00 2001 From: jgsch Date: Tue, 25 Jan 2022 13:12:34 +0100 Subject: [PATCH 01/17] better webcam example (#29) * better print of outputs * add resize argument --- examples/webcam_stream.py | 58 +++++++++++++++++++++++++++------------ 1 file changed, 40 insertions(+), 18 deletions(-) diff --git a/examples/webcam_stream.py b/examples/webcam_stream.py index 026cac9..707227c 100644 --- a/examples/webcam_stream.py +++ b/examples/webcam_stream.py @@ -12,7 +12,10 @@ import time import cv2 +import imutils import numpy as np +from rich.live import Live +from rich.spinner import Spinner from i2_client import I2Client @@ -20,6 +23,7 @@ parser.add_argument("--url", type=str, help="", required=True) parser.add_argument("--access_uuid", type=str, help="", required=True) parser.add_argument("--frame_rate", type=int, help="", default=15) +parser.add_argument("--resize_width", type=int, help="", default=None) args = parser.parse_args() @@ -30,29 +34,47 @@ async def main(): prev = 0 async with I2Client(args.url, args.access_uuid) as client: - while True: - time_elapsed = time.time() - prev - check, frame = cam.read() - if time_elapsed < 1.0 / args.frame_rate: - # force the webcam frame rate so the bottleneck is the - # inference, not the camera performance. - continue - prev = time.time() - print("send...", end=" ") + spinner = Spinner("dots2", "connecting...") + with Live(spinner, refresh_per_second=20): - start = time.time() - outputs = await client.async_inference(frame) - end = time.time() + durations = [] - print(f"got! in {end - start:.4f} secs (send + inference + receive)") + while True: - concatenate_imgs = np.concatenate((frame, outputs[0]), axis=1) - cv2.imshow("original / inference ", concatenate_imgs) + # 1. get webcam frame - key = cv2.waitKey(1) - if key == 27: - break + time_elapsed = time.time() - prev + check, frame = cam.read() + if time_elapsed < 1.0 / args.frame_rate: + # force the webcam frame rate so the bottleneck is the + # inference, not the camera performance. + continue + prev = time.time() + + if args.resize_width is not None: + frame = imutils.resize(frame, width=args.resize_width) + + # 2. inference + + start = time.time() + outputs = await client.async_inference(frame) + durations.append(time.time() - start) + + # 3. show + + spinner.text = ( + f"send + infer + receive: {durations[-1]:.4f} secs " + + f"(mean: {np.mean(durations):.4f}, std: {np.std(durations):.4f}, " + + f"min: {np.min(durations):.4f}, max: {np.max(durations):.4f})" + ) + + concatenate_imgs = np.concatenate((frame, outputs[0]), axis=1) + cv2.imshow("original / inference ", concatenate_imgs) + + key = cv2.waitKey(1) + if key == 27: + break cam.release() cv2.destroyAllWindows() From 9c463ae75093d4c88ce97fad5c1ac839d6a004e9 Mon Sep 17 00:00:00 2001 From: jGsch Date: Tue, 25 Jan 2022 14:12:31 +0100 Subject: [PATCH 02/17] [hot fix] add missing package to setup --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 915a94e..eba92e0 100644 --- a/setup.py +++ b/setup.py @@ -22,6 +22,7 @@ "archipel-utils>=0.1.6", "click>=8.0", "docker>=4.4", + "imutils>=0.5.4", "msgpack>=1.0", "numpy>=1.19", "rich>=10.13", From 2ee04ab3194e502b044eb4f64ed75ce3a72a0cfa Mon Sep 17 00:00:00 2001 From: EmilEOGG Date: Thu, 10 Feb 2022 17:16:05 +0100 Subject: [PATCH 03/17] fixed archipel-utils version --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index eba92e0..2eb9df6 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ name="i2_client", version=version, install_requires=[ - "archipel-utils>=0.1.6", + "archipel-utils==0.1.7", "click>=8.0", "docker>=4.4", "imutils>=0.5.4", From 1d284bd99822c0106e75040202d26c704693b3dc Mon Sep 17 00:00:00 2001 From: jgsch Date: Thu, 17 Feb 2022 17:36:04 +0100 Subject: [PATCH 04/17] remove useless warnings (#32) * remove wrong warning when builtin value in encore/decode * update to archipel-utils 0.1.7 and remove deprecated functions --- i2_client/client.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/i2_client/client.py b/i2_client/client.py index 7acf349..06c5557 100644 --- a/i2_client/client.py +++ b/i2_client/client.py @@ -45,11 +45,11 @@ def __init__(self, url: str, access_key: str, debug: bool = True): encode_functions = { "dict": lambda x: x, - "numpy.ndarray": utils.serialize_img, + "numpy.ndarray": utils.serialize_array, } decode_functions = { "dict": lambda x: x, - "numpy.ndarray": utils.deserialize_img, + "numpy.ndarray": utils.deserialize_array, } self.available_transforms = { "encode": encode_functions, @@ -95,16 +95,16 @@ async def __aenter__(self): for key, value in types.items(): arg = "encode" if key == "input_type" else "decode" - if value is None: + if value == "None" or value is None: log.info(f"{key}: built-in") - elif value not in self.available_transforms[arg]: + elif value in self.available_transforms[arg]: + log.info(f"{key}: {value}") + self.transforms[arg] = self.available_transforms[arg][value] + else: log.warning( f"Unknown {key} provided by task ({key}). You must provide " + f"one to the inference function with the '{arg}' argument." ) - else: - log.info(f"{key}: {value}") - self.transforms[arg] = self.available_transforms[arg][value] return self From 59cc72090d03c7a2a73029d263d51da5769d3c86 Mon Sep 17 00:00:00 2001 From: jgsch Date: Thu, 17 Feb 2022 17:36:58 +0100 Subject: [PATCH 05/17] client inference return status and outputs (#34) * inference return a bool saying if inference is success or not + update typing * update examples * update testing --- examples/simple.py | 15 ++++++++------- examples/video.py | 6 +++++- examples/webcam_stream.py | 5 ++++- i2_client/client.py | 22 ++++++++++++++-------- tests/test_client.py | 12 +++++++++--- 5 files changed, 40 insertions(+), 20 deletions(-) diff --git a/examples/simple.py b/examples/simple.py index 0e2ffb1..950e7ad 100644 --- a/examples/simple.py +++ b/examples/simple.py @@ -32,15 +32,16 @@ raise FileNotFoundError("invalid image") start = time.time() -outputs = i2_client.inference(img) -end = time.time() +success, output = i2_client.inference(img)[0] +duration = time.time() - start -print( - f"duration: {end - start:.4f} secs (open connection + send + inference + receive)" -) -print("press on any key to quit...") +print(f"duration: {duration:.4f} secs (open connection + send + inference + receive)") -concatenate_imgs = np.concatenate((img, outputs[0]), axis=1) +if not success: + raise RuntimeError(output) + +print("press on any key to quit...") +concatenate_imgs = np.concatenate((img, output), axis=1) cv2.imshow("original / inference ", concatenate_imgs) cv2.waitKey(0) cv2.destroyAllWindows() diff --git a/examples/video.py b/examples/video.py index 37de557..65b02a2 100644 --- a/examples/video.py +++ b/examples/video.py @@ -77,7 +77,11 @@ async def main(): break outputs = await client.async_inference(frame) - out.write(outputs[0]) + + success, output = outputs[0] + if not success: + raise RuntimeError(output) + out.write(output) count += 1 if not bool(count % 25): diff --git a/examples/webcam_stream.py b/examples/webcam_stream.py index 707227c..c5b529d 100644 --- a/examples/webcam_stream.py +++ b/examples/webcam_stream.py @@ -69,9 +69,12 @@ async def main(): + f"min: {np.min(durations):.4f}, max: {np.max(durations):.4f})" ) + success, output = outputs[0] + if not success: + raise RuntimeError(output) + concatenate_imgs = np.concatenate((frame, outputs[0]), axis=1) cv2.imshow("original / inference ", concatenate_imgs) - key = cv2.waitKey(1) if key == 27: break diff --git a/i2_client/client.py b/i2_client/client.py index 06c5557..84cc4af 100644 --- a/i2_client/client.py +++ b/i2_client/client.py @@ -9,6 +9,7 @@ import asyncio import logging +from typing import Any, Callable, List, Tuple import archipel_utils as utils import msgpack @@ -41,8 +42,6 @@ def __init__(self, url: str, access_key: str, debug: bool = True): if debug: log.setLevel(logging.DEBUG) - self.websocket = None - encode_functions = { "dict": lambda x: x, "numpy.ndarray": utils.serialize_array, @@ -122,7 +121,9 @@ async def __aexit__(self, *args, **kwargs): """ await self._conn.__aexit__(*args, **kwargs) - async def async_inference(self, inputs, encode=None, decode=None): + async def async_inference( + self, inputs: Any, encode: Callable = None, decode: Callable = None + ) -> List[Tuple[bool, Any]]: """Send inference to archipel in async way. Args: @@ -131,7 +132,8 @@ async def async_inference(self, inputs, encode=None, decode=None): decode: Optional; Specify a specific output decoding. Returns: - The outputs from the inference. + List of Tuple composed of two values: bool to indicate whether inference + is a success and the inference is success or an error message if fail. Raises: ValueError: There was an error encoding or packing the given @@ -139,6 +141,7 @@ async def async_inference(self, inputs, encode=None, decode=None): RuntimeError: Ther was an error during the inference (the specific error message is printed). """ + if not isinstance(inputs, list): inputs = [inputs] @@ -171,13 +174,15 @@ async def async_inference(self, inputs, encode=None, decode=None): inference = decoded_msg["data"] if decode is not None: inference = decode(inference) - outputs.append(inference) + outputs.append((True, inference)) else: - outputs.append(decoded_msg["message"]) + outputs.append((False, decoded_msg["message"])) return outputs - def inference(self, inputs, encode=None, decode=None): + def inference( + self, inputs: Any, encode: Callable = None, decode: Callable = None + ) -> List[Tuple[bool, Any]]: """Send inference to archipel in sync way. Args: @@ -186,7 +191,8 @@ def inference(self, inputs, encode=None, decode=None): decode: Optional; Specify a specific output decoding. Returns: - The outputs from the inference. + List of Tuple composed of two values: bool to indicate whether inference + is a success and the inference is success or an error message if fail. Raises: None. diff --git a/tests/test_client.py b/tests/test_client.py index 06b05ec..c11e099 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -62,8 +62,11 @@ async def test_archipel_client_connection_async_success(setup): async def fake_user(): await asyncio.sleep(0.1) async with I2Client(url, "good:access_key") as client: - inference = await client.async_inference(fake_data) - assert np.equal(inference, fake_data).all() + outputs = await client.async_inference(fake_data) + assert len(outputs) == 1 + success, output = outputs[0] + assert success + assert np.equal(output, fake_data).all() async def fake_cld(websocket, path): recv = await websocket.recv() @@ -235,7 +238,10 @@ async def fake_user(): await asyncio.sleep(0.1) async with I2Client(url, "good:access_key") as client: outputs = await client.async_inference("coucou") - assert outputs[0] == fake_msg + assert len(outputs) == 1 + success, output = outputs[0] + assert not success + assert output == fake_msg async def fake_cld(websocket, path): await websocket.recv() From b838e16b703367bc71dd16ff2683475fc3952e64 Mon Sep 17 00:00:00 2001 From: jgsch Date: Thu, 17 Feb 2022 17:39:11 +0100 Subject: [PATCH 06/17] handle case when cv2 not installed when inference (#31) --- i2_client/cli/client.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/i2_client/cli/client.py b/i2_client/cli/client.py index 7b7570e..6b22552 100644 --- a/i2_client/cli/client.py +++ b/i2_client/cli/client.py @@ -6,8 +6,9 @@ permission of the copyright holders. If you encounter this file and do not have permission, please contact the copyright holders and delete this file. """ + + import click -import cv2 import numpy as np from i2_client.client import I2Client @@ -46,4 +47,9 @@ def infer(data, url, access_key, save_path, debug): # pragma: no cover if not isinstance(output, np.ndarray): print(output) else: - cv2.imshow(output) + try: + import cv2 + + cv2.imshow(output) + except ImportError: + print("`cv2` module not available, can not show inference.") From 8adda5a76f05966029f03744ab0388b36258b1ac Mon Sep 17 00:00:00 2001 From: jGsch Date: Thu, 17 Feb 2022 17:02:16 +0000 Subject: [PATCH 07/17] v0.4.0 --- CHANGELOG.md | 18 ++++++++++++++++++ i2_client/__init__.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d612649..e32c548 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,24 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). + +## [0.4.1] - 2022.02.17 + +### Features + +- **client inference return tuple with a bool representing inference success/fail and inference/error msg (#34)** + +### Improvements + +- doc improvements +- better webcam example (#29) +- updated `archipel-utils` version & remove deprecated warnings (#32) + +### Fixes + +- handle case when `cv2` not installed when inference (#31) + + ## [0.3.1] - 2021-12-02 ### Improvements diff --git a/i2_client/__init__.py b/i2_client/__init__.py index 15c5062..9dc11ff 100644 --- a/i2_client/__init__.py +++ b/i2_client/__init__.py @@ -10,6 +10,6 @@ from .cli import create_cli from .client import I2Client # noqa -__version__ = "0.3.1" +__version__ = "0.4.0" i2_cli = create_cli() From d3100300cad989f177ddce83b428952fceba279c Mon Sep 17 00:00:00 2001 From: EmilEOGG Date: Fri, 24 Jun 2022 15:27:28 +0200 Subject: [PATCH 08/17] Made the readme richer --- README.md | 33 ++++++++++++++++++++++++++++----- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index fe2be37..69498b7 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,8 @@ # Isquare client for Python -This repository contains the client for [isquare](isquare.ai). It is available under the form of python classes, as well as a command-line-interface. +This repository contains the official python client for [ISquare](isquare.ai). It is available under the form of python classes which are ready to use in your code, as well as a command-line-interface. We currently support inference with image, text & json files, as well as any numpy array or python dictionnary or string, both for input and output. + +The complete documentation for ISquare can be found [here](docs.isquare.ai) ## Installation @@ -16,27 +18,30 @@ pip install --editable . ### Additional requirements -To be able to test your models, you need the following packages: +To be able to test your model builds, you need the following packages: Docker >= 19.03.13 _Note_: If you only need the client for inference, this is not required. ## Usage +The client can be used to verify your model build (e.g. checking if they will properly run on [ISquare](isquare.ai)) and to perform inference calls to your deployed models. To use this client for inference, you need to have a model up and running on [ISquare](isquare.ai). Commands and their usage are described [here](docs/commands.md). -Guidelines on the code adaptation required to deploy a model on isquare.ai can be found [here](docs/isquare_tutorial.md) +End-to-end guidelines on the code adaptation required to deploy a model on isquare.ai can be found [here](docs/isquare_tutorial.md) ## Examples -- Build your i2 compatible docker image: +### Command line interface +#### Test if your model repository is Isquare-compatible +To verify if your code will run smoothly on [ISquare](isquare.ai), you can perform a local build & unit test. This will build a container image with all your specific dependencies and perform an inference test. We've included an example of a simple computer vision model which returns the mirrored image it is given, and it can be tested by running: ```bash i2 build examples/tasks/mirror.py ``` +When you deploy a model with [ISquare](isquare.ai), you will be provided a url for the model, and requested to create access keys. Using a valid url & access keys (the one displayed are an example), you can perform an inference with an Image model (e.g. the Mirror) and a `.png` image by running: -Simple inference: ```bash i2 infer \ @@ -46,4 +51,22 @@ i2 infer \ ``` Other examples can be found [here](docs/getting_started.md). +### Using a model inside your python code +As you probably want to automate your model calls by integrating them directly into your code, we've provided you with several python classes you can directly use in your code. The main class to use for that is the `I2Client` class. A simple inference can be performed as follows: + +```python +from i2_client import I2Client +import cv2 + +# You need your url, access key and an image +url = "wss://archipel-beta1.isquare.ai/43465956-8d6f-492f-ad45-91da69da44d0" +access_key = "472f9457-072c-4a1a-800b-75ecdd6041e1" +img = cv2.imread("test.jpg") + +# Initialize the client & perform inference +inference_client = I2Client(url,access_key) +success, output = i2_client.inference(img)[0] +``` + +A more complex example, showing how to stream a camera to your model, can be found [here](examples/webcam_stream.py) From 14186081d8b34f4730aaef47b99f3761fae18e2e Mon Sep 17 00:00:00 2001 From: EmilEOGG Date: Fri, 24 Jun 2022 15:57:16 +0200 Subject: [PATCH 09/17] Added doc for examples --- README.md | 2 +- examples/README.md | 84 ++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 85 insertions(+), 1 deletion(-) create mode 100644 examples/README.md diff --git a/README.md b/README.md index 69498b7..17d2d55 100644 --- a/README.md +++ b/README.md @@ -65,7 +65,7 @@ img = cv2.imread("test.jpg") # Initialize the client & perform inference inference_client = I2Client(url,access_key) -success, output = i2_client.inference(img)[0] +success, output = inference_client.inference(img)[0] ``` A more complex example, showing how to stream a camera to your model, can be found [here](examples/webcam_stream.py) diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 0000000..e3cb1d3 --- /dev/null +++ b/examples/README.md @@ -0,0 +1,84 @@ +# Examples +This directory shows 3 sample integrations of the [ISquare](isquare.ai) client for image inference, with 3 levels of complexity: +- How to perform inference with an image +- How to perform inference with a video +- How to stream a camera to your model + +## Simple inference +First, we'll look at how to perform a simple inference with an image file. To start, we need to import our libraries and initialize the client: +```python +from i2_client import I2Client +import cv2 +import numpy as np + +# You need your url, access key and an image +url = "wss://archipel-beta1.isquare.ai/43465956-8d6f-492f-ad45-91da69da44d0" +access_key = "472f9457-072c-4a1a-800b-75ecdd6041e1" + +inference_client = I2Client(url,access_key) + +``` +Then, we load the image using OpenCV and verify it is loaded correctly +```python +img = cv2.imread("test.jpg") +if img is None: + raise FileNotFoundError("invalid image") +``` +Finally, we just have to call our model using the client. If using an image to image model, we can show the original and the saved image next to each other: +```python +success, output = inference_client.inference(img)[0] +concatenate_imgs = np.concatenate((img, output), axis=1) +cv2.imshow("original / inference ", concatenate_imgs) +``` +And that's it for the simple usage of the client. Our client currently supports strings, numpy arrays, and any python dictionary objects, as long as they are numpy serialisable. If you have a sentiment analysis model for text, your inference could look like the following: + +```python + +success, output = inference_client.inference("It's a rainy summer day") +``` +or, for a dictionary: +```python + +success, output = inference_client.inference({"key":value}) +``` + +## Async example +As inference might take a couple of seconds to process (mostly depending on your model), you might want to call your model in an async way. To show how to do that, we will write a client which streams your primary webcam to your model. + +We first capture the camera output using OpenCV, and then send the data to the model at a certain framerate: +```python +url = "wss://archipel-beta1.isquare.ai/43465956-8d6f-492f-ad45-91da69da44d0" +access_key = "472f9457-072c-4a1a-800b-75ecdd6041e1" +frame_rate = 15 + +async def main(): + """Stream a webcam to the model.""" + cam = cv2.VideoCapture(0) + prev = 0 + + async with I2Client(url, access_uuid) as client: + while True: + + time_elapsed = time.time() - prev + check, frame = cam.read() # read the cam + if time_elapsed < 1.0 / args.frame_rate: + # force the webcam frame rate so the bottleneck is the + # inference, not the camera performance. + continue + prev = time.time() + outputs = await client.async_inference(frame) + success, output = outputs[0] + + if not success: + raise RuntimeError(output) + + # showing original and inference for image to image model + concatenate_imgs = np.concatenate((frame, outputs[0]), axis=1) + cv2.imshow("original / inference ", concatenate_imgs) + + cam.release() + cv2.destroyAllWindows() +asyncio.run(main()) + +``` +You can easily stream any source to your model using this type of integration, as well as seemingly integrate your models in an async way, so that your code is completely independent of your model inference time. From 24131b132054b2379339c60e9fc7a46b92db7842 Mon Sep 17 00:00:00 2001 From: EmilEOGG Date: Fri, 24 Jun 2022 16:10:13 +0200 Subject: [PATCH 10/17] Added logo to readme --- README.md | 3 ++- docs/imgs/deploy_logo.png | Bin 0 -> 10611 bytes 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 docs/imgs/deploy_logo.png diff --git a/README.md b/README.md index 17d2d55..2f0f8fc 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ +![Isquare deploy logo](docs/imgs/deploy_logo.png) # Isquare client for Python -This repository contains the official python client for [ISquare](isquare.ai). It is available under the form of python classes which are ready to use in your code, as well as a command-line-interface. We currently support inference with image, text & json files, as well as any numpy array or python dictionnary or string, both for input and output. +This repository contains the official python client for [ISquare](isquare.ai) deploy. It is available under the form of python classes which are ready to use in your code, as well as a command-line-interface. We currently support inference with image, text & json files, as well as any numpy array or python dictionnary or string, both for input and output. The complete documentation for ISquare can be found [here](docs.isquare.ai) diff --git a/docs/imgs/deploy_logo.png b/docs/imgs/deploy_logo.png new file mode 100644 index 0000000000000000000000000000000000000000..b3004b229565e7a0928412754e70162d966a9ed1 GIT binary patch literal 10611 zcmaKSWmr^i@aT#tpmZqBA`8+jh_K6&E-f9>2uLd-uylhGOE*YLBb^H1&f>HD@c?n}e55cY8NpTLU*T zcWmib(e5QHSG2bcm-C0W%{?LYmp5EqR|3bM0qGULT05udHnmtzbn`=03beiZ?i{>| z@}JA*f1@S90fDs0MMAGPN^ZSAV*pQxiG3)8b6*P~0}9Ya4R3+(Fo@S58z})0lHnYO zs@+L}AAk(W7)hl`eK0}ZWHFuG3N-hSdemP`4Kn)w*cf@`Nre9g;+L2B9|oAu*s;+4 z5<)WUq0ILH8%VNDichtv70X!!0pZQ4@5qx@MqSD%4F^hA}0M=^i;^P7sQ zy;mOT7j-=yYqfV5aUJ6e&Vm-yt73E7J+F(Ic)|%R5Gqcr zwKH2yHHXR0u_B8qB_y90ZJW|P{l&R_M?Rm)GPC>T%wSBP%>iw*CQLMcxJx4Ib>hM} z6M{UE`9#_dlt-nKDzl5bP?5{DsTg}?(5xCETABN>%QrQsaLwgB@nVlpw8r+P=ZXt< z>)xYWlwW>ppVnB7OxJN@xADBbmdvjj-aqP_5W1rT{P3MD;Ll)*)bN@;4Ve}qY!tDuA8lwDDv`9FvQAZTXMd40x+=yRxJnZWK2Do2b-dy*vrV@Z! zdmLUV>^#McT2$zUH{AU09>1IqSCPev(7SM%Usg? zqea4bD?8AB?hkR|rgauZGM3;hRi!c0r>{*0mX~6YF0)iQqlqe;{)QOfO8MWN%b$w^ z;ZbSGM;fJYUg8rYRO!Ni+mwZim4^}*N&<(FXWj6q;ud9SpWWIuvA4F+?qCYaX8m@h zFgLjY=gr4>tL;&8I&|&6v_^P!Iof|V7a>#ZFp+=ifvhp`=o|0;?Ek0u1btI{0);O8 zQ$5MJGw_1_b5L9nT6l~5LSg36{c;lD4XFsC=*c>2fS|H_N%?g#WV*A>|M?kxl)Ssc zxM6B!B0pKgdI9^gFo>g`NO~9EKZHf zD>OZ4uF-)8XMv6K%Q5S;i30%tTb#ycAcmL$?yy6^!p6?L>zl3R1L1_$PdT9zp1uei(w9PclVIg7zg>SN?Kn zPJu5^c;az4>bse=^)zE%mx@GYB#!z8M_oeXEgu;&CU4^HplRpa5QD8%2kZ1QliD5e z<0n^XD5D7dIBd82F$oR!vqy$($!U>wI`sHZNU)O!lLOy7hfVsSl@Y+q^D;Ey0zYU? z$CWVpiRxkt3JjLBcw>zq6R|>=M4cwZrP#%uH^Hwtc49Fs zn+na|Xez}LA71f}4!=rJ6Z7vb0;PNqG;d==f%{LC)n(4MA9no$d@JObSKjpF%I}IN zg#-55G{oUeo}zmq8EQzoa8s5;wKT7(GyyDNLD*q(i!4~C-{iYi7~G!7(lbb2XQ#3* z)h7*6zgO|Fl)6Vvw5&;30@BF~%<)69*E!#PiAJaTFQ9lgP5)Ac+j) zq`Z~SGL0A}oOy~A`jiMkxOeG~4PomvU21SHTeU;94EZb{08jbE3Jd`Q;&(; zGG6A)bC5q_K#|uK1s+V3k%I!GhAflfkPHHa)`V-9v+9K(Ry7AEi^{cmI3+k|RHA4> z(u@^j{AqyY9Xv)QCfcWy3aRd+)4lw;>5g0@zurrsro5XQ z$vI5E1|bH_sACc9n781v<;$SbvlyYC&u6->4A2A#A zmrLu&y)0HAj6A#)kgzlmf0p3z8pgQBzAjrlof~@fY=+i)qU2_2z`A&E!wa^#MO8?i z-s)4xF_gmf4i-`K+W##QV54 z=tgiM$RoMU-0 zrh@5vCxBMaY);MG1};BE+(H>r!4$DBzU&-_pziIeOyIgFq}>QFP)5kSc%G zaH7EWLlT5Tzy3OGujLQ5rfVLaZ8AoL9b!BpZsVz02sHs~i#5GJ(zI)T4t$y{it0$i z{ko1otE_Q3U+?Piv+$gbLDTe{cgH_}4@TE>8K*s@`2yLOqPyi8>~LD7Lhx=-L{b){ z`CvvS2(&n0Lkgt>@f9CZS2xLB{;X;`lM=+I-!(RlgztNP|Dkj_8!A)$IdraKY>W*@ z8XdhBMj`pd6gGhJQNp?$fC)k=@J7zDh}+6?OBKA~<(cBPvNarGt!t!yT`gjgT~wO^ z?9ToSiJwFqmp&dkEI|Ev0sCKpF2McoBe-ekUi&;NHI7b1{v=cQgBBe^ef z8_I&HyCVg6==~6;CS;qHlBXP_-O1RBNKRbx9cLMF16e@6m5_r_2Ze2npz=z#`(<4? z5M*bUMfoB1sw?iU&fnAp*fu*erMN?6{US&3Y-otB7BZ>tn$|O; z!}zZc-h4-OcsAORJ^nEoVdcdKTRW5PFWuO4-#oai(d&Ntq0Xw*3M_WLB>^sbX}607 zoqMDXWfJY+v{!Q=Ua#e}B4gMpbyl6%Z6KAC0M)-$LN-`?nwRGI*l@gfiGDCv<-Tz) zd$M;r)cN!wt*bvDlwhZ}r!TO>-da!J(6XZF%?NiUHU!5)pAI7g%HnHwwYpo#YVQbf zlJk8gq#_zs02URl z+|u#3y}ebmGCiPk?UksjEY4>LSW^5zT(aYkHmOG0Z!)2eAu}vL5ePRfeTL{qHno-# zEH%akGK}iw3?$U|Bo}%vrFV0S=h0GBrtevXx0eQTy*FrXH`z*`aZWiGaU>t=@T`S+ zSDmodHtvo9B|~a_i5HN;wF=2bV&`j*zFRTQx_{YuB`;rYX3MTTf!1z5*KKZJ+G^jeT8V&C zeGIbl*e+d}EMEK2wGg_q-^&FP%YLzTQM?l2l$IPlW=XjMgmehYr`MRoB7T^zI?+Frgw2+ zzSKaG36;yaRp^@bL{8IGte@R&vjSL*`s|fB1|T|TZ@1Y~bRn|gH@$cl)kJq3@BALG z-O?LL!u-xpBWjdS+MGnOUi@R_n31m8qZC#|)01t4Rc^>MhE!U13J^M7y7e)H`agjL zfAJ8~6=D=YT#p8BA>gGY!KK}ZbafT7~p`zz=LH1iNof{$wWW|s5=flL`q~bTzF>sx^EVVkHuk9L2_7q?9Ul9fH@BG!8|BY3WodIF5|0} zx(qpcdhN0#G;X}fS7*YnA4OnmQ+fq=ymc3VKJYjRb-B~;T8QiGE}fLZw6S{)xv#^&q9T$_=!lZ4XSQFm2!Nmgt%-JAZEGk_sv@ zikJS&YxcbPQaNuc%Y2SR64Kmi<2lKpv4uY-VI({?p1bxtK#p9S9Ce7zaGBQN z{_y}Cvf*m)i39Q5pii*w@vk#=_T?-jzhr&xB~DHec$~1)-J!@)xf@1P}P6q0LfrniOy<1Yz0CrJ3dK3-NxJimO=G8+|!-*BfB z6xk%O6HU*w87`UmFqm_eapvhYCmGbhw9))s-Y4PHqY6g~FMSd%9EgnNQ_5-|%+E5+ zQ%-s1lcdPRA{!ny12ks3`V0DH#HoSnz&rA!!-AGgz^nHzfjVeCfL)@Jqf~FgnJ0fR zQ!z&GRk`;3^C?+xv^Tqx_OoQa?JqkVIh}D=)WmJ|uwn<%H~sWtULX7_Xe{8?f3))b zA|r+9&9_n8HjKX!X=GY7ZV1ASOOcJmPkZ!Y&D5tEE|sj#4&2cnp0ywB7 z(Aj@}3L*Em`o;9S^J^5?{ufG~`5&|+V3Dnoen0z1jC|TH@cCL2o z_vUmf=q31G`uKK+$ul)YJ+6b5op-D0+F5z*y{MTdM@Ly(d2sn?5^LU)X>nlSywIad zWg5qZm_XEJB%1BFgrU%Fk_HSbJoKAgeCQgko?L>^)&X*D9cc|^byYNU!o4qcW3q7u z;J((nLhL&!vJ7Ogy-rbS3<+x9eS&y2D+7x(cY!T%O+Z|JazA0Py4JyqYV7mC!I zv^q7cf8!_y(h^xwVaT3tGMA zpLbSV=vYymON|5vTF-RXu3kGg^nAYEzWV*}DRFjAnL>WY%O_}q7zyuEq_W>*;;HMO zw^zqzfIs%Gc`!F~|06TWlv`^ScS&99-fib;`^ZE8-{bT z7uWedO`V=wU&*)I%5Uo9grgZ`Z*xTvdjTV#(i^$-I4FkQS1H{OwWbSokOE93>T#xP zrp+yb7_)zD<&;8f^GZ!?S7edGO zFpmkeVvVL&{n)4|@_VrK(9Db7!7dGD0!2l@+;DYI16-{e>p3N2+mG|sTwyFjUR-q4 z1_N>%Mps%_ZWcGm#W!=*I*o7DeytU6U(dFda?Ps?kazlOB<@a9 z8aTAV_SOyM?#`x2ad}4&aNrj{!8PG3dreU$tfp96V7C`Vtk>$j9Mg$R%cc*%U@}I> zDaV583a8zqxS9*Y=ckV2%!Gi}a2=-)Cb`az!m2&w`BIg|yl)n%?__?*@cZWyq?=z) zQ_8328nhys+jhJM<{H`_c3Dg;Cd$S&oW8*#20yq|Slv21+Q6RIGHB=}v51Bb3_2~G zux2y$a6w@!TxB(J@zNiIxjp`BS$b72pP9cbn;&%^9pk63j!r4N|&HnsZ%$62)*07bk3mMM8%p~5~$()mBT ze>OA6;enT_=$M7B{5{qZ-HqgUK(HV~laj71_7u(-+pD1y!S>zeyREy_IFb{Y!%mNK zW;)*X*5)b#)AQNZv$##Kn;yP52yRvtQcG5o;}JNSps4I(e#Pkg_KHiPiHF5OmSB~M z4msR0#I+~ssYWbQLlo}@R-C#?X^wuMOaY$L66NlfJZF10wv>LA>r`sLb|Gii^x9&w z>~HpX<=6~&OJ1_%`Hu)Y;P&~ncMS)8HtEseiv6qUG^Kizw5Gs60~apVdf{?o`X)Q> z%p+D7p8C+6x>Egruw0(NQn{*g>me&#ZVTo_@6>X_$05u#I3sA{D1W?5q`DfknslRl$n$+0rC+YHX=KliX1_+hXJ z9J{Kf2ddSB(4y5>l;?8ca{A-hw=GJriBt5+#KAt@&RXxaL+;U?OVetT=2(B72%t39 z%nMO-Vx|%i0b{CI9T!SR=1c+!f||CNqp}>tZz-q#XqVDXb2~~hc9d3Z_)F9=CD2Z# z=w=AbllIHShTJz}wU#TC^((|wF{8GDb}Cp-duLh>tZlE$KmY-Gi>#0V_dAfa|C-#M zg%$w3a1Se|{Rj8l!zxYx!43bxOi+gCj$I-QnUjm_dvQt>3WkiRW!nF`xYihKK9H^d z=mZ?3Wie!cm;-v%MXW!G5kQ~V`=F}?pzsF>$m7}B0OTR=e>(8^sOJ~T!*?e;)>aHrQ2iaFK+@CmV_If%d0Vt-#31vV`LPk+|z-##v zNjj%90H*~IJPw;7xI_oJLv^pzi%@X=<*-%yy;dYfmm0Nv;SQT@6l3$Xk93P}LJqpr8}-+Ke=2|nl4 zeR%&#{fXM1+dDu(@Gk(lt?es?0AMvXB_ioTtR>5&C%|iH&>CS5DRg&bsI26ggELMa zf<9QPl$!y!R=B}wL?G;J=C2Gnqsi@A=+r*5w<#vy=P1{J+syxcZ}^8B&oG1e>o@pX zA(9%sv=u*P{cQ8|14Bn_2Jh50pZ&u_{<{ zOkJ>e{V0}b-3QCQMwH+O^*>Amk~g{v>f@IOWb;QJmCBw)xtmQshI3*wz>Uo+5=(?L z{o0@AjGp*vo~6Mtz*zo7U}fZMbAZd0aqn5<@1;!UFLi;S_E2sNh^;^}4Qq~mO`|OI zgG2kU0mwv}{DCxl;H5SpqB;jo2?`}P!X(GRf!Ih0lv>IKJ}^|nW@riSL@CT04|sY0jyLDl9@ z^^Hp=n((IeujMRi0kQKL`7twgCA_kmRhOtO-eMcJpCc}{cJ_^DboHKnwc&(?{(w-F zt=8Af*ETqiH?LF9X=6#%-fN`kk2UL){s|K~`y32cBmEPwc(Hbh!TQzsEiwTALtRCm z6_i3%LXT9c%BH0Uf8q8VDy6L3Q&7kkZ>385Cu18jcf5vLQz}`p3*tZo#V$49NI)@jo- znH{t4GE4|9kW3S!PFU*|aRWgm7}k`Qjf}y|VSw^6$N009`e+BBt{_Yo-hR;-ivP&Y zaX`9ATAy`5AAx<4Bhe*QFlNp8$s-E1QaJr!dUG1F7i3YQvd(S%WCSy^mq^5s0$-U z3*q-aJHEb#bTsgTBwy>Ra{ZA@blEJbwk!;xPa=bpKR6uC-_qMnQecBPW>t;kd@hZC z#~VxUn=9Ca7ng@c^kX95bJWVht(eMZ_c#^tiBExcyrW@;(Vsuyjea7+GO2J`*!jc4 z;LM-4F)}go2kphhSM^(ZB^zP-g8ZrjIdxbZM`Yq9KkH6CQV%PzIG~RQB>Dn=();xB zJHIuE_eiJzhTk#s>Rkfzt^3p6A-3}0K*ZDv3APRICj>q?^|10Er9|gy`UHwPwz?B$ z#)XKqd-lxT5|X@}!ARBwS~&-b@55`XwD<%3@j4h=)j@oH9>)JRzXG{uX|>|8b@Wq8 zJ+5m)=TgFIgum7kB&oe`KL25EAlIa9Bxkm=K;Ht+7roI4xbbG!Dh`QeADqWHSFX|C|c1@%{g6;tf(G)JH1?8MR~UfB$gHF~G?0%PsBN@jpU0TM=d-DNJY7s0 z<;W~aVI2c+NWJ!Z|BynqKp+C%zbDrocS*z$+aF`#2wM+aYm7`N)Rm7w51G*Ok5H#2 zbXaYRMfW{!;0~ae$)FrLBdCtTY5l%V!EKu>y;R+rTFHXzUh&c2I^pXFZ8Fosx-jif z`LD>tCQ|fjr>_IukxMC%l3%gs?>Krm?ghjYl-^z6NNRjb!&4l+{C8ntxe)>jC77mZ)M_Rn7D zMM;ed8e7FUG^2beQ=ok`4-w5a-LuKb2H1d>8$mt!(V)LY)*nAcpSDWb4R%R- z;ZOn~*CtNm*D=BLW`pRTT7_u^-#F-oRujdAH22jB^e~ktc?MBrYAfT807-YYs@mS9 z%=ecXg;SpNq%@rbMXt;*zko{GII?8g1xE73ClwSy#HKJAhX67XO-*t@I1$U(oJLG{ zuIvvBW^-JJXPM*ITJJ8Qm)YZw&?-D}?-!ySES^+zX8i4a_p93G1sjRx0p@-Fw|G50 z|C5V@O(TgidEPnCiS)>+e%uD<&!`F6kxN*_4zF&_ci&=**SQqZ3AOeA0Efp$5Y~OH z`-XPPLs}r07Q)qz*((XDcKWn`isuhV4?$jrEM^}|fr-1edV#pPOZ*ERct3H9}vY2-{o;m~c) zU7xAMmm4Ene7-2+Yvv&R@{R92;m&={VZ1A8CF7bdd=MwZS&G3GIVOSo9CpN#ZPfH} zibKU=nmy3_%@ap0ohv-#8XH@gHskP{?^!oJ$>pS+V-CPg~+*p)9P zSWq`-zAS7?=ra@BWy&dOwQ7xbY(4vBY8!anyB5by_9ApNK4Me)q=4sT2Fb=voux&B z9@8T%zU01e9i}J^pSF4|ldgk#Oql)%ufa(9u_{xEcyf$sVoMLGJ%jnl2wkWRh0{oQ zonA`!hq1qQ^GNJJ@A@0;8Jz2j*=U!;-3*#aIpkBPPgjBM(&)88X^vGh#TU*MPPfHw z+>HZmwNfd;Ox1V{UE;PR;mM}hPr!-(8ys+h@hx^)VCL#k|vz z5vF2;g+}-k(xW36G(GyxNA-?SeVUJ_CTg(Q#fpVz&@*cRDv2?JV{f#Dx8sKG`Xd?quD(?vmE_YcErD9B<6uZ>GKUv$8%;-;zLCnNuMby0a)4prvdpjh^) zPHDDrfkpX`u=gA2Nd+f6QtekKF9w~4(DbF|6;qW#7nI*HuzBgLdDz_gg zkEJAOEe>Xz3L5-rQ|AXN)E1Rt&}M`}Yami{j5>VKr@*GaGjlC6r}V1ImBi`!*z8y8 zFTr~DdQGFNk-E4;=RGO$3wQ>`|5(!iKSRXHk&C_g&~xZ z4gyBFuoVM)>kIs86$eK8;O4xz zN0Q2_T|wT8z~l_;S8=bas%@Fd3PXqAGr+}n3kksF{dDA$ z{0OY!DD!Ec1qh_~%K5QvmYbwLpNL-xFf8a@A|UL(@Rml$tb#($b%>&sAEffd)Wyc} zieOE$--}H4*gev+tK{~~m$C5^4@*xJ2^KJS*vgIfe+{wyKPC}h%jVw^G;61&nkt!P R0aKJ987W1{5^+P{{{=cd7vul{ literal 0 HcmV?d00001 From a3e817c646e99371ad5fb22aa5515bab92eb4355 Mon Sep 17 00:00:00 2001 From: EmilEOGG Date: Fri, 24 Jun 2022 16:12:31 +0200 Subject: [PATCH 11/17] better ponctuation --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 2f0f8fc..accf84f 100644 --- a/README.md +++ b/README.md @@ -3,13 +3,13 @@ This repository contains the official python client for [ISquare](isquare.ai) deploy. It is available under the form of python classes which are ready to use in your code, as well as a command-line-interface. We currently support inference with image, text & json files, as well as any numpy array or python dictionnary or string, both for input and output. -The complete documentation for ISquare can be found [here](docs.isquare.ai) +The complete documentation for ISquare can be found [here](docs.isquare.ai). ## Installation ### From pip -TODO when public +TODO when public. ### From source @@ -29,7 +29,7 @@ The client can be used to verify your model build (e.g. checking if they will pr Commands and their usage are described [here](docs/commands.md). -End-to-end guidelines on the code adaptation required to deploy a model on isquare.ai can be found [here](docs/isquare_tutorial.md) +End-to-end guidelines on the code adaptation required to deploy a model on isquare.ai can be found [here](docs/isquare_tutorial.md). ## Examples @@ -69,5 +69,5 @@ inference_client = I2Client(url,access_key) success, output = inference_client.inference(img)[0] ``` -A more complex example, showing how to stream a camera to your model, can be found [here](examples/webcam_stream.py) +A more complex example, showing how to stream a camera to your model, can be found [here](examples/webcam_stream.py). From 1ce55713caff0e40b273fab35db7f2fac2e32cb9 Mon Sep 17 00:00:00 2001 From: EmilEOGG Date: Tue, 5 Jul 2022 11:27:59 +0200 Subject: [PATCH 12/17] Added new workertypes as well as a section about advanced usage --- docs/isquare_tutorial.md | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/docs/isquare_tutorial.md b/docs/isquare_tutorial.md index 72ade19..1fc10d4 100644 --- a/docs/isquare_tutorial.md +++ b/docs/isquare_tutorial.md @@ -65,8 +65,13 @@ class ArchipelFacePixelizer(ImagesToImagesWorker): The imports specifies, along with any dependencies or additional functions or classes, which workertype we will be using. In our case, the model takes images as inputs and also return images (the same as before but the faces blurred), so we choose the `ImagesToImagesWorker`. The name of the workerclass always reflects inputs and outputs. For the moment, the following workers are available: - `ImagesToImagesWorker` (e.g. Face blurring) - `ImagesToDictsWorker` (e.g. classification or detection) +- `ImagesToStringsWorker` (e.g. for image annotation) - `StringsToDictsWorker` (e.g. NLP model) -More types will be added on the way. If the input outputs types you are looking for are not available, please let us know. In the meantime know thaht most data formats can be converted to strings! +- `DictsToDictsWorker` (e.g. NLP model) +- `StringsToImagesWorker` (e.g. Image generation from captions) +- `DictsToImagesWorker` (e.g. Image generation from captions) +- `DictsToStringsWorker` (e.g. NLP model) +More types will be added on the way. If the input outputs types you are looking for are not available, please let us know by opening an issue. In the meantime know that most data formats can be converted to strings and dicts! You can specify multiple classes and functions inside your workerscript (you can even write your whole code inside it, although we do not recommend it). The `__task_class_name__` specifies which class is your workerclass, in our case `ArchipelFacePixeliser`. @@ -205,6 +210,21 @@ self.log(threshold_value) This value is now logged, and you can retrieve it on your dashboard on isquare.ai. In this way, the parameters of the model can be adpated to fit the real life data, and the real performance of the model assessed. We highly encourage monitoring apropriate metrics for your model, in this way you'll always know how well your model is really performing. +## Advanced usage + +### Defining an example input +Isquare will automatically test your model by generating an input corresponding to your worker class, with one exception: All `DictsToXWorker` workers. It could also be that you want to test your worker with a very specific input. In either of those cases, you can override the `get_dump_input` method of your worker, for example, as follows: +```python +class MusicalTastePredictor(DictsToDictsWorker): + """Predicts musical taste from completely arbitrary information.""" + ... + ... + def get_dump_input(self): + return {"Name":"John","Origin": "Switzerland","Age":18} + +``` + + From e456af7e43df7fcd75e69d76f61bebf0dfd35bf6 Mon Sep 17 00:00:00 2001 From: EmilEOGG Date: Wed, 6 Jul 2022 15:32:24 +0200 Subject: [PATCH 13/17] Replaced i2 by i2py --- CHANGELOG.md | 6 ++++++ README.md | 4 ++-- docs/commands.md | 12 ++++++------ docs/getting_started.md | 2 +- 4 files changed, 15 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e32c548..4bf0989 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,12 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). +## [0.4.2] - 2022.07.06 + +### Improvements +- doc improvement for release +- name change to avoid confusions + ## [0.4.1] - 2022.02.17 diff --git a/README.md b/README.md index accf84f..6a67f27 100644 --- a/README.md +++ b/README.md @@ -39,13 +39,13 @@ End-to-end guidelines on the code adaptation required to deploy a model on isqua To verify if your code will run smoothly on [ISquare](isquare.ai), you can perform a local build & unit test. This will build a container image with all your specific dependencies and perform an inference test. We've included an example of a simple computer vision model which returns the mirrored image it is given, and it can be tested by running: ```bash - i2 build examples/tasks/mirror.py + i2py build examples/tasks/mirror.py ``` When you deploy a model with [ISquare](isquare.ai), you will be provided a url for the model, and requested to create access keys. Using a valid url & access keys (the one displayed are an example), you can perform an inference with an Image model (e.g. the Mirror) and a `.png` image by running: ```bash -i2 infer \ +i2py infer \ --url wss://archipel-beta1.isquare.ai/43465956-8d6f-492f-ad45-91da69da44d0 \ --access_uuid 48c1d60a-60fd-4643-90e4-cd0187b4fd9d \ examples/test.png diff --git a/docs/commands.md b/docs/commands.md index 593dfb0..a1645a8 100644 --- a/docs/commands.md +++ b/docs/commands.md @@ -1,9 +1,9 @@ # i2 -`i2`, for isquare, is the name of the general command used for the client: +`i2py`, for isquare python client, is the name of the general command used for the client: ```bash -Usage: i2 [OPTIONS] COMMAND [ARGS]... +Usage: i2py [OPTIONS] COMMAND [ARGS]... Command line interface for isquare. @@ -19,7 +19,7 @@ Commands: ## build ```bash -Usage: i2 build [OPTIONS] SCRIPT +Usage: i2py build [OPTIONS] SCRIPT Build an docker image ready for isquare. @@ -46,7 +46,7 @@ If you just want to test an image without rebuilding it completely you can just following command: ```bash -Usage: i2 test [OPTIONS] TAG +Usage: i2py test [OPTIONS] TAG Verify that an docker image matches the isquare standard. @@ -57,10 +57,10 @@ Options: ## infer -The `i2 infer` command is used to send the data to your models running on isquare: +The `i2py infer` command is used to send the data to your models running on isquare: ```bash -Usage: i2 infer [OPTIONS] DATA +Usage: i2py infer [OPTIONS] DATA Send data for inference. diff --git a/docs/getting_started.md b/docs/getting_started.md index 67d8463..1ab9690 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -22,7 +22,7 @@ The client allows you to build and test your model before uploading it to isquar you to test this feature, which we are sure will save you alot of time. For instance, try running: ```bash -i2 build examples/tasks/mirror.py --cpu +i2py build examples/tasks/mirror.py --cpu ``` You should see following output: From ff940054ed6c2b365528841cbd9ef824542161de Mon Sep 17 00:00:00 2001 From: EmilEOGG Date: Wed, 6 Jul 2022 15:33:01 +0200 Subject: [PATCH 14/17] Fixed setup.py --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 2eb9df6..0cf7379 100644 --- a/setup.py +++ b/setup.py @@ -27,11 +27,12 @@ "numpy>=1.19", "rich>=10.13", "websockets>=8.1", + "opencv-python==4.6.0.66", ], packages=find_packages(), entry_points=""" [console_scripts] - i2=i2_client:i2_cli + i2py=i2_client:i2_cli """, python_requires=">=3.8", ) From 2dba1be2d236e79de4639c7b83112b0e81a1fe27 Mon Sep 17 00:00:00 2001 From: EmilEOGG Date: Wed, 13 Jul 2022 10:30:14 +0200 Subject: [PATCH 15/17] Corrected links in readme --- README.md | 10 +++++----- examples/README.md | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 6a67f27..3a11305 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,9 @@ ![Isquare deploy logo](docs/imgs/deploy_logo.png) # Isquare client for Python -This repository contains the official python client for [ISquare](isquare.ai) deploy. It is available under the form of python classes which are ready to use in your code, as well as a command-line-interface. We currently support inference with image, text & json files, as well as any numpy array or python dictionnary or string, both for input and output. +This repository contains the official python client for [ISquare](http://isquare.ai) deploy. It is available under the form of python classes which are ready to use in your code, as well as a command-line-interface. We currently support inference with image, text & json files, as well as any numpy array or python dictionnary or string, both for input and output. -The complete documentation for ISquare can be found [here](docs.isquare.ai). +The complete documentation for ISquare can be found [here](http://docs.isquare.ai). ## Installation @@ -25,7 +25,7 @@ Docker >= 19.03.13 _Note_: If you only need the client for inference, this is not required. ## Usage -The client can be used to verify your model build (e.g. checking if they will properly run on [ISquare](isquare.ai)) and to perform inference calls to your deployed models. To use this client for inference, you need to have a model up and running on [ISquare](isquare.ai). +The client can be used to verify your model build (e.g. checking if they will properly run on [ISquare](http://isquare.ai)) and to perform inference calls to your deployed models. To use this client for inference, you need to have a model up and running on [ISquare](http://isquare.ai). Commands and their usage are described [here](docs/commands.md). @@ -36,12 +36,12 @@ End-to-end guidelines on the code adaptation required to deploy a model on isqua ### Command line interface #### Test if your model repository is Isquare-compatible -To verify if your code will run smoothly on [ISquare](isquare.ai), you can perform a local build & unit test. This will build a container image with all your specific dependencies and perform an inference test. We've included an example of a simple computer vision model which returns the mirrored image it is given, and it can be tested by running: +To verify if your code will run smoothly on [ISquare](http://isquare.ai), you can perform a local build & unit test. This will build a container image with all your specific dependencies and perform an inference test. We've included an example of a simple computer vision model which returns the mirrored image it is given, and it can be tested by running: ```bash i2py build examples/tasks/mirror.py ``` -When you deploy a model with [ISquare](isquare.ai), you will be provided a url for the model, and requested to create access keys. Using a valid url & access keys (the one displayed are an example), you can perform an inference with an Image model (e.g. the Mirror) and a `.png` image by running: +When you deploy a model with [ISquare](http://isquare.ai), you will be provided a url for the model, and requested to create access keys. Using a valid url & access keys (the one displayed are an example), you can perform an inference with an Image model (e.g. the Mirror) and a `.png` image by running: ```bash diff --git a/examples/README.md b/examples/README.md index e3cb1d3..5eec539 100644 --- a/examples/README.md +++ b/examples/README.md @@ -1,5 +1,5 @@ # Examples -This directory shows 3 sample integrations of the [ISquare](isquare.ai) client for image inference, with 3 levels of complexity: +This directory shows 3 sample integrations of the [ISquare](http://isquare.ai) client for image inference, with 3 levels of complexity: - How to perform inference with an image - How to perform inference with a video - How to stream a camera to your model From 6028900c17640f86e4d5525a17996bdab00b4659 Mon Sep 17 00:00:00 2001 From: EmilEOGG Date: Wed, 13 Jul 2022 10:32:24 +0200 Subject: [PATCH 16/17] Corrected typo --- docs/isquare_tutorial.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/isquare_tutorial.md b/docs/isquare_tutorial.md index 1fc10d4..a519960 100644 --- a/docs/isquare_tutorial.md +++ b/docs/isquare_tutorial.md @@ -121,7 +121,7 @@ We strongly recommend that you implement batched inference for your model, since And that's all you need to get going from the code point of view. Let's proceed, setup our environment and deploy our model! ### Step 1.2: Set up your environment -Most deep learning models are not coded from scratch and depend on external libraries (e.g. python, tensorflow). With isquare.ai, all requirements are handled by a Dockerfile, which is basically a set of instructions which sets up an environment. If you’re new to Docker, check the [documentation](https://docs.docker.com/engine/reference/builder/). We need to create a file containing Docker instructions (usually called Dockerfile): +Most deep learning models are not coded from scratch and depend on external libraries (e.g. pytorch, tensorflow). With isquare.ai, all requirements are handled by a Dockerfile, which is basically a set of instructions which sets up an environment. If you’re new to Docker, check the [documentation](https://docs.docker.com/engine/reference/builder/). We need to create a file containing Docker instructions (usually called Dockerfile): ```dockerfile FROM alpineintuition/archipel-base-cpu:latest From 436c4ceb2c1eb6d78da0be020f35c57faae634ef Mon Sep 17 00:00:00 2001 From: jGsch Date: Mon, 25 Jul 2022 15:04:33 +0000 Subject: [PATCH 17/17] increase compability with jupyter --- i2_client/client.py | 9 +++++++++ setup.py | 1 + 2 files changed, 10 insertions(+) diff --git a/i2_client/client.py b/i2_client/client.py index 84cc4af..45cec62 100644 --- a/i2_client/client.py +++ b/i2_client/client.py @@ -13,6 +13,7 @@ import archipel_utils as utils import msgpack +import nest_asyncio import websockets log = logging.getLogger(__name__) @@ -204,4 +205,12 @@ async def _inference(self, inputs): await self.__aexit__(exc_type=None, exc_value=None, traceback=None) return outputs + try: + loop = asyncio.get_running_loop() + if loop.is_running(): + # when executed in jupyter notebook or something + nest_asyncio.apply(loop) + except RuntimeError: + pass + return asyncio.run(_inference(self, inputs)) diff --git a/setup.py b/setup.py index 0cf7379..53de557 100644 --- a/setup.py +++ b/setup.py @@ -25,6 +25,7 @@ "imutils>=0.5.4", "msgpack>=1.0", "numpy>=1.19", + "nest-asyncio>=1.5", "rich>=10.13", "websockets>=8.1", "opencv-python==4.6.0.66",