diff --git a/poetry.lock b/poetry.lock index 787904d0..027070dd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -224,6 +224,7 @@ anyio = "*" certifi = "*" httpcore = "==1.*" idna = "*" +socksio = {version = "==1.*", optional = true, markers = "extra == \"socks\""} [package.extras] brotli = ["brotli", "brotlicffi"] @@ -558,6 +559,23 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-socks" +version = "2.7.1" +description = "Proxy (SOCKS4, SOCKS5, HTTP CONNECT) client for Python" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "python_socks-2.7.1-py3-none-any.whl", hash = "sha256:2603c6454eeaeb82b464ad705be188989e8cf1a4a16f0af3c921d6dd71a49cec"}, + {file = "python_socks-2.7.1.tar.gz", hash = "sha256:f1a0bb603830fe81e332442eada96757b8f8dec02bd22d1d6f5c99a79704c550"}, +] + +[package.extras] +anyio = ["anyio (>=3.3.4,<5.0.0)"] +asyncio = ["async-timeout (>=4.0)"] +curio = ["curio (>=1.4)"] +trio = ["trio (>=0.24)"] + [[package]] name = "requests" version = "2.32.4" @@ -628,6 +646,17 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +[[package]] +name = "socksio" +version = "1.0.0" +description = "Sans-I/O implementation of SOCKS4, SOCKS4A, and SOCKS5." +optional = false +python-versions = ">=3.6" +files = [ + {file = "socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3"}, + {file = "socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac"}, +] + [[package]] name = "tomli" version = "2.2.1" @@ -820,4 +849,4 @@ pyaudio = ["pyaudio"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "b820898bc8f521807e3f8b6d5dc0323fc0b5ff4110a82646f60865bd9738d84a" +content-hash = "816733cae8aafbebe5feb90db763f3be11dafa9b69703642cfc8253efb56deff" diff --git a/pyproject.toml b/pyproject.toml index 82260013..2a6eb152 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,8 @@ Repository = 'https://github.com/elevenlabs/elevenlabs-python' [tool.poetry.dependencies] python = "^3.8" -httpx = ">=0.21.2" +httpx = {version = "^0.28.1", extras = ["socks"]} +python-socks = ">=2.7.1" pyaudio = { version = ">=0.2.14", optional = true} pydantic = ">= 1.9.2" pydantic-core = "^2.18.2" diff --git a/requirements.txt b/requirements.txt index 90ceac30..21936c46 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ -httpx>=0.21.2 +httpx[socks]>=0.28.1 +python-socks>=2.7.1 pyaudio>=0.2.14 pydantic>= 1.9.2 pydantic-core==2.18.2 diff --git a/src/elevenlabs/conversational_ai/conversation.py b/src/elevenlabs/conversational_ai/conversation.py index 50ff1f25..af353f0c 100644 --- a/src/elevenlabs/conversational_ai/conversation.py +++ b/src/elevenlabs/conversational_ai/conversation.py @@ -241,7 +241,7 @@ class Conversation: callback_user_transcript: Optional[Callable[[str], None]] callback_latency_measurement: Optional[Callable[[int], None]] callback_end_session: Optional[Callable] - + proxy_url: Optional[str] _thread: Optional[threading.Thread] _should_stop: threading.Event _conversation_id: Optional[str] @@ -263,6 +263,7 @@ def __init__( callback_user_transcript: Optional[Callable[[str], None]] = None, callback_latency_measurement: Optional[Callable[[int], None]] = None, callback_end_session: Optional[Callable] = None, + proxy_url: Optional[str] = None, ): """Conversational AI session. @@ -295,6 +296,7 @@ def __init__( self.callback_user_transcript = callback_user_transcript self.callback_latency_measurement = callback_latency_measurement self.callback_end_session = callback_end_session + self.proxy_url = proxy_url self.client_tools.start() @@ -395,7 +397,7 @@ def send_contextual_update(self, text: str): raise def _run(self, ws_url: str): - with connect(ws_url, max_size=16 * 1024 * 1024) as ws: + with connect(ws_url, max_size=16 * 1024 * 1024, proxy=self.proxy_url) as ws: self._ws = ws ws.send( json.dumps(