Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .code-samples.meilisearch.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ create_an_index_1: |-
client.create_index('movies', {'primaryKey': 'id'})
update_an_index_1: |-
client.index('movies').update(primary_key='id')
compact_index_1: |-
client.index('movies').compact()
Copy link

Copilot AI Nov 14, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The indentation of the code sample is inconsistent with other entries in this YAML file. The code should start with 2 spaces (like other entries) but it starts with 4 spaces. This should be:

compact_index_1: |-
  client.index('movies').compact()

instead of:

compact_index_1: |-
    client.index('movies').compact()
Suggested change
client.index('movies').compact()
client.index('movies').compact()

Copilot uses AI. Check for mistakes.
delete_an_index_1: |-
client.delete_index('movies')
// OR
Expand Down
28 changes: 28 additions & 0 deletions meilisearch/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -984,6 +984,34 @@ def update_chat_workspace_settings(

return self.http.patch(f"chats/{workspace_uid}/settings", body=settings)

def get_experimental_features(self) -> dict:
"""
Retrieve the current settings for all experimental features.

Returns:
dict: A mapping of feature names to their enabled/disabled state.

Example:
>>> client.get_experimental_features()
"""
return self.http.get(self.config.paths.experimental_features)

def update_experimental_features(self, features: dict) -> dict:
"""
Update one or more experimental features.

Args:
features (dict): A dictionary mapping feature names to booleans.
For example, {"multimodal": True} to enable multimodal.

Returns:
dict: The updated experimental features settings.

Example:
>>> client.update_experimental_features({"multimodal": True})
"""
return self.http.patch(self.config.paths.experimental_features, body=features)

@staticmethod
def _base64url_encode(data: bytes) -> str:
return base64.urlsafe_b64encode(data).decode("utf-8").replace("=", "")
Expand Down
1 change: 1 addition & 0 deletions meilisearch/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ class Paths:
localized_attributes = "localized-attributes"
edit = "edit"
network = "network"
experimental_features = "experimental-features"

def __init__(
self,
Expand Down
14 changes: 14 additions & 0 deletions meilisearch/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -2333,3 +2333,17 @@ def _build_url(
if primary_key is None and csv_delimiter is None:
return f"{self.config.paths.index}/{self.uid}/{self.config.paths.document}"
return f"{self.config.paths.index}/{self.uid}/{self.config.paths.document}?{parse.urlencode(parameters)}"

def compact(self) -> TaskInfo:
"""
Trigger the compaction of the index.
This is an asynchronous operation in Meilisearch.

Returns
-------
task_info: TaskInfo
Contains information to track the progress of the compaction task.
Copy link

Copilot AI Nov 14, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The docstring for the compact method is missing a "Raises" section. Similar methods in this file (e.g., search, update_localized_attributes) document that they can raise MeilisearchApiError. For consistency and completeness, this method should also document potential exceptions:

Raises
------
MeilisearchApiError
    An error containing details about why Meilisearch can't process your request. Meilisearch error codes are described here: https://www.meilisearch.com/docs/reference/errors/error_codes#meilisearch-errors
Suggested change
Contains information to track the progress of the compaction task.
Contains information to track the progress of the compaction task.
Raises
------
MeilisearchApiError
An error containing details about why Meilisearch can't process your request. Meilisearch error codes are described here: https://www.meilisearch.com/docs/reference/errors/error_codes#meilisearch-errors

Copilot uses AI. Check for mistakes.
"""
path = f"{self.config.paths.index}/{self.uid}/compact"
task = self.http.post(path)
return TaskInfo(**task)
8 changes: 8 additions & 0 deletions meilisearch/models/embedders.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,12 @@ class RestEmbedder(CamelBase):
Template defining the data Meilisearch sends to the embedder
document_template_max_bytes: Optional[int]
Maximum allowed size of rendered document template (defaults to 400)
indexing_fragments: Optional[Dict[str, Dict[str, str]]]
Defines how to fragment documents for indexing (multi-modal search)
Example: {"text": {"value": "{{doc.title}} - {{doc.overview}}"}}
search_fragments: Optional[Dict[str, Dict[str, str]]]
Defines how to fragment search queries (multi-modal search)
Example: {"text": {"value": "{{fragment}}"}}
request: Dict[str, Any]
A JSON value representing the request Meilisearch makes to the remote embedder
response: Dict[str, Any]
Expand All @@ -185,6 +191,8 @@ class RestEmbedder(CamelBase):
dimensions: Optional[int] = None
document_template: Optional[str] = None
document_template_max_bytes: Optional[int] = None
indexing_fragments: Optional[Dict[str, Dict[str, str]]] = None
search_fragments: Optional[Dict[str, Dict[str, str]]] = None
request: Dict[str, Any]
response: Dict[str, Any]
headers: Optional[Dict[str, str]] = None
Expand Down
2 changes: 1 addition & 1 deletion meilisearch/version.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import annotations

__version__ = "0.37.1"
__version__ = "0.38.0"


def qualified_version() -> str:
Expand Down
40 changes: 40 additions & 0 deletions tests/client/test_client_experimental_features.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
"""Tests for client experimental features methods."""

def test_get_experimental_features(client):
"""Test getting experimental features returns a dict including 'multimodal'."""
response = client.get_experimental_features()

assert isinstance(response, dict)
assert len(response) > 0
assert "multimodal" in response
assert isinstance(response["multimodal"], bool)


def test_update_experimental_features(client):
"""Test updating experimental features and verify changes persist."""
initial = client.get_experimental_features()
initial_multimodal = initial.get("multimodal", False)

# Toggle multimodal
new_value = not initial_multimodal
response = client.update_experimental_features({"multimodal": new_value})

assert isinstance(response, dict)
assert response.get("multimodal") == new_value
assert client.get_experimental_features().get("multimodal") == new_value

# Reset to original value
client.update_experimental_features({"multimodal": initial_multimodal})


def test_multimodal_idempotency_generic(client):
"""Test that updating multimodal via generic method is idempotent."""
# Enable twice
client.update_experimental_features({"multimodal": True})
response = client.update_experimental_features({"multimodal": True})
assert response.get("multimodal") is True

# Disable twice
client.update_experimental_features({"multimodal": False})
response = client.update_experimental_features({"multimodal": False})
assert response.get("multimodal") is False
112 changes: 112 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,79 @@ def index_maker(index_uid=common.INDEX_UID, documents=small_movies):
return index_maker


@fixture(scope="function")
def mock_embedder_server():
"""Fixture that starts a mock HTTP server to act as an embedder.

This server responds to embedding requests with fake vectors,
allowing us to test search_with_media without a real AI service.
"""
from http.server import HTTPServer, BaseHTTPRequestHandler
import threading
import json
Copy link

Copilot AI Nov 14, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This import of module json is redundant, as it was previously imported on line 2.

Suggested change
import json

Copilot uses AI. Check for mistakes.

class MockEmbedderHandler(BaseHTTPRequestHandler):
def do_POST(self):
# Return a fake embedding vector
response = {"data": [{"embedding": [0.1] * 512}]}
self.send_response(200)
self.send_header('Content-Type', 'application/json')
self.end_headers()
self.wfile.write(json.dumps(response).encode())

def log_message(self, format, *args):
# Suppress logging
pass

# Start server in background thread
server = HTTPServer(('localhost', 8080), MockEmbedderHandler)
thread = threading.Thread(target=server.serve_forever, daemon=True)
thread.start()

yield server

# Cleanup
server.shutdown()


@fixture(scope="function")
def index_with_rest_embedder(empty_index, small_movies, mock_embedder_server, experimental_features):
"""Fixture for index with REST embedder configured for media search testing.

Uses a mock HTTP server to act as the embedder, allowing real
search_with_media() testing without external AI services.
"""
def index_maker(index_uid=common.INDEX_UID, documents=small_movies):
experimental_features({"multimodal": True})
index = empty_index(index_uid)
# Configure REST embedder pointing to mock server
settings_update_task = index.update_embedders(
{
"default": {
"source": "rest",
"url": "http://localhost:8080/embed",
"apiKey": "test-key",
"dimensions": 512,
"indexingFragments": {
"text": {"value": "{{doc.title}}"}
},
"searchFragments": {
"text": {"value": "{{fragment}}"}
},
"request": {"input": ["{{fragment}}"], "model": "test-model"},
"response": {"data": [{"embedding": "{{embedding}}"}]},
}
}
)
index.wait_for_task(settings_update_task.task_uid)
# Add documents - embedder will be called via mock server
document_addition_task = index.add_documents(documents)
index.wait_for_task(document_addition_task.task_uid)
return index

return index_maker


@fixture(scope="function")
def index_with_documents_and_facets(empty_index, small_movies):
def index_maker(index_uid=common.INDEX_UID, documents=small_movies):
Expand Down Expand Up @@ -308,3 +381,42 @@ def enable_network_options():
json={"network": False},
timeout=10,
)


@fixture
def experimental_features():
"""
Fixture to temporarily set experimental features for a test.

Usage:
def test_example(experimental_features):
experimental_features({"multimodal": True, "new_ui": True})
"""
def _set_features(features: dict):
# Enable features
requests.patch(
f"{common.BASE_URL}/experimental-features",
headers={"Authorization": f"Bearer {common.MASTER_KEY}"},
json=features,
timeout=10,
)
# Return features so we can reset later
return features

yield _set_features

# Reset features after the test
def _reset(features: dict):
# Create a reset payload inside the function
reset_payload = {key: False for key in features.keys()}
requests.patch(
f"{common.BASE_URL}/experimental-features",
headers={"Authorization": f"Bearer {common.MASTER_KEY}"},
json=reset_payload,
timeout=10,
)

@fixture
def multimodal_enabled(experimental_features):
"""Convenience fixture: enables multimodal experimental feature."""
experimental_features({"multimodal": True})
15 changes: 15 additions & 0 deletions tests/index/test_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,3 +219,18 @@ def test_delete_index(client):
client.wait_for_task(deleted.task_uid)
with pytest.raises(MeilisearchApiError):
client.get_index(uid=common.INDEX_UID)


@pytest.mark.usefixtures("indexes_sample")
def test_index_compact(client):
"""Tests the compaction of an index."""
index = client.index(common.INDEX_UID)
# Get stats before compaction
stats_before = index.get_stats()

task_info = index.compact()
client.wait_for_task(task_info.task_uid)
stats_after = index.get_stats()

assert stats_before.number_of_documents == stats_after.number_of_documents
assert stats_after.is_indexing is False
Loading
Loading