Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions .code-samples.meilisearch.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -514,6 +514,16 @@ faceted_search_1: |-
})
post_dump_1: |-
client.create_dump()
export_post_1: |-
client.export(
url='https://remote-meilisearch-instance.com',
api_key='masterKey',
payload_size='50MiB',
indexes={
'movies*': {},
'books*': {},
},
)
phrase_search_1: |-
client.index('movies').search('"african american" horror')
sorting_guide_update_sortable_attributes_1: |-
Expand Down
12 changes: 12 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,13 @@ services:
working_dir: /home/package
environment:
- MEILISEARCH_URL=http://meilisearch:7700
- MEILISEARCH_URL_2=http://meilisearch2:7700
depends_on:
- meilisearch
- meilisearch2
links:
- meilisearch
- meilisearch2
volumes:
- ./:/home/package

Expand All @@ -20,3 +23,12 @@ services:
environment:
- MEILI_MASTER_KEY=masterKey
- MEILI_NO_ANALYTICS=true

meilisearch2:
image: getmeili/meilisearch:latest
container_name: meili2
ports:
- "7701:7700"
environment:
- MEILI_MASTER_KEY=masterKey
- MEILI_NO_ANALYTICS=true
51 changes: 51 additions & 0 deletions meilisearch/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -627,6 +627,57 @@ def create_dump(self) -> TaskInfo:

return TaskInfo(**task)

def export(
self,
url: str,
api_key: Optional[str] = None,
payload_size: Optional[str] = None,
indexes: Optional[Mapping[str, Any]] = None,
) -> TaskInfo:
"""Trigger the creation of a Meilisearch export.

Parameters
----------
url:
A string pointing to a remote Meilisearch instance, including its port if necessary.

api_key:
A security key with index.create, settings.update, and documents.add permissions
to a secured Meilisearch instance.

payload_size:
The maximum size of each single data payload in a human-readable format such as "100MiB".
Larger payloads are generally more efficient, but require significantly more powerful machines.

indexes:
A set of objects whose keys correspond to patterns matching the indexes you want to export.
By default, Meilisearch exports all documents across all indexes.

Returns
-------
task_info:
TaskInfo instance containing information about a task to track the progress of an asynchronous process.
https://www.meilisearch.com/docs/reference/api/export#create-an-export

Raises
------
MeilisearchApiError
An error containing details about why Meilisearch can't process your request.
Meilisearch error codes are described
here: https://www.meilisearch.com/docs/reference/errors/error_codes#meilisearch-errors
"""
payload: Dict[str, Any] = {"url": url}
if api_key is not None:
payload["apiKey"] = api_key
if payload_size is not None:
payload["payloadSize"] = payload_size
if indexes is not None:
payload["indexes"] = indexes

task = self.http.post(self.config.paths.exports, body=payload)

return TaskInfo(**task)

def create_snapshot(self) -> TaskInfo:
"""Trigger the creation of a Meilisearch snapshot.

Expand Down
1 change: 1 addition & 0 deletions meilisearch/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ class Paths:
edit = "edit"
network = "network"
webhooks = "webhooks"
exports = "export"

def __init__(
self,
Expand Down
42 changes: 42 additions & 0 deletions tests/client/test_client_exports.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import os

import pytest

from tests import common

pytestmark = pytest.mark.skipif(
not os.getenv("MEILISEARCH_URL_2"),
reason="Export API tests run only when second server is configured",
)


def test_export_creation(client, client2, index_with_documents):
"""Tests the creation of a Meilisearch export."""
index = index_with_documents()
export_task = client.export(common.BASE_URL_2, api_key=common.MASTER_KEY)
task_result = client.wait_for_task(export_task.task_uid)
assert task_result.status == "succeeded"

index2 = client2.get_index(index.uid)
assert index2.uid == index.uid
assert index2.primary_key == index.get_primary_key()
assert index2.get_documents().total == index.get_documents().total


def test_export_creation_with_index_filter(client, client2, index_with_documents):
"""Tests the creation of a Meilisearch export with specific index UIDs."""
index_with_documents()
index = index_with_documents(common.INDEX_UID2)

indexes = {common.INDEX_UID2: {"filter": None}}
export_task = client.export(common.BASE_URL_2, api_key=common.MASTER_KEY, indexes=indexes)
task_result = client.wait_for_task(export_task.task_uid)
assert task_result.status == "succeeded"

response = client2.get_indexes()
assert response["total"] == 1
index2 = client2.get_index(common.INDEX_UID2)

assert index2.uid == index.uid
assert index2.primary_key == index.get_primary_key()
assert index.get_documents().total == index2.get_documents().total
1 change: 1 addition & 0 deletions tests/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

MASTER_KEY = "masterKey"
BASE_URL = os.getenv("MEILISEARCH_URL", "http://127.0.0.1:7700")
BASE_URL_2 = os.getenv("MEILISEARCH_URL_2", "http://127.0.0.1:7701")

INDEX_UID = "indexUID"
INDEX_UID2 = "indexUID2"
Expand Down
29 changes: 22 additions & 7 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# pylint: disable=redefined-outer-name
import json
import os
from typing import Optional

import requests
Expand All @@ -16,19 +17,31 @@ def client():
return meilisearch.Client(common.BASE_URL, common.MASTER_KEY)


@fixture(scope="session")
def client2():
return meilisearch.Client(common.BASE_URL_2, common.MASTER_KEY)


def _clear_indexes(meilisearch_client):
"""Deletes all the indexes in the Meilisearch instance."""

indexes = meilisearch_client.get_indexes()
for index in indexes["results"]:
task = meilisearch_client.index(index.uid).delete()
meilisearch_client.wait_for_task(task.task_uid)


@fixture(autouse=True)
def clear_indexes(client):
def clear_indexes(client, client2):
"""
Auto-clears the indexes after each test function run.
Makes all the test functions independent.
"""
# Yields back to the test function.
yield
# Deletes all the indexes in the Meilisearch instance.
indexes = client.get_indexes()
for index in indexes["results"]:
task = client.index(index.uid).delete()
client.wait_for_task(task.task_uid)
_clear_indexes(client)
if os.getenv("MEILISEARCH_URL_2"):
_clear_indexes(client2)


@fixture(autouse=True)
Expand All @@ -47,12 +60,14 @@ def clear_webhooks(client):


@fixture(autouse=True)
def clear_all_tasks(client):
def clear_all_tasks(client, client2):
"""
Auto-clears the tasks after each test function run.
Makes all the test functions independent.
"""
client.delete_tasks({"statuses": ["succeeded", "failed", "canceled"]})
if os.getenv("MEILISEARCH_URL_2"):
client2.delete_tasks({"statuses": ["succeeded", "failed", "canceled"]})


@fixture(scope="function")
Expand Down