Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions aixplain/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,7 @@
logger = logging.getLogger(__name__)

BACKEND_URL = os.getenv("BACKEND_URL", "https://platform-api.aixplain.com")
MODELS_RUN_URL = os.getenv(
"MODELS_RUN_URL", "https://models.aixplain.com/api/v1/execute"
)
MODELS_RUN_URL = os.getenv("MODELS_RUN_URL", "https://models.aixplain.com/api/v1/execute")
# GET THE API KEY FROM CMD
TEAM_API_KEY = os.getenv("TEAM_API_KEY", "")
AIXPLAIN_API_KEY = os.getenv("AIXPLAIN_API_KEY", "")
Expand Down
20 changes: 7 additions & 13 deletions aixplain/utils/convert_datatype_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
import json
from pydantic import BaseModel

def dict_to_metadata(metadatas: List[Union[Dict, MetaData]]) -> None:

def dict_to_metadata(metadatas: List[Union[Dict, MetaData]]) -> None:
"""Convert all the Dicts to MetaData

Args:
Expand All @@ -38,26 +38,20 @@ def dict_to_metadata(metadatas: List[Union[Dict, MetaData]]) -> None:
if isinstance(metadatas[i], dict):
metadatas[i] = MetaData(**metadatas[i])
except TypeError:
raise TypeError(f"Data Asset Onboarding Error: One or more elements in the metadata_schema are not well-structured")
raise TypeError(
f"Data Asset Onboarding Error: One or more elements in the metadata_schema are not well-structured"
)


def normalize_expected_output(obj):
if isinstance(obj, type) and issubclass(obj, BaseModel):
schema = (
obj.model_json_schema()
if hasattr(obj, "model_json_schema")
else obj.schema()
)
schema = obj.model_json_schema() if hasattr(obj, "model_json_schema") else obj.schema()
return json.dumps(schema)

if isinstance(obj, BaseModel):
return (
obj.model_dump_json() if hasattr(obj, "model_dump_json") else obj.json()
)
return obj.model_dump_json() if hasattr(obj, "model_dump_json") else obj.json()

if isinstance(obj, (dict, str)) or obj is None:
return (
obj if isinstance(obj, str) else json.dumps(obj) if obj is not None else obj
)
return obj if isinstance(obj, str) else json.dumps(obj) if obj is not None else obj

return json.dumps(obj)
9 changes: 7 additions & 2 deletions aixplain/utils/file_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,12 @@ def upload_data(
url = urljoin(config.BACKEND_URL, "sdk/file/upload-url")
if tags is None:
tags = []
payload = {"contentType": content_type, "originalName": file_name, "tags": ",".join(tags), "license": license.value}
payload = {
"contentType": content_type,
"originalName": file_name,
"tags": ",".join(tags),
"license": license.value,
}

team_key = api_key or config.TEAM_API_KEY
headers = {"Authorization": "token " + team_key}
Expand Down Expand Up @@ -209,7 +214,7 @@ def upload_data(

def s3_to_csv(
s3_url: Text,
aws_credentials: Optional[Dict[Text, Text]] = {"AWS_ACCESS_KEY_ID": None, "AWS_SECRET_ACCESS_KEY": None}
aws_credentials: Optional[Dict[Text, Text]] = {"AWS_ACCESS_KEY_ID": None, "AWS_SECRET_ACCESS_KEY": None},
) -> str:
"""Convert S3 directory contents to a CSV file with file listings.

Expand Down
42 changes: 22 additions & 20 deletions aixplain/utils/validation_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,17 +84,17 @@ def dataset_onboarding_validation(
metadata_spliting_schema = list(filter(lambda md: str(md.dsubtype) == "split", metadata_schema))

# validate the input and the output of the dataset
assert (
len(input_schema) > 0 or len(input_ref_data) > 0
), "Data Asset Onboarding Error: You must specify an input data to onboard a dataset."
assert len(input_schema) > 0 or len(input_ref_data) > 0, (
"Data Asset Onboarding Error: You must specify an input data to onboard a dataset."
)

input_dtype = input_schema[0].dtype if isinstance(input_schema[0], MetaData) else input_schema[0]["dtype"]
if isinstance(input_dtype, DataType):
input_dtype = input_dtype.value

assert (
FunctionInputOutput.get(function) is not None and input_dtype in FunctionInputOutput[function]["input"]
), f"Data Asset Onboarding Error: The input data type `{input_dtype}` is not compatible with the `{function}` function.\nThe expected input data type should be one of these data type: `{FunctionInputOutput[function]['input']}`."
assert FunctionInputOutput.get(function) is not None and input_dtype in FunctionInputOutput[function]["input"], (
f"Data Asset Onboarding Error: The input data type `{input_dtype}` is not compatible with the `{function}` function.\nThe expected input data type should be one of these data type: `{FunctionInputOutput[function]['input']}`."
)

if len(output_schema) > 0:
output_dtype = output_schema[0].dtype if isinstance(output_schema[0], MetaData) else output_schema[0]["dtype"]
Expand All @@ -103,19 +103,21 @@ def dataset_onboarding_validation(

assert (
FunctionInputOutput.get(function) is not None and output_dtype in FunctionInputOutput[function]["output"]
), f"Data Asset Onboarding Error: The output data type `{output_dtype}` is not compatible with the `{function}` function.\nThe expected output data type should be one of these data type: `{FunctionInputOutput[function]['output']}`."
), (
f"Data Asset Onboarding Error: The output data type `{output_dtype}` is not compatible with the `{function}` function.\nThe expected output data type should be one of these data type: `{FunctionInputOutput[function]['output']}`."
)

# validate the splitting
assert (
len(metadata_spliting_schema) < 2
), f"Data Asset Onboarding Error: Only 0 or 1 metadata of the split subtype can be added to the `metadata_schema`."
assert all(
str(mds.dtype) == "label" for mds in metadata_spliting_schema
), f"Data Asset Onboarding Error: The `dtype` must be `label` for any splitting subtype."

assert (
content_path is not None or s3_link is not None
), "Data Asset Onboarding Error: No path to content Data was provided. Please update `context_path` or `s3_link`."
assert (split_labels is not None and split_rate is not None) or (
split_labels is None and split_rate is None
), "Data Asset Onboarding Error: Make sure you set the split labels values as well as their rates."
assert len(metadata_spliting_schema) < 2, (
f"Data Asset Onboarding Error: Only 0 or 1 metadata of the split subtype can be added to the `metadata_schema`."
)
assert all(str(mds.dtype) == "label" for mds in metadata_spliting_schema), (
f"Data Asset Onboarding Error: The `dtype` must be `label` for any splitting subtype."
)

assert content_path is not None or s3_link is not None, (
"Data Asset Onboarding Error: No path to content Data was provided. Please update `context_path` or `s3_link`."
)
assert (split_labels is not None and split_rate is not None) or (split_labels is None and split_rate is None), (
"Data Asset Onboarding Error: Make sure you set the split labels values as well as their rates."
)
5 changes: 5 additions & 0 deletions aixplain/v2/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,12 @@
from .meta_agents import Debugger, DebugResult
from .agent_progress import AgentProgressTracker, ProgressFormat
from .api_key import APIKey, APIKeyLimits, APIKeyUsageLimit, TokenType
from .issue import IssueReporter, IssueSeverity
from .exceptions import (
AixplainV2Error,
ResourceError,
APIError,
AixplainIssueError,
ValidationError,
TimeoutError,
FileUploadError,
Expand Down Expand Up @@ -85,13 +87,16 @@
"APIKeyLimits",
"APIKeyUsageLimit",
"TokenType",
"IssueReporter",
"IssueSeverity",
# Progress tracking
"AgentProgressTracker",
"ProgressFormat",
# Exceptions
"AixplainV2Error",
"ResourceError",
"APIError",
"AixplainIssueError",
"ValidationError",
"TimeoutError",
"FileUploadError",
Expand Down
4 changes: 4 additions & 0 deletions aixplain/v2/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from .meta_agents import Debugger
from .api_key import APIKey
from .rlm import RLM, RLMResult
from .issue import IssueReporter
from . import enums


Expand All @@ -28,6 +29,7 @@
DebuggerType = TypeVar("DebuggerType", bound=Debugger)
APIKeyType = TypeVar("APIKeyType", bound=APIKey)
RLMType = TypeVar("RLMType", bound=RLM)
IssueReporterType = TypeVar("IssueReporterType", bound=IssueReporter)


class Aixplain:
Expand All @@ -53,6 +55,7 @@ class Aixplain:
Debugger: DebuggerType = None
APIKey: APIKeyType = None
RLM: RLMType = None
issue: IssueReporterType = None

Function = enums.Function
Supplier = enums.Supplier
Expand Down Expand Up @@ -133,3 +136,4 @@ def init_resources(self) -> None:
self.Debugger = type("Debugger", (Debugger,), {"context": self})
self.APIKey = type("APIKey", (APIKey,), {"context": self})
self.RLM = type("RLM", (RLM,), {"context": self})
self.issue = IssueReporter(context=self)
6 changes: 6 additions & 0 deletions aixplain/v2/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,12 @@ def __init__(
)


class AixplainIssueError(APIError):
"""Raised when SDK issue reporting fails."""

pass


class ValidationError(AixplainV2Error):
"""Raised when validation fails."""

Expand Down
96 changes: 96 additions & 0 deletions aixplain/v2/issue.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
"""Issue reporting helpers for the V2 SDK."""

from __future__ import annotations

from enum import Enum
from typing import Any, Dict, Optional, TYPE_CHECKING

from .exceptions import APIError, AixplainIssueError

if TYPE_CHECKING:
from .core import Aixplain


class IssueSeverity(str, Enum):
"""Supported issue severity levels."""

SEV1 = "SEV1"
SEV2 = "SEV2"
SEV3 = "SEV3"
SEV4 = "SEV4"


class IssueReporter:
"""submitting SDK issues to the backend."""

ISSUE_PATH = "/v1/issue"

def __init__(self, context: "Aixplain") -> None:
"""Initialize the issue reporter."""
self.context = context

def _issue_url(self) -> str:
"""Build the full issue endpoint URL for the configured backend."""
return f"{self.context.backend_url.rstrip('/')}{self.ISSUE_PATH}"

def report(self, description: Optional[str], **kwargs: Any) -> str:
"""Submit an issue report and return its ID."""
self._validate_description(description)

allowed_fields = {
"title",
"severity",
"tags",
"sdk_version",
"runtime_context",
"reporter_email",
}
unexpected_fields = sorted(set(kwargs) - allowed_fields)
if unexpected_fields:
raise AixplainIssueError(
f"Unsupported issue fields: {', '.join(unexpected_fields)}.",
)

severity = kwargs.get("severity")
if severity is not None:
self._validate_severity(severity)

payload: Dict[str, Any] = {"description": description}
for key, value in kwargs.items():
if value is not None:
payload[key] = value.value if isinstance(value, IssueSeverity) else value

try:
response = self.context.client.post(self._issue_url(), json=payload)
except APIError as error:
raise AixplainIssueError(
error.message,
status_code=error.status_code,
response_data=error.response_data,
error=error.error,
) from error

issue_id = response.get("issue_id")
if not issue_id:
raise AixplainIssueError(
"Issue report accepted but no issue_id was returned.",
status_code=202,
response_data=response,
error="missing_issue_id",
)
return issue_id

@staticmethod
def _validate_description(description: Optional[str]) -> None:
if description is None:
raise AixplainIssueError("Field 'description' is required.", status_code=400)

@staticmethod
def _validate_severity(severity: Any) -> None:
valid_values = {level.value for level in IssueSeverity}
resolved = severity.value if isinstance(severity, IssueSeverity) else severity
if resolved not in valid_values:
raise AixplainIssueError(
"severity must be one of: SEV1, SEV2, SEV3, SEV4.",
status_code=400,
)
Loading
Loading