Skip to content

Add Secret AI as a local app #1690

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 46 additions & 0 deletions packages/tasks/src/local-apps.ts
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,14 @@ function isMlxModel(model: ModelData) {
return model.tags.includes("mlx");
}

function isMnnModel(model: ModelData): boolean {
// Check for MNN models using multiple criteria:
// 1. Models from the taobao-mnn organization
// 2. Model IDs ending with -MNN suffix (naming convention)
// 3. Models tagged with "mnn" (future-proofing)
return model.id.startsWith("taobao-mnn/") || model.id.endsWith("-MNN") || model.tags.includes("mnn");
}

function getQuantTag(filepath?: string): string {
const defaultTag = ":{{QUANT_TAG}}";

Expand Down Expand Up @@ -485,6 +493,44 @@ export const LOCAL_APPS = {
displayOnModelPage: isLlamaCppGgufModel,
snippet: snippetOllama,
},
secretai: {
prettyLabel: "Secret AI",
docsUrl: "https://secretai.io",
mainTask: "text-generation",
displayOnModelPage: (model) => {
// Display for text-generation models that support GGUF, MLX, or MNN formats
return (
model.pipeline_tag === "text-generation" &&
(isLlamaCppGgufModel(model) || isMlxModel(model) || isMnnModel(model))
);
},
deeplink: (model, filepath) => {
// Determine format parameter based on model type
let format: string;
if (isLlamaCppGgufModel(model)) {
format = "gguf";
} else if (isMlxModel(model)) {
format = "mlx";
} else if (isMnnModel(model)) {
format = "mnn";
} else {
// Default to gguf for compatibility
format = "gguf";
}

// Build deeplink URL following Secret AI's protocol
// Format: secret-ai://pages/huggingface/repos/{model_id}?format={format}
const baseUrl = `secret-ai://pages/huggingface/repos/${model.id}`;
const params = new URLSearchParams({ format });

// Add file parameter if a specific file is selected
if (filepath) {
params.append("file", filepath);
}

return new URL(`${baseUrl}?${params.toString()}`);
},
},
"docker-model-runner": {
prettyLabel: "Docker Model Runner",
docsUrl: "https://docs.docker.com/ai/model-runner/",
Expand Down