Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
123 changes: 64 additions & 59 deletions scripts/cmds/ai.js
Original file line number Diff line number Diff line change
@@ -1,61 +1,66 @@
const axios = require('axios');

const GPT_API_URL = 'https://sandipapi.onrender.com/gpt';
const PREFIXES = ['ai'];
const horizontalLine = "━━━━━━━━━━━━━━━";

module.exports = {
config: {
name: "ai",
version: 1.0,
author: "OtinXSandip",
longDescription: "AI",
category: "ai",
guide: {
en: "{p} questions",
},
},
onStart: async function () {
// Initialization logic if needed
},
onChat: async function ({ api, event, args, message }) {
try {
const prefix = PREFIXES.find((p) => event.body && event.body.toLowerCase().startsWith(p));

if (!prefix) {
return; // Invalid prefix, ignore the command
}

const prompt = event.body.substring(prefix.length).trim();

if (!prompt) {
const defaultMessage = getCenteredHeader("") + "\n" + horizontalLine + "\nProvide a Question\n" + horizontalLine;
await message.reply(defaultMessage);
return;
}

const answer = await getGPTResponse(prompt);

// Adding header and horizontal lines to the answer
const answerWithHeader = getCenteredHeader("") + "\n" + horizontalLine + "\n" + answer + "\n" + horizontalLine;

await message.reply(answerWithHeader);
} catch (error) {
console.error("Error:", error.message);
// Additional error handling if needed
}
}
};

function getCenteredHeader(header) {
const totalWidth = 32; // Adjust the total width as needed
const padding = Math.max(0, Math.floor((totalWidth - header.length) / 2));
return " ".repeat(padding) + header;
const { getPrefix, getStreamFromURL, uploadImgbb } = global.utils;
async function ai({ message: m, event: e, args: a, usersData: u }) {
var p = [`${await getPrefix(e.threadID)}${this.config.name}`,
`${this.config.name}`
/*"ai","Athena"
*you can add more prefix here
*/
];
if (p.some(b => a[0].toLowerCase().startsWith(b))) {
try {
let prompt = "";
if (e.type === "message_reply" && e.messageReply.attachments && e.messageReply.attachments[0]?.type === "photo") {
const b = await uploadImgbb(e.messageReply.attachments[0].url);
prompt = a.slice(1).join(" ") + ' ' + b.image.url;
} else {
prompt = a.slice(1).join(" ");
}

async function getGPTResponse(prompt) {
// Implement caching logic here

const response = await axios.get(`${GPT_API_URL}?prompt=${encodeURIComponent(prompt)}`);
return response.data.answer;
var __ = [{ id: e.senderID, tag: await u.getName(e.senderID) }];
const r = await require("axios").post(`https://test-ai-ihc6.onrender.com/api`, {
prompt: prompt,
apikey: "GayKey-oWHmMb1t8ASljhpgSSUI",
name: __[0]['tag'],
id: __[0]['id'],
});
var _ = r.data.result.replace(/{name}/g, __[0]['tag']).replace(/{pn}/g, p[0]);
if (r.data.av) {
if (Array.isArray(r.data.av)) {
const avs = r.data.av.map(url => getStreamFromURL(url));
const avss = await Promise.all(avs);
m.reply({
body: _,
mentions: __,
attachment: avss
});
} else {
m.reply({
body: _,
mentions: __,
attachment: await getStreamFromURL(r.data.av)
});
}
} else {
m.reply({
body: _,
mentions: __
});
}
} catch (error) {
m.reply("Error " + error);
}
}
}
module.exports = {
config: {
name: "ai",
aliases: ["Athena"],
version: 1.6,
author: "Jun",
role: 0,
shortDescription: "An AI that can do various tasks",
guide: "{pn} <query>",
category: "AI",
},
onStart: function() {},
onChat: ai
};