Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
81 changes: 0 additions & 81 deletions agents/ResearchAgent-soln.js

This file was deleted.

62 changes: 59 additions & 3 deletions agents/ResearchAgent.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,72 @@ import { AgentExecutor } from "langchain/agents";
import SerpAPITool from "../tools/SerpAPI";
import WebBrowserTool from "../tools/WebBrowser";

/**
*
* WARNING: THIS IS THE SOLUTION! Please try coding before viewing this.
*
*/
const ResearchAgent = async (topic) => {
console.log({ topic });

try {
// do stuff
// We'll give it tohe ability to search Google and also Browse the WEb
// show the importance of returnDirect
const SerpAPI = SerpAPITool();
const WebBrowser = WebBrowserTool();

console.log(SerpAPI.returnDirect);
console.log(WebBrowser.returnDirect);
// We put them into an array of tools
const tools = [SerpAPI, WebBrowser];

// We'll use the ZeroShotReactDescription which is the recommended tool for chat models
// https://js.langchain.com/docs/modules/agents/agents/
// The cool thing is that Langchain gives us a method that can create prompts specifically for that agent
const promptTemplate = ZeroShotAgent.createPrompt(tools, {
prefix: `Answer the following questions as best you can. You have access to the following tools:`,
suffix: `Begin! Answer concisely. It's OK to say you don't know.`,
});

// Then we'll initialize it with a Prompt template again
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
new SystemMessagePromptTemplate(promptTemplate),
HumanMessagePromptTemplate.fromTemplate(`{input}`),
]);

// And we'll initialize the model, what is chatOpenAI since we're using a ChatAgent
const chat = new ChatOpenAI({});
// We'll create an LLM chain which just a prompt template and a LLM or chat model
const llmChain = new LLMChain({
prompt: chatPrompt,
llm: chat,
});
// then we'lll use that LLM chain as the basis of the agent
// so basidcally our agent is made up of: Tools, LLM and Prompt Templates, making it highly customizable to our needs!
const agent = new ZeroShotAgent({
llmChain,
allowedTools: tools.map((tool) => tool.name),
});
// Now we'll create an Executor instance which allows us to make queries to the agent

const executor = AgentExecutor.fromAgentAndTools({
agent,
tools,
returnIntermediateSteps: false,
// Max iterations is important - because sometimes our agent can get confused. this can be from a variety of factors such as:
// not using the right agent for the tool
// prompts are not perfect – check out the document for crafting the perfect prompt!
// always set this to a low number to start, or if you're not going to watch the output
maxIterations: 3,
// Always set verbose to true, there was a case where I didn't do this and realized my agent was going in loops
verbose: true,
});

const result = await executor.run(`Who is ${topic}?`);

return result;
} catch (err) {
console.error(err);
return "Error in completing research";
}
};

export default ResearchAgent;
17 changes: 17 additions & 0 deletions app/components/Emoji.jsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import React from "react";

const Emoji = ({ color }) => {
console.log(color);
return (
<div className={`bg-${color}-500 text-center`}>
{/* <p>The color is {color}</p> */}
{/* Ternary Operator – If Else Statement ? : */}
{/* {CONDITION ? IF_VALUE : ELSE_VALUE } */}

{color === "red" ? <p>The color is red</p> : <p>The color is not red</p>}
<p>🤖</p>
</div>
);
};

export default Emoji;
4 changes: 2 additions & 2 deletions app/components/HamburgerMenu.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ const HamburgerMenu = () => {
</span>
</a>
</li>
<li className="flex flex-col gap-4">
{/* <li className="flex flex-col gap-4">
<a href="chatcompletions">
<span>
<div className=" rounded-xl overflow-hidden h-40 w-32 drop-shadow">
Expand All @@ -217,7 +217,7 @@ const HamburgerMenu = () => {
</p>
</span>
</a>
</li>
</li> */}
</ul>
</nav>
</div>
Expand Down
6 changes: 6 additions & 0 deletions app/content-generator/page.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,12 @@ const ContentGenerator = () => {
},
body: JSON.stringify({ prompt: prompt, topic: topic, firstMsg }),
});
if (!response.ok) {
console.error(`Error: ${response.status}`); // log the HTTP status code
const errorMessage = await response.text(); // get the error message from the response
console.error(`Message: ${errorMessage}`); // log the error message
throw new Error(errorMessage);
}

const searchRes = await response.json();

Expand Down
100 changes: 0 additions & 100 deletions app/memory/memory-solution.jsx

This file was deleted.

Loading