diff --git a/modus/data-fetching.mdx b/modus/data-fetching.mdx index c715b6df..2d6c4ed2 100644 --- a/modus/data-fetching.mdx +++ b/modus/data-fetching.mdx @@ -157,9 +157,9 @@ export function getPerson(name: string): Person { const vars = new dgraph.Variables() vars.set("$name", name) - const resp = dgraph.execute( + const resp = dgraph.executeQuery( connection, - new dgraph.Request(new dgraph.Query(statement, vars)), + new dgraph.Query(statement, vars), ) const persons = JSON.parse(resp.Json).persons return persons[0] @@ -468,7 +468,7 @@ class Person { } @json class GetPersonResponse { - getPerson: Person | null + getPerson: Person | null = null } export function getPerson(name: string): Person | null { diff --git a/modus/first-modus-agent.mdx b/modus/first-modus-agent.mdx index 795f7615..f2f884db 100644 --- a/modus/first-modus-agent.mdx +++ b/modus/first-modus-agent.mdx @@ -103,104 +103,162 @@ AssemblyScript if you prefer. For AssemblyScript usage, refer to the Create a function that fetches data from an external API and uses AI for analysis: - - Create `intelligence.go`: - - ```go intelligence.go - package main - - import ( - "errors" - "fmt" - "strings" - - "github.com/hypermodeinc/modus/sdk/go/pkg/http" - "github.com/hypermodeinc/modus/sdk/go/pkg/models" - "github.com/hypermodeinc/modus/sdk/go/pkg/models/openai" - ) - - type IntelReport struct { - Quote string `json:"quote"` - Author string `json:"author"` - Analysis string `json:"analysis,omitempty"` - } + + + Create `intelligence.go`: + + ```go intelligence.go + package main - const modelName = "text-generator" + import ( + "errors" + "fmt" + "strings" - // Fetch a random quote and provide AI analysis - func GatherIntelligence() (*IntelReport, error) { - request := http.NewRequest("https://zenquotes.io/api/random") + "github.com/hypermodeinc/modus/sdk/go/pkg/http" + "github.com/hypermodeinc/modus/sdk/go/pkg/models" + "github.com/hypermodeinc/modus/sdk/go/pkg/models/openai" + ) - response, err := http.Fetch(request) - if err != nil { - return nil, err + type IntelReport struct { + Quote string `json:"quote"` + Author string `json:"author"` + Analysis string `json:"analysis,omitempty"` } - if !response.Ok() { - return nil, fmt.Errorf("request failed: %d %s", response.Status, response.StatusText) + + const modelName = "text-generator" + + // Fetch a random quote and provide AI analysis + func GatherIntelligence() (*IntelReport, error) { + request := http.NewRequest("https://zenquotes.io/api/random") + + response, err := http.Fetch(request) + if err != nil { + return nil, err + } + if !response.Ok() { + return nil, fmt.Errorf("request failed: %d %s", response.Status, response.StatusText) + } + + // Parse the API response + var quotes []IntelReport + response.JSON("es) + if len(quotes) == 0 { + return nil, errors.New("no data received") + } + + // Get the quote + intel := quotes[0] + + // Generate AI analysis + analysis, err := analyzeIntelligence(intel.Quote, intel.Author) + if err != nil { + fmt.Printf("AI analysis failed for %s: %v\n", intel.Author, err) + intel.Analysis = "Analysis unavailable" + } else { + intel.Analysis = analysis + } + + return &intel, nil } - // Parse the API response - var quotes []IntelReport - response.JSON("es) - if len(quotes) == 0 { - return nil, errors.New("no data received") + // Use AI to analyze the quote + func analyzeIntelligence(quote, author string) (string, error) { + model, err := models.GetModel[openai.ChatModel](modelName) + if err != nil { + return "", err + } + + prompt := `You are an analyst. + Provide a brief insight that captures the core meaning + and practical application of this wisdom in 1-2 sentences.` + content := fmt.Sprintf("Quote: \"%s\" - %s", quote, author) + + input, err := model.CreateInput( + openai.NewSystemMessage(prompt), + openai.NewUserMessage(content), + ) + if err != nil { + return "", err + } + + input.Temperature = 0.7 + + output, err := model.Invoke(input) + if err != nil { + return "", err + } + + return strings.TrimSpace(output.Choices[0].Message.Content), nil } + ``` + + + Modify `index.ts` - // Get the quote - intel := quotes[0] + ```ts + import { http, models } from "@hypermode/modus-sdk-as"; + import { OpenAIChatModel, SystemMessage, UserMessage } from "@hypermode/modus-sdk-as/models/openai/chat"; - // Generate AI analysis - analysis, err := analyzeIntelligence(intel.Quote, intel.Author) - if err != nil { - fmt.Printf("AI analysis failed for %s: %v\n", intel.Author, err) - intel.Analysis = "Analysis unavailable" - } else { - intel.Analysis = analysis - } + export function sayHello(name: string | null = null): string { + return `Hello, ${name || "World"}!`; + } - return &intel, nil + @json + class IntelReport { + @alias("q") + quote!: string; + @alias("a") + author!: string; + analysis!: string; } - // Use AI to analyze the quote - func analyzeIntelligence(quote, author string) (string, error) { - model, err := models.GetModel[openai.ChatModel](modelName) - if err != nil { - return "", err - } + const modelName = "text-generator"; + + export function gatherIntelligence(): IntelReport { + const response = http.fetch("https://zenquotes.io/api/random"); + + if (response.status !== 200) + throw new Error("Request failed with status: " + response.status.toString() + " " + response.statusText); + + const quotes = response.json(); + + if (!quotes.length) + throw new Error("No data recieved"); + + const quote = quotes[0]; + const analysis = analyzeIntelligence(quote.quote, quote.author); + + quote.analysis = analysis; - prompt := `You are an analyst. + return quote; + } + + function analyzeIntelligence(quote: string, author: string): string { + const model = models.getModel(modelName); + + const prompt = `You are an analyst. Provide a brief insight that captures the core meaning - and practical application of this wisdom in 1-2 sentences.` - content := fmt.Sprintf("Quote: \"%s\" - %s", quote, author) + and practical application of this wisdom in 1-2 sentences.`; + const content = "Quote: " + quote + " - " + author; - input, err := model.CreateInput( - openai.NewSystemMessage(prompt), - openai.NewUserMessage(content), - ) - if err != nil { - return "", err - } + const input = model.createInput([ + new SystemMessage(prompt), + new UserMessage(content) + ]); - input.Temperature = 0.7 + input.temperature = 0.7; - output, err := model.Invoke(input) - if err != nil { - return "", err - } + const output = model.invoke(input); - return strings.TrimSpace(output.Choices[0].Message.Content), nil + return output.choices[0].message.content.trim(); } ``` - + + - Restart your development server: - - ```sh - modus dev - ``` - Modus automatically generates a GraphQL API from your functions. Since your function is named `GatherIntelligence()`, it becomes a GraphQL query field called `gatherIntelligence`. @@ -208,13 +266,13 @@ AssemblyScript if you prefer. For AssemblyScript usage, refer to the The explorer is fully GraphQL-compatible, so you can issue this query: ```graphql - query { - gatherIntelligence { - quote - author - analysis + query { + gatherIntelligence { + quote + author + analysis + } } - } ``` You'll receive a response like: diff --git a/modus/functions.mdx b/modus/functions.mdx index 8ad1e3dd..bd55ba2b 100644 --- a/modus/functions.mdx +++ b/modus/functions.mdx @@ -43,13 +43,22 @@ GraphQL API. Your functions become either **queries** (for data retrieval) or Most functions become GraphQL queries—perfect for fetching and processing data: -```go + +```go Go // This function becomes a GraphQL query func GatherThreatIntelligence(source string) (*ThreatReport, error) { - // Data gathering and processing operation - return fetchThreatData(source) + // Data gathering and processing operation + return fetchThreatData(source) } ``` +```ts AssemblyScript +// This function becomes a GraphQL query +export function gatherThreatIntelligence(source: string): ThreatReport { + // Data gathering and processing operation + return fetchThreatData(source) +} +``` + Your functions are now accessible via GraphQL: @@ -82,13 +91,22 @@ query { Functions that modify data automatically become GraphQL mutations. Modus detects these by their operation prefixes: -```go + +```go Go // This becomes a GraphQL mutation func CreateSecurityAlert(data AlertInput) (*SecurityAlert, error) { // Create new security alert return deploySecurityAlert(data) } ``` +```ts AssemblyScript +// This becomes a GraphQL mutation +export function createSecurityAlert(data: AlertInput): SecurityAlert { + // Create new security alert + return deploySecurityAlert(data) +} +``` + Now you can execute data modifications: @@ -130,11 +148,13 @@ automatically become mutations. Here's a complete example that demonstrates how functions integrate external APIs with AI models for intelligent data processing: -```go + + ```go Go package main import ( "fmt" + "net/url" "strings" "github.com/hypermodeinc/modus/sdk/go/pkg/http" "github.com/hypermodeinc/modus/sdk/go/pkg/models" @@ -152,10 +172,11 @@ const modelName = "text-generator" // Function: Gather weather data and provide tactical analysis func GatherWeatherIntelligence(city string) (*WeatherIntel, error) { + city = url.QueryEscape(city) // Fetch weather data from OpenWeatherMap API url := fmt.Sprintf( - "https://api.openweathermap.org/data/2.5/weather?q=%s&appid={{API_KEY}}&units=metric", - city, + "https://api.openweathermap.org/data/2.5/weather?q=%s&units=metric", + city, ) response, err := http.Fetch(url) @@ -243,6 +264,87 @@ func analyzeTacticalConditions(city string, temp float64, conditions string) (st return strings.TrimSpace(output.Choices[0].Message.Content), nil } ``` +```ts AssemblyScript +import { http, models } from "@hypermode/modus-sdk-as"; +import { OpenAIChatModel, SystemMessage, UserMessage } from "@hypermode/modus-sdk-as/models/openai/chat"; + +@json +class WeatherIntel { + city!: string; + temperature!: f64; + conditions!: string; + analysis!: string; +} + +@json +class WeatherData { + name!: string; + main!: MainWeatherData; + weather!: WeatherDescription[]; +} + +@json +class MainWeatherData { + temp!: f64; +} + +@json +class WeatherDescription { + description!: string; +} + +const modelName = "text-generator"; + +export function gatherWeatherIntelligence(city: string): WeatherIntel { + city = encodeURIComponent(city); + const url = `https://api.openweathermap.org/data/2.5/weather?q=${city}&units=metric`; + const response = http.fetch(url); + + if (!response.ok) + throw new Error("Weather data retrieval failed: " + response.status.toString() + " " + response.statusText); + + const weatherData = response.json(); + + let conditions = "unknown" + + if (weatherData.weather.length) conditions = weatherData.weather[0].description; + + const analysis = analyzeTacticalConditions( + weatherData.name, + weatherData.main.temp, + conditions + ); + + return { + city: weatherData.name, + temperature: weatherData.main.temp, + conditions, + analysis + }; +} + +function analyzeTacticalConditions(city: string, temperature: f64, conditions: string): string { + const model = models.getModel(modelName); + + const prompt = `You are a tactical analyst evaluating weather conditions for field operations. + Provide a brief tactical assessment of how these weather conditions might impact + outdoor activities, visibility, and operational considerations in 1-2 sentences.`; + + const content = `Location: ${city}, Temperatire: ${temperature}, Conditions: ${conditions}`; + + const input = model.createInput([ + new SystemMessage(prompt), + new UserMessage(content) + ]); + input.temperature = 0.7; + + const output = model.invoke(input); + + return output.choices[0].message.content.trim(); +} +``` + + This function automatically becomes available as a GraphQL query: diff --git a/modus/model-invoking.mdx b/modus/model-invoking.mdx index aafba0fe..0b67f0a1 100644 --- a/modus/model-invoking.mdx +++ b/modus/model-invoking.mdx @@ -159,7 +159,6 @@ func GenerateText(instruction, prompt string) (string, error) { import { models } from "@hypermode/modus-sdk-as" import { OpenAIChatModel, - ResponseFormat, SystemMessage, UserMessage, } from "@hypermode/modus-sdk-as/models/openai/chat" @@ -225,12 +224,8 @@ func ClassifyText(text string, threshold float32) (string, error) { ```ts AssemblyScript import { models } from "@hypermode/modus-sdk-as" -import { - ClassificationModel, - ClassifierResult, -} from "@hypermode/modus-sdk-as/models/experimental/classification" +import { ClassificationModel } from "@hypermode/modus-sdk-as/models/experimental/classification" -// this model name should match the one defined in the modus.json manifest file const modelName: string = "my-classifier" // this function takes input text and a probability threshold, and returns the @@ -241,12 +236,14 @@ export function classifyText(text: string, threshold: f32): string { const input = model.createInput([text]) const output = model.invoke(input) - const prediction = output.predictions[0] - if (prediction.confidence >= threshold) { - return prediction.label + const predictions = output.predictions + + const prediction = predictions[0] + if (prediction.confidence < threshold) { + return "" } - return "" + return prediction.label } ```