From 687fc3ed5a5083cb3ffa54d3c2a6d8399588c7ec Mon Sep 17 00:00:00 2001 From: JairusSW Date: Mon, 30 Jun 2025 13:17:02 -0700 Subject: [PATCH] docs: add and update AssemblyScript examples --- modus/data-fetching.mdx | 6 +- modus/first-modus-agent.mdx | 220 +++++++++++++++++---------- modus/functions.mdx | 296 +++++++++++++++++++++++------------- modus/model-invoking.mdx | 17 +-- 4 files changed, 343 insertions(+), 196 deletions(-) diff --git a/modus/data-fetching.mdx b/modus/data-fetching.mdx index c715b6df..2d6c4ed2 100644 --- a/modus/data-fetching.mdx +++ b/modus/data-fetching.mdx @@ -157,9 +157,9 @@ export function getPerson(name: string): Person { const vars = new dgraph.Variables() vars.set("$name", name) - const resp = dgraph.execute( + const resp = dgraph.executeQuery( connection, - new dgraph.Request(new dgraph.Query(statement, vars)), + new dgraph.Query(statement, vars), ) const persons = JSON.parse(resp.Json).persons return persons[0] @@ -468,7 +468,7 @@ class Person { } @json class GetPersonResponse { - getPerson: Person | null + getPerson: Person | null = null } export function getPerson(name: string): Person | null { diff --git a/modus/first-modus-agent.mdx b/modus/first-modus-agent.mdx index 795f7615..f2f884db 100644 --- a/modus/first-modus-agent.mdx +++ b/modus/first-modus-agent.mdx @@ -103,104 +103,162 @@ AssemblyScript if you prefer. For AssemblyScript usage, refer to the Create a function that fetches data from an external API and uses AI for analysis: - - Create `intelligence.go`: - - ```go intelligence.go - package main - - import ( - "errors" - "fmt" - "strings" - - "github.com/hypermodeinc/modus/sdk/go/pkg/http" - "github.com/hypermodeinc/modus/sdk/go/pkg/models" - "github.com/hypermodeinc/modus/sdk/go/pkg/models/openai" - ) - - type IntelReport struct { - Quote string `json:"quote"` - Author string `json:"author"` - Analysis string `json:"analysis,omitempty"` - } + + + Create `intelligence.go`: + + ```go intelligence.go + package main - const modelName = "text-generator" + import ( + "errors" + "fmt" + "strings" - // Fetch a random quote and provide AI analysis - func GatherIntelligence() (*IntelReport, error) { - request := http.NewRequest("https://zenquotes.io/api/random") + "github.com/hypermodeinc/modus/sdk/go/pkg/http" + "github.com/hypermodeinc/modus/sdk/go/pkg/models" + "github.com/hypermodeinc/modus/sdk/go/pkg/models/openai" + ) - response, err := http.Fetch(request) - if err != nil { - return nil, err + type IntelReport struct { + Quote string `json:"quote"` + Author string `json:"author"` + Analysis string `json:"analysis,omitempty"` } - if !response.Ok() { - return nil, fmt.Errorf("request failed: %d %s", response.Status, response.StatusText) + + const modelName = "text-generator" + + // Fetch a random quote and provide AI analysis + func GatherIntelligence() (*IntelReport, error) { + request := http.NewRequest("https://zenquotes.io/api/random") + + response, err := http.Fetch(request) + if err != nil { + return nil, err + } + if !response.Ok() { + return nil, fmt.Errorf("request failed: %d %s", response.Status, response.StatusText) + } + + // Parse the API response + var quotes []IntelReport + response.JSON("es) + if len(quotes) == 0 { + return nil, errors.New("no data received") + } + + // Get the quote + intel := quotes[0] + + // Generate AI analysis + analysis, err := analyzeIntelligence(intel.Quote, intel.Author) + if err != nil { + fmt.Printf("AI analysis failed for %s: %v\n", intel.Author, err) + intel.Analysis = "Analysis unavailable" + } else { + intel.Analysis = analysis + } + + return &intel, nil } - // Parse the API response - var quotes []IntelReport - response.JSON("es) - if len(quotes) == 0 { - return nil, errors.New("no data received") + // Use AI to analyze the quote + func analyzeIntelligence(quote, author string) (string, error) { + model, err := models.GetModel[openai.ChatModel](modelName) + if err != nil { + return "", err + } + + prompt := `You are an analyst. + Provide a brief insight that captures the core meaning + and practical application of this wisdom in 1-2 sentences.` + content := fmt.Sprintf("Quote: \"%s\" - %s", quote, author) + + input, err := model.CreateInput( + openai.NewSystemMessage(prompt), + openai.NewUserMessage(content), + ) + if err != nil { + return "", err + } + + input.Temperature = 0.7 + + output, err := model.Invoke(input) + if err != nil { + return "", err + } + + return strings.TrimSpace(output.Choices[0].Message.Content), nil } + ``` + + + Modify `index.ts` - // Get the quote - intel := quotes[0] + ```ts + import { http, models } from "@hypermode/modus-sdk-as"; + import { OpenAIChatModel, SystemMessage, UserMessage } from "@hypermode/modus-sdk-as/models/openai/chat"; - // Generate AI analysis - analysis, err := analyzeIntelligence(intel.Quote, intel.Author) - if err != nil { - fmt.Printf("AI analysis failed for %s: %v\n", intel.Author, err) - intel.Analysis = "Analysis unavailable" - } else { - intel.Analysis = analysis - } + export function sayHello(name: string | null = null): string { + return `Hello, ${name || "World"}!`; + } - return &intel, nil + @json + class IntelReport { + @alias("q") + quote!: string; + @alias("a") + author!: string; + analysis!: string; } - // Use AI to analyze the quote - func analyzeIntelligence(quote, author string) (string, error) { - model, err := models.GetModel[openai.ChatModel](modelName) - if err != nil { - return "", err - } + const modelName = "text-generator"; + + export function gatherIntelligence(): IntelReport { + const response = http.fetch("https://zenquotes.io/api/random"); + + if (response.status !== 200) + throw new Error("Request failed with status: " + response.status.toString() + " " + response.statusText); + + const quotes = response.json(); + + if (!quotes.length) + throw new Error("No data recieved"); + + const quote = quotes[0]; + const analysis = analyzeIntelligence(quote.quote, quote.author); + + quote.analysis = analysis; - prompt := `You are an analyst. + return quote; + } + + function analyzeIntelligence(quote: string, author: string): string { + const model = models.getModel(modelName); + + const prompt = `You are an analyst. Provide a brief insight that captures the core meaning - and practical application of this wisdom in 1-2 sentences.` - content := fmt.Sprintf("Quote: \"%s\" - %s", quote, author) + and practical application of this wisdom in 1-2 sentences.`; + const content = "Quote: " + quote + " - " + author; - input, err := model.CreateInput( - openai.NewSystemMessage(prompt), - openai.NewUserMessage(content), - ) - if err != nil { - return "", err - } + const input = model.createInput([ + new SystemMessage(prompt), + new UserMessage(content) + ]); - input.Temperature = 0.7 + input.temperature = 0.7; - output, err := model.Invoke(input) - if err != nil { - return "", err - } + const output = model.invoke(input); - return strings.TrimSpace(output.Choices[0].Message.Content), nil + return output.choices[0].message.content.trim(); } ``` - + + - Restart your development server: - - ```sh - modus dev - ``` - Modus automatically generates a GraphQL API from your functions. Since your function is named `GatherIntelligence()`, it becomes a GraphQL query field called `gatherIntelligence`. @@ -208,13 +266,13 @@ AssemblyScript if you prefer. For AssemblyScript usage, refer to the The explorer is fully GraphQL-compatible, so you can issue this query: ```graphql - query { - gatherIntelligence { - quote - author - analysis + query { + gatherIntelligence { + quote + author + analysis + } } - } ``` You'll receive a response like: diff --git a/modus/functions.mdx b/modus/functions.mdx index 8ad1e3dd..365457ce 100644 --- a/modus/functions.mdx +++ b/modus/functions.mdx @@ -43,13 +43,22 @@ GraphQL API. Your functions become either **queries** (for data retrieval) or Most functions become GraphQL queries—perfect for fetching and processing data: -```go + +```go Go // This function becomes a GraphQL query func GatherThreatIntelligence(source string) (*ThreatReport, error) { - // Data gathering and processing operation - return fetchThreatData(source) + // Data gathering and processing operation + return fetchThreatData(source) } ``` +```ts AssemblyScript +// This function becomes a GraphQL query +export function gatherThreatIntelligence(source: string): ThreatReport { + // Data gathering and processing operation + return fetchThreatData(source) +} +``` + Your functions are now accessible via GraphQL: @@ -82,13 +91,22 @@ query { Functions that modify data automatically become GraphQL mutations. Modus detects these by their operation prefixes: -```go + +```go Go // This becomes a GraphQL mutation func CreateSecurityAlert(data AlertInput) (*SecurityAlert, error) { // Create new security alert return deploySecurityAlert(data) } ``` +```ts AssemblyScript +// This becomes a GraphQL mutation +export function createSecurityAlert(data: AlertInput): SecurityAlert { + // Create new security alert + return deploySecurityAlert(data) +} +``` + Now you can execute data modifications: @@ -130,119 +148,193 @@ automatically become mutations. Here's a complete example that demonstrates how functions integrate external APIs with AI models for intelligent data processing: -```go -package main - -import ( - "fmt" - "strings" - "github.com/hypermodeinc/modus/sdk/go/pkg/http" - "github.com/hypermodeinc/modus/sdk/go/pkg/models" - "github.com/hypermodeinc/modus/sdk/go/pkg/models/openai" -) - -type WeatherIntel struct { - City string `json:"city"` - Temperature float64 `json:"temperature"` - Conditions string `json:"conditions"` - Analysis string `json:"analysis"` -} + + ```go Go + package main + +import ( "fmt" "strings" "github.com/hypermodeinc/modus/sdk/go/pkg/http" +"github.com/hypermodeinc/modus/sdk/go/pkg/models" +"github.com/hypermodeinc/modus/sdk/go/pkg/models/openai" ) + +type WeatherIntel struct { City string `json:"city"` Temperature float64 +`json:"temperature"` Conditions string `json:"conditions"` Analysis string +`json:"analysis"` } const modelName = "text-generator" -// Function: Gather weather data and provide tactical analysis -func GatherWeatherIntelligence(city string) (*WeatherIntel, error) { - // Fetch weather data from OpenWeatherMap API - url := fmt.Sprintf( - "https://api.openweathermap.org/data/2.5/weather?q=%s&appid={{API_KEY}}&units=metric", - city, - ) - - response, err := http.Fetch(url) - if err != nil { - return nil, err - } - if !response.Ok() { - return nil, fmt.Errorf( - "weather data retrieval failed: %d %s", - response.Status, - response.StatusText, - ) - } +// Function: Gather weather data and provide tactical analysis func +GatherWeatherIntelligence(city string) (\*WeatherIntel, error) { // Fetch +weather data from OpenWeatherMap API url := fmt.Sprintf( +"https://api.openweathermap.org/data/2.5/weather?q=%s&appid={{API_KEY}}&units=metric", +city, ) - // Parse weather data - var weatherData struct { - Name string `json:"name"` - Main struct { - Temp float64 `json:"temp"` - } `json:"main"` - Weather []struct { - Description string `json:"description"` - } `json:"weather"` - } + response, err := http.Fetch(url) + if err != nil { + return nil, err + } + if !response.Ok() { + return nil, fmt.Errorf( + "weather data retrieval failed: %d %s", + response.Status, + response.StatusText, + ) + } - response.JSON(&weatherData) + // Parse weather data + var weatherData struct { + Name string `json:"name"` + Main struct { + Temp float64 `json:"temp"` + } `json:"main"` + Weather []struct { + Description string `json:"description"` + } `json:"weather"` + } - conditions := "unknown" - if len(weatherData.Weather) > 0 { - conditions = weatherData.Weather[0].Description - } + response.JSON(&weatherData) - // Generate tactical analysis - analysis, err := analyzeTacticalConditions( - weatherData.Name, - weatherData.Main.Temp, - conditions, - ) - if err != nil { - fmt.Printf("Analysis failed for %s: %v\n", weatherData.Name, err) - analysis = "Analysis unavailable - proceed with standard protocols" - } + conditions := "unknown" + if len(weatherData.Weather) > 0 { + conditions = weatherData.Weather[0].Description + } + + // Generate tactical analysis + analysis, err := analyzeTacticalConditions( + weatherData.Name, + weatherData.Main.Temp, + conditions, + ) + if err != nil { + fmt.Printf("Analysis failed for %s: %v\n", weatherData.Name, err) + analysis = "Analysis unavailable - proceed with standard protocols" + } + + return &WeatherIntel{ + City: weatherData.Name, + Temperature: weatherData.Main.Temp, + Conditions: conditions, + Analysis: analysis, + }, nil - return &WeatherIntel{ - City: weatherData.Name, - Temperature: weatherData.Main.Temp, - Conditions: conditions, - Analysis: analysis, - }, nil } -// Analyze weather conditions for tactical implications -func analyzeTacticalConditions(city string, temp float64, conditions string) (string, error) { - model, err := models.GetModel[openai.ChatModel](modelName) - if err != nil { - return "", err - } +// Analyze weather conditions for tactical implications func +analyzeTacticalConditions(city string, temp float64, conditions string) (string, +error) { model, err := models.GetModel[openai.ChatModel](modelName) if err != +nil { return "", err } + + prompt := `You are a tactical analyst evaluating weather conditions for field operations. + Provide a brief tactical assessment of how these weather conditions might impact + outdoor activities, visibility, and operational considerations in 1-2 sentences.` + + content := fmt.Sprintf( + "Location: %s, Temperature: %.1f°C, Conditions: %s", + city, + temp, + conditions, + ) + + input, err := model.CreateInput( + openai.NewSystemMessage(prompt), + openai.NewUserMessage(content), + ) + if err != nil { + return "", err + } - prompt := `You are a tactical analyst evaluating weather conditions for field operations. - Provide a brief tactical assessment of how these weather conditions might impact - outdoor activities, visibility, and operational considerations in 1-2 sentences.` - - content := fmt.Sprintf( - "Location: %s, Temperature: %.1f°C, Conditions: %s", - city, - temp, - conditions, - ) - - input, err := model.CreateInput( - openai.NewSystemMessage(prompt), - openai.NewUserMessage(content), - ) - if err != nil { - return "", err - } + input.Temperature = 0.7 - input.Temperature = 0.7 + output, err := model.Invoke(input) + if err != nil { + return "", err + } - output, err := model.Invoke(input) - if err != nil { - return "", err - } + return strings.TrimSpace(output.Choices[0].Message.Content), nil - return strings.TrimSpace(output.Choices[0].Message.Content), nil } -``` + +```` +```ts AssemblyScript +import { http, models } from "@hypermode/modus-sdk-as"; +import { OpenAIChatModel, SystemMessage, UserMessage } from "@hypermode/modus-sdk-as/models/openai/chat"; + +@json +class WeatherIntel { + city!: string; + temperature!: f64; + conditions!: string; + analysis!: string; +} + +@json +class WeatherData { + name!: string; + main!: MainWeatherData; + weather!: WeatherDescription[]; +} + +@json +class MainWeatherData { + temp!: f64; +} + +@json +class WeatherDescription { + description!: string; +} + +const modelName = "text-generator"; + +export function gatherWeatherIntelligence(city: string): WeatherIntel { + const url = `https://api.openweathermap.org/data/2.5/weather?q=${city}&appid={{API_KEY}}&units=metric`; + + const response = http.fetch(url); + + if (!response.ok) + throw new Error("Weather data retrieval failed: " + response.status.toString() + " " + response.statusText); + + const weatherData = response.json(); + + let conditions = "unknown" + + if (weatherData.weather.length) conditions = weatherData.weather[0].description; + + const analysis = analyzeTacticalConditions( + weatherData.name, + weatherData.main.temp, + conditions + ); + + return { + city: weatherData.name, + temperature: weatherData.main.temp, + conditions, + analysis + }; +} + +function analyzeTacticalConditions(city: string, temperature: f64, conditions: string): string { + const model = models.getModel(modelName); + + const prompt = `You are a tactical analyst evaluating weather conditions for field operations. + Provide a brief tactical assessment of how these weather conditions might impact + outdoor activities, visibility, and operational considerations in 1-2 sentences.`; + + const content = `Location: ${city}, Temperatire: ${temperature}, Conditions: ${conditions}`; + + const input = model.createInput([ + new SystemMessage(prompt), + new UserMessage(content) + ]); + input.temperature = 0.7; + + const output = model.invoke(input); + + return output.choices[0].message.content.trim(); +} +```` + + This function automatically becomes available as a GraphQL query: diff --git a/modus/model-invoking.mdx b/modus/model-invoking.mdx index aafba0fe..0b67f0a1 100644 --- a/modus/model-invoking.mdx +++ b/modus/model-invoking.mdx @@ -159,7 +159,6 @@ func GenerateText(instruction, prompt string) (string, error) { import { models } from "@hypermode/modus-sdk-as" import { OpenAIChatModel, - ResponseFormat, SystemMessage, UserMessage, } from "@hypermode/modus-sdk-as/models/openai/chat" @@ -225,12 +224,8 @@ func ClassifyText(text string, threshold float32) (string, error) { ```ts AssemblyScript import { models } from "@hypermode/modus-sdk-as" -import { - ClassificationModel, - ClassifierResult, -} from "@hypermode/modus-sdk-as/models/experimental/classification" +import { ClassificationModel } from "@hypermode/modus-sdk-as/models/experimental/classification" -// this model name should match the one defined in the modus.json manifest file const modelName: string = "my-classifier" // this function takes input text and a probability threshold, and returns the @@ -241,12 +236,14 @@ export function classifyText(text: string, threshold: f32): string { const input = model.createInput([text]) const output = model.invoke(input) - const prediction = output.predictions[0] - if (prediction.confidence >= threshold) { - return prediction.label + const predictions = output.predictions + + const prediction = predictions[0] + if (prediction.confidence < threshold) { + return "" } - return "" + return prediction.label } ```