diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..1debcb5 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/LLM AutoHotkey Assistant.exe +/.project diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..767654c --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,29 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "type": "ahk2", + "request": "launch", + "name": "TW_AHK_LLM_Assistant v2 Debugger", + "program": "C:/NoSave/warrott/J/GITHUB/LLM-AutoHotkey-Assistant/LLM AutoHotkey Assistant.ahk", + "stopOnEntry": true + }, + { + "type": "ahk2", + "request": "launch", + "name": "TW_AHK_Response_Window v2 Debugger", + "program": "C:/NoSave/warrott/J/GITHUB/LLM-AutoHotkey-Assistant/lib/Response Window.ahk", + "stopOnEntry": true + }, + { + "type": "ahk2", + "request": "launch", + "name": "AutoHotkey Debugger", + "program": "${file}", + "stopOnEntry": true + } + ] +} \ No newline at end of file diff --git a/LLM AutoHotkey Assistant.ahk b/LLM AutoHotkey Assistant.ahk index 246f264..868c082 100644 --- a/LLM AutoHotkey Assistant.ahk +++ b/LLM AutoHotkey Assistant.ahk @@ -1,10 +1,43 @@ -#Include +#Requires AutoHotkey v2.0 + +;****************************************************************************** +; +; Compilation directives +; +;****************************************************************************** + +; ;@Ahk2Exe-Base %A_AhkPath% + +; ;@Ahk2Exe-AddResource %A_ScriptName% +;@Ahk2Exe-AddResource lib\Response Window.ahk, lib\Response Window.ahk +;@Ahk2Exe-AddResource lib\simulate_select.ahk, lib\simulate_select.ahk + + +; WARNING : Do not forget to update lib\Config.ahk according to following numbers + +;@Ahk2Exe-AddResource icons\IconOn.ico, 10 +;@Ahk2Exe-AddResource icons\IconOff.ico, 11 +;@Ahk2Exe-AddResource icons\anthropic.ico, 12 +;@Ahk2Exe-AddResource icons\deepseek.ico, 13 +;@Ahk2Exe-AddResource icons\google.ico, 14 +;@Ahk2Exe-AddResource icons\openai.ico, 15 +;@Ahk2Exe-AddResource icons\openrouter.ico, 16 +;@Ahk2Exe-AddResource icons\perplexity.ico, 17 + +#Include lib\Config.ahk + +#Include config\Prompts.ahk + #SingleInstance ; ---------------------------------------------------- ; Hotkeys ; ---------------------------------------------------- +; +; Custom Hotkeys to call AI Assistant +; + `:: mainScriptHotkeyActions("showPromptMenu") ~^s:: mainScriptHotkeyActions("saveAndReloadScript") ~^w:: mainScriptHotkeyActions("closeWindows") @@ -12,70 +45,198 @@ #SuspendExempt CapsLock & `:: mainScriptHotkeyActions("suspendHotkey") +; ---------------------------------------------------- +; Auto-execute Section +; ---------------------------------------------------- + +; Dispatch hotkey actions mainScriptHotkeyActions(action) { + + MENU_LINE_SEPARATOR := "---" + + ; Get count of active models (= Open Response Windows) activeModelsCount := getActiveModels().Count switch action { + case "showPromptMenu": promptMenu := Menu() tagsMap := Map() - ; Process all active models once to build prompt maps if (activeModelsCount > 0) { + ; There are open Response Windows + + ; + ; Build "Send message to" menu + ; + ; Process all open Response Windows once to build prompt maps for uniqueID, modelData in getActiveModels() { getActiveModels().%modelData.promptName% := true } - ; Send message to menu + ; Add "Send message to" menu sendToMenu := Menu() promptMenu.Add("Send message to", sendToMenu) + ; Create a sub-item (with the prompt name) for each open Response Window + ; TODO TW m 2025_06_20 If two Response Windows have the same prompt name, only one will be shown in the menu. for uniqueID, modelData in getActiveModels() { sendToMenu.Add(modelData.promptName, sendToPromptGroupHandler.Bind(modelData.promptName)) } ; If there are more than one Response Windows, add "All" menu option if (activeModelsCount > 1) { - sendToMenu.Add("All", (*) => sendToAllModelsInputWindow.showInputWindow(, , "ahk_id " sendToAllModelsInputWindow - .guiObj.hWnd)) + sendToMenu.Add("All", (*) => sendToAllModelsInputWindow.showInputWindow(, , "ahk_id " sendToAllModelsInputWindow.guiObj.hWnd)) } ; Line separator after Activate and Send message to promptMenu.Add() } - ; Normal prompts - for index, prompt in managePromptState("prompts", "get") { + ; + ; Build Tags or Prompt menu (from Prompts.ahk) + ; - ; Check if prompt has tags - hasTags := prompt.HasProp("tags") && prompt.tags && prompt.tags.Length > 0 + Loop 2 ; for step = 1 to 2 + { + step := A_Index ; step 1 = menu items, step 2 = tags sub-menu - ; If no tags, add directly to menu and continue - if !hasTags { - promptMenu.Add(prompt.menuText, promptMenuHandler.Bind(index)) - continue - } - ; Process tags - for tag in prompt.tags { - normalizedTag := StrLower(Trim(tag)) + if (step = 1) { + ; 1st step + + } else { + ; 2nd step + + ; Line separator before Tags menu + promptMenu.Add() + } - ; Create tag menu if doesn't exist - if !tagsMap.Has(normalizedTag) { - tagsMap[normalizedTag] := { menu: Menu(), displayName: tag } - promptMenu.Add(tag, tagsMap[normalizedTag].menu) + for index, prompt in managePromptState("prompts", "get") { + + hasPromptName := prompt.HasProp("promptName") && prompt.promptName && prompt.promptName != "" + + if (hasPromptName and prompt.promptName == MENU_LINE_SEPARATOR) { + ; Line separator + + if (step = 1) { + ; 1st step + + ; Line separator before Tags menu + promptMenu.Add() + + } else { + ; 2nd step + + } + + } else { + ; Not a line separator + + ; Check if prompt has tags + hasTags := prompt.HasProp("tags") && prompt.tags && prompt.tags.Length > 0 + + if !hasTags { + ; No tags + + if (step = 1) { + ; 1st step + + ; Add directly to menu + ; promptMenu.Add(prompt.menuText, promptMenuHandler.Bind(index)) + addToPromptMenu(promptMenu, prompt, index) + + } else { + + } + + } else { + ; Prompt has tags + + ; Transform tags to menu item + ; and add Prompt as sub-menu item + for tag in prompt.tags { + + if (step = 1) { + ; 1st step + + if (tag == "") { + ; tag is "" which means to add as direct menu item + + ; Add directly to menu + ; promptMenu.Add(prompt.menuText, promptMenuHandler.Bind(index)) + addToPromptMenu(promptMenu, prompt, index) + + } else { + ; There is a tag not "" which means need to add a tag menu + ; but only on step 2 + + ; NTD + } + + } else { + ; 2nd step + + if (tag == "") { + ; tag is empty + + ; NTD, because already done on step 1 + + } else if (tag == MENU_LINE_SEPARATOR) { + ; Line separator + + ; Add a Tag line separator to menu + promptMenu.Add() + + } else { + ; Normal tag + + normalizedTag := StrLower(Trim(tag)) + + if !tagsMap.Has(normalizedTag) { + ; First time this Tag is seen + + ; + ; Create tag menu + ; + + ; Create a tag map containing sub-menu and display name + tagsMap[normalizedTag] := { menu: Menu(), displayName: tag } + + ; Create Menu for tag with its sub-menu + promptMenu.Add(tag, tagsMap[normalizedTag].menu) + } + + if (hasAllNeededForMenu(prompt)) { + ; There is a menuText + + ; Add prompt to tag menu + tagsMap[normalizedTag].menu.Add(prompt.menuText, promptMenuHandler.Bind(index)) + + } else { + ; There is NO menuText + + ; NTD + } + } + } + + } ; for + } } + } ; for + } ; loop - ; Add prompt to tag menu - tagsMap[normalizedTag].menu.Add(prompt.menuText, promptMenuHandler.Bind(index)) - } - } - ; Add menus ("Activate", "Minimize", "Close") that manages Response Windows - ; after normal prompts if there are active models if (activeModelsCount > 0) { - + ; There are open Response Windows + + ; + ; After Tag or Prompt menu items, + ; add menu items ("Activate", "Minimize", "Close") that manages Response Windows + ; + ; Line separator before managing Response Window menu promptMenu.Add() @@ -104,15 +265,41 @@ mainScriptHotkeyActions(action) { } } + ; + ; Build "Help" menu + ; + + ; Line separator before Options + promptMenu.Add() + + ; promptMenu.Add("&Help", helpMenu := Menu()) + ; buildHelpMenu(helpMenu) + promptMenu.Add("&Help", gHelpMenu) + + ; + ; Build Options menu + ; + ; Line separator before Options promptMenu.Add() - ; Options menu promptMenu.Add("&Options", optionsMenu := Menu()) - optionsMenu.Add("&1 - Edit prompts", (*) => Run("Notepad " A_ScriptDir "\Prompts.ahk")) - optionsMenu.Add("&2 - View available models", (*) => Run("https://openrouter.ai/models")) - optionsMenu.Add("&3 - View available credits", (*) => Run("https://openrouter.ai/credits")) - optionsMenu.Add("&4 - View usage activity", (*) => Run("https://openrouter.ai/activity")) + optionsMenu.Add("&1 - Edit prompts", (*) => Run("Notepad " A_ScriptDir "\config\Prompts.ahk")) + optionsMenu.Add("&2 - View available models", (*) => Run(gLLM_BASE_URL "/v1/models")) + optionsMenu.Add("&3 - View available credits", (*) => Run(gLLM_BASE_URL "/v1/credits")) + optionsMenu.Add("&4 - View usage activity", (*) => Run(gLLM_BASE_URL "/v1/activity")) + + ; Turn default menu item to Bold + ; promptMenu.Default := "1&" ; First menu item + ; promptMenu.Default := "Language" ; Menu item with name "Language" + + ; Launch a secondary script to simulate Down key press to select + ; the first menu item. + ; This method is mandatory because .show() blocks execution of all threads in the script + ; Only another Process can interact with the GUI while the main script is paused + RunScript("Lib\simulate_select.ahk") + + ; Display the menu promptMenu.Show() case "suspendHotkey": @@ -126,7 +313,7 @@ mainScriptHotkeyActions(action) { } ; Small delay to ensure file operations are complete - Sleep 100 + Sleep 1000 if (activeModelsCount > 0) { MsgBox("Script will automatically reload once all Response Windows are closed.", @@ -145,26 +332,117 @@ mainScriptHotkeyActions(action) { } } +addToPromptMenu(promptMenu, prompt, index) { + + if (hasAllNeededForMenu(prompt)) { + ; There is all needed info + + ; Add directly to menu and continue + promptMenu.Add(prompt.menuText, promptMenuHandler.Bind(index)) + + } else { + ; There is NO menuText + + MsgBox("SYNTAX ERROR : In Prompt n° " index) + } +} + +hasAllNeededForMenu(prompt) { + + hasPromptName := prompt.HasProp("promptName") && prompt.promptName && prompt.promptName != "" + hasMenuText := prompt.HasProp("menuText") && prompt.menuText && prompt.menuText != "" + hasSystemPrompt := prompt.HasProp("systemPrompt") && prompt.systemPrompt && prompt.systemPrompt != "" + hasAPIModels := prompt.HasProp("APIModels") && prompt.APIModels && prompt.APIModels != "" + + hasAllNeeded := hasPromptName and hasMenuText and hasSystemPrompt and hasAPIModels + + return hasAllNeeded +} + +RunScript(scriptPath) { + + Run(getScriptRunCmd(scriptPath)) +} + +getScriptRunCmd(scriptPath) { + + Return (!A_IsCompiled) + ? + ; The script is not compiled, run the script directly with AHK path + A_AhkPath ' ' '"' getRessourcePath(scriptPath) '"' + : + ; The script is compiled, run the script with /script and "*" to tell that the script is embedded in the executable + A_AhkPath ' /script ' '"' getRessourcePath(scriptPath) '"' +} + + ; ---------------------------------------------------- ; Script tray menu ; ---------------------------------------------------- -trayMenuItems := [{ +; Create Help Menu +gHelpMenu := Menu() +buildHelpMenu(gHelpMenu) + +buildHelpMenu(helpMenu) { + helpMenu.Add("Open README.pdf", (*) => openHelpPdf()) + helpMenu.Add("Press CTRL+click on a menu item, to invert use of Selection or Chat window", (*) => {}) + helpMenu.Add("About : " gVersionStringPrefix " " gVersionStringSuffix, (*) => {}) +} + +openHelpPdf() { + Run(A_ScriptDir "\README.pdf") +} + + +trayMenuItems := [ + { + menuText: "&Help", + function: gHelpMenu + } + ,{ + menuText: "", + function: (*) => {} + } + ,{ menuText: "&Reload Script", function: (*) => Reload() -}, { + } + ,{ + menuText: "", + function: (*) => {} + } + ,{ + menuText: "&Suspend Assistant", + function: (*) => mainScriptHotkeyActions("suspendHotkey") + } + ,{ menuText: "E&xit", function: (*) => ExitApp() -}] + } +] ; ---------------------------------------------------- ; Generate tray menu dynamically ; ---------------------------------------------------- -TraySetIcon("icons\IconOn.ico") +TraySetIconEmbed(ICON_ON) + A_TrayMenu.Delete() + for index, item in trayMenuItems { - A_TrayMenu.Add(item.menuText, item.function) + if (item.menuText != "") { + ; There is a menu text + + ; Add it to the tray menu + A_TrayMenu.Add(item.menuText, item.function) + + } else { + ; There is NO menu text + + ; Add a separator to the tray menu + A_TrayMenu.Add() + } } A_IconTip := "LLM AutoHotkey Assistant" @@ -172,7 +450,7 @@ A_IconTip := "LLM AutoHotkey Assistant" ; Create new instance of OpenRouter class ; ---------------------------------------------------- -router := OpenRouter(APIKey) +router := OpenRouter(gLLM_BASE_URL, APIKey) ; ---------------------------------------------------- ; Create Input Windows @@ -186,26 +464,29 @@ sendToPromptNameInputWindow := InputWindow("Send message to prompt") ; Register sendButtonActions ; ---------------------------------------------------- -customPromptInputWindow.sendButtonAction(customPromptSendButtonAction) -sendToAllModelsInputWindow.sendButtonAction(sendToAllModelsSendButtonAction) -sendToPromptNameInputWindow.sendButtonAction(sendToGroupSendButtonAction) +customPromptInputWindow.registerSendButtonAction(customPromptSendButtonAction) +sendToAllModelsInputWindow.registerSendButtonAction(sendToAllModelsSendButtonAction) +sendToPromptNameInputWindow.registerSendButtonAction(sendToGroupSendButtonAction) ; ---------------------------------------------------- ; Input Window actions ; ---------------------------------------------------- customPromptSendButtonAction(*) { + if !customPromptInputWindow.validateInputAndHide() { return } selectedPrompt := managePromptState("selectedPrompt", "get") - processInitialRequest(selectedPrompt.promptName, selectedPrompt.menuText, selectedPrompt.systemPrompt, - selectedPrompt.APIModels, - selectedPrompt.HasProp("copyAsMarkdown") && selectedPrompt.copyAsMarkdown, - selectedPrompt.HasProp("isAutoPaste") && selectedPrompt.isAutoPaste, - selectedPrompt.HasProp("skipConfirmation") && selectedPrompt.skipConfirmation, - customPromptInputWindow.EditControl.Value + processInitialRequest(selectedPrompt.promptName + , selectedPrompt.menuText + , selectedPrompt.systemPrompt + , selectedPrompt.APIModels + , selectedPrompt.HasProp("copyAsMarkdown") && selectedPrompt.copyAsMarkdown + , selectedPrompt.HasProp("isAutoPaste") && selectedPrompt.isAutoPaste + , selectedPrompt.HasProp("skipConfirmation") && selectedPrompt.skipConfirmation + , customPromptInputWindow.EditControl.Value ) customPromptInputWindow.EditControl.Value := "" } @@ -286,9 +567,8 @@ sendToPromptGroupHandler(promptName, *) { ; Check if the prompt has skipConfirmation property and set accordingly sendToPromptNameInputWindow.setSkipConfirmation(selectedPrompt.HasProp("skipConfirmation") ? selectedPrompt.skipConfirmation : false) - sendToPromptNameInputWindow.showInputWindow(, "Send message to " promptName, "ahk_id " sendToPromptNameInputWindow.guiObj - .hWnd - ) + + sendToPromptNameInputWindow.showInputWindow(, "Send message to " promptName, "ahk_id " sendToPromptNameInputWindow.guiObj.hWnd) } ; Generic function to perform an operation on prompt windows @@ -337,13 +617,13 @@ scriptSuspendStatus.GetPos(, , &scriptSuspendStatusWidth) toggleSuspend(*) { Suspend -1 if (A_IsSuspended) { - TraySetIcon("icons\IconOff.ico", , 1) - A_IconTip := "LLM AutoHotkey Assistant - Suspended)" + TraySetIconEmbed(ICON_OFF) + A_IconTip := "LLM AutoHotkey Assistant - (Suspended)" ; Show GUI at the bottom, centered scriptSuspendStatus.Show("AutoSize x" (A_ScreenWidth - scriptSuspendStatusWidth) / 2.3 " y990 NA") } else { - TraySetIcon("icons\IconOn.ico") + TraySetIconEmbed(ICON_ON) A_IconTip := "LLM AutoHotkey Assistant" scriptSuspendStatus.Hide() } @@ -351,12 +631,29 @@ toggleSuspend(*) { ; ---------------------------------------------------- ; Prompt menu handler function +; +; If needed, show a Custom Prompt Window Else send the request ; ---------------------------------------------------- promptMenuHandler(index, *) { + + ; Get CTRL key state + ctrlPressed := GetKeyState("Ctrl", "P") + + ; Get the list of prompts and the selected prompt promptsList := managePromptState("prompts", "get") selectedPrompt := promptsList[index] - if (selectedPrompt.HasProp("isCustomPrompt") && selectedPrompt.isCustomPrompt) { + + ; Determine if we should show the prompt input window based on the prompt properties + shallShowPromptInputWindow := ( selectedPrompt.HasProp("isCustomPrompt") + && selectedPrompt.isCustomPrompt + ) + + ; Invert if CTRL is pressed + shallShowPromptInputWindow := ctrlPressed ? !shallShowPromptInputWindow : shallShowPromptInputWindow + + if (shallShowPromptInputWindow) or (getSelectedText() == "") { + ; Selected prompt ask for a custom prompt or no text is selected ; Save the prompt for future reference in customPromptSendButtonAction(*) managePromptState("selectedPrompt", "set", selectedPrompt) @@ -364,14 +661,23 @@ promptMenuHandler(index, *) { ; Set skipConfirmation property based on the prompt customPromptInputWindow.setSkipConfirmation(selectedPrompt.HasProp("skipConfirmation") ? selectedPrompt.skipConfirmation : false) - customPromptInputWindow.showInputWindow(selectedPrompt.HasProp("customPromptInitialMessage") - ? selectedPrompt.customPromptInitialMessage : unset, selectedPrompt.promptName, "ahk_id " customPromptInputWindow - .guiObj.hWnd) + ; Show InputWindow for custom prompt + customPromptInputWindow.showInputWindow(selectedPrompt.HasProp("customPromptInitialMessage") ? selectedPrompt.customPromptInitialMessage : unset + , selectedPrompt.promptName + , "ahk_id " customPromptInputWindow.guiObj.hWnd + , selectedPrompt.HasProp("isCustomPromptCursorAtEnd") ? selectedPrompt.isCustomPromptCursorAtEnd : true) + } else { - processInitialRequest(selectedPrompt.promptName, selectedPrompt.menuText, selectedPrompt.systemPrompt, + ; Selected prompt does NOT ask for a custom prompt + + ; Process initial request with the selected prompt details + processInitialRequest(selectedPrompt.promptName, + selectedPrompt.menuText, + selectedPrompt.systemPrompt, selectedPrompt.APIModels, selectedPrompt.HasProp("copyAsMarkdown") && selectedPrompt.copyAsMarkdown, selectedPrompt.HasProp("isAutoPaste") && selectedPrompt.isAutoPaste, - selectedPrompt.HasProp("skipConfirmation") && selectedPrompt.skipConfirmation) + selectedPrompt.HasProp("skipConfirmation") && selectedPrompt.skipConfirmation + ) } } @@ -411,29 +717,48 @@ managePromptState(component, action, data := {}) { ; Connect to LLM API and process request ; ---------------------------------------------------- -processInitialRequest(promptName, menuText, systemPrompt, APIModels, copyAsMarkdown, isAutoPaste, skipConfirmation, - customPromptMessage := unset) { +processInitialRequest( promptName, + menuText, + systemPrompt, + APIModels, + copyAsMarkdown, + isAutoPaste, + skipConfirmation, + customPromptMessage := unset) { + + ; Récupérer le texte sélectionné + selectedText := getSelectedText() - ; Handle the copied text - clipboardBeforeCopy := A_Clipboard - A_Clipboard := "" - Send("^c") + if (selectedText == "") { + ; No selected text - if !ClipWait(1) { if IsSet(customPromptMessage) { + ; There is a custom prompt + + ; Use custom prompt message without copied text userPrompt := customPromptMessage + } else { + ; Copy failed and there is no custom prompt + + ; Reset Tooltip manageCursorAndToolTip("Reset") + MsgBox "The attempt to copy text onto the clipboard failed.", "No text copied", "IconX" return } } else if IsSet(customPromptMessage) { - userPrompt := customPromptMessage "`n`n" A_Clipboard + ; Copy succeeded and there is a custom prompt + + ; Use custom prompt message with copied text + userPrompt := customPromptMessage "`n`n" selectedText + } else { - userPrompt := A_Clipboard - } + ; Copy succeeded and there is no custom prompt - A_Clipboard := clipboardBeforeCopy + ; Use copied text as prompt without any additional message + userPrompt := selectedText + } ; Removes newlines, spaces, and splits by comma APIModels := StrSplit(RegExReplace(APIModels, "\s+", ""), ",") @@ -441,74 +766,133 @@ processInitialRequest(promptName, menuText, systemPrompt, APIModels, copyAsMarkd ; Automatically disables isAutoPaste if more than one model is present isAutoPaste := (APIModels.Length > 1) ? false : isAutoPaste + ; Loop to call all Models in APIModels list for i, fullAPIModelName in APIModels { - ; Get text before forward slash as providerName - providerName := SubStr(fullAPIModelName, 1, InStr(fullAPIModelName, "/") - 1) - - ; Get text after forward slash as singleAPIModelName - singleAPIModelName := SubStr(fullAPIModelName, InStr(fullAPIModelName, "/") + 1) - - uniqueID := A_TickCount - - ; Create the chatHistoryJSONRequest - chatHistoryJSONRequest := router.createJSONRequest(fullAPIModelName, systemPrompt, userPrompt) - - ; Generate sanitized filenames for chat history, cURL command, and cURL output files - chatHistoryJSONRequestFile := A_Temp "\" RegExReplace("chatHistoryJSONRequest_" promptName "_" singleAPIModelName "_" uniqueID ".json", - "[\/\\:*?`"<>|]", "") - cURLCommandFile := A_Temp "\" RegExReplace("cURLCommand_" promptName "_" singleAPIModelName "_" uniqueID ".txt", - "[\/\\:*?`"<>|]", "") - cURLOutputFile := A_Temp "\" RegExReplace("cURLOutput_" promptName "_" singleAPIModelName "_" uniqueID ".json", - "[\/\\:*?`"<>|]", "") - - ; Write the JSON request and cURL command to files - FileOpen(chatHistoryJSONRequestFile, "w", "UTF-8-RAW").Write(chatHistoryJSONRequest) - cURLCommand := router.buildcURLCommand(chatHistoryJSONRequestFile, cURLOutputFile) - FileOpen(cURLCommandFile, "w").Write(cURLCommand) - - ; Maintain a reference in the global map - getActiveModels()[uniqueID] := { - promptName: promptName, - name: singleAPIModelName, - provider: router, - JSONFile: chatHistoryJSONRequestFile, - cURLFile: cURLCommandFile, - outputFile: cURLOutputFile, - isLoading: false - } + if (fullAPIModelName != "") { + ; There is a valid API Model Name + + ; + ; Generate sanitized filenames for chat history, cURL command, and cURL output files + ; + + ; Use current timestamp as unique identifier for filenames + uniqueID := A_TickCount + + ; Get text before forward slash as providerName + providerName := SubStr(fullAPIModelName, 1, InStr(fullAPIModelName, "/") - 1) + + ; Get text after forward slash as singleAPIModelName + singleAPIModelName := SubStr(fullAPIModelName, InStr(fullAPIModelName, "/") + 1) + + ; Create a prefix for filenames + prefix := "LLM_Ahk_" uniqueID "_" promptName "_" singleAPIModelName + + ; Define forbidden characters for filenames that must be replaced with underscores + forbiddenChars := "[\/\\:*?`"<>| ]" + + ; Use prefix ans number in order to easily sort files + dataObjToJSONStrFile := A_Temp "\" RegExReplace(prefix "_" "1_responseWindowData.json", forbiddenChars, "_") + chatHistoryJSONRequestFile := A_Temp "\" RegExReplace(prefix "_" "2_chatHistoryJSONRequest.json", forbiddenChars, "_") + cURLCommandFile := A_Temp "\" RegExReplace(prefix "_" "3_cURLCommand.bat", forbiddenChars, "_") + cURLOutputFile := A_Temp "\" RegExReplace(prefix "_" "4_cURLOutput.json", forbiddenChars, "_") + + ; + ; Write Chat History JSON request to files + ; + + ; Create the chatHistoryJSONRequest + chatHistoryJSONRequest := router.createJSONRequest(fullAPIModelName, systemPrompt, userPrompt) + + FileOpen(chatHistoryJSONRequestFile, "w", "UTF-8-RAW").Write(chatHistoryJSONRequest) + + ; + ; Write cURL command to file + ; + + cURLCommand := router.buildcURLCommand(chatHistoryJSONRequestFile, cURLOutputFile) + + ; Write cURL command to file + ; Before running manually this bat file for debug in a cmd.exe window, + ; change the code page of the cmd.exe to UTF-8 with the following command : chcp 65001 + FileOpen(cURLCommandFile, "w", "UTF-8").Write(cURLCommand) + + ; + ; Maintain a reference in the global map + ; + + getActiveModels()[uniqueID] := { + promptName: promptName, + menuText: menuText, + name: singleAPIModelName, + userPrompt: userPrompt, + systemPrompt: systemPrompt, + provider: router, + JSONFile: chatHistoryJSONRequestFile, + cURLFile: cURLCommandFile, + outputFile: cURLOutputFile, + isLoading: false + } + + ; + ; Create an object containing all values for the Response Window + ; + + responseWindowDataObj := { + chatHistoryJSONRequestFile: chatHistoryJSONRequestFile, + cURLCommandFile: cURLCommandFile, + cURLOutputFile: cURLOutputFile, + providerName: providerName, + copyAsMarkdown: copyAsMarkdown, + isAutoPaste: isAutoPaste, + skipConfirmation: skipConfirmation, + mainScriptHiddenhWnd: A_ScriptHwnd, + callingWindowHwnd: WinActive("A"), + responseWindowTitle: gVersionStringPrefix " " promptName " [" singleAPIModelName "]" " : " gVersionStringSuffix, + singleAPIModelName: singleAPIModelName, + numberOfAPIModels: APIModels.Length, + APIModelsIndex: i, + uniqueID: uniqueID + } + + ; + ; Write the object to a file named responseWindowData and run + ; Response Window.ahk while passing the location of that file + ; through dataObjToJSONStrFile as the first argument + ; + + dataObjToJSONStr := jsongo.Stringify(responseWindowDataObj) + FileOpen(dataObjToJSONStrFile, "w", "UTF-8-RAW").Write(dataObjToJSONStr) + getActiveModels()[uniqueID].JSONFile := chatHistoryJSONRequestFile + + if (!A_IsCompiled) + { + ; On est en mode script + ; A_AhkPath pointe vers AutoHotkeyU64.exe + + ; Lancer Response Window.ahk dans un Process séparé + Run(A_AhkPath . " " . "`"lib\Response Window.ahk`"" . " " . "`"" . dataObjToJSONStrFile . "`"") + + } else { + ; On est en mode compilé + ; A_AhkPath pointe vers l'exe du script principal commpilé + + ; Lancer Response Window.ahk (Embedded, d'où la nécessité d'une étoile "*" devant le nom de la ressource) dans un Process séparé + Run(A_AhkPath . " /script " . "`"*lib\Response Window.ahk`"" . " " . "`"" . dataObjToJSONStrFile . "`"") + } - ; Create an object containing all values for the Response Window - responseWindowDataObj := { - chatHistoryJSONRequestFile: chatHistoryJSONRequestFile, - cURLCommandFile: cURLCommandFile, - cURLOutputFile: cURLOutputFile, - providerName: providerName, - copyAsMarkdown: copyAsMarkdown, - isAutoPaste: isAutoPaste, - skipConfirmation: skipConfirmation, - mainScriptHiddenhWnd: A_ScriptHwnd, - responseWindowTitle: promptName " [" singleAPIModelName "]", - singleAPIModelName: singleAPIModelName, - numberOfAPIModels: APIModels.Length, - APIModelsIndex: i, - uniqueID: uniqueID - } + } else { + ; There is NO API Model Name - ; Write the object to a file named responseWindowData and run - ; Response Window.ahk while passing the location of that file - ; through dataObjToJSONStrFile as the first argument - dataObjToJSONStr := jsongo.Stringify(responseWindowDataObj) - dataObjToJSONStrFile := A_Temp "\" RegExReplace("responseWindowData_" promptName "_" singleAPIModelName "_" A_TickCount ".json", - "[\/\\:*?`"<>|]", "") - FileOpen(dataObjToJSONStrFile, "w", "UTF-8-RAW").Write(dataObjToJSONStr) - getActiveModels()[uniqueID].JSONFile := chatHistoryJSONRequestFile - Run("lib\Response Window.ahk " "`"" dataObjToJSONStrFile) + ; Eventually, I think it's better to simply ignore it + ; ; Display error message + ; MsgBox("SYNTAX ERROR : In Prompt [" promptName "] on API Model n° " i) + } } } ; ---------------------------------------------------- -; Tracks active models +; Tracks active models (i.e. Opened Response Windows) ; ---------------------------------------------------- getActiveModels() { @@ -522,6 +906,7 @@ getActiveModels() { ; ---------------------------------------------------- CustomMessages.registerHandlers("mainScript", responseWindowState) + responseWindowState(uniqueID, responseWindowhWnd, state, mainScriptHiddenhWnd) { static responseWindowLoadingCount := 0 static reloadScript := false @@ -584,7 +969,7 @@ manageCursorAndToolTip(action) { toolTipMessage := "Retrieving response for the following prompt" - ; Singular and plural forms of the word "model" + ; Singular and plural forms of the word "prompt" if (activeCount > 1) { toolTipMessage .= "s" } @@ -592,7 +977,7 @@ manageCursorAndToolTip(action) { toolTipMessage .= " (Press ESC to cancel):" for key, data in getActiveModels() { if (data.isLoading) { - toolTipMessage .= "`n- " data.promptName " [" data.name "]" + toolTipMessage .= "`n- " StrReplace(data.menuText, "&", "") " `"" SubStr(data.userPrompt, 1 , 50) "...`"" " [" data.name "]" } } diff --git a/README.md b/README.md index 7246a61..dddb461 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,111 @@ + + + +
![bot](https://github.com/user-attachments/assets/fd5e1d8c-d19f-44f1-b590-2cc950ede6b9) # LLM AutoHotkey Assistant -An AutoHotkey v2 application that utilizes [OpenRouter.ai](https://openrouter.ai/) to seamlessly integrate Large Language Models into your daily workflow. Process texts with customizable prompts by pressing a hotkey and interact with multiple AI models simultaneously. +

+If you already now what LLM AutoHotkey Assistant is and just want to get started right now? +

+ +

Jump to "Getting Starded" section below !

+ +*** + +

+Otherwise +

+ +

+LLM AutoHotkey Assistant is an AutoHotkey v2 application that usually utilizes OpenRouter.ai to seamlessly integrate LLM (Large Language Models) into your daily workflow. Process texts with customizable prompts by pressing a hotkey and interact with multiple AI models simultaneously. +

[![Download](https://img.shields.io/github/v/release/kdalanon/LLM-AutoHotkey-Assistant?style=for-the-badge&color=blue&label=Download)](https://github.com/kdalanon/LLM-AutoHotkey-Assistant/releases/latest) @@ -18,8 +119,15 @@ An AutoHotkey v2 application that utilizes [OpenRouter.ai](https://openrouter.ai > Navigate through this page by clicking on the menu button at the upper-right corner. > ![image](https://github.com/user-attachments/assets/eddb0216-f0db-4ecf-9231-81592d4aa454) +
+ ## 🔑 Key Features +The following paragraphs describe the **key features** of the application and some **use cases**. + +However, due to the ability to **customize** your own **prompt menu** items, you can create lot more use cases, according to the ability of your **connected AI**. + + ### 1️⃣ Text Processing with Keyboard Hotkeys Simply highlight any text and press [a hotkey](#hotkeys) to access AI-powered text processing. @@ -88,6 +196,8 @@ https://github.com/user-attachments/assets/62a6959a-e7b7-4379-b1c3-e82f131686ed https://github.com/user-attachments/assets/f960a7ef-9a6c-4217-8f86-44acfcea9122 +
+ ## 🚀 Getting Started ### Prerequisites @@ -96,41 +206,87 @@ https://github.com/user-attachments/assets/f960a7ef-9a6c-4217-8f86-44acfcea9122 - Windows OS - [API key](https://openrouter.ai/settings/keys) from [OpenRouter.ai](https://openrouter.ai) -### Set up +### Install the Assistant + +1. Download `LLM AutoHotkey Assistant.zip` +[![Download](https://img.shields.io/github/v/release/kdalanon/LLM-AutoHotkey-Assistant?style=for-the-badge&color=blue&label=Download)](https://github.com/kdalanon/LLM-AutoHotkey-Assistant/releases/latest) +2. Unzip `LLM AutoHotkey Assistant.zip` + +3. OPTIONAL : Configure the `config/Preferences.ahk` file as needed if you want to use a **local LLM**. -1. Run the `LLM AutoHotkey Assistant.ahk` script and press the `backtick` hotkey. -2. Select `Options` ➡ `Edit prompts` +4. Run the `LLM AutoHotkey Assistant.ahk` script and press the `backtick` (or `Alt+Shift+o`) hotkey. +5. Select `Options` ➡ `Edit prompts` ![image](https://github.com/user-attachments/assets/93b4b345-6651-4693-82b4-0edd728ff076) -3. Enter your [OpenRouter.ai API key](https://openrouter.ai/settings/keys) within the quotation marks. Then, press `CTRL + S` to save the file automatically and reload the application. +6. Enter your [OpenRouter.ai API key](https://openrouter.ai/settings/keys) within the quotation marks. Then, press `CTRL + S` to save the file automatically and reload the application. > [!NOTE] -> To ensure the API key is automatically applied and the application reloads, use the keyboard shortcut `CTRL + S` to save. Saving via `File` ➡ `Save` will not trigger the automatic reload. +> To ensure the API key is automatically applied and the application reloads, use the keyboard shortcut `CTRL + S` to save. Saving via `File` ➡ `Save` will not trigger the **automatic reload**. ![image](https://github.com/user-attachments/assets/6622d386-d73b-40bd-9fb5-7a5a429133a3) -4. You can now use the app! If you want to further enhance your experience and customize your prompts, press the `backtick` hotkey and select `Options` ➡ `Edit prompts` again. See [Editing prompts](#editing-prompts) for more info. +7. You can now use the app! +If you want to further enhance your experience and customize your prompts, press the `backtick` (or `Alt+Shift+o`) hotkey and select `Options` ➡ `Edit prompts` again. See [Editing prompts](#editing-prompts) for more info. + > [!NOTE] -> The app icon will appear in your system tray and will indicate that the script is running in the background. -> To terminate the script, right-click the icon and select `Exit`. +> The `robot icon` will appear in your system tray and will indicate that the Assistant is running in the background, ready to be called. +> ![image](https://github.com/user-attachments/assets/93fa2fed-3222-494a-974c-5a037cf7e60d) + +### Shutdown the Assistant -![image](https://github.com/user-attachments/assets/93fa2fed-3222-494a-974c-5a037cf7e60d) +To shutdown the assistant, **right-click** the yellow `robot icon` in the `tray bar` and select `Exit`. + +### Uninstall permanently the Assistant + +Delete the folder containing `LLM AutoHotkey Assistant.exe`. + +
## 🖱️ Usage -1. Highlight any text. -2. Press the `backtick` hotkey to bring up the prompt menu. -3. Select a prompt to process the text. -4. View and interact with the AI response in the Response Window. -5. If you want to use the `backtick` character, you can press `CapsLock + Backtick` to suspend and unsuspend the script. A message will be displayed at the bottom indicating that the app is suspended. +### How to use + +1. **Highlight** any **text** (this is optional if your prompt does not require you to input text) +2. **Press** `backtick` (or the `Alt+Shift+o`) hotkey to bring up the prompt menu : +3. Select a menu item : + - to display a **Custom prompt** window or + - to process the **selected text** + + > [!NOTE] + > Use `Ctrl + Click` on menu item to **switch** between : + > - **Custom prompt** mode and + > - **Process text** mode. + +4. OPTIONAL : According to your Prompt configuration, write your question or custom prompt : +![image](https://github.com/user-attachments/assets/951a3133-bf21-44e6-8959-b98ab26bbbb1) + +5. View the AI response in the `Response Window` and continue to **Chat** or **Paste and Close** the window as needed +> [!NOTE] +> `Alt + underlined character` of a Response Window button, is the **shortcut** key to activate a button. + +7. OPTIONAL : Select `Options` ➡ `View available models` to get available **model ids** on server, to customize your Prompt `APIModels` field. + +8. OPTIONAL : Select `Options` ➡ `Edit prompts` to add/customize your **Prompt Menu** items (in the `config/Prompts.ahk` file). + +> [!NOTE] +> You may **call** the Assistant **multiple times**, even if the previous response window is not yet opened or still open. +> A **new** `response window` will be created when the LLM response is received. + +### Suspend the Assistant + +If you want to use the `backtick` character, you can press `CapsLock + Backtick` (or `CapsLock + o`) to suspend and unsuspend the script. -![image](https://github.com/user-attachments/assets/e8611390-5fb3-4916-ac8f-774210b5a14d) +It can also be done through the `tray menu > Suspend Assistant` -### Hotkeys +> A message will be displayed at the bottom indicating that the app is suspended. +> ![image](https://github.com/user-attachments/assets/e8611390-5fb3-4916-ac8f-774210b5a14d) -- `Backtick`: Show prompt menu + +### Hotkeys Summary + +- `Backtick` or `Alt+Shift+o` : Show prompt menu - `Ctrl + S`: Will automatically save and reload the script when editing in Notepad (or any other editing tool that matches `LLM AutoHotkey Assistant.ahk` title window) - `CapsLock + backtick`: Suspend/resume hotkeys - `ESC`: Cancel ongoing requests @@ -140,6 +296,15 @@ https://github.com/user-attachments/assets/f960a7ef-9a6c-4217-8f86-44acfcea9122 - Chat with specific prompt - Response Window +### Get Help + +- **Right-click** the yellow `robot icon` in the `tray bar` and +- Select `Help > Open README.pdf`. + +
+ +## 🖱️ Configuration + ### Running the script at startup You can automatically run the script at startup by following the steps below: @@ -164,18 +329,25 @@ Edit the `prompts` array in the script to add your own prompts. prompts := [{ promptName: "Your Prompt Name", menuText: "&1 - Menu Text", + tags: ["", "&tag1", "&tag2"], + isCustomPrompt: true, + customPromptInitialMessage: "Initial message that will show on Custom Prompt window", + isCustomPromptCursorAtEnd: false, systemPrompt: "Your system prompt", - APIModels: "model-name", + APIModels: " + ( + perplexity/r1-1776:online, + openai/o3-mini-high:online, + anthropic/claude-3.7-sonnet:thinking:online, + google/gemini-2.0-flash-thinking-exp:free:online + )", copyAsMarkdown: true, isAutoPaste: true, - isCustomPrompt: true, - customPromptInitialMessage: "Initial message that will show on Custom Prompt window", - tags: ["&tag1", "&tag2"], skipConfirmation: true }] ``` -#### `promptName` +#### promptName The name of the prompt. This will also be shown in the tooltip, `Send message to`, `Activate`, `Minimize`, and `Close` menus. In addition, this will also show in the Response Window title together with the chosen API model. @@ -187,7 +359,10 @@ The name of the prompt. This will also be shown in the tooltip, `Send message to ![image](https://github.com/user-attachments/assets/cea6810f-1408-4d49-8a53-18d8bd334c46) -### `menuText` + +
+ +#### menuText The name of the prompt that will appear when your press the hotkey to bring up the menu. The ampersand (`&`) is a shortcut key and indicates that by pressing the character next to it after bringing up the menu, the prompt will be selected. @@ -196,7 +371,98 @@ The name of the prompt that will appear when your press the hotkey to bring up t > [!NOTE] > You can have duplicate shortcut keys for the prompts. Pressing the shortcut key will highlight the first prompt, and pressing the shortcut key again will highlight the second prompt. Pressing `Enter` afterwards will select the prompt and initiate the request. -#### `systemPrompt` +#### tags + +Enabling this feature will sort and group the prompts by their tags. + +For instance, the following configuration + +```autohotkey +prompts := [ + { + tags: [ + "&Custom prompts" + , "Multi-models" + , "---" + , "Text manipulation" + , "&Articles" + , "---" + , "Language" + , "Learning" + , "&Auto paste" + ] +}, { + promptName: "Multi-model custom prompt", + menuText: "&1 - Gemini, GPT-4o, Claude", + systemPrompt: "System prompt", + APIModels: "google/gemini-2.0-flash-thinking-exp:free, openai/gpt-4o, anthropic/claude-3.7-sonnet", + isCustomPrompt: true, + customPromptInitialMessage: "How can I leverage the power of AI in my everyday tasks?", + tags: ["&Custom prompts", "&Multi-models"] +}, { + promptName: "Auto-paste custom prompt", + menuText: "&5 - Auto-paste custom prompt", + systemPrompt: "You are a helpful assistant. Follow the instructions that I will provide or answer any questions that I will ask.", + APIModels: "google/gemini-2.0-flash-thinking-exp:free", + isCustomPrompt: true, + isAutoPaste: true, + tags: ["", "&Custom prompts", "&Auto paste"] +}] +``` +will show : + - Tag menus `&Custom prompts`, `Multi-models`, `Text manipulation`... + - in **this order**, due to the first **special prompt** containing **only one** `tags` field. + - "---" will add **menu separators** between menu items + (not yet shown on the picture below) + - Menu `&1 - Gemini, GPT-4o, Claude` in both sub menus : + - `&Custom prompts` and + - `&Multi-models` + - Menu `Auto-paste custom prompt` in both sub-menus : + - `&Custom prompts` and + - `&Auto paste` and + - as a **top direct menu** due to tag `""` + (not yet shown on the picture below) + +![image](https://github.com/user-attachments/assets/f6629513-35c4-4469-886d-480363c89214) + +![image](https://github.com/user-attachments/assets/8c931782-0937-4a10-a26a-2fe7f22272aa) + +#### isCustomPrompt + +Setting `isCustomPrompt: true` will allow the prompt to show an input box to write custom prompts. Remove this if you don't need Custom Prompt functionality. + +By maintaining **Ctrl** key and **clicking** on a menu item, this inverts the behavior of the `isCustomPrompt` flag for that specific call. + + +![image](https://github.com/user-attachments/assets/951a3133-bf21-44e6-8959-b98ab26bbbb1) + +**Ctrl+Enter** key will send the prompt to the LLM. + +#### customPromptInitialMessage + +An optional message that you can set to be displayed when the Custom Prompt window is shown. Remove this if you don't want to show a message whenever you open the Custom Prompt. + +![image](https://github.com/user-attachments/assets/7aeb1a40-8bbd-4aae-bf44-fb417c5f366c) + +https://github.com/user-attachments/assets/d8f70927-2544-4c8e-a856-b4569d89263e + +> [!TIP] +> You can also split a long message into a series of multiple lines. +> See [Splitting a long prompt into a series of multiple lines](#Splitting-a-long-prompt-into-a-series-of-multiple-lines) for more info. + +> [!IMPORTANT] +> Make sure to add a comma at the end of the line before the Auto Paste, Custom Prompt, `copyAsMarkdown`, etc. functionality: + +![image](https://github.com/user-attachments/assets/04ad392f-0f6d-45c0-b00b-00d0e4414109) + +#### isCustomPromptCursorAtEnd + +When **isCustomPromptCursorAtEnd** is : +- **true** : the cursor will be placed at the **end** (Not defined assumes **true**) +- **false** : the **whole text** will be **selected** allowing to esealy **overwrite** the text + + +#### systemPrompt This will be the initial prompt and will set the tone and context of the conversation. @@ -229,7 +495,7 @@ prompts := [{ ![image](https://github.com/user-attachments/assets/ebd22b64-0c49-4ce2-a5b7-c10dd0b9f2e0) -#### `APIModels` +#### APIModels The API model that will be used to process the prompt. @@ -292,17 +558,20 @@ Since this app uses [OpenRouter.ai](https://openrouter.ai/) service, you get acc > [!TIP] > Feeling overwhelmed by the number of models to choose from? Take a look at [OpenRouter.ai's ranking page](https://openrouter.ai/rankings) to discover the best models for each task. You can also find benchmarks across various models at [LiveBench.ai](https://livebench.ai/#/). + +
+ ##### Auto Router Your prompt will be processed by a meta-model and [routed to one of dozens of models](https://openrouter.ai/openrouter/auto), optimizing for the best possible output. To use it, just enter `openrouter/auto` in the `APIModel` field. -#### `copyAsMarkdown: true` +#### copyAsMarkdown Setting `copyAsMarkdown: true` will enable the `Copy` button in the Response Window to copy content in Markdown format. This is especially useful for responses that need markdown content such as codes for programming. If you’d rather copy the response as plain text or HTML-formatted text (default behavior), simply remove this setting. -#### `isAutoPaste: true` +#### isAutoPaste Setting `isAutoPaste: true` will automatically paste the model's response in Markdown format. Remove this if you don't need auto-paste functionality. @@ -316,60 +585,7 @@ Default behavior of copied content between `isAutoPaste: true` and `Copy`: | `Copy` button from the Response Window | HTML | | `isAutoPaste: true` | Markdown | -#### `isCustomPrompt: true` - -Setting `isCustomPrompt: true` will allow the prompt to show an input box to write custom prompts. Remove this if you don't need Custom Prompt functionality. - -![image](https://github.com/user-attachments/assets/951a3133-bf21-44e6-8959-b98ab26bbbb1) - -##### `customPromptInitialMessage` - -An optional message that you can set to be displayed when the Custom Prompt window is shown. Remove this if you don't want to show a message whenever you open the Custom Prompt. - -![image](https://github.com/user-attachments/assets/7aeb1a40-8bbd-4aae-bf44-fb417c5f366c) - -https://github.com/user-attachments/assets/d8f70927-2544-4c8e-a856-b4569d89263e - -> [!TIP] -> You can also split a long message into a series of multiple lines. -> See [Splitting a long prompt into a series of multiple lines](#Splitting-a-long-prompt-into-a-series-of-multiple-lines) for more info. - -> [!IMPORTANT] -> Make sure to add a comma at the end of the line before the Auto Paste, Custom Prompt, `copyAsMarkdown`, etc. functionality: - -![image](https://github.com/user-attachments/assets/04ad392f-0f6d-45c0-b00b-00d0e4414109) - -#### `tags` - -Enabling this feature will sort and group the prompts by their tags. - -For example, this will show `&1 - Gemini, GPT-4o, Claude` to both `&Custom prompts` and `&Multi-models` sub menus: - -```autohotkey -prompts := [{ - promptName: "Multi-model custom prompt", - menuText: "&1 - Gemini, GPT-4o, Claude", - systemPrompt: "System prompt", - APIModels: "google/gemini-2.0-flash-thinking-exp:free, openai/gpt-4o, anthropic/claude-3.7-sonnet", - isCustomPrompt: true, - customPromptInitialMessage: "How can I leverage the power of AI in my everyday tasks?", - tags: ["&Custom prompts", "&Multi-models"] -}, { - promptName: "Auto-paste custom prompt", - menuText: "&5 - Auto-paste custom prompt", - systemPrompt: "You are a helpful assistant. Follow the instructions that I will provide or answer any questions that I will ask.", - APIModels: "google/gemini-2.0-flash-thinking-exp:free", - isCustomPrompt: true, - isAutoPaste: true, - tags: ["&Custom prompts", "&Auto paste"] -}] -``` - -![image](https://github.com/user-attachments/assets/f6629513-35c4-4469-886d-480363c89214) - -![image](https://github.com/user-attachments/assets/8c931782-0937-4a10-a26a-2fe7f22272aa) - -#### `skipConfirmation: true` +#### skipConfirmation Setting `skipConfirmation: true` will skip confirmation messages when closing the following windows: @@ -378,6 +594,8 @@ Setting `skipConfirmation: true` will skip confirmation messages when closing th - Chat with specific prompt - Response Window +
+ ## 📣 Share prompts and settings Do you have prompts and settings you'd like to share? [Check here](https://github.com/kdalanon/LLM-AutoHotkey-Assistant/discussions/7) to share your prompts! @@ -412,6 +630,8 @@ These files will be created after you select a prompt and will be deleted when a - Pressing the `ESC` key _after_ selecting a prompt but _before_ receiving the model's response (for example, if the Response Window has not yet opened) - Closing the Response Window +
+ ## 💬 Frequently-asked questions ### Can I use my Anthropic/OpenAI/Google/Other provider's API? @@ -444,9 +664,32 @@ See [OpenRouter's documentation](https://openrouter.ai/docs/api-reference/limits ### Can I connect it with my local AI? -I'm uncertain if it will work, as I don't have a local AI setup on my machine to test it myself. However, it's highly likely to work if your local AI uses the same format as the `OpenAI SDK`. OpenRouter relies on the `OpenAI SDK` for request processing. I followed the [OpenRouter documentation](https://openrouter.ai/docs/quickstart) to configure the app to connect to their API. +Yes ! + +You just have to uncomment and configure the `gLLM_BASE_URL` variable in the `config/Preferences.ahk` file to point to your local AI server' + +You can also customize the icon of your local AI server by adding the following lines to the `config/Preferences.ahk` file : + +```autohotkey +ICON_XXX := 18 + +gMapIconNb2IconPath.Set(ICON_XXX, "icons\xxx.ico") +``` + +where `xxx` is the name of : +- your icon +- the prefix of your model in the APIModels section of your prompts : + +```autohotkey + APIModels: " + ( + xxx/mymodel + )" +``` + +You can add other icons by adding more lines like the one above. +Don't forget to increment the icon number (`ICON_XXX`) for each new icon you add. -To understand how the app sends and receives requests through the OpenRouter API, open the `Config.ahk` file in the `lib` folder. If you successfully set up the app to connect to your local LLM, please let me know, and I will update this information. ### Can I run the app using a portable installation of AutoHotkey? @@ -468,6 +711,8 @@ Thanks to [@WhazZzZzup25](https://github.com/kdalanon/LLM-AutoHotkey-Assistant/i Check out their [documentation](https://openrouter.ai/docs/quickstart) to learn more about their service. +
+ ## ✅ Features planned on future releases - Timestamp messages in Chat History diff --git a/README.pdf b/README.pdf new file mode 100644 index 0000000..6b825d2 Binary files /dev/null and b/README.pdf differ diff --git a/Response Window resources/js/main.js b/Response Window resources/js/main.js deleted file mode 100644 index 1b814af..0000000 --- a/Response Window resources/js/main.js +++ /dev/null @@ -1,174 +0,0 @@ -window.chrome.webview.addEventListener('message', handleWebMessage); - -// Initialize markdown-it with options -var md = window.markdownit({ - html: true, // Enable HTML tags in source - linkify: true, // Autoconvert URL-like text to links - typographer: true, // Enable smartypants and other sweet transforms - highlight: function (str, lang) { - if (lang && hljs.getLanguage(lang)) { - try { - return '
' +
-          hljs.highlight(str, { language: lang, ignoreIllegals: true }).value +
-          '
'; - } catch (__) { } - } - return '
' + md.utils.escapeHtml(str) + '
'; - } -}) - .use(window.texmath, { // Use texmath plugin for mathematical expressions - engine: window.katex, - delimiters: 'dollars', - katexOptions: { macros: { "\\RR": "\\mathbb{R}" } } - }); - -function renderMarkdown(content, ChatHistoryText) { - // Define the content to render. Use the provided content or a default message - var contentToRender = content || 'There is no content available.'; - - // Save the pre-markdown text in localStorage for reloading later - localStorage.setItem('preMarkdownText', contentToRender); - - // Render the markdown content - var result = md.render(contentToRender); - - // Inject the rendered HTML into the target element - var contentElement = document.getElementById('content'); - contentElement.innerHTML = result; - - // Scroll to the top - contentElement.scrollTo(0, 0); - - // If ChatHistoryText, change button text to Chat History. Used by ShowResponseWindow in AutoHotkey - var button = document.getElementById("chatHistoryButton"); - if (ChatHistoryText) { - button.textContent = "Chat History"; - } -} - -function responseWindowCopyButtonAction(copyAsMarkdown) { - // Get the button element by its id - var button = document.getElementById('copyButton'); - - // Store the original button text - var originalText = button.innerHTML; - - if (copyAsMarkdown) { - // If copyAsMarkdown is true, just update the button without copying - button.innerHTML = 'Copied!'; - button.disabled = true; - - // After 2 seconds, restore the original text and enable the button - setTimeout(function () { - button.innerHTML = originalText; - button.disabled = false; - }, 2000); - } else { - // Get the 'content' element - var contentElement = document.getElementById('content'); - - // Create a temporary element to hold the formatted content - const tempElement = document.createElement('div'); - tempElement.innerHTML = contentElement.innerHTML; - - // Use the Clipboard API to write the HTML content to the clipboard - navigator.clipboard.write([ - new ClipboardItem({ - 'text/html': new Blob([tempElement.innerHTML], { type: 'text/html' }), - 'text/plain': new Blob([contentElement.innerText], { type: 'text/plain' }) - }) - ]).then(() => { - // Change button text to "Copied!" and disable the button - button.innerHTML = 'Copied!'; - button.disabled = true; - - // After 2 seconds, restore the original text and enable the button - setTimeout(function () { - button.innerHTML = originalText; - button.disabled = false; - }, 2000); - }); - } -} - -// Enables or disables the buttons and resets the cursor -function responseWindowButtonsEnabled(enable) { - // Array of button IDs - var buttonIds = ["chatButton", "copyButton", "retryButton", "chatHistoryButton"]; - - // Iterate over each ID in the array - buttonIds.forEach(function (id) { - // Get the button element by ID - var button = document.getElementById(id); - - // Check if the button exists to avoid errors - if (button) { - // Toggle the 'disabled' property using the ternary operator - button.disabled = !enable; - } - }); - // Resets cursor - document.body.style.cursor = 'auto'; -} - -function handleWebMessage(event) { - try { - // Name incoming data - const message = event.data; - - // Check if data is an array for multiple parameters - if (Array.isArray(message.data)) { - if (typeof window[message.target] === 'function') { - window[message.target](...message.data); - } else { - console.error(`Function "${message.target}" does not exist.`); - } - } else { - // Existing single parameter handling - if (typeof window[message.target] === 'function') { - window[message.target](message.data); - } else { - console.error(`Function "${message.target}" does not exist.`); - } - } - } catch (error) { - console.error("Error handling incoming message:", error); - } -} - -// Toggle text button between Chat History and Latest Response -function toggleButtonText(ChatHistoryText) { - var button = document.getElementById('chatHistoryButton'); - if (button.textContent === "Chat History") { - button.textContent = "Latest Response"; - } else if (button.textContent === "Latest Response") { - button.textContent = "Chat History"; - // Scroll to the top when displaying chat history - document.getElementById('content').scrollTo(0, 0); - } else if (ChatHistoryText) { - button.textContent = "Chat History"; - } - - // Store the button text in localStorage so it persists across refreshes - localStorage.setItem('chatHistoryButtonText', button.textContent); -} - -// Store button text before page refresh -window.addEventListener("beforeunload", function () { - var button = document.getElementById("chatHistoryButton"); - if (button) { - localStorage.setItem("chatHistoryButtonText", button.textContent); - } -}); - -// Call renderMarkdown when the DOM is ready -document.addEventListener("DOMContentLoaded", function () { - // Retrieve pre-markdown text from localStorage and re-render - var storedContent = localStorage.getItem('preMarkdownText'); - renderMarkdown(storedContent); - - // Retrieve the button text from localStorage - var storedButtonText = localStorage.getItem("chatHistoryButtonText"); - var button = document.getElementById("chatHistoryButton"); - button.textContent = storedButtonText; -}); \ No newline at end of file diff --git a/config/Preferences.ahk b/config/Preferences.ahk new file mode 100644 index 0000000..4156f47 --- /dev/null +++ b/config/Preferences.ahk @@ -0,0 +1,22 @@ +#Requires AutoHotkey v2.0 + +; +; Global Variables +; + +; ---------------------------------------------------- +; Base URL for the LLM API endpoint +; ---------------------------------------------------- + +; If not defined, OpenRouter API endpoint will be used +; gLLM_BASE_URL := "http://xxx.fr:yyyy" + +; ---------------------------------------------------- +; Add custom icons +; ---------------------------------------------------- + +; ICON_XXX := 18 + +; gMapIconNb2IconPath.Set(ICON_XXX, "icons\xxx.ico") + + diff --git a/Prompts.ahk b/config/Prompts.ahk similarity index 92% rename from Prompts.ahk rename to config/Prompts.ahk index c861532..fc9e283 100644 --- a/Prompts.ahk +++ b/config/Prompts.ahk @@ -1,134 +1,156 @@ -; ---------------------------------------------------- -; OpenRouter API Key -; ---------------------------------------------------- - -APIKey := "Your API Key here" - -; ---------------------------------------------------- -; Prompts -; ---------------------------------------------------- - -prompts := [{ - promptName: "Multi-model custom prompt", - menuText: "&1 - Gemini, GPT-4o, Claude", - systemPrompt: "You are a helpful assistant. Follow the instructions that I will provide or answer any questions that I will ask. My first query is the following:", - APIModels: " - ( - google/gemini-2.0-flash-thinking-exp:free, - openai/gpt-4o, - anthropic/claude-3.7-sonnet - )", - isCustomPrompt: true, - customPromptInitialMessage: "How can I leverage the power of AI in my everyday tasks?", - tags: ["&Custom prompts", "&Multi-models"] -}, { - promptName: "Rephrase", - menuText: "&1 - Rephrase", - systemPrompt: "Your task is to rephrase the following text or paragraph in English to ensure clarity, conciseness, and a natural flow. If there are abbreviations present, expand it when it's used for the first time, like so: OCR (Optical Character Recognition). The revision should preserve the tone, style, and formatting of the original text. If possible, split it into paragraphs to improve readability. Additionally, correct any grammar and spelling errors you come across. You should also answer follow-up questions if asked. Respond with the rephrased text only:", - APIModels: " - ( - google/gemini-2.0-flash-thinking-exp:free - )", - tags: ["&Text manipulation"] -}, { - promptName: "Summarize", - menuText: "&2 - Summarize", - systemPrompt: "Your task is to summarize the following article in English to ensure clarity, conciseness, and a natural flow. If there are abbreviations present, expand it when it's used for the first time, like so: OCR (Optical Character Recognition). The summary should preserve the tone, style, and formatting of the original text, and should be in its original language. If possible, split it into paragraphs to improve readability. Additionally, correct any grammar and spelling errors you come across. You should also answer follow-up questions if asked. Respond with the rephrased text only:", - APIModels: " - ( - google/gemini-2.0-flash-thinking-exp:free - )", - tags: ["&Text manipulation", "&Articles"] -}, { - promptName: "Translate to English", - menuText: "&3 - Translate to English", - systemPrompt: "Generate an English translation for the following text or paragraph, ensuring the translation accurately conveys the intended meaning or idea without excessive deviation. If there are abbreviations present, expand it when it's used for the first time, like so: OCR (Optical Character Recognition). The translation should preserve the tone, style, and formatting of the original text. If possible, split it into paragraphs to improve readability. Additionally, correct any grammar and spelling errors you come across. You should also answer follow-up questions if asked. Respond with the rephrased text only:", - APIModels: " - ( - google/gemini-2.0-flash-thinking-exp:free - )", - tags: ["&Text manipulation", "Language"] -}, { - promptName: "Define", - menuText: "&4 - Define", - systemPrompt: "Provide and explain the definition of the following, providing analogies if needed. In addition, answer follow-up questions if asked:", - APIModels: " - ( - google/gemini-2.0-flash-thinking-exp:free - )", - tags: ["&Text manipulation", "Learning"] -}, { - promptName: "Auto-paste custom prompt", - menuText: "&5 - Auto-paste custom prompt", - systemPrompt: "You are a helpful assistant. Follow the instructions that I will provide or answer any questions that I will ask.", - APIModels: " - ( - google/gemini-2.0-flash-thinking-exp:free - )", - isCustomPrompt: true, - isAutoPaste: true, - tags: ["&Custom prompts", "&Auto paste"] -}, { - promptName: "Web search", - menuText: "&6 - Web search", - systemPrompt: "Provide the latest information and answer follow-up questions that I will ask. My first query is the following:", - APIModels: " - ( - google/gemini-2.0-flash-thinking-exp:free:online - )", - tags: ["&Web search", "Learning"] -}, { - promptName: "Web search custom prompt", - menuText: "&7 - Web search custom prompt", - systemPrompt: "Provide the latest information and answer follow-up questions that I will ask. My first query is the following:", - APIModels: " - ( - google/gemini-2.0-flash-thinking-exp:free:online - )", - isCustomPrompt: true, - tags: ["&Web search", "&Custom prompts"] -}, { - promptName: "Deep thinking multi-model custom prompt", - menuText: "&1 - Deep thinking multi-model custom prompt", - systemPrompt: "You are a helpful assistant. Follow the instructions that I will provide or answer any questions that I will ask. My first query is the following:", - APIModels: " - ( - perplexity/r1-1776, - openai/o3-mini-high, - anthropic/claude-3.7-sonnet:thinking, - google/gemini-2.0-flash-thinking-exp:free - )", - isCustomPrompt: true, - customPromptInitialMessage: "This is a message template." -}, { - promptName: "Deep thinking multi-model web search custom prompt", - menuText: "&2 - Deep thinking multi-model custom prompt web search", - systemPrompt: "Provide information about the following. In addition, answer follow-up questions that I will ask or follow any instructions that I may provide:", - APIModels: " - ( - perplexity/r1-1776:online, - openai/o3-mini-high:online, - anthropic/claude-3.7-sonnet:thinking:online, - google/gemini-2.0-flash-thinking-exp:free:online - )", - isCustomPrompt: true -}, { - promptName: "Multi-line prompt example", - menuText: "Multi-line prompt example", - systemPrompt: " - ( - This prompt is broken down into multiple lines. - - Here is the second sentence. - - And the third one. - - As long as the prompt is inside the quotes and the opening and closing parenthesis, - - it will be valid. - )", - APIModels: " - ( - google/gemini-2.0-flash-thinking-exp:free - )" +; ---------------------------------------------------- +; Custom Shortcut +; ---------------------------------------------------- + +!+o:: mainScriptHotkeyActions("showPromptMenu") + +#SuspendExempt +CapsLock & o:: mainScriptHotkeyActions("suspendHotkey") + +; ---------------------------------------------------- +; OpenRouter API Key +; ---------------------------------------------------- + +APIKey := "Your API Key here" + +; ---------------------------------------------------- +; Prompts +; ---------------------------------------------------- + +prompts := [ +{ + tags: [ + "&Custom prompts" + , "Multi-models" + , "---" + , "Text manipulation" + , "&Articles" + , "---" + , "Language" + , "Learning" + , "&Auto paste" + ] +}, { + promptName: "Multi-model custom prompt", + menuText: "&1 - Gemini, GPT-4o, Claude", + systemPrompt: "You are a helpful assistant. Follow the instructions that I will provide or answer any questions that I will ask. My first query is the following:", + APIModels: " + ( + google/gemini-2.0-flash-thinking-exp:free, + openai/gpt-4o, + anthropic/claude-3.7-sonnet + )", + isCustomPrompt: true, + customPromptInitialMessage: "How can I leverage the power of AI in my everyday tasks?", + tags: ["&Custom prompts", "&Multi-models"] +}, { + promptName: "Rephrase", + menuText: "&1 - Rephrase", + systemPrompt: "Your task is to rephrase the following text or paragraph in English to ensure clarity, conciseness, and a natural flow. If there are abbreviations present, expand it when it's used for the first time, like so: OCR (Optical Character Recognition). The revision should preserve the tone, style, and formatting of the original text. If possible, split it into paragraphs to improve readability. Additionally, correct any grammar and spelling errors you come across. You should also answer follow-up questions if asked. Respond with the rephrased text only:", + APIModels: " + ( + google/gemini-2.0-flash-thinking-exp:free + )", + tags: ["&Text manipulation"] +}, { + promptName: "Summarize", + menuText: "&2 - Summarize", + systemPrompt: "Your task is to summarize the following article in English to ensure clarity, conciseness, and a natural flow. If there are abbreviations present, expand it when it's used for the first time, like so: OCR (Optical Character Recognition). The summary should preserve the tone, style, and formatting of the original text, and should be in its original language. If possible, split it into paragraphs to improve readability. Additionally, correct any grammar and spelling errors you come across. You should also answer follow-up questions if asked. Respond with the rephrased text only:", + APIModels: " + ( + google/gemini-2.0-flash-thinking-exp:free + )", + tags: ["&Text manipulation", "&Articles"] +}, { + promptName: "Translate to English", + menuText: "&3 - Translate to English", + systemPrompt: "Generate an English translation for the following text or paragraph, ensuring the translation accurately conveys the intended meaning or idea without excessive deviation. If there are abbreviations present, expand it when it's used for the first time, like so: OCR (Optical Character Recognition). The translation should preserve the tone, style, and formatting of the original text. If possible, split it into paragraphs to improve readability. Additionally, correct any grammar and spelling errors you come across. You should also answer follow-up questions if asked. Respond with the rephrased text only:", + APIModels: " + ( + google/gemini-2.0-flash-thinking-exp:free + )", + tags: ["&Text manipulation", "Language"] +}, { + promptName: "Define", + menuText: "&4 - Define", + systemPrompt: "Provide and explain the definition of the following, providing analogies if needed. In addition, answer follow-up questions if asked:", + APIModels: " + ( + google/gemini-2.0-flash-thinking-exp:free + )", + tags: ["&Text manipulation", "Learning"] +}, { + promptName: "Auto-paste custom prompt", + menuText: "&5 - Auto-paste custom prompt", + systemPrompt: "You are a helpful assistant. Follow the instructions that I will provide or answer any questions that I will ask.", + APIModels: " + ( + google/gemini-2.0-flash-thinking-exp:free + )", + isCustomPrompt: true, + isAutoPaste: true, + tags: ["&Custom prompts", "&Auto paste"] +}, { + promptName: "Web search", + menuText: "&6 - Web search", + systemPrompt: "Provide the latest information and answer follow-up questions that I will ask. My first query is the following:", + APIModels: " + ( + google/gemini-2.0-flash-thinking-exp:free:online + )", + tags: ["&Web search", "Learning"] +}, { + promptName: "Web search custom prompt", + menuText: "&7 - Web search custom prompt", + systemPrompt: "Provide the latest information and answer follow-up questions that I will ask. My first query is the following:", + APIModels: " + ( + google/gemini-2.0-flash-thinking-exp:free:online + )", + isCustomPrompt: true, + tags: ["&Web search", "&Custom prompts"] +}, { + promptName: "Deep thinking multi-model custom prompt", + menuText: "&1 - Deep thinking multi-model custom prompt", + systemPrompt: "You are a helpful assistant. Follow the instructions that I will provide or answer any questions that I will ask. My first query is the following:", + APIModels: " + ( + perplexity/r1-1776, + openai/o3-mini-high, + anthropic/claude-3.7-sonnet:thinking, + google/gemini-2.0-flash-thinking-exp:free + )", + isCustomPrompt: true, + customPromptInitialMessage: "This is a message template." +}, { + promptName: "Deep thinking multi-model web search custom prompt", + menuText: "&2 - Deep thinking multi-model custom prompt web search", + systemPrompt: "Provide information about the following. In addition, answer follow-up questions that I will ask or follow any instructions that I may provide:", + APIModels: " + ( + perplexity/r1-1776:online, + openai/o3-mini-high:online, + anthropic/claude-3.7-sonnet:thinking:online, + google/gemini-2.0-flash-thinking-exp:free:online + )", + isCustomPrompt: true +}, { + promptName: "Multi-line prompt example", + menuText: "Multi-line prompt example", + systemPrompt: " + ( + This prompt is broken down into multiple lines. + + Here is the second sentence. + + And the third one. + + As long as the prompt is inside the quotes and the opening and closing parenthesis, + + it will be valid. + )", + APIModels: " + ( + google/gemini-2.0-flash-thinking-exp:free + )" }] \ No newline at end of file diff --git a/lib/Config.ahk b/lib/Config.ahk index 76933bc..934b229 100644 --- a/lib/Config.ahk +++ b/lib/Config.ahk @@ -1,5 +1,54 @@ #Requires AutoHotkey v2.0.18+ -#Include ..\Prompts.ahk + +;****************************************************************************** +; +; Global Variables and Configuration +; +;****************************************************************************** + +global gVersion := "2.1.0" +global gVersionStringPrefix := "" +global gVersionStringSuffix := ": (LLM AutoHotkey Assistant " gVersion ")" + +; By default use OpenRouter API endpoint +global gLLM_BASE_URL := "https://openrouter.ai/api" + +global APIKey := "none" + +; +; Icons mapping numbers to paths +; + +ICON_ON := 10 +ICON_OFF := 11 +ICON_ANTHROPIC := 12 +ICON_DEEPSEEK := 13 +ICON_GOOGLE := 14 +ICON_OPENAI := 15 +ICON_OPENROUTER := 16 +ICON_PERPLEXITY := 17 + +gMapIconNb2IconPath := Map( + ICON_ON, "icons\IconOn.ico" + ,ICON_OFF, "icons\IconOff.ico" + ,ICON_ANTHROPIC, "icons\anthropic.ico" + ,ICON_DEEPSEEK, "icons\deepseek.ico" + ,ICON_GOOGLE, "icons\google.ico" + ,ICON_OPENAI, "icons\openai.ico" + ,ICON_OPENROUTER, "icons\openrouter.ico" + ,ICON_PERPLEXITY, "icons\perplexity.ico" + ) + + +; Include the Preferences file only if it exists => *i +#Include "*i ..\config\Preferences.ahk" + +;****************************************************************************** +; +; Includes and Libraries +; +;****************************************************************************** + #Include Dark_MsgBox.ahk ; Enables dark mode MsgBox and InputBox. Remove this if you want light mode MsgBox and InputBox #Include Dark_Menu.ahk ; Enables dark mode Menu. Remove this if you want light mode Menu #Include SystemThemeAwareToolTip.ahk ; Enables dark mode tooltips. Remove this if you want light mode tooltips @@ -9,22 +58,112 @@ #Include ToolTipEx.ahk ; Enables the tooltip to track the mouse cursor smoothly and permit the tooltip to be moved by dragging DetectHiddenWindows true ; Enables detection of hidden windows for inter-process communication +; ---------------------------------------------------- +; Constants +; ---------------------------------------------------- + +; WARNING : Do not forget to update LLM AutoHotkey Assistant.ahk according to following numbers + +getIconNb(iconName) { + + iconNb := -1 + + For tmpIconNb, iconPath in gMapIconNb2IconPath { + + if (InStr(iconPath, iconName, false) > 0) { + ; Icon found + + iconNb := tmpIconNb + + break + } else { + ; Icon not found + + ; Continue to look for + } + } ; for + + return iconNb +} + +; ---------------------------------------------------- +; Function to get selected text +; ---------------------------------------------------- +getSelectedText() { + + ; Backup clipboard before copying + ClipSaved := ClipboardAll() ; Save the entire clipboard to a variable of your choice. + + ; Copy of the selected text in the clipboard + A_Clipboard := "" + Send("^c") + + if !ClipWait(1) { + ; Clipboard did not receive any text within 1 second. + + selectedText := "" + + } else { + + selectedText := A_Clipboard + } + + ; Restore clipboard + A_Clipboard := ClipSaved ; Restore the original clipboard. Note the use of A_Clipboard (not ClipboardAll). + ClipSaved := "" ; Free the memory in case the clipboard was very large. + + ; Return the selected text + return selectedText +} + +getRessourcePath(ressourcePath) { + + Return (!A_IsCompiled) + ? + ; The script is not compiled, run the script directly with AHK path + A_WorkingDir '\' ressourcePath + : + ; The script is compiled, run the script with /script and "*" to tell that the script is embedded in the executable + "*" ressourcePath +} + +TraySetIconEmbed(iconNb) { + + if (!A_IsCompiled) { + ; Script mode + + ; Use icon's relative path + TraySetIcon((A_ScriptName = "Response Window.ahk" ? "..\" : "") gMapIconNb2IconPath[iconNb]) + + } else { + ; Compiled mode + + ; Use negative icon's Number + ; (see ahk v2 documentation : "If negative, the absolute value is assumed to be the resource ID of an icon within an executable file") + TraySetIcon(A_ScriptFullPath, -1 * iconNb) + } +} + ; ---------------------------------------------------- ; OpenRouter ; ---------------------------------------------------- class OpenRouter { - static cURLCommand := - 'cURL.exe -s -X POST https://openrouter.ai/api/v1/chat/completions ' - . '-H "Authorization: Bearer {1}" ' - . '-H "HTTP-Referer: https://github.com/kdalanon/LLM-AutoHotkey-Assistant" ' - . '-H "X-Title: LLM AutoHotkey Assistant" ' - . '-H "Content-Type: application/json" ' - . '-d @"{2}" ' - . '-o "{3}"' - - __New(APIKey) { + + __New(llmBaseUrl, APIKey) { + this.APIKey := APIKey + + this.cURLCommand := + 'cURL.exe ' llmBaseUrl '/v1/chat/completions' + . ' --silent' + . ' --request POST' + . ' -H "Authorization: Bearer {1}"' + . ' -H "HTTP-Referer: https://github.com/kdalanon/LLM-AutoHotkey-Assistant"' + . ' -H "X-Title: LLM AutoHotkey Assistant"' + . ' -H "Content-Type: application/json"' + . ' --data @"{2}"' + . ' --output "{3}"' } createJSONRequest(APIModel, systemPrompt, userPrompt) { @@ -68,7 +207,7 @@ class OpenRouter { FileOpen(chatHistoryJSONRequestFile, "w", "UTF-8-RAW").Write(chatHistoryJSONRequest) } - getMessages(obj) { + getChatHistoryMessages(obj) { messages := [] for i in obj["messages"] { messages.Push({ @@ -90,15 +229,20 @@ class OpenRouter { } buildcURLCommand(chatHistoryJSONRequestFile, cURLOutputFile) { - return Format(OpenRouter.cURLCommand, this.APIKey, chatHistoryJSONRequestFile, cURLOutputFile) + return Format(this.cURLCommand, this.APIKey, chatHistoryJSONRequestFile, cURLOutputFile) } } ; ---------------------------------------------------- -; Input Window +; Input Window (for User Prompt, Send Message to All, Send Message to Prompt) ; ---------------------------------------------------- class InputWindow { + + static cMSGBOX_BTN_YES_NO := 0x4 + static cMSGBOX_ICON_EXCLAMATION := 0x30 + static cMSGBOX_WARNING := InputWindow.cMSGBOX_BTN_YES_NO | InputWindow.cMSGBOX_ICON_EXCLAMATION + __New(windowTitle, skipConfirmation := false) { this.inputWindowSkipConfirmation := skipConfirmation @@ -108,11 +252,11 @@ class InputWindow { this.guiObj.OnEvent("Escape", this.closeButtonAction.Bind(this)) this.guiObj.OnEvent("Size", this.resizeAction.Bind(this)) this.guiObj.BackColor := "0x212529" - this.guiObj.SetFont("s14 cWhite", "Cambria") + this.guiObj.SetFont("s14 cWhite", "Arial") ; Add controls this.EditControl := this.guiObj.Add("Edit", "x20 y+5 w500 h250 Background0x212529") - this.SendButton := this.guiObj.Add("Button", "x240 y+10 w80", "Send") + this.SendButton := this.guiObj.Add("Button", "x220 y+10 w80", "Send (Ctrl+Enter)") ; Apply dark mode to title bar ; Reference: https://www.autohotkey.com/boards/viewtopic.php?p=422034#p422034 @@ -124,34 +268,87 @@ class InputWindow { } } - showInputWindow(message := "", title := unset, windowID := unset) { + showInputWindow(message := "", title := unset, windowID := unset, isCustomPromptCursorAtEnd := true) { + this.EditControl.Value := message + if IsSet(title) { this.guiObj.Title := title } this.EditControl.Focus() this.guiObj.Show("AutoSize") + if IsSet(windowID) { - ControlSend("^{End}", "Edit1", windowID) + + if (isCustomPromptCursorAtEnd) { + ; Move the cursor to the end of the text + + ; Select the whole text in the Edit control of the InputWindow + ControlSend("^{End}", "Edit1", windowID) + + + } else { + ; Select the whole text + + ; Select the whole text in the Edit control of the InputWindow + ControlSend("^+{End}", "Edit1", windowID) + } } } validateInputAndHide(*) { - if !this.EditControl.Value { - MsgBox "Please enter a message or close the window.", "No text entered", "IconX" - return false + + ; Récupérer le texte sélectionné + selectedText := getSelectedText() + + if (selectedText == "") { + ; No selected text + + if !this.EditControl.Value { + ; Edit control is also empty + + ; Show error message and prevent closing the window + MsgBox "Please enter a message or close the window.", "No text entered", "IconX" + return false + + } else { + ; Edit control is not empty + + ; Put Edit value in Clipboard in order to be able to use it another time + ; if you use the Windows multiple clipboard + A_Clipboard := this.EditControl.Value + } } + + ; Either clipboard or edit control has content + + ; Hide the input window this.guiObj.Hide return true } - sendButtonAction(functionToCall) { - this.SendButton.OnEvent("Click", functionToCall.Bind(this)) + registerSendButtonAction(functionToCall) { + + boundFunc := functionToCall.Bind(this) + + ; Sur le click, appeler la fonction passée en paramètre + this.SendButton.OnEvent("Click", boundFunc) + + try { + ; Limiter le raccourci clavier défini juste après, à la fenêtre qui l'a défini + HotIfWinActive "ahk_id " this.guiObj.Hwnd + + ; Associer le raccourci clavier CTRL+Enter au clic sur le GuiControl + Hotkey "^Enter", boundFunc + + } catch as e { + MsgBox "Error setting hotkey: " e.Message + } } closeButtonAction(*) { - if this.inputWindowSkipConfirmation || (MsgBox("Close " this.guiObj.Title " window?", this.guiObj.Title, 308) = "Yes") { + if this.inputWindowSkipConfirmation || (MsgBox("Close " this.guiObj.Title " window ?", this.guiObj.Title, InputWindow.cMSGBOX_WARNING) = "Yes") { this.EditControl.Value := "" this.guiObj.Hide return diff --git a/Response Window resources/Bootstrap/bootstrap.bundle.min.js b/lib/Response Window resources/Bootstrap/bootstrap.bundle.min.js similarity index 100% rename from Response Window resources/Bootstrap/bootstrap.bundle.min.js rename to lib/Response Window resources/Bootstrap/bootstrap.bundle.min.js diff --git a/Response Window resources/Bootstrap/bootstrap.min.css b/lib/Response Window resources/Bootstrap/bootstrap.min.css similarity index 100% rename from Response Window resources/Bootstrap/bootstrap.min.css rename to lib/Response Window resources/Bootstrap/bootstrap.min.css diff --git a/Response Window resources/Bootstrap/color-modes.js b/lib/Response Window resources/Bootstrap/color-modes.js similarity index 100% rename from Response Window resources/Bootstrap/color-modes.js rename to lib/Response Window resources/Bootstrap/color-modes.js diff --git a/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.eot b/lib/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.eot similarity index 100% rename from Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.eot rename to lib/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.eot diff --git a/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.svg b/lib/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.svg similarity index 100% rename from Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.svg rename to lib/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.svg diff --git a/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.ttf b/lib/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.ttf similarity index 100% rename from Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.ttf rename to lib/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.ttf diff --git a/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.woff b/lib/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.woff similarity index 100% rename from Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.woff rename to lib/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.woff diff --git a/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.woff2 b/lib/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.woff2 similarity index 100% rename from Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.woff2 rename to lib/Response Window resources/Bootstrap/fonts/glyphicons-halflings-regular.woff2 diff --git a/Response Window resources/Bootstrap/sidebars.css b/lib/Response Window resources/Bootstrap/sidebars.css similarity index 100% rename from Response Window resources/Bootstrap/sidebars.css rename to lib/Response Window resources/Bootstrap/sidebars.css diff --git a/Response Window resources/Bootstrap/sidebars.js b/lib/Response Window resources/Bootstrap/sidebars.js similarity index 100% rename from Response Window resources/Bootstrap/sidebars.js rename to lib/Response Window resources/Bootstrap/sidebars.js diff --git a/Response Window resources/css/custom.css b/lib/Response Window resources/css/custom.css similarity index 85% rename from Response Window resources/css/custom.css rename to lib/Response Window resources/css/custom.css index 51a80f6..0d0bf9b 100644 --- a/Response Window resources/css/custom.css +++ b/lib/Response Window resources/css/custom.css @@ -1,5 +1,5 @@ body { - font-family: "TeX Gyre Pagella", "Latin Modern Roman 12", "Source Serif Pro", Cambria, Cochin, Georgia, Times, "Times New Roman", serif; + font-family: "Arial", "TeX Gyre Pagella", "Latin Modern Roman 12", "Source Serif Pro", Cambria, Cochin, Georgia, Times, "Times New Roman", serif; color: white; background-color: #212529; font-size: large; diff --git a/Response Window resources/css/highlight/atom-one-dark.min.css b/lib/Response Window resources/css/highlight/atom-one-dark.min.css similarity index 100% rename from Response Window resources/css/highlight/atom-one-dark.min.css rename to lib/Response Window resources/css/highlight/atom-one-dark.min.css diff --git a/Response Window resources/css/katex.min.css b/lib/Response Window resources/css/katex.min.css similarity index 100% rename from Response Window resources/css/katex.min.css rename to lib/Response Window resources/css/katex.min.css diff --git a/Response Window resources/css/texmath.min.css b/lib/Response Window resources/css/texmath.min.css similarity index 100% rename from Response Window resources/css/texmath.min.css rename to lib/Response Window resources/css/texmath.min.css diff --git a/Response Window resources/index.html b/lib/Response Window resources/index.html similarity index 92% rename from Response Window resources/index.html rename to lib/Response Window resources/index.html index d6ef75e..3931019 100644 --- a/Response Window resources/index.html +++ b/lib/Response Window resources/index.html @@ -230,10 +230,11 @@
- - - - + + + + +
@@ -255,19 +256,21 @@ } function ahkButtonClick(ele) { - if (ele.Id != null || "") { - var eleInfo = ele.Id; + + if (ele.id) { + var eleInfo = ele.id; } - else if (ele.Name != null || "") { - var eleInfo = ele.Name; + else if (ele.name) { + var eleInfo = ele.name; } - else if (ele.innerText != null || "") { + else if (ele.innerText) { var eleInfo = ele.innerText; } else { var eleInfo = ele.outerHTML; } + ahk.ButtonClick.Func(eleInfo); } - \ No newline at end of file + diff --git a/Response Window resources/js/highlight.min.js b/lib/Response Window resources/js/highlight.min.js similarity index 100% rename from Response Window resources/js/highlight.min.js rename to lib/Response Window resources/js/highlight.min.js diff --git a/Response Window resources/js/katex.min.js b/lib/Response Window resources/js/katex.min.js similarity index 100% rename from Response Window resources/js/katex.min.js rename to lib/Response Window resources/js/katex.min.js diff --git a/lib/Response Window resources/js/main.js b/lib/Response Window resources/js/main.js new file mode 100644 index 0000000..7060e61 --- /dev/null +++ b/lib/Response Window resources/js/main.js @@ -0,0 +1,240 @@ +window.chrome.webview.addEventListener('message', handleWebMessage); + +// Initialize markdown-it with options +var md = window.markdownit( + { + html: true, // Enable HTML tags in source + linkify: true, // Autoconvert URL-like text to links + typographer: true, // Enable smartypants and other sweet transforms + highlight: function (str, lang) + { + if (lang && hljs.getLanguage(lang)) { + try { + return '
' +
+            hljs.highlight(str, { language: lang, ignoreIllegals: true }).value +
+            '
'; + } catch (__) { } + } + return '
' + md.utils.escapeHtml(str) + '
'; + } + } +).use(window.texmath + ,{ // Use texmath plugin for mathematical expressions + engine: window.katex, + delimiters: 'dollars', + katexOptions: { macros: { "\\RR": "\\mathbb{R}" } } + } +); + +function renderMarkdown(mapName2Data) { + + // If shallSetChatHistoryText, change button text to Chat History. Used by ShowResponseWindow in AutoHotkey + var button = document.getElementById("chatHistoryButton"); + if (mapName2Data.shallSetChatHistoryText) { + button.innerHTML = "Chat History"; + } else { + button.innerHTML = "Latest Response"; + } + + // Store the button text in localStorage so it persists across refreshes + localStorage.setItem('chatHistoryButtonText', button.innerHTML); + + // Define the content to render. Use the provided content or a default message + var contentToRender = mapName2Data.content || '⚠️ There is no content available.'; + + // Save the pre-markdown text in localStorage for reloading later + localStorage.setItem('preMarkdownText', contentToRender); + + var result = "" + try { + // Render the markdown content + result = md.render(contentToRender); + + } catch (error) { + result = "*** ERROR ***

" + error + "

" + contentToRender + + console.error(error); + } + + // Inject the rendered HTML into the target element + var contentElement = document.getElementById('content'); + contentElement.innerHTML = result; + + // Scroll to the top + contentElement.scrollTo(0, 0); +} + +function responseWindowCopyButtonAction(copyAsMarkdown) { + + // Get the button element by its id + var button = document.getElementById('copyButton'); + + // Store the original button text + var btnOriginalText = button.innerHTML; + + if (copyAsMarkdown) { + // Shall Copy As Markdown + + // + // Just update the button without copying, + // because Markdown text has already been copied (by Response Window.ahk) + // + + button.innerHTML = 'Copied!'; + button.disabled = true; + + // After 2 seconds, restore the original text and enable the button + setTimeout(function () { + button.innerHTML = btnOriginalText; + button.disabled = false; + }, 2000); + + } else { + // Shall Copy As HTML and Plain Text + + // + // Put the content into Navigator Clipboard as HTML and Plain Text + // + // then Update the button + // + + // Get the 'content' element + var contentElement = document.getElementById('content'); + + // Create a temporary element to hold the formatted content + const tempElement = document.createElement('div'); + tempElement.innerHTML = contentElement.innerHTML; + + // Use the Clipboard API to write the HTML content to the clipboard + navigator.clipboard.write([ + new ClipboardItem({ + 'text/html': new Blob([tempElement.innerHTML], { type: 'text/html' }), + 'text/plain': new Blob([contentElement.innerText], { type: 'text/plain' }) + }) + ]).then(() => { + // Change button text to "Copied!" and disable the button + button.innerHTML = 'Copied!'; + button.disabled = true; + + // After 2 seconds, restore the original text and enable the button + setTimeout(function () { + button.innerHTML = btnOriginalText; + button.disabled = false; + }, 2000); + + // Set Focus + + + }).catch(failureCallback); + } +} + +function failureCallback(erreur) { + + // Get the button element by its id + var button = document.getElementById('copyButton'); + + // Store the original button text + var btnOriginalText = button.innerHTML; + + button.innerHTML = "** ERROR **"; + + // After 2 seconds, restore the original text and enable the button + setTimeout(function () { + button.innerHTML = btnOriginalText; + button.disabled = false; + }, 2000); +} + + +// Enables or disables the buttons and resets the cursor +function responseWindowButtonsEnabled(enable) { + // Array of button IDs + var buttonIds = ["chatButton", "copyButton", "pasteButton", "retryButton", "chatHistoryButton"]; + + // Iterate over each ID in the array + buttonIds.forEach(function (id) { + // Get the button element by ID + var button = document.getElementById(id); + + // Check if the button exists to avoid errors + if (button) { + // Toggle the 'disabled' property using the ternary operator + button.disabled = !enable; + } + }); + // Resets cursor + document.body.style.cursor = 'auto'; +} + +function handleWebMessage(event) { + try { + // Name incoming data + const message = event.data; + + // Check if data is an array for multiple parameters + if (Array.isArray(message.data)) { + if (typeof window[message.target] === 'function') { + window[message.target](...message.data); + } else { + console.error(`Function "${message.target}" does not exist.`); + } + } else { + // Existing single parameter handling + if (typeof window[message.target] === 'function') { + window[message.target](message.data); + } else { + console.error(`Function "${message.target}" does not exist.`); + } + } + } catch (error) { + console.error("Error handling incoming message:", error); + } +} + +// // Toggle chatHistoryButton's button text between Chat History and Latest Response +// function toggleButtonText(shallSetChatHistoryText) { + +// var button = document.getElementById('chatHistoryButton'); + +// alert("toggleButtonText"); + +// if (button.textContent === "Chat History") { +// button.textContent = "Latest Response"; + +// } else if (button.textContent === "Latest Response") { +// button.textContent = "Chat History"; + +// // Scroll to the top when displaying chat history +// document.getElementById('content').scrollTo(0, 0); + +// } else if (shallSetChatHistoryText) { +// button.textContent = "Chat History"; +// } + +// // Store the button text in localStorage so it persists across refreshes +// localStorage.setItem('chatHistoryButtonText', button.textContent); +// } + +// Store button text before page refresh +window.addEventListener("beforeunload", function () { + var button = document.getElementById("chatHistoryButton"); + if (button) { + localStorage.setItem("chatHistoryButtonText", button.innerHTML); + } +}); + +// Call renderMarkdown when the DOM is ready +document.addEventListener("DOMContentLoaded", function () { + + // alert("DOMContentLoaded"); + + // Retrieve the button text from localStorage + var storedButtonText = localStorage.getItem("chatHistoryButtonText"); + var button = document.getElementById("chatHistoryButton"); + button.innerHTML = storedButtonText; + + // Retrieve pre-markdown text from localStorage and re-render + var storedContent = localStorage.getItem('preMarkdownText'); + renderMarkdown({content : storedContent, shallSetChatHistoryText : true}); +}); \ No newline at end of file diff --git a/Response Window resources/js/markdown-it.min.js b/lib/Response Window resources/js/markdown-it.min.js similarity index 100% rename from Response Window resources/js/markdown-it.min.js rename to lib/Response Window resources/js/markdown-it.min.js diff --git a/Response Window resources/js/mhchem.min.js b/lib/Response Window resources/js/mhchem.min.js similarity index 100% rename from Response Window resources/js/mhchem.min.js rename to lib/Response Window resources/js/mhchem.min.js diff --git a/Response Window resources/js/texmath.min.js b/lib/Response Window resources/js/texmath.min.js similarity index 100% rename from Response Window resources/js/texmath.min.js rename to lib/Response Window resources/js/texmath.min.js diff --git a/lib/Response Window.ahk b/lib/Response Window.ahk index 8aeb314..625cbf2 100644 --- a/lib/Response Window.ahk +++ b/lib/Response Window.ahk @@ -1,4 +1,5 @@ #Include Config.ahk + #SingleInstance Off #NoTrayIcon @@ -6,12 +7,47 @@ ; Hotkeys ; ---------------------------------------------------- -~Esc:: subScriptHotkeyActions("Esc") -~^w:: subScriptHotkeyActions("closeWindows") +!t:: respWindowHotkeyActions("chatButton") +!r:: respWindowHotkeyActions("retryButton") +!h:: +!l:: respWindowHotkeyActions("chatHistoryButton") +!c:: respWindowHotkeyActions("copyButton") +!p:: respWindowHotkeyActions("pasteButton") + +~Esc:: respWindowHotkeyActions("Esc") +~^w:: respWindowHotkeyActions("closeWindows") -subScriptHotkeyActions(action) { +global gChatHistoryButtonState := "ChatHistory" + +respWindowHotkeyActions(action) { switch action { + case "chatButton": + switch WinActive("A") { + case responseWindow.hWnd: buttonClickAction("chatButton") + } + + case "retryButton": + switch WinActive("A") { + case responseWindow.hWnd: buttonClickAction("retryButton") + } + + case "chatHistoryButton": + switch WinActive("A") { + case responseWindow.hWnd: + buttonClickAction("chatHistoryButton") + } + + case "copyButton": + switch WinActive("A") { + case responseWindow.hWnd: buttonClickAction("copyButton") + } + + case "pasteButton": + switch WinActive("A") { + case responseWindow.hWnd: buttonClickAction("pasteButton") + } + ; Handles request cancellation based on Response Window state: ; ; Background window: Stop request, keep window open @@ -43,27 +79,45 @@ subScriptHotkeyActions(action) { case responseWindow.hWnd: buttonClickAction("Close") case chatInputWindow.guiObj.hWnd: chatInputWindow.closeButtonAction() } + + Default: + MsgBox("Unhandled key : (" action ")", "Error", 0x30) } } ; ---------------------------------------------------- -; Read data from main script and start loading cursor +; Read data created by main script ; ---------------------------------------------------- -requestParams := jsongo.Parse(FileOpen(A_Args[1], "r", "UTF-8").Read()) +if (A_Args.Length < 1) { + ; No args provided (case of debug) + + ; Ask user to select an 2_responseWindowData.json file + responseWindowDataFilePath := FileSelect(3, A_Temp , "Open a file", "Text Documents (LLM_Ahk_*_responseWindowData.json)") + +} else { + + ; Path to JSON file given as first argument + responseWindowDataFilePath := A_Args[1] +} + +requestParams := jsongo.Parse(FileOpen(responseWindowDataFilePath, "r", "UTF-8").Read()) startLoadingCursor(true) ; ---------------------------------------------------- ; Change icon based on providerName ; ---------------------------------------------------- -TraySetIcon(FileExist(icon := "..\icons\" requestParams["providerName"] ".ico") ? icon : "..\icons\IconOn.ico") +iconNb := getIconNb(requestParams["providerName"]) + +; TraySetIcon(FileExist(icon) ? icon : iconDefault) +TraySetIconEmbed((iconNb > 0) ? iconNb : ICON_ON) ; ---------------------------------------------------- ; Create new instance of OpenRouter class ; ---------------------------------------------------- -router := OpenRouter(APIKey) +router := OpenRouter(gLLM_BASE_URL, APIKey) ; ---------------------------------------------------- ; Create Response Window @@ -72,84 +126,163 @@ router := OpenRouter(APIKey) ; Create the Webview Window responseWindow := WebViewToo(, , ,) responseWindow.OnEvent("Close", (*) => buttonClickAction("Close")) -responseWindow.Load("..\Response Window resources\index.html") +responseWindow.Load("Response Window resources\index.html") ; Apply dark mode to title bar ; Reference: https://www.autohotkey.com/boards/viewtopic.php?p=422034#p422034 DllCall("Dwmapi\DwmSetWindowAttribute", "ptr", responseWindow.hWnd, "int", 20, "int*", true, "int", 4) +; ; Assign actions to click events +; + +; Define the function that will handle the action when a button is clicked responseWindow.AddHostObjectToScript("ButtonClick", { func: buttonClickAction }) + +; Function that perform action when button is clicked +; +; action : buttonId or button Name +; buttonClickAction(action) { - static chatHistoryButtonText := "Chat History" + + global gChatHistoryButtonState switch action { - case "Chat": chatInputWindow.showInputWindow() - case "Copy": - if requestParams["copyAsMarkdown"] { - chatState := manageState("chat", "get") - A_Clipboard := (chatHistoryButtonText = "Chat History") ? chatState.latestResponse : chatState.chatHistory - } + case "chatButton": + chatInputWindow.showInputWindow() + + case "copyButton": + doWebCopy() + + case "pasteButton": + ; Perform a Copy Button Action + doWebCopy() - postWebMessage("responseWindowCopyButtonAction", requestParams["copyAsMarkdown"]) + ; Activate the calling window + WinActivate("ahk_id " requestParams["callingWindowHwnd"]) - case "Retry": + ; Paste the Clipboard to the calling window + Send("^v") + + ; Close and stop the Response Window + closeAndStop() + + case "retryButton": manageState("model", "remove") + postWebMessage("responseWindowButtonsEnabled", false) + startLoadingCursor(true) + chatHistoryJSONRequest := manageChatHistoryJSON("get") router.removeLastAssistantMessage(&chatHistoryJSONRequest) FileOpen(requestParams["chatHistoryJSONRequestFile"], "w", "UTF-8-RAW").Write(chatHistoryJSONRequest) manageChatHistoryJSON("set", chatHistoryJSONRequest) + sendRequestToLLM(&chatHistoryJSONRequest) - case "Chat History", "Latest Response": + case "chatHistoryButton": + ; Render ChatHistory or LatestResponse as Markdown content := manageState("chat", "get") - data := [(action = "Chat History") ? content.chatHistory : content.latestResponse] - postWebMessage("renderMarkdown", data) - chatHistoryButtonText := (chatHistoryButtonText = "Chat History" ? "Latest Response" : "Chat History") + contentToDisplay := (gChatHistoryButtonState = "ChatHistory") ? content.chatHistory : content.latestResponse + + ; Toggle gChatHistoryButtonState + gChatHistoryButtonState := (gChatHistoryButtonState = "ChatHistory" ? "LatestResponse" : "ChatHistory") + + postWebMessage("renderMarkdown", Map("content", contentToDisplay, "shallSetChatHistoryText", gChatHistoryButtonState = "ChatHistory")) + + case "resetChatHistoryButtonState": gChatHistoryButtonState := "ChatHistory" - case "resetChatHistoryButtonText": chatHistoryButtonText := "Chat History" case "Close": - if (!requestParams["skipConfirmation"]) { - if (MsgBox("End your chat session with " requestParams["responseWindowTitle"] "?", - "Close " requestParams["responseWindowTitle"], - "308 Owner" responseWindow.hWnd) != "Yes") { - return true - } - } + ; Close and stop the response window + closeAndStop() + + Default: + MsgBox("Unhandled action: (" action ")", "Error", 0x30) + } +} - ; Proceed with closing (either no warning needed or user clicked "Yes") - if (ProcessExist(manageState("cURL", "get"))) { - manageState("cURL", "close") +; Call JavaScript function in the web page to toggle copy button for few seconds +; and copy the response to the clipboard as HTML and Plain Text (if not copyAsMarkdown) +doWebCopy() { - ; Sometimes the cURLOutputFile is still being accessed - ; Sleep here to make sure the file is not opened anymore - Sleep 100 - } + global gChatHistoryButtonState - deleteTempFiles() - startLoadingCursor(false) - postWebMessage("toggleButtonText", [true]) + if requestParams["copyAsMarkdown"] { + ; Shall copy as Markdown - ; Sends a PostMessage to main script saying the - ; Response Window has been closed, then terminates - ; the Response Window script afterwards - CustomMessages.notifyResponseWindowState(CustomMessages.WM_RESPONSE_WINDOW_CLOSED, - requestParams["uniqueID"], - responseWindow.hWnd, - requestParams["mainScriptHiddenhWnd"]) - ExitApp + chatState := manageState("chat", "get") + + ; Last response and Chathistory are encoded as Markdown + ; Put it to clipboard + A_Clipboard := (gChatHistoryButtonState = "ChatHistory") ? chatState.latestResponse : chatState.chatHistory + + } else { + ; Shall copy as HTML and Plain Text + + ; Nothing to do here. It must be done in the web page + } + + ; Set Focus + responseWindow.MoveFocus(0) + ; ControlFocus responseWindow.Gui["WebViewTooContainer"].Hwnd + + ; Call JavaScript function in the web page to toggle button for few seconds + ; and copy the response to the clipboard as HTML and Plain Text (if not copyAsMarkdown) + postWebMessage("responseWindowCopyButtonAction", requestParams["copyAsMarkdown"]) +} + +; Close and stop the response window +closeAndStop() { + if (!requestParams["skipConfirmation"]) { + if (MsgBox("End your chat session with " requestParams["responseWindowTitle"] "?", + "Close " requestParams["responseWindowTitle"], + InputWindow.cMSGBOX_WARNING . " Owner" responseWindow.hWnd) != "Yes") { + return true + } + } + + ; Proceed with closing (either no warning needed or user clicked "Yes") + if (ProcessExist(manageState("cURL", "get"))) { + manageState("cURL", "close") + + ; Sometimes the cURLOutputFile is still being accessed + ; Sleep here to make sure the file is not opened anymore + Sleep 400 } + + deleteTempFiles() + startLoadingCursor(false) + + postWebMessage("toggleButtonText", [true]) + + ; Sends a PostMessage to main script saying the + ; Response Window has been closed, then terminates + ; the Response Window script afterwards + CustomMessages.notifyResponseWindowState(CustomMessages.WM_RESPONSE_WINDOW_CLOSED, + requestParams["uniqueID"], + responseWindow.hWnd, + requestParams["mainScriptHiddenhWnd"]) + + ; Stop the Response Window script + ExitApp } +; +; Function to show the Response Window using WebView +; (This method does not block execution) +; showResponseWindow(responseWindowTextContent, initialRequest, noActivate := false) { - postWebMessage("renderMarkdown", [responseWindowTextContent, true]) - buttonClickAction("resetChatHistoryButtonText") + + buttonClickAction("resetChatHistoryButtonState") + + postWebMessage("renderMarkdown", Map("content", responseWindowTextContent, "shallSetChatHistoryText", gChatHistoryButtonState = "ChatHistory")) + + if initialRequest { ; Response Window's width and height - desiredW := 600 + desiredW := 750 desiredH := 600 ; Calculate screen center @@ -217,21 +350,32 @@ showResponseWindow(responseWindowTextContent, initialRequest, noActivate := fals ; Create Chat Input Window ; ---------------------------------------------------- -chatInputWindow := InputWindow("Send message to " requestParams["responseWindowTitle"], requestParams[ - "skipConfirmation"]) -chatInputWindow.sendButtonAction(chatSendButtonAction) +chatInputWindow := InputWindow("Send message to " requestParams["responseWindowTitle"], + requestParams["skipConfirmation"]) + +chatInputWindow.registerSendButtonAction(chatSendButtonAction) chatSendButtonAction(*) { if !chatInputWindow.validateInputAndHide() { return } + ; Activate Loading Mouse Cursor startLoadingCursor(true) + + ; Disable Response Window Buttons while waiting for LLM response postWebMessage("responseWindowButtonsEnabled", false) + + ; Append User Prompt message to chat history + ; and get the JSON request for sending to the LLM chatHistoryJSONRequest := manageChatHistoryJSON("get") - router.appendToChatHistory("user", chatInputWindow.EditControl.Value, & - chatHistoryJSONRequest, requestParams["chatHistoryJSONRequestFile"]) + router.appendToChatHistory("user", + chatInputWindow.EditControl.Value, + &chatHistoryJSONRequest, + requestParams["chatHistoryJSONRequestFile"]) manageChatHistoryJSON("set", chatHistoryJSONRequest) + + ; Send the request to the LLM sendRequestToLLM(&chatHistoryJSONRequest) } @@ -259,7 +403,7 @@ responseWindowSendToAllModels(uniqueID, lParam, msg, responseWindowhWnd) { } ; ---------------------------------------------------- -; Run cURL command and process response +; Run cURL command and process LLM response ; ---------------------------------------------------- chatHistoryJSONRequest := manageChatHistoryJSON("get") @@ -267,20 +411,29 @@ sendRequestToLLM(&chatHistoryJSONRequest, true) sendRequestToLLM(&chatHistoryJSONRequest, initialRequest := false) { + ; + ; Launch the cURL command and run it asynchronously + ; + ; Run the cURL command asynchronously and store the PID Run(FileOpen(requestParams["cURLCommandFile"], "r", "UTF-8").Read(), , "Hide", &cURLPID) manageState("cURL", "set", cURLPID) - ; Waits for the process to complete or be aborted + ; Waits for the cURL process to complete or be aborted ; while allowing the script to process events while (ProcessExist(cURLPID)) { Sleep 250 } - ; If user cancels the process, exit if !manageState("cURL", "get") { + ; User canceled the process + + ; Perform Exit + manageState("cURL", "close") + startLoadingCursor(false) + if initialRequest { deleteTempFiles() @@ -297,53 +450,117 @@ sendRequestToLLM(&chatHistoryJSONRequest, initialRequest := false) { cURLPID := 0 manageState("cURL", "set", cURLPID) - ; Read the output after the process has completed - JSONResponseFromLLM := FileOpen(requestParams["cURLOutputFile"], "r", "UTF-8").Read() + ; ; Process the JSON response from the LLM API - try { - JSONResponseVar := jsongo.Parse(JSONResponseFromLLM) - responseFromLLM := router.extractJSONResponse(JSONResponseVar) + ; + + errorMsg := "" - ; Get text after forward slash as responseFromLLM.model and replace colon (:) with dash (-) - responseFromLLM.model := StrReplace(SubStr(responseFromLLM.model, InStr(responseFromLLM.model, "/") + 1), ":", - "-") + try { + ; Read the LLM Response File + JSONResponseFromLLM := FileOpen(requestParams["cURLOutputFile"], "r", "UTF-8").Read() - manageState("model", "add", responseFromLLM.model) - router.appendToChatHistory("assistant", - responseFromLLM.response, &chatHistoryJSONRequest, requestParams["chatHistoryJSONRequestFile"]) } catch as e { - JSONResponseFromLLM := router.extractErrorResponse(JSONResponseVar) - responseFromLLM := - "**⛔ Error parsing response**`n`n" e.Message - . "`n`n---`n`n**⚠️ Response from the API**`n`n" - . JSONResponseFromLLM.error + + errorMsg := + "**⛔ ERROR**" + . "`n`n" + . "LLM Response File not found :" + . "`n`n" + . requestParams["cURLOutputFile"] . "`n`n---`n`n" - errorCodes := { - 400: "You may have specified an invalid API model. See [this guide](https://github.com/kdalanon/LLM-AutoHotkey-Assistant/blob/main/README.md#apimodels) on how to get the correct API models.", - 401: "Authentication failed. Your API key or session might be invalid or expired. Check your keys [here](https://openrouter.ai/settings/keys), re-add it to the app, and try again.", - 402: "Insufficient funds. Click [here](https://openrouter.ai/credits) to check your available credits.", - 403: "Content flagged as inappropriate. Your input triggered content moderation and was rejected. Please revise your request and try again with different content.", - 408: "Request timed out. The API request took too long to process. This might be due to network issues or server overload.", - 429: "You've hit the rate limit of **" requestParams["singleAPIModelName"] "**. Try again after some time.", - 502: "Service temporarily unavailable. The chosen model is either down or returned an invalid response. Please try again later or select a different model.", - 503: "No suitable model available. There are no providers currently meeting your request requirements. Please try again later or adjust your routing settings." - } + . "**POSSIBLE REASONS**" + . "`n`n" + . "- **LLM** did **not respond**." + . "`n`n" + . "- Your disk is **full**." - responseFromLLM .= errorCodes.%JSONResponseFromLLM.code% - showResponseWindow(responseFromLLM, initialRequest) - postWebMessage("responseWindowButtonsEnabled", true) - startLoadingCursor(false) - Exit + } else { + ; LLM Response File found + + try { + ; Parse JSON response from LLM + JSONResponseVar := jsongo.Parse(JSONResponseFromLLM) + responseFromLLM := router.extractJSONResponse(JSONResponseVar) + + ; Get text after forward slash as responseFromLLM.model and replace colon (:) with dash (-) + responseFromLLM.model := StrReplace(SubStr(responseFromLLM.model, InStr(responseFromLLM.model, "/") + 1), ":", "-") + + manageState("model", "add", responseFromLLM.model) + + ; Append assistant's (LLM's) response to chat history + router.appendToChatHistory("assistant", + responseFromLLM.response, + &chatHistoryJSONRequest, + requestParams["chatHistoryJSONRequestFile"]) + + } catch as e { + + try { + JSONResponseFromLLM := router.extractErrorResponse(JSONResponseVar) + + } catch as e2 { + ; Can not extract error message from JSON response + + errorMsg := + "**⛔ ERROR**" + . "`n`n" + . "Error **parsing** LLM Response" + + } else { + ; Can extract error message from JSON response + + errorMsg := + "**⛔ ERROR**" + . "`n`n" + . "Error **parsing** LLM Response" + . "`n`n" + . e.Message + . "`n`n---`n`n" + . "**REASON**" + . "`n`n" + . "**⚠️ Response from the API** : " + . "`n`n" + . JSONResponseFromLLM.error + + ; Map of error codes + errorCodes := { + 400: "You may have specified an invalid API model. See [this guide](https://github.com/kdalanon/LLM-AutoHotkey-Assistant/blob/main/README.md#apimodels) on how to get the correct API models.", + 401: "Authentication failed. Your API key or session might be invalid or expired. Check your keys [here](https://openrouter.ai/settings/keys), re-add it to the app, and try again.", + 402: "Insufficient funds. Click [here](https://openrouter.ai/credits) to check your available credits.", + 403: "Content flagged as inappropriate. Your input triggered content moderation and was rejected. Please revise your request and try again with different content.", + 408: "Request timed out. The API request took too long to process. This might be due to network issues or server overload.", + 429: "You've hit the rate limit of **" requestParams["singleAPIModelName"] "**. Try again after some time.", + 502: "Service temporarily unavailable. The chosen model is either down or returned an invalid response. Please try again later or select a different model.", + 503: "No suitable model available. There are no providers currently meeting your request requirements. Please try again later or adjust your routing settings." + } + + ; Concat Error message according to error code + errorMsg .= errorCodes.%JSONResponseFromLLM.code% + + } finally { + + ; NTD + } + } ; catch + + } finally { + + ; NTD } + ; ; Save Chat History and Latest Response so it can be viewed later - ; Begin by parsing the JSON string into an object + ; + manageChatHistoryJSON("set", chatHistoryJSONRequest) + + ; Begin by parsing the Chat History JSON string into an object obj := jsongo.Parse(chatHistoryJSONRequest) ; Get the messages array - messages := router.getMessages(obj) + messages := router.getChatHistoryMessages(obj) totalMessages := messages.Length ; Chat History - Iterate over each message in the 'messages' array @@ -357,10 +574,11 @@ sendRequestToLLM(&chatHistoryJSONRequest, initialRequest := false) { case "user": chatHistory .= "`n`n---`n`n**🔵 You**`n`n" content case "assistant": chatHistory .= "`n`n---`n`n**🟡 " manageState("model", "get")[modelIndex++] "**`n`n" content } - } + } ; for ; Latest Response - Iterate backwards over each message in the 'messages' array to find the last assistant message ; and calculate the current index starting from the end + latestResponse := "⚠️ No LLM Response yet." loop totalMessages { currentIndex := totalMessages - A_Index + 1 ; msg := messages[currentIndex] @@ -372,18 +590,55 @@ sendRequestToLLM(&chatHistoryJSONRequest, initialRequest := false) { manageState("chat", "add", { chatHistory: chatHistory, latestResponse: latestResponse }) - if requestParams["isAutoPaste"] { - A_Clipboard := responseFromLLM.response - Send("^v") - startLoadingCursor(false) - CustomMessages.notifyResponseWindowState(CustomMessages.WM_RESPONSE_WINDOW_CLOSED, requestParams["uniqueID"], - responseWindow.hWnd, requestParams["mainScriptHiddenhWnd"]) - deleteTempFiles() - ExitApp + + ; + ; Display or Paste + ; + + if (errorMsg = "") { + ; No error + + if requestParams["isAutoPaste"] { + ; Auto-paste the response into the active window + + ; TODO M 2025_06_26 : Should Activate the calling Window because another window could have been activated since the call + + A_Clipboard := responseFromLLM.response + Send("^v") + + ; TODO m 2025_06_26 : Should factorize (See closeAndStop() ) + + startLoadingCursor(false) + + CustomMessages.notifyResponseWindowState(CustomMessages.WM_RESPONSE_WINDOW_CLOSED, requestParams["uniqueID"], + responseWindow.hWnd, requestParams["mainScriptHiddenhWnd"]) + deleteTempFiles() + + ; Stop Response Window.ahk script + ExitApp + + } else { + ; Not Auto-paste mode + + ; Display the response in the Response Window + showResponseWindow(responseFromLLM.response, initialRequest, !initialRequest && !(WinActive(responseWindow.hWnd))) + + ; Enable buttons in the response window + postWebMessage("responseWindowButtonsEnabled", true) + + ; Stop loading mouse cursor + startLoadingCursor(false) + } } else { - showResponseWindow(responseFromLLM.response, initialRequest, !initialRequest && !(WinActive(responseWindow.hWnd - ))) + ; There is an error message to display + + ; Show Response Window + showResponseWindow(errorMsg, initialRequest) + + ; Enable Response Window buttons postWebMessage("responseWindowButtonsEnabled", true) + + ; Stop loading Cursor startLoadingCursor(false) } } @@ -407,6 +662,7 @@ manageChatHistoryJSON(action, data := unset) { ;-------------------------------------------------- manageState(component, action, data := {}) { + static state := { modelHistory: [], chatHistory: { chatHistory: "", latestResponse: "" }, @@ -457,16 +713,17 @@ postWebMessage(target, data := unset) { ; ---------------------------------------------------- deleteTempFiles() { - FileDelete(requestParams["chatHistoryJSONRequestFile"]) - FileDelete(requestParams["cURLCommandFile"]) - FileExist(requestParams["cURLOutputFile"]) ? FileDelete(requestParams["cURLOutputFile"]) : "" - FileDelete(A_Args[1]) + try FileDelete(requestParams["chatHistoryJSONRequestFile"]) + try FileDelete(requestParams["cURLCommandFile"]) + try FileExist(requestParams["cURLOutputFile"]) ? FileDelete(requestParams["cURLOutputFile"]) : "" + try FileDelete(A_Args[1]) } ; ---------------------------------------------------- ; Start or stop loading cursor ; ---------------------------------------------------- +; TODO m 2025_06_26 : Rename startLoadingCursor to setLoadingCursor startLoadingCursor(status) { status ? CustomMessages.notifyResponseWindowState(CustomMessages.WM_RESPONSE_WINDOW_LOADING_START, requestParams["uniqueID"], , requestParams["mainScriptHiddenhWnd"]) diff --git a/lib/WebViewToo.ahk b/lib/WebViewToo.ahk index af01eb7..e19549d 100644 --- a/lib/WebViewToo.ahk +++ b/lib/WebViewToo.ahk @@ -59,7 +59,10 @@ class WebViewToo { return RTrim(StrLower(Id), "-") } static TempDir := A_Temp "\" WebViewToo.UniqueId - static DllPath := WebViewToo.TempDir "\" (A_PtrSize * 8) "bit\WebView2Loader.dll" + + ; Dll is in folder 32bit\WebView2Loader.dll or 64bit\WebView2Loader.dll + ; static DllPath := WebViewToo.TempDir "\" (A_PtrSize * 8) "bit\WebView2Loader.dll" + static DllPath := (A_PtrSize * 8) "bit\WebView2Loader.dll" __New(Html := WebViewToo.Template.Html, Css := WebViewToo.Template.Css, JavaScript := WebViewToo.Template.JavaScript, CustomCaption := False) { this.Gui := Gui("+Resize") @@ -363,8 +366,10 @@ class WebViewToo { } } } - - Load(Filename) => this.Navigate(Filename ~= "^https?:\/\/" ? Filename : A_IsCompiled ? "https://ahk.localhost/" Filename : A_WorkingDir "\" Filename) + + ; In compiled mode, use A_WorkingDir, not "https://ahk.localhost/" + ; Load(Filename) => this.Navigate(Filename ~= "^https?:\/\/" ? Filename : A_IsCompiled ? "https://ahk.localhost/" Filename : A_WorkingDir "\" Filename) + Load(Filename) => this.Navigate(Filename ~= "^https?:\/\/" ? Filename : A_WorkingDir "\" Filename) SimplePrintToPdf(FileName := "", Orientation := "Portrait", Timeout := 5000) { Loop { diff --git a/lib/simulate_select.ahk b/lib/simulate_select.ahk new file mode 100644 index 0000000..8354c89 --- /dev/null +++ b/lib/simulate_select.ahk @@ -0,0 +1,10 @@ +#Requires AutoHotkey v2.0 + +; Wait until the menu appears +Sleep 200 + +; Press Down key to select the first element from the menu +Send("{Down}") + +; Close the script after the action is performed +ExitApp