Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
8f0209d
Add copy-paste mode with cp: prefix for web UI interaction, based on …
chrisnestrud Dec 18, 2025
e14b1b7
feat: add CopyPasteCoder and remove legacy copy-paste path
chrisnestrud Dec 19, 2025
658b5a5
refactor: switch clipboard I/O to copypaste module
chrisnestrud Dec 19, 2025
97ed880
feat: add local token counting to CopyPasteCoder and fill usage
chrisnestrud Dec 19, 2025
4e7fffe
fix: map LiteLLM token count exceptions to user-friendly warnings
chrisnestrud Dec 19, 2025
196d7f3
refactor: remove unused streaming-related fields in CopyPasteCoder
chrisnestrud Dec 19, 2025
05a906b
fix: initialize CopyPasteCoder.gpt_prompts from selected edit_format
chrisnestrud Dec 19, 2025
ec4fb44
fix: initialize streaming attributes in CopyPasteCoder
chrisnestrud Dec 19, 2025
6681055
refactor: add docstrings and nosec hint in copypaste_coder.py
chrisnestrud Dec 19, 2025
83f28c2
fix: initialize partial_response_tool_calls to [] and update comments
chrisnestrud Dec 19, 2025
803e4aa
fix: remove stray code fence in copypaste_coder.py
chrisnestrud Dec 19, 2025
e702295
fix: remove stray bash fence and flake8 command from copypaste_coder.py
chrisnestrud Dec 19, 2025
22789c4
Tests for copypaste coder
chrisnestrud Dec 19, 2025
ff7aa32
Merge branch 'main' into copy-paste-no-api
chrisnestrud Dec 19, 2025
45aef2d
Bump Version
dwash96 Dec 19, 2025
85b854b
#280: Commit files on finished tool call in agent mode
dwash96 Dec 19, 2025
e17a707
refactor: swap copy_paste_instead_of_api for copy_paste_mode
chrisnestrud Dec 19, 2025
c482ce5
refactor: align copy_paste_transport checks to clipboard value
chrisnestrud Dec 19, 2025
350ede1
Update sessions.md to have session name mirror model name
chrisnestrud Dec 19, 2025
ca5ad78
Auto-commit when finished.
jamwil Dec 19, 2025
388b005
Don't display line format in tui mode, the input area already does
dwash96 Dec 19, 2025
ef07f11
#281: They run AI model's but can't support the full json schema spec…
dwash96 Dec 19, 2025
3303a17
Merge pull request #283 from chrisnestrud/patch-1
dwash96 Dec 19, 2025
3ddb829
Fix git-diff tool.
jamwil Dec 19, 2025
175db1a
Merge pull request #276 from chrisnestrud/copy-paste-no-api
dwash96 Dec 19, 2025
40917cc
Avoid exception when detached.
jamwil Dec 19, 2025
93736d6
Fix Formatting
dwash96 Dec 19, 2025
1dfd940
Fix conflicts with model overrides and copy paste coder
dwash96 Dec 20, 2025
152e5d3
Set AgentPrompts as a class level artifact
dwash96 Dec 20, 2025
f3127a1
#166: Format JSON and python literals in error text
dwash96 Dec 20, 2025
85bae17
#286: Early return instead of uncaught exception
dwash96 Dec 20, 2025
d37382c
Merge pull request #285 from jamwil/git-ops
dwash96 Dec 20, 2025
de465de
Merge pull request #284 from jamwil/commit-when-finished
dwash96 Dec 20, 2025
ba42944
Remove duplicate auto commits
dwash96 Dec 20, 2025
8179e83
Merge branch 'main' into v0.90.4
dwash96 Dec 20, 2025
8803f20
Make CONTRIBUTING.md a bit more strongly worded
dwash96 Dec 20, 2025
644d591
More CONTRIBUTNG.md verbiage
dwash96 Dec 20, 2025
8bd4550
Replace netoworkx with rustworkx for perfomance improvement
dwash96 Dec 20, 2025
de79fc1
Update agent prompt to encourage the model to act more proactively an…
dwash96 Dec 20, 2025
ce25a03
Options Updates:
dwash96 Dec 20, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 8 additions & 5 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,11 +64,14 @@ docker build -t cecli -f docker/Dockerfile .

## Coding Standards

It really helps the merge process if your PR:

1. complies with project coding standards
2. includes test coverage
3. updates the relevant user-facing documentation, including the output of `/help` and `--help` as well as notes in config files and the web-site.
In order for your PR to be accepted it must:

1. Be up to date with the main branch
2. Comply with project coding standards (including running the pre-commit formatting hooks)
3. Include test coverage
4. Update relevant user-facing documentation:
- Primary documentation will live in `aider/website/docs/config/`
- Check new cli arguments with the output of `/help` and `--help`

### Python Compatibility

Expand Down
2 changes: 1 addition & 1 deletion aider/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from packaging import version

__version__ = "0.90.0.dev"
__version__ = "0.90.4.dev"
safe_version = __version__

try:
Expand Down
12 changes: 5 additions & 7 deletions aider/args.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,8 +246,8 @@ def get_parser(default_config_files, git_root):
group = parser.add_argument_group("TUI Settings")
group.add_argument(
"--tui",
action="store_true",
default=False,
action=argparse.BooleanOptionalAction,
default=None,
help="Launch Textual TUI interface (experimental)",
)
group.add_argument(
Expand Down Expand Up @@ -718,7 +718,7 @@ def get_parser(default_config_files, git_root):
"--check-update",
action=argparse.BooleanOptionalAction,
help="Check for new aider versions on launch",
default=False,
default=True,
)
group.add_argument(
"--show-release-notes",
Expand Down Expand Up @@ -803,10 +803,8 @@ def get_parser(default_config_files, git_root):
)
group.add_argument(
"--linear-output",
action="store_true",
help=(
"Run input and output sequentially instead of us simultaneous streams (default: False)"
),
action=argparse.BooleanOptionalAction,
help="Run input and output sequentially instead of us simultaneous streams (default: True)",
default=True,
)
group.add_argument(
Expand Down
2 changes: 2 additions & 0 deletions aider/coders/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from .ask_coder import AskCoder
from .base_coder import Coder
from .context_coder import ContextCoder
from .copypaste_coder import CopyPasteCoder
from .editblock_coder import EditBlockCoder
from .editblock_fenced_coder import EditBlockFencedCoder
from .editor_diff_fenced_coder import EditorDiffFencedCoder
Expand Down Expand Up @@ -33,4 +34,5 @@
EditorDiffFencedCoder,
ContextCoder,
AgentCoder,
CopyPasteCoder,
]
7 changes: 3 additions & 4 deletions aider/coders/agent_coder.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,12 +81,9 @@ class AgentCoder(Coder):
"""Mode where the LLM autonomously manages which files are in context."""

edit_format = "agent"
gpt_prompts = AgentPrompts()

def __init__(self, *args, **kwargs):
# Initialize appropriate prompt set before calling parent constructor
# This needs to happen before super().__init__ so the parent class has access to gpt_prompts
self.gpt_prompts = AgentPrompts()

# Dictionary to track recently removed files
self.recently_removed = {}

Expand Down Expand Up @@ -1203,6 +1200,8 @@ async def reply_completed(self):

if self.agent_finished:
self.tool_usage_history = []
if self.files_edited_by_tools:
_ = await self.auto_commit(self.files_edited_by_tools)
return True

# Since we are no longer suppressing, the partial_response_content IS the final content.
Expand Down
5 changes: 2 additions & 3 deletions aider/coders/agent_prompts.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,8 @@ class AgentPrompts(CoderPrompts):
## Core Directives
- **Role**: Act as an expert software engineer.
- **Act Proactively**: Autonomously use file discovery and context management tools (`ViewFilesAtGlob`, `ViewFilesMatching`, `Ls`, `View`, `Remove`) to gather information and fulfill the user's request. Chain tool calls across multiple turns to continue exploration.
- **Be Decisive**: Do not ask the same question or search for the same term in multiple ways. Trust that your initial findings are valid.
- **Be Concise**: Keep all responses brief and direct (1-3 sentences). Avoid preamble, postamble, and unnecessary explanations.
- **Confirm Ambiguity**: Before applying complex or ambiguous edits, briefly state your plan. For simple, direct edits, proceed without confirmation.
- **Be Decisive**: Trust that your initial findings are valid. Refrain from asking the same question or searching for the same term in multiple similar ways.
- **Be Concise**: Keep all responses brief and direct (1-3 sentences). Avoid preamble, postamble, and unnecessary explanations. Do not repeat yourself.
</context>

<context name="workflow_and_tool_usage">
Expand Down
24 changes: 19 additions & 5 deletions aider/coders/base_coder.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ async def create(
if from_coder:
main_model = from_coder.main_model
else:
main_model = models.Model(models.DEFAULT_MODEL_NAME)
main_model = models.Model(models.DEFAULT_MODEL_NAME, io=io)

if edit_format == "code":
edit_format = None
Expand Down Expand Up @@ -229,6 +229,15 @@ async def create(
kwargs = use_kwargs
from_coder.ok_to_warm_cache = False

if (
getattr(main_model, "copy_paste_mode", False)
and getattr(main_model, "copy_paste_transport", "api") == "clipboard"
):
res = coders.CopyPasteCoder(main_model, io, args=args, **kwargs)
await res.initialize_mcp_tools()
res.original_kwargs = dict(kwargs)
return res

for coder in coders.__all__:
if hasattr(coder, "edit_format") and coder.edit_format == edit_format:
res = coder(main_model, io, args=args, **kwargs)
Expand Down Expand Up @@ -379,6 +388,9 @@ def __init__(
self.io = io
self.io.coder = weakref.ref(self)

self.manual_copy_paste = getattr(main_model, "copy_paste_transport", "api") == "clipboard"
self.copy_paste_mode = getattr(main_model, "copy_paste_mode", False) or auto_copy_context

self.shell_commands = []
self.partial_response_tool_calls = []

Expand All @@ -399,7 +411,7 @@ def __init__(
self.main_model.reasoning_tag if self.main_model.reasoning_tag else REASONING_TAG
)

self.stream = stream and main_model.streaming
self.stream = stream and main_model.streaming and not self.manual_copy_paste

if cache_prompts and self.main_model.cache_control:
self.add_cache_headers = True
Expand Down Expand Up @@ -581,6 +593,8 @@ def get_announcements(self):
output += ", prompt cache"
if main_model.info.get("supports_assistant_prefill"):
output += ", infinite output"
if self.copy_paste_mode:
output += ", copy/paste mode"

lines.append(output)

Expand Down Expand Up @@ -639,7 +653,7 @@ def get_announcements(self):
if self.done_messages:
lines.append("Restored previous conversation history.")

if self.io.multiline_mode:
if self.io.multiline_mode and not self.args.tui:
lines.append("Multiline mode: Enabled. Enter inserts newline, Alt-Enter submits text")

return lines
Expand Down Expand Up @@ -2823,8 +2837,8 @@ def add_assistant_reply_to_cur_messages(self):
# but response.dict() is the Pydantic V1 method name.
response_dict = dict(response)
except TypeError:
print("Neither model_dump() nor dict() worked as expected.")
raise
print("Response parsing error.")
return

msg = response_dict["choices"][0]["message"]

Expand Down
Loading
Loading