Skip to content

Commit 681ff50

Browse files
Fix(stream): Correct newline handling in streaming functions
Refactored `ollama_generate_stream` to use a more robust method for handling streaming responses, similar to `ollama_chat_stream`. This change fixes a bug where newlines were not being handled correctly in the streaming output. The new implementation uses `jq -r` to extract and unescape the response content, and `printf '%s'` to print the raw string without interpreting escape sequences. This ensures that newlines and other special characters in the model's response are rendered correctly. The `ollama_chat_stream` function was also reviewed and found to be already using the correct approach.
1 parent a1cb014 commit 681ff50

File tree

1 file changed

+17
-44
lines changed

1 file changed

+17
-44
lines changed

ollama_bash_lib.sh

Lines changed: 17 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -756,52 +756,25 @@ ollama_generate_stream() {
756756
_debug "ollama_generate_stream: model='$model' prompt='${prompt:0:40}'"
757757

758758
OBL_STREAM=1
759-
760-
local is_thinking=false
761-
local is_responding=false
762-
763-
ollama_generate_json -m "$model" -p "$prompt" |
764-
while IFS= read -r line; do
765-
766-
#_debug "ollama_generate_stream: line: [${line:0:1000}]"
767-
768-
thinking="$(jq '.thinking // empty' <<<"$line")"
769-
thinking=${thinking#\"} # strip first "
770-
thinking=${thinking%\"} # strip last "
771-
if [[ -n "$thinking" ]]; then
772-
if [[ "$is_thinking" == 'false' ]]; then
773-
# first thinking input received
774-
is_thinking=true
775-
printf '\n#### %b' "$thinking"
776-
else
777-
# subsequent thinking input received
778-
printf '%b' "$thinking"
779-
fi
780-
fi
781-
782-
response="$(jq '.response // empty' <<<"$line")"
783-
response=${response#\"} # strip first "
784-
response=${response%\"} # strip last "
785-
if [[ -n "$response" ]]; then
786-
if [[ "$is_responding" == 'false' ]]; then
787-
# first response input received
788-
is_responding=true
789-
printf '\n\n%b' "$response"
790-
else
791-
# subsequent response input received
792-
printf '%b' "$response"
759+
(
760+
ollama_generate_json -m "$model" -p "$prompt" | while IFS= read -r line; do
761+
if ! _is_valid_json "$line"; then continue; fi
762+
if [[ "$OBL_THINKING" == 'on' ]]; then
763+
printf '%s' "$(jq -r '.thinking // empty' <<<"$line")" >&2
793764
fi
794-
fi
795-
done
796-
rc=$? # exit status of the whole pipeline
797-
765+
read -r -d '' response < <(jq -r '.response // empty' <<<"$line")
766+
printf '%s' "$response"
767+
done
768+
exit "${PIPESTATUS[0]}"
769+
) 2> >( _ollama_thinking_stream )
770+
local error_code=$?
798771
OBL_STREAM=0
799-
800-
# Final newline (only on success)
801-
(( rc == 0 )) && printf '\n'
802-
803-
_debug "ollama_generate_stream: exit=$rc"
804-
return $rc
772+
if [[ $error_code -ne 0 ]]; then
773+
_error "ollama_generate_stream: ollama_generate_json failed with code $error_code"
774+
return 1
775+
fi
776+
printf '\n'
777+
return 0
805778
}
806779

807780
# Messages Functions

0 commit comments

Comments
 (0)