@@ -161,8 +161,8 @@ if [ -n "$RC" ]; then
161161fi
162162export PATH=" $INSTALL_DIR :$PATH "
163163
164- # ─── Ollama ──────── ───────────────────────────────────────
165- echo -e " ${GREEN} [4/4]${NC} Checking Ollama..."
164+ # ─── Ollama + Model ───────────────────────────────────────
165+ echo -e " ${GREEN} [4/4]${NC} Setting up Ollama + AI model ..."
166166
167167OLLAMA_BIN=" "
168168if command -v ollama & > /dev/null; then
174174if [ -n " $OLLAMA_BIN " ]; then
175175 echo -e " ${GREEN} Ollama found ✓${NC} "
176176
177- # Pull default model if none exist
178- MODEL_COUNT=$( $OLLAMA_BIN list 2> /dev/null | tail -n +2 | wc -l | tr -d ' ' )
179- if [ " $MODEL_COUNT " = " 0" ]; then
177+ # Check if hackcode-uncensored alias already exists
178+ if $OLLAMA_BIN list 2> /dev/null | grep -q " hackcode-uncensored" ; then
179+ echo -e " ${GREEN} hackcode-uncensored model ready ✓${NC} "
180+ else
181+ # Pick best model based on available RAM
182+ RAM_GB=8
183+ case " $PLATFORM " in
184+ macos) RAM_GB=$( sysctl -n hw.memsize 2> /dev/null | awk ' {printf "%d", $1/1073741824}' ) ;;
185+ linux) RAM_GB=$( awk ' /MemTotal/{printf "%d", $2/1048576}' /proc/meminfo 2> /dev/null) ;;
186+ esac
187+
188+ # Try models in order of preference (largest that fits → smallest fallback)
189+ BASE_MODEL=" "
190+ MODEL_DESC=" "
191+ if [ " $RAM_GB " -ge 24 ]; then
192+ BASE_MODEL=" tripolskypetr/qwen3.5-uncensored-aggressive:35b"
193+ MODEL_DESC=" Qwen3.5-35B-A3B MoE Uncensored (~21GB)"
194+ elif [ " $RAM_GB " -ge 8 ]; then
195+ BASE_MODEL=" qwen3:8b"
196+ MODEL_DESC=" Qwen3-8B (~5GB)"
197+ else
198+ BASE_MODEL=" tripolskypetr/qwen3.5-uncensored-aggressive:4b"
199+ MODEL_DESC=" Qwen3.5-4B Uncensored (~3GB)"
200+ fi
201+
180202 echo " "
181- echo -e " ${DIM} No models found. Pulling default model...${NC} "
182- echo -e " ${BOLD} Qwen3.5-8B Uncensored${NC} (~5GB download)"
203+ echo -e " ${DIM} RAM: ${RAM_GB} GB — pulling ${BOLD}${MODEL_DESC}${NC} "
183204 echo " "
184- $OLLAMA_BIN pull " tripolskypetr/qwen3.5-uncensored-aggressive:8b" || true
205+
206+ PULLED=false
207+ # Try primary pick, then fallbacks
208+ for TRY_MODEL in " $BASE_MODEL " " qwen3:8b" " tripolskypetr/qwen3.5-uncensored-aggressive:4b" " qwen3:4b" ; do
209+ if $OLLAMA_BIN pull " $TRY_MODEL " 2> /dev/null; then
210+ BASE_MODEL=" $TRY_MODEL "
211+ PULLED=true
212+ break
213+ fi
214+ echo -e " ${DIM} $TRY_MODEL not available, trying next...${NC} "
215+ done
216+
217+ if [ " $PULLED " = true ]; then
218+ # Create hackcode-uncensored alias via Modelfile
219+ HACKCODE_CFG=" ${HOME} /.config/hackcode"
220+ mkdir -p " $HACKCODE_CFG "
221+ cat > " ${HACKCODE_CFG} /Modelfile" << MODELFILE
222+ FROM ${BASE_MODEL}
223+ PARAMETER temperature 0.7
224+ PARAMETER num_ctx 32768
225+ MODELFILE
226+ $OLLAMA_BIN create hackcode-uncensored -f " ${HACKCODE_CFG} /Modelfile" 2> /dev/null
227+ echo -e " ${GREEN} Model ready as ${BOLD} hackcode-uncensored${NC} ${GREEN} ✓${NC} "
228+ else
229+ echo -e " ${RED} Could not pull any model${NC} "
230+ echo -e " ${DIM} Run: ollama pull qwen3:8b && hackcode --setup${NC} "
231+ fi
185232 fi
186233else
187- echo -e " ${RED} Ollama not found${NC} "
234+ echo -e " ${RED} Ollama not found${NC} — required for local AI "
188235 if [ " $PLATFORM " = " macos" ]; then
189- echo -e " ${DIM} Install Ollama: https://ollama.ai/download${NC} "
190- echo -e " ${DIM} Or: brew install ollama${NC} "
236+ echo -e " ${DIM} Install: brew install ollama ${NC} or${DIM} https://ollama.ai/download${NC} "
191237 else
192- echo -e " ${DIM} Install Ollama : curl -fsSL https://ollama.ai/install.sh | sh${NC} "
238+ echo -e " ${DIM} Install: curl -fsSL https://ollama.ai/install.sh | sh${NC} "
193239 fi
194240fi
195241
0 commit comments