URI: 
       gpt - annna - Annna the nice friendly bot.
  HTML git clone git://bitreich.org/annna/ git://enlrupgkhuxnvlhsf6lc3fziv5h2hhfrinws65d7roiv6bfj7d652fid.onion/annna/
   DIR Log
   DIR Files
   DIR Refs
   DIR Tags
   DIR README
       ---
       gpt (2165B)
       ---
            1 #!/bin/sh
            2 
            3 export PATH="$HOME/bin:$PATH"
            4 
            5 function local_llama() {
            6         #ggmlbase="/br/ai/ggml"
            7         ggmlbase="/br/ai/llama.cpp"
            8         #ggmlbin="./build/bin/gpt-2"
            9         ggmlbin="./build/bin/llama-cli"
           10         #ggmlmodel="models/gpt-2-1558M/ggml-model.bin"
           11         ggmlmodel="models/Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf"
           12         ggmlntokens="$((69 * 2))"
           13 
           14         cd $ggmlbase
           15         if mountpoint -q /br/ai/tmp;
           16         then
           17                 ggmlbasename="$(basename "${ggmlmodel}")"
           18                 ggmltmppath="/br/ai/tmp/${ggmlbasename}"
           19                 [ ! -r "${ggmltmppath}" ] && cp "$ggmlmodel" /br/ai/tmp        
           20                 [ -r "${ggmltmppath}" ] && ggmlmodel="${ggmltmppath}"
           21         fi
           22 
           23         prompt="$1"
           24         if [ -z "$prompt" ];
           25         then
           26                 cat \
           27                 | $ggmlbin -m $ggmlmodel -n $ggmlntokens -t 3 \
           28                         --no-warmup --simple-io --no-display-prompt --grammar 'root ::= ([^\x00-\x1F])*' \
           29                         -cnv 2>/dev/null \
           30                         | sed -E '/^$/d;s/^>[[:blank:]]+//;q' \
           31                         | sed -e 's/^"//;s/"$//;'
           32         else
           33                 printf "%s\n" "${prompt}" \
           34                 | $ggmlbin -m $ggmlmodel -n $ggmlntokens -t 3 \
           35                         --no-warmup --simple-io --no-display-prompt --grammar 'root ::= ([^\x00-\x1F])*' \
           36                         -cnv 2>/dev/null \
           37                         | sed -E '/^$/d;s/^>[[:blank:]]+//;q' \
           38                         | sed -e 's/^"//;s/"$//;'
           39         fi
           40         #$ggmlbin -m $ggmlmodel -n $ggmlntokens \
           41         #        --simple-io --no-display-prompt --grammar 'root ::= ([^\x00-\x1F])*' \
           42         #        -p "$1" 2>/dev/null \
           43         #        | head -n1 \
           44         #        | sed -E 's/^[[:blank:]]+//;s/[[:blank:]]*\[end of text\]$//' \
           45         #        | tr -d '"'
           46 }
           47 
           48 function remote_llama() {
           49         prompt="$1"
           50         ggmlmodel="mannix/llama3.1-8b-abliterated"
           51         #ggmlmodel="huihui_ai/gpt-oss-abliterated:20b"
           52         #ggmlmodel="huihui_ai/qwen3-abliterated:16b"
           53         #ggmlmodel="huihui_ai/gemma3-abliterated:12b"
           54         #ggmlmodel="huihui_ai/deepseek-r1-abliterated:14b"
           55         if [ -z "$prompt" ];
           56         then
           57                 cat \
           58                 | ollama-gpu \
           59                         ollama run \
           60                                 --hidethinking \
           61                                 --nowordwrap \
           62                                 "${ggmlmodel}" \
           63                 | head -n 1 \
           64                 | sed -e 's/^"//;s/"$//;'
           65         else
           66                 printf "%s\n" "${prompt}" \
           67                 | ollama-gpu \
           68                         ollama run \
           69                                 --hidethinking \
           70                                 --nowordwrap \
           71                                 "${ggmlmodel}" \
           72                 | head -n 1 \
           73                 | sed -e 's/^"//;s/"$//;'
           74         fi
           75 }
           76 
           77 prompt="$1"
           78 response="$(remote_llama "${prompt}")"
           79 [ -z "${response}" ] && response="$(local_llama "${prompt}")"
           80 [ -n "${response}" ] && printf "%s\n" "${response}"
           81