#!/usr/pkg/bin/python """ Queries AI model openrouter/elephant-alpha for displaying the answer in gopher. Answer can be wrapped in cowsay by passing 'cowsay' or 'cowthink' without quotes as the first word in the prompt. """ # Copyright (c) 2026 snake_case_nemo # SPDX-License-Identifier: BSD-2-Clause # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import os import sys import subprocess import requests import json COW_SAY = { "cowsay": "/usr/pkg/bin/cowsay -f bunny", "cowthink": "/usr/pkg/bin/cowthink -f bunny" } ENV_PARAM = "QUERY_STRING" LLM_URL = "https://openrouter.ai/api/v1/chat/completions" TOKEN = "xxxxxxxxxxxx" MODEL = "inclusionai/ling-2.6-flash:free" def main(): """Query LLM and print answer. Main function. Returns: None """ prompt_obj = build_prompt_obj(get_parameter()) prompt_obj['answer'] = llm_req(prompt_obj['prompt']) print_header(prompt_obj) if prompt_obj['cow']: cow_print(prompt_obj) else: for l in prompt_obj['answer'].split("\n"): g_print(l) def print_header(prompt_obj: dict) -> None: """Print a small header. Repeats the prompt and announces the answer. Parameters: prompt_obj (dict): Contains the prompt. Returns: None """ g_print("Your prompt: {}\n".format(prompt_obj['prompt'])) g_print("The answer was:") def llm_req(prompt: str) -> str: """Make ai request. Parameters: prompt_obj (dict): The prompt. Returns: str: The result from the prompt. """ resp = requests.post( url = LLM_URL, headers = { "Authorization": f"Bearer {TOKEN}" }, data = json.dumps({ "model": MODEL, "messages": [ { "role": "user", "content": prompt } ] }) ) resp_obj = json.loads(resp.content) if not "choices" in resp_obj: g_print("Failed to query ai") if "error" in resp_obj: g_print(resp_obj['error']['message']) g_print(resp_obj['error']['metadata']['raw']) sys.exit(0) return resp_obj['choices'][0]['message']['content'] def build_prompt_obj(prompt: str) -> dict: """Build dict for the prompt for later use. Contains the prompt and the key "answer" that will be filled later with the LLM's answer. Also contains the boolean "cow" which indicates that cowsay or cowthink is to be used in the answer. If the first word in the prompt is either 'cowsay' or 'cowthink' the key "cow_method" is set with the path to the specified cow program. Parameters: prompt (str): The prompt. Returns: dict: object with prompt, answer, cow, cow_method """ words_arr = prompt.split() cow = True if words_arr[0] in ("cowsay", "cowthink") else False return { "cow": cow, "cow_method": COW_SAY[words_arr.pop(0)] if cow else "", "prompt": " ".join(words_arr), "answer": "" } def cow_print(prompt_obj: dict) -> None: """Wrap LLM answer in cowsay or cowthink. Parameters: prompt_obj (dict) Returns: None """ escaped_text = prompt_obj['answer'].replace('"', '\"') escaped_text = '"' + escaped_text + '"' cow_out = subprocess.run( "echo -e {} | {}".format(escaped_text, prompt_obj['cow_method']), shell=True, text=True, capture_output=True, check=False ) for l in cow_out.stdout.split("\n"): g_print(l) def get_parameter() -> str: """Get prompt from environment. Returns: str: The prompt """ return os.getenv(ENV_PARAM) def g_print(mystr: str, **kwargs) -> None: """Print passed string with leading 'i'. So gopher displays string correctly. Parameters: mystr (str): Returns: None """ print(f"i{mystr}", **kwargs) if __name__ == "__main__": main()