init: nvm we ollama in this bitch
This commit is contained in:
parent
9923ddb5a8
commit
bbb6581c74
2 changed files with 49 additions and 13 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -1,2 +1 @@
|
||||||
.env
|
.venv/
|
||||||
.venv/
|
|
||||||
|
|
|
||||||
59
main.py
59
main.py
|
|
@ -1,14 +1,51 @@
|
||||||
from google import genai
|
import ollama
|
||||||
from dotenv import load_dotenv
|
import subprocess
|
||||||
import os
|
|
||||||
|
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
prompt = input("Prompt: ")
|
def shell_command(command: str) -> str:
|
||||||
|
"""
|
||||||
|
Run a shell command.
|
||||||
|
|
||||||
client = genai.Client(api_key=os.getenv("GEM_API_KEY"))
|
Args:
|
||||||
response = client.models.generate_content(
|
command: the command to run
|
||||||
model="gemini-2.5-flash",
|
|
||||||
contents=prompt
|
Returns:
|
||||||
)
|
stdout: the stdout
|
||||||
print(response.text)
|
stderr: the stderr
|
||||||
|
"""
|
||||||
|
exec = subprocess.run(command, encoding="utf-8", shell=True, capture_output=True)
|
||||||
|
return f"stdout:\n{exec.stdout}\n\nstderr:\n{exec.stderr}"
|
||||||
|
|
||||||
|
|
||||||
|
tools = [shell_command] # ollama-python turns this into JSON
|
||||||
|
|
||||||
|
messages = [{"role": "user", "content": input("prompt > ")}]
|
||||||
|
|
||||||
|
while True:
|
||||||
|
stream = ollama.chat(model="gpt-oss", messages=messages,
|
||||||
|
tools=tools, stream=True)
|
||||||
|
|
||||||
|
ran_tool = False
|
||||||
|
for chunk in stream:
|
||||||
|
msg = chunk.message
|
||||||
|
|
||||||
|
# 1️⃣ the model is asking us to run something
|
||||||
|
if msg.tool_calls:
|
||||||
|
for call in msg.tool_calls:
|
||||||
|
if call["function"]["name"] == "shell_command":
|
||||||
|
cmd = call["function"]["arguments"]["command"]
|
||||||
|
output = shell_command(cmd)
|
||||||
|
messages.append({
|
||||||
|
"role": "tool",
|
||||||
|
"name": "shell_command",
|
||||||
|
"content": output
|
||||||
|
})
|
||||||
|
ran_tool = True
|
||||||
|
|
||||||
|
# 2️⃣ normal user-visible text
|
||||||
|
elif msg.content:
|
||||||
|
print(msg.content, end="", flush=True)
|
||||||
|
|
||||||
|
# loop again if we just satisfied a tool call
|
||||||
|
if not ran_tool:
|
||||||
|
break
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue