Last active
January 13, 2026 15:45
-
-
Save pythonhacker/b72060c4504bfd838f64110acbf9cb5f to your computer and use it in GitHub Desktop.
A Python cmd.Cmd shell for talking with Ollama model with context
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import ollama | |
| import cmd | |
| class OllamaShell(cmd.Cmd): | |
| """ A console to ask questions to any LLM supported by ollama """ | |
| prompt = "ollama> " | |
| # Ollama service needs to be running locally | |
| def __init__(self, model='llama3.2:1b'): | |
| self.model = model | |
| # last line | |
| self.last = None | |
| # For context | |
| self.messages = [ | |
| { | |
| "role": "system", | |
| "content": "You are a helpful, precise assistant." | |
| } | |
| ] | |
| print(f"Configured model is {model}") | |
| super().__init__() | |
| def do_last(self, arg): | |
| """ Print last asked question """ | |
| if self.last: | |
| print(self.last) | |
| def do_reset(self, arg): | |
| """ Reset conversation context """ | |
| # Keep system context | |
| self.messages = self.messages[:1] | |
| print("Reset conversation context.") | |
| def do_model(self, model): | |
| """ Change the model """ | |
| model = model.strip() | |
| models = ollama.list() | |
| present = any(m.model == model for m in models["models"]) | |
| if present: | |
| self.model = model | |
| print(f"Model set to {model}.") | |
| else: | |
| print(f"error - model {model} not available in ollama.") | |
| def emptyline(self): | |
| """ Override this otherwise previous lines are repeated in the prompt """ | |
| return | |
| def default(self, line: str): | |
| """ | |
| Called when the input line does not match a known command. | |
| We interpret the entire line as a prompt for the LLM. | |
| """ | |
| line = line.strip() | |
| try: | |
| message = {"role": "user", "content": line} | |
| self.messages.append(message) | |
| response = ollama.chat( | |
| model=self.model, | |
| messages = self.messages | |
| ) | |
| model_response = msg = response["message"]["content"] | |
| self.messages.append( | |
| {"role": "assistant", "content": model_response} | |
| ) | |
| print(model_response) | |
| self.last = line | |
| except Exception as exc: | |
| print(f"[error] {exc}") | |
| def do_EOF(self, arg): | |
| print("Goodbye.") | |
| return True |
Author
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Referring bug in CPython - python/cpython#143804