-
-
Save cavj83/9333aba673e88bc10028edc0d7bd421a to your computer and use it in GitHub Desktop.
PIMORONI BADGER 2040 W
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # ========================================================== | |
| # Proyecto: Auto Práctica con API de OpenAI ChatGPT vía MicroPython | |
| # Dispositivo: PIMORONI BADGER 2040 W | |
| # Lenguaje: MicroPython + Python (Flask) | |
| # Autor: Juan Carmona Valdez | |
| # Fecha: Noviembre 2025 | |
| # Descripción: | |
| # Conecta la PIMORONI BADGER 2040 W con un servidor Flask | |
| # que utiliza la API de OpenAI ChatGPT para procesar una historia. | |
| # El flujo de datos se realiza mediante solicitudes HTTP POST, | |
| # permitiendo comunicación bidireccional crea una historia | |
| # ofrece 3 opciones para continuar. | |
| # ========================================================== | |
| ## Requisitos | |
| - Python 3.10+ | |
| - Flask, OpenAI, Requests | |
| - Firmware Badger 2040 W v0.0.5 | |
| Descarga del driver: pimoroni-badger2040w-v0.0.5-micropython-with-badger-os.uf2 | |
| https://github.com/pimoroni/badger2040/releases/tag/v0.0.5 | |
| ## Uso | |
| Ejecuta el servidor: | |
| El archivo server_in_lap.py se corre en una computara con: | |
| ``` | |
| python server_in_lap.py | |
| ``` | |
| # Nota: esto genera Running on http://ip:puerto | |
| Esta direccción es la que debe coincidir con SERVER_URL de stories.py | |
| Guarda stories.py en: | |
| /examples/stories.py | |
| ## Controles Badger | |
| - A = Opción 1\ | |
| - B = Opción 2\ | |
| - C = Opción 3\ | |
| - UP/DOWN = Scroll | |
| ## La Badger envía: | |
| { | |
| "historia": "<historia_nueva>" o "<historia_actual>", | |
| "eleccion": "0" o "1"|"2"|"3" | |
| } | |
| ## El Servidor responde: | |
| ``` | |
| ### HISTORIA: | |
| ... | |
| ### OPCIONES: | |
| 1) ... | |
| 2) ... | |
| 3) ... | |
| ``` | |
| ## Notas técnicas | |
| El servidor mantiene una historia independiente por IP del cliente. | |
| Cada respuesta siempre contiene: | |
| una historia | |
| tres opciones | |
| Los textos son cortos (<20 palabras) para fácil lectura en Badger | |
| pero se pude aumentar y desplasarse con los botonoes laterales para la lectura. | |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| from flask import Flask, request, jsonify | |
| from openai import OpenAI | |
| import logging | |
| app = Flask(__name__) | |
| client = OpenAI(api_key="OPENAI_API_KEY") | |
| logging.basicConfig(level=logging.INFO, | |
| format="%(asctime)s [%(levelname)s] %(message)s") | |
| logger = logging.getLogger(__name__) | |
| # Diccionario para guardar la historia de cada cliente | |
| historia_estado = {} | |
| PROMPT_BASE = """ | |
| Eres un generador de historias interactivas. | |
| Crea historias cortas de 20 palabras o menos, | |
| usando lenguaje seguro y adecuado para todo público. | |
| Ofrece EXACTAMENTE 3 opciones numeradas. | |
| Formato de salida obligatorio: | |
| ### HISTORIA: | |
| (texto) | |
| ### OPCIONES: | |
| 1) ... | |
| 2) ... | |
| 3) ... | |
| """ | |
| @app.route("/story", methods=["POST"]) | |
| def story(): | |
| data = request.get_json() | |
| logger.info(f"JSON recibido: {data}") | |
| if not data or "eleccion" not in data or "historia" not in data: | |
| return jsonify({"error": "Faltan datos"}), 400 | |
| ip_cliente = request.remote_addr | |
| eleccion = data["eleccion"] | |
| historia_input = data["historia"] | |
| # Inicio de historia nueva | |
| if eleccion == "0" or ip_cliente not in historia_estado: | |
| prompt = f""" | |
| {PROMPT_BASE} | |
| Inicia una historia nueva: | |
| {historia_input} | |
| Continúa la historia de manera segura. | |
| """ | |
| historia_estado[ip_cliente] = "" | |
| else: | |
| # Continuación de historia previa | |
| prompt = f""" | |
| {PROMPT_BASE} | |
| Historia actual: | |
| {historia_estado[ip_cliente]} | |
| Última elección del jugador: | |
| {eleccion} | |
| Continúa la historia de manera segura. | |
| """ | |
| logger.info("Prompt enviado a OpenAI:\n" + prompt) | |
| try: | |
| respuesta = client.chat.completions.create( | |
| model="gpt-4o-mini", | |
| messages=[{"role": "user", "content": prompt}] | |
| ).choices[0].message.content | |
| logger.info("Respuesta enviada al cliente:\n" + respuesta) | |
| # Guardar la historia completa para este cliente | |
| historia_estado[ip_cliente] += "\n" + respuesta | |
| except Exception as e: | |
| logger.error("Error al contactar con OpenAI: " + str(e)) | |
| return jsonify({"error": "OpenAI connection failed"}), 500 | |
| return jsonify({"respuesta": respuesta}) | |
| if __name__ == "__main__": | |
| logger.info("===== SERVIDOR DE HISTORIAS INICIADO =====") | |
| app.run(host="0.0.0.0", port=5001) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import badger2040 | |
| from badger2040 import WIDTH, HEIGHT | |
| import urequests | |
| import ujson | |
| import time | |
| from WIFI_CONFIG import SSID, PSK, COUNTRY | |
| # ====== Configuración Badger ====== | |
| display = badger2040.Badger2040() | |
| display.led(128) | |
| display.set_update_speed(badger2040.UPDATE_FAST) | |
| LINE_HEIGHT = 16 | |
| TEXT_SIZE = 1 | |
| # ====== Conexión WiFi ====== | |
| display.set_pen(15) | |
| display.clear() | |
| display.set_pen(0) | |
| display.text("Conectando WiFi...", 10, 40, WIDTH) | |
| display.update() | |
| import network | |
| wlan = network.WLAN(network.STA_IF) | |
| wlan.active(True) | |
| wlan.connect(SSID, PSK) | |
| while not wlan.isconnected(): | |
| time.sleep(0.5) | |
| net = wlan.ifconfig() | |
| display.clear() | |
| display.set_pen(0) | |
| display.rectangle(0, 0, WIDTH, 20) | |
| display.set_pen(15) | |
| display.text("Stories", 3, 4) | |
| display.set_pen(0) | |
| display.text("> LOCAL IP: {}".format(net[0]), 0, 35 + int(LINE_HEIGHT / 2), WIDTH) | |
| display.update() | |
| time.sleep(1) | |
| # ====== Servidor ====== | |
| SERVER_URL = "http://172.16.4.29:5001/story" | |
| # ====== Variables de scroll ====== | |
| scroll_offset = 0 | |
| page_lines = [] | |
| def parsear_historia(texto): | |
| if not texto: | |
| return ["Error: historia vacía"] | |
| lines = [] | |
| # Extraer HISTORIA | |
| inicio = texto.find("### HISTORIA:") | |
| fin = texto.find("### OPCIONES:") | |
| if inicio == -1: | |
| inicio = 0 | |
| else: | |
| inicio += len("### HISTORIA:") | |
| if fin == -1: | |
| fin = len(texto) | |
| historia = texto[inicio:fin].strip().replace("\r", "").split("\n") | |
| # Limitar longitud de cada línea | |
| max_chars = 50 | |
| for linea in historia: | |
| while len(linea) > max_chars: | |
| lines.append(linea[:max_chars]) | |
| linea = linea[max_chars:] | |
| lines.append(linea) | |
| # Extraer OPCIONES | |
| inicio_op = texto.find("### OPCIONES:") | |
| if inicio_op != -1: | |
| opciones = texto[inicio_op + len("### OPCIONES:"):].strip().replace("\r", "").split("\n") | |
| lines.append("") # separación visual | |
| for opcion in opciones: | |
| while len(opcion) > max_chars: | |
| lines.append(opcion[:max_chars]) | |
| opcion = opcion[max_chars:] | |
| lines.append(opcion) | |
| return lines | |
| def mostrar_texto_scroll(lines, offset): | |
| display.set_pen(15) | |
| display.rectangle(0, 20, WIDTH, HEIGHT-20) # limpiar área de texto | |
| display.set_pen(0) | |
| y = 25 | |
| for i in range(offset, min(offset + (HEIGHT//LINE_HEIGHT), len(lines))): | |
| display.text(lines[i], 5, y, WIDTH, TEXT_SIZE) | |
| y += LINE_HEIGHT | |
| display.update() | |
| def pedir_historia(historia, eleccion): | |
| data = { | |
| "historia": historia if historia else "", | |
| "eleccion": eleccion if eleccion else "" | |
| } | |
| try: | |
| r = urequests.post(SERVER_URL, json=data) | |
| r_json = r.json() | |
| r.close() | |
| return r_json.get("respuesta", "") | |
| except Exception as e: | |
| print("ERROR EN POST:", e) | |
| return "" | |
| # ====== Primera historia ====== | |
| historia_actual = pedir_historia("<historia_nueva>", "0") | |
| page_lines = parsear_historia(historia_actual) | |
| scroll_offset = 0 | |
| mostrar_texto_scroll(page_lines, scroll_offset) | |
| # ====== Loop principal ====== | |
| prev_a = prev_b = prev_c = False | |
| while True: | |
| # Lectura botones | |
| a = display.pressed(badger2040.BUTTON_A) | |
| b = display.pressed(badger2040.BUTTON_B) | |
| c = display.pressed(badger2040.BUTTON_C) | |
| up = display.pressed(badger2040.BUTTON_UP) | |
| down = display.pressed(badger2040.BUTTON_DOWN) | |
| # ====== Scroll ====== | |
| if up: | |
| scroll_offset = max(0, scroll_offset - 1) | |
| mostrar_texto_scroll(page_lines, scroll_offset) | |
| if down: | |
| scroll_offset = min(len(page_lines) - (HEIGHT//LINE_HEIGHT), scroll_offset + 1) | |
| mostrar_texto_scroll(page_lines, scroll_offset) | |
| # ====== Elecciones ====== | |
| if a and not prev_a: | |
| historia_actual = pedir_historia("<historia_actual>", "1") | |
| page_lines = parsear_historia(historia_actual) | |
| scroll_offset = 0 | |
| mostrar_texto_scroll(page_lines, scroll_offset) | |
| if b and not prev_b: | |
| historia_actual = pedir_historia("<historia_actual>", "2") | |
| page_lines = parsear_historia(historia_actual) | |
| scroll_offset = 0 | |
| mostrar_texto_scroll(page_lines, scroll_offset) | |
| if c and not prev_c: | |
| historia_actual = pedir_historia("<historia_actual>", "3") | |
| page_lines = parsear_historia(historia_actual) | |
| scroll_offset = 0 | |
| mostrar_texto_scroll(page_lines, scroll_offset) | |
| prev_a, prev_b, prev_c = a, b, c | |
| time.sleep(0.1) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment