Created
July 13, 2025 06:59
-
-
Save AllenFang/ab1299f2fcd7ac7a7c22462bd5b0f187 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import openai | |
| openai.api_key = "YOUR_API_KEY" | |
| def agent_response(message): | |
| response = openai.ChatCompletion.create( | |
| model="gpt-3.5-turbo", | |
| messages=[ | |
| {"role": "user", "content": message} | |
| ], | |
| max_tokens=50 | |
| ) | |
| return response['choices'][0]['message']['content'] | |
| # Usage | |
| print(agent_response("Hello!")) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| def get_weather_by_city(city: str) -> str: | |
| # In real use, connect to a live weather API here. | |
| fake_weather = { | |
| "London": "Cloudy, 18°C", | |
| "New York": "Sunny, 24°C", | |
| "Beijing": "Rainy, 21°C", | |
| "Tokyo": "Clear, 27°C" | |
| } | |
| return fake_weather.get(city, "Sorry, I don't have the weather info for that city." | |
| # define the tool followed by OpenAI spec | |
| weather_tool = { | |
| "type": "function", | |
| "function": { | |
| "name": "get_weather_by_city", | |
| "description": "Get today's weather information for a given city.", | |
| "parameters": { | |
| "type": "object", | |
| "properties": { | |
| "city": { | |
| "type": "string", | |
| "description": "The name of the city (e.g., London, New York, Beijing, Tokyo)" | |
| } | |
| }, | |
| "required": ["city"] | |
| } | |
| } | |
| } | |
| import openai | |
| def agent_response(user_input): | |
| # Step 1: Send the user message, with tool descriptions, to OpenAI | |
| chat_completion = openai.ChatCompletion.create( | |
| model="gpt-3.5-turbo-0613", | |
| messages=[ | |
| {"role": "system", "content": "You are a helpful assistant."}, | |
| {"role": "user", "content": user_input} | |
| ], | |
| tools=[weather_tool], | |
| tool_choice="auto", # Let the model decide when to call the tool | |
| ) | |
| response = chat_completion.choices[0].message | |
| # Step 2: If tool_call is requested, execute the tool in Python | |
| if response.tool_calls: | |
| for tool_call in response.tool_calls: | |
| fn_name = tool_call.function.name | |
| arguments = eval(tool_call.function.arguments) | |
| if fn_name == "get_weather_by_city": | |
| city = arguments["city"] | |
| weather_info = get_weather_by_city(city) | |
| # Now send the tool result back to the model to generate a final answer | |
| followup = openai.ChatCompletion.create( | |
| model="gpt-3.5-turbo-0613", | |
| messages=[ | |
| {"role": "system", "content": "You are a helpful assistant."}, | |
| {"role": "user", "content": user_input}, | |
| response, # the original assistant response with tool call | |
| { | |
| "role": "tool", | |
| "tool_call_id": tool_call.id, | |
| "name": fn_name, | |
| "content": weather_info | |
| } | |
| ] | |
| ) | |
| return followup.choices[0].message.content | |
| # Otherwise, just return the model's normal message | |
| return response.content |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment