Skip to content

Instantly share code, notes, and snippets.

@pwais
Created December 7, 2024 17:23
Show Gist options
  • Select an option

  • Save pwais/583164e1e4ea2c489bee5d93b883a566 to your computer and use it in GitHub Desktop.

Select an option

Save pwais/583164e1e4ea2c489bee5d93b883a566 to your computer and use it in GitHub Desktop.
if __name__ == "__main__":
import autogen
log.info(f"Using autogen {autogen.__version__}")
# https://microsoft.github.io/autogen/0.2/docs/tutorial/tool-use
from typing import Annotated, Literal
Operator = Literal["+", "-", "*", "/"]
def calculator(a: int, b: int, operator: Annotated[Operator, "operator"]) -> int:
if operator == "+":
return a + b
elif operator == "-":
return a - b
elif operator == "*":
return a * b
elif operator == "/":
return int(a / b)
else:
raise ValueError("Invalid operator")
llm_config = {"config_list": [{
# "model": "gpt-4o-mini",
"model": "llama3.1-70b-instruct-berkeley",
"base_url": "https://api.lambdalabs.com/v1/", # ensure trailing slash!!!
# "api_rate_limit": 60.0, # Set to allow up to 60 API requests per second.
"api_key": os.environ.get("OPENAI_API_KEY"),
"temperature": 0.9,
}]}
from autogen import ConversableAgent
# Let's first define the assistant agent that suggests tool calls.
assistant = ConversableAgent(
name="Assistant",
system_message="You are a helpful AI assistant. "
"You can help with simple calculations. "
"Return 'TERMINATE' when the task is done.",
llm_config=llm_config,
)
# The user proxy agent is used for interacting with the assistant agent
# and executes tool calls.
user_proxy = ConversableAgent(
name="User",
llm_config=False,
is_termination_msg=lambda msg: msg.get("content") is not None and "TERMINATE" in msg["content"],
human_input_mode="NEVER",
)
# Register the tool signature with the assistant agent.
assistant.register_for_llm(name="calculator", description="A simple calculator")(calculator)
# Register the tool function with the user proxy agent.
user_proxy.register_for_execution(name="calculator")(calculator)
chat_result = user_proxy.initiate_chat(assistant, message="What is (44232 + 13312 / (232 - 32)) * 5?")
print(chat_result)
assert False
@pwais
Copy link
Author

pwais commented Dec 7, 2024

I get like:

2024-12-07 17:20:04,038 op   845440 : Using autogen 0.3.0
User (to Assistant):

What is (44232 + 13312 / (232 - 32)) * 5?

--------------------------------------------------------------------------------

>>>>>>>> USING AUTO REPLY...
Traceback (most recent call last):
  File "/opt/labs/lab01_release/main.py", line 256, in <module>
    chat_result = user_proxy.initiate_chat(assistant, message="What is (44232 + 13312 / (232 - 32)) * 5?")
                  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
                  ....
                  
                  File "/opt/conda/lib/python3.11/site-packages/openai/_base_client.py", line 1041, in _request
    raise self._make_status_error_from_response(err.response) from None
openai.BadRequestError: Error code: 400 - {'id': '', 'object': '', 'created': 0, 'model': '', 'choices': None, 'usage': {'prompt_tokens': 0, 'completion_tokens': 0, 'total_tokens': 0, 'prompt_tokens_details': None, 'completion_tokens_details': None}, 'system_fingerprint': ''}

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment