# DM-aioaiagent
## Urls
* [PyPI](https://pypi.org/project/dm-aioaiagent)
* [GitHub](https://github.com/MykhLibs/dm-aioaiagent)
### * Package contains both `asynchronous` and `synchronous` clients
## Usage
Analogue to `DMAioAIAgent` is the synchronous client `DMAIAgent`.
### Windows Setup
```python
import asyncio
import sys
if sys.platform == "win32":
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
```
### Api Key Setup
You can set your OpenAI API key in the environment variable `OPENAI_API_KEY` or pass it as an argument to the agent.
**Use load_dotenv to load the `.env` file.**
```python
from dotenv import load_dotenv
load_dotenv()
```
### Use agent *with* inner memory and run *single* message
By default, agent use inner memory to store the conversation history.
(You can set *max count messages in memory* by `max_memory_messages` init argument)
```python
import asyncio
from dm_aioaiagent import DMAioAIAgent
async def main():
# define a system message
system_message = "Your custom system message with role, backstory and goal"
# (optional) define a list of tools, if you want to use them
tools = [...]
# define a openai model, default is "gpt-4o-mini"
model_name = "gpt-4o"
# create an agent
ai_agent = DMAioAIAgent(system_message, tools, model=model_name)
# if you don't want to see the input and output messages from agent
# you can set `input_output_logging=False` init argument
# call an agent
answer = await ai_agent.run("Hello!")
# call an agent
answer = await ai_agent.run("I want to know the weather in Kyiv")
# get full conversation history
conversation_history = ai_agent.memory_messages
# clear conversation history
ai_agent.clear_memory_messages()
if __name__ == "__main__":
asyncio.run(main())
```
### Use agent *without* inner memory and run *multiple* messages
If you want to control the memory of the agent, you can disable it by setting `is_memory_enabled=False`
```python
import asyncio
from dm_aioaiagent import DMAioAIAgent
async def main():
# define a system message
system_message = "Your custom system message with role, backstory and goal"
# (optional) define a list of tools, if you want to use them
tools = [...]
# define a openai model, default is "gpt-4o-mini"
model_name = "gpt-4o"
# create an agent
ai_agent = DMAioAIAgent(system_message, tools, model=model_name,
is_memory_enabled=False)
# if you don't want to see the input and output messages from agent
# you can set input_output_logging=False
# define the conversation message(s)
messages = [
{"role": "user", "content": "Hello!"}
]
# call an agent
new_messages = await ai_agent.run_messages(messages)
# add new_messages to messages
messages.extend(new_messages)
# define the next conversation message
messages.append(
{"role": "user", "content": "I want to know the weather in Kyiv"}
)
# call an agent
new_messages = await ai_agent.run_messages(messages)
if __name__ == "__main__":
asyncio.run(main())
```
### Image vision
```python
from dm_aioaiagent import DMAIAgent, OpenAIImageMessageContent
def main():
# create an agent
ai_agent = DMAIAgent(agent_name="image_vision", model="gpt-4o")
# create an image message content
# NOTE: text argument is optional
img_content = OpenAIImageMessageContent(image_url="https://your.domain/image",
text="Hello, what is shown in the photo?")
# define the conversation messages
messages = [
{"role": "user", "content": "Hello!"},
{"role": "user", "content": img_content},
]
# call an agent
new_messages = ai_agent.run_messages(messages)
answer = new_messages[-1].content
if __name__ == "__main__":
main()
```
Raw data
{
"_id": null,
"home_page": "https://pypi.org/project/dm-aioaiagent",
"name": "dm-aioaiagent",
"maintainer": null,
"docs_url": null,
"requires_python": ">=3.9",
"maintainer_email": null,
"keywords": "dm aioaiagent",
"author": "dimka4621",
"author_email": "mismartconfig@gmail.com",
"download_url": "https://files.pythonhosted.org/packages/b7/b7/601a958f16548826f85cc715cdb972566dd5c22d1297af8e083cc057a195/dm_aioaiagent-0.5.3.tar.gz",
"platform": null,
"description": "# DM-aioaiagent\n\n## Urls\n\n* [PyPI](https://pypi.org/project/dm-aioaiagent)\n* [GitHub](https://github.com/MykhLibs/dm-aioaiagent)\n\n### * Package contains both `asynchronous` and `synchronous` clients\n\n## Usage\n\nAnalogue to `DMAioAIAgent` is the synchronous client `DMAIAgent`.\n\n### Windows Setup\n\n```python\nimport asyncio\nimport sys\n\nif sys.platform == \"win32\":\n asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())\n```\n\n### Api Key Setup\n\nYou can set your OpenAI API key in the environment variable `OPENAI_API_KEY` or pass it as an argument to the agent.\n\n**Use load_dotenv to load the `.env` file.**\n\n```python\nfrom dotenv import load_dotenv\nload_dotenv()\n```\n\n### Use agent *with* inner memory and run *single* message\n\nBy default, agent use inner memory to store the conversation history.\n\n(You can set *max count messages in memory* by `max_memory_messages` init argument)\n\n```python\nimport asyncio\nfrom dm_aioaiagent import DMAioAIAgent\n\n\nasync def main():\n # define a system message\n system_message = \"Your custom system message with role, backstory and goal\"\n\n # (optional) define a list of tools, if you want to use them\n tools = [...]\n\n # define a openai model, default is \"gpt-4o-mini\"\n model_name = \"gpt-4o\"\n\n # create an agent\n ai_agent = DMAioAIAgent(system_message, tools, model=model_name)\n # if you don't want to see the input and output messages from agent\n # you can set `input_output_logging=False` init argument\n\n # call an agent\n answer = await ai_agent.run(\"Hello!\")\n\n # call an agent\n answer = await ai_agent.run(\"I want to know the weather in Kyiv\")\n\n # get full conversation history\n conversation_history = ai_agent.memory_messages\n\n # clear conversation history\n ai_agent.clear_memory_messages()\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n```\n\n### Use agent *without* inner memory and run *multiple* messages\n\nIf you want to control the memory of the agent, you can disable it by setting `is_memory_enabled=False`\n\n```python\nimport asyncio\nfrom dm_aioaiagent import DMAioAIAgent\n\n\nasync def main():\n # define a system message\n system_message = \"Your custom system message with role, backstory and goal\"\n\n # (optional) define a list of tools, if you want to use them\n tools = [...]\n\n # define a openai model, default is \"gpt-4o-mini\"\n model_name = \"gpt-4o\"\n\n # create an agent\n ai_agent = DMAioAIAgent(system_message, tools, model=model_name,\n is_memory_enabled=False)\n # if you don't want to see the input and output messages from agent\n # you can set input_output_logging=False\n\n # define the conversation message(s)\n messages = [\n {\"role\": \"user\", \"content\": \"Hello!\"}\n ]\n\n # call an agent\n new_messages = await ai_agent.run_messages(messages)\n\n # add new_messages to messages\n messages.extend(new_messages)\n\n # define the next conversation message\n messages.append(\n {\"role\": \"user\", \"content\": \"I want to know the weather in Kyiv\"}\n )\n\n # call an agent\n new_messages = await ai_agent.run_messages(messages)\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n```\n\n### Image vision\n\n```python\nfrom dm_aioaiagent import DMAIAgent, OpenAIImageMessageContent\n\n\ndef main():\n # create an agent\n ai_agent = DMAIAgent(agent_name=\"image_vision\", model=\"gpt-4o\")\n\n # create an image message content\n # NOTE: text argument is optional\n img_content = OpenAIImageMessageContent(image_url=\"https://your.domain/image\",\n text=\"Hello, what is shown in the photo?\")\n\n # define the conversation messages\n messages = [\n {\"role\": \"user\", \"content\": \"Hello!\"},\n {\"role\": \"user\", \"content\": img_content},\n ]\n\n # call an agent\n new_messages = ai_agent.run_messages(messages)\n answer = new_messages[-1].content\n\n\nif __name__ == \"__main__\":\n main()\n```\n",
"bugtrack_url": null,
"license": null,
"summary": "This is my custom aioaiagent client",
"version": "0.5.3",
"project_urls": {
"GitHub": "https://github.com/MykhLibs/dm-aioaiagent",
"Homepage": "https://pypi.org/project/dm-aioaiagent"
},
"split_keywords": [
"dm",
"aioaiagent"
],
"urls": [
{
"comment_text": null,
"digests": {
"blake2b_256": "01c7edb201ee819914461f1a6e8b7b236d7737e69f172cfd28144abbdec983c0",
"md5": "b37c26d822c0cbde331bc6052d77f5cd",
"sha256": "365f8894d12a3c337f9aa24a37e76dd8229f4f8c1a2cab1422f92557399a93cc"
},
"downloads": -1,
"filename": "dm_aioaiagent-0.5.3-py3-none-any.whl",
"has_sig": false,
"md5_digest": "b37c26d822c0cbde331bc6052d77f5cd",
"packagetype": "bdist_wheel",
"python_version": "py3",
"requires_python": ">=3.9",
"size": 8750,
"upload_time": "2025-07-22T14:23:55",
"upload_time_iso_8601": "2025-07-22T14:23:55.263207Z",
"url": "https://files.pythonhosted.org/packages/01/c7/edb201ee819914461f1a6e8b7b236d7737e69f172cfd28144abbdec983c0/dm_aioaiagent-0.5.3-py3-none-any.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": null,
"digests": {
"blake2b_256": "b7b7601a958f16548826f85cc715cdb972566dd5c22d1297af8e083cc057a195",
"md5": "63d6ca540b53b13385b268ccb0ed42b3",
"sha256": "2c346fdc925d1c87d5a9444b2f3019a4e23de03f24ec730ad6f28e7908aae58a"
},
"downloads": -1,
"filename": "dm_aioaiagent-0.5.3.tar.gz",
"has_sig": false,
"md5_digest": "63d6ca540b53b13385b268ccb0ed42b3",
"packagetype": "sdist",
"python_version": "source",
"requires_python": ">=3.9",
"size": 8341,
"upload_time": "2025-07-22T14:23:56",
"upload_time_iso_8601": "2025-07-22T14:23:56.428080Z",
"url": "https://files.pythonhosted.org/packages/b7/b7/601a958f16548826f85cc715cdb972566dd5c22d1297af8e083cc057a195/dm_aioaiagent-0.5.3.tar.gz",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2025-07-22 14:23:56",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "MykhLibs",
"github_project": "dm-aioaiagent",
"travis_ci": false,
"coveralls": false,
"github_actions": true,
"requirements": [
{
"name": "dm-logger",
"specs": [
[
"~=",
"0.6.6"
]
]
},
{
"name": "python-dotenv",
"specs": [
[
">=",
"1.0.0"
]
]
},
{
"name": "pydantic",
"specs": [
[
"<",
"3.0.0"
],
[
">=",
"2.9.2"
]
]
},
{
"name": "langchain",
"specs": [
[
"~=",
"0.3.0"
]
]
},
{
"name": "langchain-core",
"specs": [
[
"~=",
"0.3.5"
]
]
},
{
"name": "langchain-community",
"specs": [
[
"~=",
"0.3.0"
]
]
},
{
"name": "langchain-openai",
"specs": [
[
"~=",
"0.3.0"
]
]
},
{
"name": "langchain-anthropic",
"specs": [
[
"~=",
"0.3.0"
]
]
},
{
"name": "langgraph",
"specs": [
[
"~=",
"0.2.23"
]
]
},
{
"name": "langsmith",
"specs": [
[
"~=",
"0.1.144"
]
]
},
{
"name": "grandalf",
"specs": [
[
">=",
"0.8"
]
]
}
],
"lcname": "dm-aioaiagent"
}