# VM-X SDK for Python Langchain
## Description
VM-X AI SDK client for Python Langchain
## Installation
```bash
pip install langchain-vm-x-ai
```
```bash
poetry add langchain-vm-x-ai
```
## Usage
### Non-Streaming
```python
from langchain_vmxai import ChatVMX
llm = ChatVMX(
resource="default",
)
messages = [
(
"system",
"You are a helpful translator. Translate the user sentence to French.",
),
("human", "I love programming."),
]
result = llm.invoke(messages)
```
### Streaming
```python
from langchain_vmxai import ChatVMX
llm = ChatVMX(
resource="default",
)
messages = [
(
"system",
"You are a helpful translator. Translate the user sentence to French.",
),
("human", "I love programming."),
]
for chunk in llm.stream(messages):
print(chunk.content, end="", flush=True)
```
### Function Calling
#### Decorator
```python
from langchain_core.messages import HumanMessage, ToolMessage
from langchain_core.tools import tool
from langchain_vmxai import ChatVMX
@tool
def add(a: int, b: int) -> int:
"""Adds a and b.
Args:
a: first int
b: second int
"""
return a + b
@tool
def multiply(a: int, b: int) -> int:
"""Multiplies a and b.
Args:
a: first int
b: second int
"""
return a * b
tools = [add, multiply]
llm = ChatVMX(
resource="default",
)
llm_with_tools = llm.bind_tools(tools)
query = "What is 3 * 12? Also, what is 11 + 49?"
messages = [HumanMessage(query)]
ai_msg = llm_with_tools.invoke(messages)
messages.append(ai_msg)
for tool_call in ai_msg.tool_calls:
selected_tool = {"add": add, "multiply": multiply}[tool_call["name"].lower()]
tool_output = selected_tool.invoke(tool_call["args"])
messages.append(ToolMessage(tool_output, tool_call_id=tool_call["id"]))
print(llm_with_tools.invoke(messages))
```
#### Pydantic
```python
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_vmxai import ChatVMX
from langchain_vmxai.output_parsers.tools import PydanticToolsParser
# Note that the docstrings here are crucial, as they will be passed along
# to the model along with the class name.
class add(BaseModel):
"""Add two integers together."""
a: int = Field(..., description="First integer")
b: int = Field(..., description="Second integer")
class multiply(BaseModel):
"""Multiply two integers together."""
a: int = Field(..., description="First integer")
b: int = Field(..., description="Second integer")
tools = [add, multiply]
llm = ChatVMX(
resource="default",
)
llm_with_tools = llm.bind_tools(tools) | PydanticToolsParser(tools=[multiply, add])
query = "What is 3 * 12? Also, what is 11 + 49?"
print(llm_with_tools.invoke(query))
```
#### Function Calling Streaming
```python
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_vmxai import ChatVMX
from langchain_vmxai.output_parsers.tools import PydanticToolsParser
# Note that the docstrings here are crucial, as they will be passed along
# to the model along with the class name.
class add(BaseModel):
"""Add two integers together."""
a: int = Field(..., description="First integer")
b: int = Field(..., description="Second integer")
class multiply(BaseModel):
"""Multiply two integers together."""
a: int = Field(..., description="First integer")
b: int = Field(..., description="Second integer")
tools = [add, multiply]
llm = ChatVMX(
resource="default",
)
llm_with_tools = llm.bind_tools(tools) | PydanticToolsParser(tools=[multiply, add])
query = "What is 3 * 12? Also, what is 11 + 49?"
for chunk in llm_with_tools.stream(query):
print(chunk)
```
### Structured Output
```python
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_vmxai import ChatVMX
class Joke(BaseModel):
setup: str = Field(description="The setup of the joke")
punchline: str = Field(description="The punchline to the joke")
llm = ChatVMX(resource="default")
structured_llm = llm.with_structured_output(Joke, strict=True)
print(structured_llm.invoke("Tell me a joke about cats"))
```
## Limitations
1. Async client is not supported.
2. `json_mode` and `json_schema` Structured output are not supported.
## [Change Log](./CHANGELOG.md)
Raw data
{
"_id": null,
"home_page": "https://github.com/vm-x-ai/vm-x-ai-sdk",
"name": "langchain-vm-x-ai",
"maintainer": "VM-X Engineering",
"docs_url": null,
"requires_python": "<4,>=3.8.1",
"maintainer_email": "eng@vm-x.ai",
"keywords": "VM-X, AI, SDK, Python, LangChain",
"author": "VM-X Engineering",
"author_email": "eng@vm-x.ai",
"download_url": "https://files.pythonhosted.org/packages/c5/39/47452a3cdd84c494e3125033445e674f340de10db5e1d8668f20b680ba4c/langchain_vm_x_ai-1.1.5.tar.gz",
"platform": null,
"description": "# VM-X SDK for Python Langchain\n\n## Description\n\nVM-X AI SDK client for Python Langchain\n\n## Installation\n\n```bash\npip install langchain-vm-x-ai\n```\n\n```bash\npoetry add langchain-vm-x-ai\n```\n\n## Usage\n\n### Non-Streaming\n\n```python\nfrom langchain_vmxai import ChatVMX\n\nllm = ChatVMX(\n resource=\"default\",\n)\n\nmessages = [\n (\n \"system\",\n \"You are a helpful translator. Translate the user sentence to French.\",\n ),\n (\"human\", \"I love programming.\"),\n]\nresult = llm.invoke(messages)\n```\n\n### Streaming\n\n```python\nfrom langchain_vmxai import ChatVMX\n\nllm = ChatVMX(\n resource=\"default\",\n)\n\nmessages = [\n (\n \"system\",\n \"You are a helpful translator. Translate the user sentence to French.\",\n ),\n (\"human\", \"I love programming.\"),\n]\n\nfor chunk in llm.stream(messages):\n print(chunk.content, end=\"\", flush=True)\n```\n\n### Function Calling\n\n#### Decorator\n\n```python\nfrom langchain_core.messages import HumanMessage, ToolMessage\nfrom langchain_core.tools import tool\nfrom langchain_vmxai import ChatVMX\n\n\n@tool\ndef add(a: int, b: int) -> int:\n \"\"\"Adds a and b.\n\n Args:\n a: first int\n b: second int\n \"\"\"\n return a + b\n\n\n@tool\ndef multiply(a: int, b: int) -> int:\n \"\"\"Multiplies a and b.\n\n Args:\n a: first int\n b: second int\n \"\"\"\n return a * b\n\n\ntools = [add, multiply]\nllm = ChatVMX(\n resource=\"default\",\n)\n\nllm_with_tools = llm.bind_tools(tools)\nquery = \"What is 3 * 12? Also, what is 11 + 49?\"\n\nmessages = [HumanMessage(query)]\nai_msg = llm_with_tools.invoke(messages)\nmessages.append(ai_msg)\n\nfor tool_call in ai_msg.tool_calls:\n selected_tool = {\"add\": add, \"multiply\": multiply}[tool_call[\"name\"].lower()]\n tool_output = selected_tool.invoke(tool_call[\"args\"])\n messages.append(ToolMessage(tool_output, tool_call_id=tool_call[\"id\"]))\n\nprint(llm_with_tools.invoke(messages))\n```\n\n#### Pydantic\n\n```python\nfrom langchain_core.pydantic_v1 import BaseModel, Field\nfrom langchain_vmxai import ChatVMX\nfrom langchain_vmxai.output_parsers.tools import PydanticToolsParser\n\n\n# Note that the docstrings here are crucial, as they will be passed along\n# to the model along with the class name.\nclass add(BaseModel):\n \"\"\"Add two integers together.\"\"\"\n\n a: int = Field(..., description=\"First integer\")\n b: int = Field(..., description=\"Second integer\")\n\n\nclass multiply(BaseModel):\n \"\"\"Multiply two integers together.\"\"\"\n\n a: int = Field(..., description=\"First integer\")\n b: int = Field(..., description=\"Second integer\")\n\n\ntools = [add, multiply]\n\nllm = ChatVMX(\n resource=\"default\",\n)\n\nllm_with_tools = llm.bind_tools(tools) | PydanticToolsParser(tools=[multiply, add])\n\nquery = \"What is 3 * 12? Also, what is 11 + 49?\"\n\nprint(llm_with_tools.invoke(query))\n\n```\n\n#### Function Calling Streaming\n\n```python\nfrom langchain_core.pydantic_v1 import BaseModel, Field\nfrom langchain_vmxai import ChatVMX\nfrom langchain_vmxai.output_parsers.tools import PydanticToolsParser\n\n\n# Note that the docstrings here are crucial, as they will be passed along\n# to the model along with the class name.\nclass add(BaseModel):\n \"\"\"Add two integers together.\"\"\"\n\n a: int = Field(..., description=\"First integer\")\n b: int = Field(..., description=\"Second integer\")\n\n\nclass multiply(BaseModel):\n \"\"\"Multiply two integers together.\"\"\"\n\n a: int = Field(..., description=\"First integer\")\n b: int = Field(..., description=\"Second integer\")\n\n\ntools = [add, multiply]\n\nllm = ChatVMX(\n resource=\"default\",\n)\n\nllm_with_tools = llm.bind_tools(tools) | PydanticToolsParser(tools=[multiply, add])\n\nquery = \"What is 3 * 12? Also, what is 11 + 49?\"\n\nfor chunk in llm_with_tools.stream(query):\n print(chunk)\n```\n\n### Structured Output\n\n```python\nfrom langchain_core.pydantic_v1 import BaseModel, Field\nfrom langchain_vmxai import ChatVMX\n\n\nclass Joke(BaseModel):\n setup: str = Field(description=\"The setup of the joke\")\n punchline: str = Field(description=\"The punchline to the joke\")\n\n\nllm = ChatVMX(resource=\"default\")\nstructured_llm = llm.with_structured_output(Joke, strict=True)\n\nprint(structured_llm.invoke(\"Tell me a joke about cats\"))\n\n```\n\n## Limitations\n\n1. Async client is not supported.\n2. `json_mode` and `json_schema` Structured output are not supported.\n\n## [Change Log](./CHANGELOG.md)\n",
"bugtrack_url": null,
"license": "MIT",
"summary": "VM-X AI Langchain Python SDK",
"version": "1.1.5",
"project_urls": {
"Homepage": "https://github.com/vm-x-ai/vm-x-ai-sdk",
"Repository": "https://github.com/vm-x-ai/vm-x-ai-sdk"
},
"split_keywords": [
"vm-x",
" ai",
" sdk",
" python",
" langchain"
],
"urls": [
{
"comment_text": "",
"digests": {
"blake2b_256": "5b72cb1a4f99473bde702788e6a2e544957a25086dae8273e201bf163dd571ac",
"md5": "182201917031bfd3ae31bfe9d85cbbf6",
"sha256": "67552439693b646ff8be6599d566065ee55359f4936c318afdd38a073e25948e"
},
"downloads": -1,
"filename": "langchain_vm_x_ai-1.1.5-py3-none-any.whl",
"has_sig": false,
"md5_digest": "182201917031bfd3ae31bfe9d85cbbf6",
"packagetype": "bdist_wheel",
"python_version": "py3",
"requires_python": "<4,>=3.8.1",
"size": 18481,
"upload_time": "2024-12-09T15:36:22",
"upload_time_iso_8601": "2024-12-09T15:36:22.079963Z",
"url": "https://files.pythonhosted.org/packages/5b/72/cb1a4f99473bde702788e6a2e544957a25086dae8273e201bf163dd571ac/langchain_vm_x_ai-1.1.5-py3-none-any.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "c53947452a3cdd84c494e3125033445e674f340de10db5e1d8668f20b680ba4c",
"md5": "51a6b92c9f979faab340acd963832b7f",
"sha256": "98e64880612c9322c6e0b7860ad6d38287a844c21f73db836b90467d78b8a8a4"
},
"downloads": -1,
"filename": "langchain_vm_x_ai-1.1.5.tar.gz",
"has_sig": false,
"md5_digest": "51a6b92c9f979faab340acd963832b7f",
"packagetype": "sdist",
"python_version": "source",
"requires_python": "<4,>=3.8.1",
"size": 17592,
"upload_time": "2024-12-09T15:36:23",
"upload_time_iso_8601": "2024-12-09T15:36:23.684776Z",
"url": "https://files.pythonhosted.org/packages/c5/39/47452a3cdd84c494e3125033445e674f340de10db5e1d8668f20b680ba4c/langchain_vm_x_ai-1.1.5.tar.gz",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2024-12-09 15:36:23",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "vm-x-ai",
"github_project": "vm-x-ai-sdk",
"travis_ci": false,
"coveralls": false,
"github_actions": true,
"lcname": "langchain-vm-x-ai"
}