# Cognitora Python SDK
The official Python SDK for Cognitora - Operating System for Autonomous AI Agents.
## Features
- **Code Interpreter**: Execute Python, JavaScript, and Bash code in secure sandboxed environments
- **Containers Platform**: Run containerized workloads with flexible resource allocation
- **Session Management**: Persistent sessions with state management and automatic cleanup
- **Execution Control**: Start, monitor, and cancel long-running compute tasks
- **File Operations**: Upload and manipulate files in execution environments
- **Networking Control**: Optional internet access with security-focused defaults
- **Async Support**: Full async/await support for high-performance applications
- **Type Safety**: Comprehensive type hints and data validation
## Installation
```bash
pip install cognitora
```
## Quick Start
```python
from cognitora import Cognitora
# Initialize the client
client = Cognitora(api_key="your_api_key_here")
# Execute Python code with networking
result = client.code_interpreter.execute(
code="print('Hello from Cognitora!')",
language="python",
networking=True # Enable internet access (default for code interpreter)
)
print(f"Status: {result.data.status}")
for output in result.data.outputs:
print(f"{output.type}: {output.data}")
```
## Authentication
Get your API key from the [Cognitora Dashboard](https://www.cognitora.dev/home/api-keys) and set it:
```python
# Method 1: Pass directly
client = Cognitora(api_key="cgk_1234567890abcdef")
# Method 2: Environment variable
import os
os.environ['COGNITORA_API_KEY'] = 'cgk_1234567890abcdef'
client = Cognitora() # Will use environment variable
# Method 3: With custom configuration
client = Cognitora(
api_key="your_api_key",
base_url="https://api.cognitora.dev", # Production default
timeout=30
)
```
## Code Interpreter
### Basic Execution with Networking Control
```python
# Execute Python code with internet access (default)
result = client.code_interpreter.execute(
code="""
import requests
import numpy as np
import matplotlib.pyplot as plt
# Fetch data from API (requires networking)
response = requests.get('https://api.github.com/repos/microsoft/typescript')
repo_data = response.json()
print(f"Repository: {repo_data['name']}")
print(f"Stars: {repo_data['stargazers_count']}")
# Create visualization
x = np.linspace(0, 10, 100)
y = np.sin(x)
plt.figure(figsize=(10, 6))
plt.plot(x, y)
plt.title('Sine Wave')
plt.show()
""",
language="python",
networking=True # Explicitly enable networking (default for code interpreter)
)
# Execute code without internet access for security
secure_result = client.code_interpreter.execute(
code="""
import numpy as np
# No external requests - isolated execution
data = np.random.randn(1000)
print(f"Mean: {np.mean(data)}")
""",
language="python",
networking=False # Disable networking for secure execution
)
```
### Session Persistence
**Sessions maintain state between executions**, making them perfect for:
- Interactive data analysis workflows
- Long-running machine learning experiments
- Multi-step data processing pipelines
- Collaborative coding environments
```python
# Create a persistent session
session = client.code_interpreter.create_session(
language="python",
timeout_minutes=60,
resources={
"cpu_cores": 2,
"memory_mb": 2048,
"storage_gb": 10
}
)
print(f"Session created: {session.session_id}")
# Execute code in session (variables persist)
result1 = client.code_interpreter.execute(
code="x = 42; y = 'Hello World'; import pandas as pd",
session_id=session.session_id,
networking=True # Enable networking for package installs
)
result2 = client.code_interpreter.execute(
code="print(f'x = {x}, y = {y}'); print(f'Pandas version: {pd.__version__}')",
session_id=session.session_id
)
# Variables and imports are maintained across executions
print(result2.data.outputs[0].data) # Output: x = 42, y = Hello World
# Get session execution history
session_executions = client.code_interpreter.get_session_executions(session.session_id)
print(f"Session has {len(session_executions)} executions")
# Always clean up sessions when done
client.code_interpreter.delete_session(session.session_id)
```
### New Execution Management Features
```python
# List all interpreter executions across all sessions
all_executions = client.code_interpreter.list_all_executions(
limit=20,
status='completed'
)
print(f"Found {len(all_executions)} completed executions")
# Get specific execution details
execution_details = client.code_interpreter.get_execution('exec_123456')
print(f"Execution status: {execution_details['status']}")
# Get executions for a specific session
session_executions = client.code_interpreter.get_session_executions(
'session_123456',
limit=10
)
```
### File Operations
```python
from cognitora import FileUpload
# Prepare files
files = [
FileUpload(
name="data.csv",
content="name,age,city\nJohn,30,NYC\nJane,25,LA",
encoding="string"
),
FileUpload(
name="script.py",
content="import pandas as pd\ndf = pd.read_csv('data.csv')\nprint(df.head())",
encoding="string"
)
]
# Execute with files
result = client.code_interpreter.run_with_files(
code="exec(open('script.py').read())",
files=files,
language="python"
)
```
## Containers Platform
The Containers Platform allows you to run containerized workloads with **full execution control** and **networking security**.
### Basic Container Execution
```python
# Run a secure container (isolated by default)
execution = client.containers.create_container(
image="docker.io/library/python:3.11-slim",
command=["python", "-c", "print('Hello from secure container!')"],
cpu_cores=1.0,
memory_mb=512,
max_cost_credits=5,
networking=False # Default: isolated for security
)
print(f"Container ID: {execution.id}")
print(f"Status: {execution.status}")
# Run container with internet access when needed
networking_execution = client.containers.create_container(
image="docker.io/library/python:3.11",
command=["python", "-c", """
import requests
response = requests.get('https://api.github.com/users/octocat')
user_data = response.json()
print(f"GitHub user: {user_data['name']}")
"""],
cpu_cores=1.0,
memory_mb=512,
max_cost_credits=10,
networking=True # Enable networking for API calls
)
```
### Advanced Container Management
```python
# List all container executions
container_executions = client.containers.list_all_container_executions(
limit=50,
status='running'
)
print(f"Active containers: {len(container_executions)}")
# Get specific container execution details
container_execution = client.containers.get_container_execution('exec_123456')
print(f"Container execution: {container_execution['status']}")
# Get executions for a specific container
container_history = client.containers.get_container_executions('container_123456')
print(f"Container has {len(container_history)} executions")
```
### Execution Control & Cancellation
```python
# Run a long-running task with networking control
execution = client.containers.create_container(
image="docker.io/library/python:3.11-slim",
command=["python", "-c", """
import time
import requests
for i in range(100):
print(f'Processing step {i+1}/100')
# Make API call every 10 steps (requires networking)
if i % 10 == 0:
try:
response = requests.get('https://httpbin.org/delay/1')
print(f'API call {i//10 + 1} completed')
except Exception as e:
print(f'Network error: {e}')
time.sleep(2)
print('Processing complete!')
"""],
cpu_cores=2.0,
memory_mb=1024,
max_cost_credits=50,
timeout_seconds=3600,
networking=True # Enable networking for API calls
)
print(f"Started container: {execution.id}")
# Monitor execution status
try:
# Wait for completion with timeout
completed = client.containers.wait_for_completion(
execution.id,
timeout_seconds=30, # 30 seconds timeout for demo
poll_interval=2
)
print(f"Container completed: {completed.status}")
except Exception as e:
print(f"Container taking too long, cancelling...")
# Cancel the container
result = client.containers.cancel_container(execution.id)
print(f"Cancellation result: {result}")
# Verify cancellation
cancelled_container = client.containers.get_container(execution.id)
print(f"Final status: {cancelled_container.status}")
```
### Resource Management Best Practices
```python
# Always estimate costs before running expensive operations
estimate = client.containers.estimate_cost(
cpu_cores=4.0,
memory_mb=8192,
storage_gb=20,
gpu_count=1,
timeout_seconds=3600
)
print(f"Estimated cost: {estimate['estimated_credits']} credits")
if estimate['estimated_credits'] <= 100:
# Proceed with execution
execution = client.containers.create_container(
image="docker.io/tensorflow/tensorflow:latest-gpu",
command=["python", "train.py"],
cpu_cores=4.0,
memory_mb=8192,
storage_gb=20,
gpu_count=1,
max_cost_credits=int(estimate['estimated_credits'] * 1.2), # 20% buffer
networking=False # Secure by default
)
try:
# Monitor execution
result = client.containers.wait_for_completion(execution.id)
logs = client.containers.get_container_logs(execution.id)
print(f"Training completed: {result.status}")
except KeyboardInterrupt:
# Handle user interruption gracefully
print("Interruption detected, cancelling container...")
client.containers.cancel_container(execution.id)
except Exception as e:
# Handle errors and cleanup
print(f"Error occurred: {e}")
client.containers.cancel_container(execution.id)
else:
print(f"Execution too expensive ({estimate['estimated_credits']} credits), skipping...")
```
## Async Support
```python
import asyncio
from cognitora import CognitoraAsync
async def main():
async with CognitoraAsync(api_key="your_api_key") as client:
# Parallel execution
tasks = [
client.code_interpreter.execute(
code=f"import time; time.sleep(1); print('Task {i} completed')",
language="python"
)
for i in range(5)
]
results = await asyncio.gather(*tasks)
for i, result in enumerate(results):
print(f"Task {i}: {result.data.outputs[0].data}")
# Run async code
asyncio.run(main())
```
## Error Handling
```python
from cognitora import CognitoraError, AuthenticationError, RateLimitError
try:
result = client.code_interpreter.execute(
code="raise ValueError('Test error')",
language="python"
)
except AuthenticationError:
print("Invalid API key")
except RateLimitError:
print("Rate limit exceeded, please wait")
except CognitoraError as e:
print(f"API error: {e}")
print(f"Status code: {e.status_code}")
print(f"Response data: {e.response_data}")
```
## API Reference
### CodeInterpreter Class
#### Methods
- `execute(code, language='python', session_id=None, files=None, timeout_seconds=60, environment=None, networking=None)` - Execute code with networking control
- `create_session(language='python', timeout_minutes=60, environment=None, resources=None)` - Create persistent session
- `list_sessions()` - List active sessions
- `get_session(session_id)` - Get session details
- `delete_session(session_id)` - Delete session
- `get_session_logs(session_id, limit=50, offset=0)` - Get session logs
- `list_all_executions(limit=50, offset=0, status=None)` - **NEW**: List all interpreter executions
- `get_execution(execution_id)` - **NEW**: Get specific execution details
- `get_session_executions(session_id, limit=50, offset=0)` - **NEW**: List executions for specific session
- `run_python(code, session_id=None)` - Execute Python code
- `run_javascript(code, session_id=None)` - Execute JavaScript code
- `run_bash(command, session_id=None)` - Execute bash command
- `run_with_files(code, files, language='python', session_id=None)` - Execute with files
### Containers Class
#### Methods
- `create_container(image, command, cpu_cores, memory_mb, max_cost_credits, networking=None, **kwargs)` - Create container with networking control
- `list_containers(limit=50, offset=0, status=None)` - List containers
- `get_container(container_id)` - Get container details
- `cancel_container(container_id)` - Cancel container
- `get_container_logs(container_id)` - Get container logs
- `get_container_executions(container_id)` - Get container executions
- `list_all_container_executions(limit=50, offset=0, status=None)` - **NEW**: List all container executions
- `get_container_execution(execution_id)` - **NEW**: Get specific container execution details
- `estimate_cost(cpu_cores, memory_mb, storage_gb=5, gpu_count=0, timeout_seconds=300)` - Estimate cost
- `wait_for_completion(container_id, timeout_seconds=300, poll_interval=5)` - Wait for completion
- `run_and_wait(image, command, cpu_cores, memory_mb, max_cost_credits, **kwargs)` - Create and wait
## Security & Networking
### Default Networking Behavior
| Service | Default Networking | Security Rationale |
|---------|-------------------|-------------------|
| **Code Interpreter** | `True` (enabled) | Needs package installs, data fetching |
| **Containers** | `False` (disabled) | Security-first: isolated by default |
### Networking Best Practices
```python
# For data analysis that needs external data
data_analysis = client.code_interpreter.execute(
code="""
import pandas as pd
import requests
# Fetch external data
response = requests.get('https://api.coindesk.com/v1/bpi/currentprice.json')
data = response.json()
print(f"Bitcoin price: {data['bpi']['USD']['rate']}")
""",
networking=True # Required for external API calls
)
# For secure computation without external access
secure_computation = client.containers.create_container(
image="docker.io/library/python:3.11",
command=["python", "-c", "print('Secure isolated computation')"],
cpu_cores=1.0,
memory_mb=512,
max_cost_credits=5,
networking=False # Isolated execution (default)
)
# For containers that need external resources
data_processing = client.containers.create_container(
image="docker.io/library/python:3.11",
command=["pip", "install", "requests", "&&", "python", "process.py"],
cpu_cores=2.0,
memory_mb=1024,
max_cost_credits=20,
networking=True # Enable for pip install and external APIs
)
```
## Configuration
### Environment Variables
```bash
export COGNITORA_API_KEY="your_api_key_here"
export COGNITORA_BASE_URL="https://api.cognitora.dev" # Optional
export COGNITORA_TIMEOUT="30" # Optional, seconds
```
## Best Practices
### 1. Resource Management
```python
# Always specify appropriate resources
session = client.code_interpreter.create_session(
language="python",
timeout_minutes=30, # Don't set too high
resources={
"cpu_cores": 1.0, # Start small
"memory_mb": 1024, # Adjust based on needs
"storage_gb": 5 # Minimum required
}
)
```
### 2. Session Lifecycle
```python
# Create session
session = client.code_interpreter.create_session()
try:
# Use session for multiple operations
for code_snippet in code_snippets:
result = client.code_interpreter.execute(
code=code_snippet,
session_id=session.session_id
)
process_result(result)
finally:
# Clean up
client.code_interpreter.delete_session(session.session_id)
```
### 3. Error Recovery
```python
import time
def execute_with_retry(client, code, max_retries=3):
for attempt in range(max_retries):
try:
return client.code_interpreter.execute(code=code)
except RateLimitError:
if attempt < max_retries - 1:
time.sleep(2 ** attempt) # Exponential backoff
continue
raise
except CognitoraError as e:
if e.status_code >= 500 and attempt < max_retries - 1:
time.sleep(1)
continue
raise
```
## Recent Updates
### API Refactor Alignment
- ✅ **Updated endpoint paths**: Code interpreter now uses `/api/v1/interpreter/*`
- ✅ **Container-focused architecture**: All compute operations use `/api/v1/compute/containers/*`
- ✅ **Networking parameter**: Security-focused networking control for all operations
- ✅ **New execution endpoints**: Comprehensive execution management and history
- ✅ **Production-ready defaults**: All clients default to `https://api.cognitora.dev`
### New Features
- ✅ **Networking Control**: Optional `networking` parameter with security-focused defaults
- ✅ **Execution Management**: List, filter, and retrieve execution details across all services
- ✅ **Session History**: Track and manage executions within persistent sessions
- ✅ **Container Execution History**: Detailed tracking of container execution lifecycle
### Breaking Changes from Previous Versions
- **Method Names**: `compute.*` methods renamed to `containers.*`
- **Endpoint Paths**: Code interpreter paths changed from `/code-interpreter/` to `/interpreter/`
- **Default Networking**: Containers now default to `networking=False` for security
## Data Classes and Types
```python
from cognitora import (
ExecuteCodeRequest,
ExecuteCodeResponse,
ComputeExecutionRequest,
FileUpload,
Session,
Execution
)
# All request and response types are provided as dataclasses
request = ExecuteCodeRequest(
code="print('Hello')",
language="python",
networking=True # NEW: networking control
)
container_request = ComputeExecutionRequest(
image="python:3.11",
command=["python", "-c", "print('test')"],
cpu_cores=1.0,
memory_mb=512,
max_cost_credits=5,
networking=False # NEW: networking control
)
```
## Support
- **Documentation**: [docs.cognitora.dev](https://www.cognitora.dev/docs/)
- **Support or get an early access**: [hello@cognitora.dev](mailto:hello@cognitora.dev)
## License
MIT License - see [LICENSE](LICENSE) file for details.
Raw data
{
"_id": null,
"home_page": "https://github.com/Cognitora/python-sdk",
"name": "cognitora",
"maintainer": null,
"docs_url": null,
"requires_python": ">=3.8",
"maintainer_email": null,
"keywords": "cognitora, ai, agents, code-interpreter, containers, sdk, python, automation",
"author": "Cognitora Team",
"author_email": "support@cognitora.dev",
"download_url": "https://files.pythonhosted.org/packages/2a/1d/cd15fce430b24d24d1f51632d92f3316aedb04b29e30f87933689d075917/cognitora-1.5.0.tar.gz",
"platform": null,
"description": "# Cognitora Python SDK\n\nThe official Python SDK for Cognitora - Operating System for Autonomous AI Agents.\n\n## Features\n\n- **Code Interpreter**: Execute Python, JavaScript, and Bash code in secure sandboxed environments\n- **Containers Platform**: Run containerized workloads with flexible resource allocation\n- **Session Management**: Persistent sessions with state management and automatic cleanup\n- **Execution Control**: Start, monitor, and cancel long-running compute tasks\n- **File Operations**: Upload and manipulate files in execution environments\n- **Networking Control**: Optional internet access with security-focused defaults\n- **Async Support**: Full async/await support for high-performance applications\n- **Type Safety**: Comprehensive type hints and data validation\n\n## Installation\n\n```bash\npip install cognitora\n```\n\n## Quick Start\n\n```python\nfrom cognitora import Cognitora\n\n# Initialize the client\nclient = Cognitora(api_key=\"your_api_key_here\")\n\n# Execute Python code with networking\nresult = client.code_interpreter.execute(\n code=\"print('Hello from Cognitora!')\",\n language=\"python\",\n networking=True # Enable internet access (default for code interpreter)\n)\n\nprint(f\"Status: {result.data.status}\")\nfor output in result.data.outputs:\n print(f\"{output.type}: {output.data}\")\n```\n\n## Authentication\n\nGet your API key from the [Cognitora Dashboard](https://www.cognitora.dev/home/api-keys) and set it:\n\n```python\n# Method 1: Pass directly\nclient = Cognitora(api_key=\"cgk_1234567890abcdef\")\n\n# Method 2: Environment variable\nimport os\nos.environ['COGNITORA_API_KEY'] = 'cgk_1234567890abcdef'\nclient = Cognitora() # Will use environment variable\n\n# Method 3: With custom configuration\nclient = Cognitora(\n api_key=\"your_api_key\",\n base_url=\"https://api.cognitora.dev\", # Production default\n timeout=30\n)\n```\n\n## Code Interpreter\n\n### Basic Execution with Networking Control\n\n```python\n# Execute Python code with internet access (default)\nresult = client.code_interpreter.execute(\n code=\"\"\"\nimport requests\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n# Fetch data from API (requires networking)\nresponse = requests.get('https://api.github.com/repos/microsoft/typescript')\nrepo_data = response.json()\n\nprint(f\"Repository: {repo_data['name']}\")\nprint(f\"Stars: {repo_data['stargazers_count']}\")\n\n# Create visualization\nx = np.linspace(0, 10, 100)\ny = np.sin(x)\n\nplt.figure(figsize=(10, 6))\nplt.plot(x, y)\nplt.title('Sine Wave')\nplt.show()\n\"\"\",\n language=\"python\",\n networking=True # Explicitly enable networking (default for code interpreter)\n)\n\n# Execute code without internet access for security\nsecure_result = client.code_interpreter.execute(\n code=\"\"\"\nimport numpy as np\n# No external requests - isolated execution\ndata = np.random.randn(1000)\nprint(f\"Mean: {np.mean(data)}\")\n\"\"\",\n language=\"python\",\n networking=False # Disable networking for secure execution\n)\n```\n\n### Session Persistence\n\n**Sessions maintain state between executions**, making them perfect for:\n- Interactive data analysis workflows\n- Long-running machine learning experiments \n- Multi-step data processing pipelines\n- Collaborative coding environments\n\n```python\n# Create a persistent session\nsession = client.code_interpreter.create_session(\n language=\"python\",\n timeout_minutes=60,\n resources={\n \"cpu_cores\": 2,\n \"memory_mb\": 2048,\n \"storage_gb\": 10\n }\n)\n\nprint(f\"Session created: {session.session_id}\")\n\n# Execute code in session (variables persist)\nresult1 = client.code_interpreter.execute(\n code=\"x = 42; y = 'Hello World'; import pandas as pd\",\n session_id=session.session_id,\n networking=True # Enable networking for package installs\n)\n\nresult2 = client.code_interpreter.execute(\n code=\"print(f'x = {x}, y = {y}'); print(f'Pandas version: {pd.__version__}')\",\n session_id=session.session_id\n)\n\n# Variables and imports are maintained across executions\nprint(result2.data.outputs[0].data) # Output: x = 42, y = Hello World\n\n# Get session execution history\nsession_executions = client.code_interpreter.get_session_executions(session.session_id)\nprint(f\"Session has {len(session_executions)} executions\")\n\n# Always clean up sessions when done\nclient.code_interpreter.delete_session(session.session_id)\n```\n\n### New Execution Management Features\n\n```python\n# List all interpreter executions across all sessions\nall_executions = client.code_interpreter.list_all_executions(\n limit=20,\n status='completed'\n)\n\nprint(f\"Found {len(all_executions)} completed executions\")\n\n# Get specific execution details\nexecution_details = client.code_interpreter.get_execution('exec_123456')\nprint(f\"Execution status: {execution_details['status']}\")\n\n# Get executions for a specific session\nsession_executions = client.code_interpreter.get_session_executions(\n 'session_123456',\n limit=10\n)\n```\n\n### File Operations\n\n```python\nfrom cognitora import FileUpload\n\n# Prepare files\nfiles = [\n FileUpload(\n name=\"data.csv\",\n content=\"name,age,city\\nJohn,30,NYC\\nJane,25,LA\",\n encoding=\"string\"\n ),\n FileUpload(\n name=\"script.py\",\n content=\"import pandas as pd\\ndf = pd.read_csv('data.csv')\\nprint(df.head())\",\n encoding=\"string\"\n )\n]\n\n# Execute with files\nresult = client.code_interpreter.run_with_files(\n code=\"exec(open('script.py').read())\",\n files=files,\n language=\"python\"\n)\n```\n\n## Containers Platform\n\nThe Containers Platform allows you to run containerized workloads with **full execution control** and **networking security**.\n\n### Basic Container Execution\n\n```python\n# Run a secure container (isolated by default)\nexecution = client.containers.create_container(\n image=\"docker.io/library/python:3.11-slim\",\n command=[\"python\", \"-c\", \"print('Hello from secure container!')\"],\n cpu_cores=1.0,\n memory_mb=512,\n max_cost_credits=5,\n networking=False # Default: isolated for security\n)\n\nprint(f\"Container ID: {execution.id}\")\nprint(f\"Status: {execution.status}\")\n\n# Run container with internet access when needed\nnetworking_execution = client.containers.create_container(\n image=\"docker.io/library/python:3.11\",\n command=[\"python\", \"-c\", \"\"\"\nimport requests\nresponse = requests.get('https://api.github.com/users/octocat')\nuser_data = response.json()\nprint(f\"GitHub user: {user_data['name']}\")\n\"\"\"],\n cpu_cores=1.0,\n memory_mb=512,\n max_cost_credits=10,\n networking=True # Enable networking for API calls\n)\n```\n\n### Advanced Container Management\n\n```python\n# List all container executions\ncontainer_executions = client.containers.list_all_container_executions(\n limit=50,\n status='running'\n)\n\nprint(f\"Active containers: {len(container_executions)}\")\n\n# Get specific container execution details\ncontainer_execution = client.containers.get_container_execution('exec_123456')\nprint(f\"Container execution: {container_execution['status']}\")\n\n# Get executions for a specific container\ncontainer_history = client.containers.get_container_executions('container_123456')\nprint(f\"Container has {len(container_history)} executions\")\n```\n\n### Execution Control & Cancellation\n\n```python\n# Run a long-running task with networking control\nexecution = client.containers.create_container(\n image=\"docker.io/library/python:3.11-slim\",\n command=[\"python\", \"-c\", \"\"\"\nimport time\nimport requests\n\nfor i in range(100):\n print(f'Processing step {i+1}/100')\n \n # Make API call every 10 steps (requires networking)\n if i % 10 == 0:\n try:\n response = requests.get('https://httpbin.org/delay/1')\n print(f'API call {i//10 + 1} completed')\n except Exception as e:\n print(f'Network error: {e}')\n \n time.sleep(2)\nprint('Processing complete!')\n\"\"\"],\n cpu_cores=2.0,\n memory_mb=1024,\n max_cost_credits=50,\n timeout_seconds=3600,\n networking=True # Enable networking for API calls\n)\n\nprint(f\"Started container: {execution.id}\")\n\n# Monitor execution status\ntry:\n # Wait for completion with timeout \n completed = client.containers.wait_for_completion(\n execution.id, \n timeout_seconds=30, # 30 seconds timeout for demo\n poll_interval=2\n )\n print(f\"Container completed: {completed.status}\")\n \nexcept Exception as e:\n print(f\"Container taking too long, cancelling...\")\n \n # Cancel the container\n result = client.containers.cancel_container(execution.id)\n print(f\"Cancellation result: {result}\")\n \n # Verify cancellation\n cancelled_container = client.containers.get_container(execution.id)\n print(f\"Final status: {cancelled_container.status}\")\n```\n\n### Resource Management Best Practices\n\n```python\n# Always estimate costs before running expensive operations\nestimate = client.containers.estimate_cost(\n cpu_cores=4.0,\n memory_mb=8192,\n storage_gb=20,\n gpu_count=1,\n timeout_seconds=3600\n)\n\nprint(f\"Estimated cost: {estimate['estimated_credits']} credits\")\n\nif estimate['estimated_credits'] <= 100:\n # Proceed with execution\n execution = client.containers.create_container(\n image=\"docker.io/tensorflow/tensorflow:latest-gpu\",\n command=[\"python\", \"train.py\"],\n cpu_cores=4.0,\n memory_mb=8192,\n storage_gb=20,\n gpu_count=1,\n max_cost_credits=int(estimate['estimated_credits'] * 1.2), # 20% buffer\n networking=False # Secure by default\n )\n \n try:\n # Monitor execution\n result = client.containers.wait_for_completion(execution.id)\n logs = client.containers.get_container_logs(execution.id)\n print(f\"Training completed: {result.status}\")\n \n except KeyboardInterrupt:\n # Handle user interruption gracefully\n print(\"Interruption detected, cancelling container...\")\n client.containers.cancel_container(execution.id)\n \n except Exception as e:\n # Handle errors and cleanup\n print(f\"Error occurred: {e}\")\n client.containers.cancel_container(execution.id)\n \nelse:\n print(f\"Execution too expensive ({estimate['estimated_credits']} credits), skipping...\")\n```\n\n## Async Support\n\n```python\nimport asyncio\nfrom cognitora import CognitoraAsync\n\nasync def main():\n async with CognitoraAsync(api_key=\"your_api_key\") as client:\n # Parallel execution\n tasks = [\n client.code_interpreter.execute(\n code=f\"import time; time.sleep(1); print('Task {i} completed')\",\n language=\"python\"\n )\n for i in range(5)\n ]\n \n results = await asyncio.gather(*tasks)\n \n for i, result in enumerate(results):\n print(f\"Task {i}: {result.data.outputs[0].data}\")\n\n# Run async code\nasyncio.run(main())\n```\n\n## Error Handling\n\n```python\nfrom cognitora import CognitoraError, AuthenticationError, RateLimitError\n\ntry:\n result = client.code_interpreter.execute(\n code=\"raise ValueError('Test error')\",\n language=\"python\"\n )\nexcept AuthenticationError:\n print(\"Invalid API key\")\nexcept RateLimitError:\n print(\"Rate limit exceeded, please wait\")\nexcept CognitoraError as e:\n print(f\"API error: {e}\")\n print(f\"Status code: {e.status_code}\")\n print(f\"Response data: {e.response_data}\")\n```\n\n## API Reference\n\n### CodeInterpreter Class\n\n#### Methods\n\n- `execute(code, language='python', session_id=None, files=None, timeout_seconds=60, environment=None, networking=None)` - Execute code with networking control\n- `create_session(language='python', timeout_minutes=60, environment=None, resources=None)` - Create persistent session\n- `list_sessions()` - List active sessions\n- `get_session(session_id)` - Get session details\n- `delete_session(session_id)` - Delete session\n- `get_session_logs(session_id, limit=50, offset=0)` - Get session logs\n- `list_all_executions(limit=50, offset=0, status=None)` - **NEW**: List all interpreter executions\n- `get_execution(execution_id)` - **NEW**: Get specific execution details\n- `get_session_executions(session_id, limit=50, offset=0)` - **NEW**: List executions for specific session\n- `run_python(code, session_id=None)` - Execute Python code\n- `run_javascript(code, session_id=None)` - Execute JavaScript code\n- `run_bash(command, session_id=None)` - Execute bash command\n- `run_with_files(code, files, language='python', session_id=None)` - Execute with files\n\n### Containers Class\n\n#### Methods\n\n- `create_container(image, command, cpu_cores, memory_mb, max_cost_credits, networking=None, **kwargs)` - Create container with networking control\n- `list_containers(limit=50, offset=0, status=None)` - List containers\n- `get_container(container_id)` - Get container details\n- `cancel_container(container_id)` - Cancel container\n- `get_container_logs(container_id)` - Get container logs\n- `get_container_executions(container_id)` - Get container executions\n- `list_all_container_executions(limit=50, offset=0, status=None)` - **NEW**: List all container executions\n- `get_container_execution(execution_id)` - **NEW**: Get specific container execution details\n- `estimate_cost(cpu_cores, memory_mb, storage_gb=5, gpu_count=0, timeout_seconds=300)` - Estimate cost\n- `wait_for_completion(container_id, timeout_seconds=300, poll_interval=5)` - Wait for completion\n- `run_and_wait(image, command, cpu_cores, memory_mb, max_cost_credits, **kwargs)` - Create and wait\n\n## Security & Networking\n\n### Default Networking Behavior\n\n| Service | Default Networking | Security Rationale |\n|---------|-------------------|-------------------|\n| **Code Interpreter** | `True` (enabled) | Needs package installs, data fetching |\n| **Containers** | `False` (disabled) | Security-first: isolated by default |\n\n### Networking Best Practices\n\n```python\n# For data analysis that needs external data\ndata_analysis = client.code_interpreter.execute(\n code=\"\"\"\nimport pandas as pd\nimport requests\n\n# Fetch external data\nresponse = requests.get('https://api.coindesk.com/v1/bpi/currentprice.json')\ndata = response.json()\nprint(f\"Bitcoin price: {data['bpi']['USD']['rate']}\")\n\"\"\",\n networking=True # Required for external API calls\n)\n\n# For secure computation without external access\nsecure_computation = client.containers.create_container(\n image=\"docker.io/library/python:3.11\",\n command=[\"python\", \"-c\", \"print('Secure isolated computation')\"],\n cpu_cores=1.0,\n memory_mb=512,\n max_cost_credits=5,\n networking=False # Isolated execution (default)\n)\n\n# For containers that need external resources\ndata_processing = client.containers.create_container(\n image=\"docker.io/library/python:3.11\",\n command=[\"pip\", \"install\", \"requests\", \"&&\", \"python\", \"process.py\"],\n cpu_cores=2.0,\n memory_mb=1024,\n max_cost_credits=20,\n networking=True # Enable for pip install and external APIs\n)\n```\n\n## Configuration\n\n### Environment Variables\n\n```bash\nexport COGNITORA_API_KEY=\"your_api_key_here\"\nexport COGNITORA_BASE_URL=\"https://api.cognitora.dev\" # Optional\nexport COGNITORA_TIMEOUT=\"30\" # Optional, seconds\n```\n\n## Best Practices\n\n### 1. Resource Management\n\n```python\n# Always specify appropriate resources\nsession = client.code_interpreter.create_session(\n language=\"python\",\n timeout_minutes=30, # Don't set too high\n resources={\n \"cpu_cores\": 1.0, # Start small\n \"memory_mb\": 1024, # Adjust based on needs\n \"storage_gb\": 5 # Minimum required\n }\n)\n```\n\n### 2. Session Lifecycle\n\n```python\n# Create session\nsession = client.code_interpreter.create_session()\n\ntry:\n # Use session for multiple operations\n for code_snippet in code_snippets:\n result = client.code_interpreter.execute(\n code=code_snippet,\n session_id=session.session_id\n )\n process_result(result)\nfinally:\n # Clean up\n client.code_interpreter.delete_session(session.session_id)\n```\n\n### 3. Error Recovery\n\n```python\nimport time\n\ndef execute_with_retry(client, code, max_retries=3):\n for attempt in range(max_retries):\n try:\n return client.code_interpreter.execute(code=code)\n except RateLimitError:\n if attempt < max_retries - 1:\n time.sleep(2 ** attempt) # Exponential backoff\n continue\n raise\n except CognitoraError as e:\n if e.status_code >= 500 and attempt < max_retries - 1:\n time.sleep(1)\n continue\n raise\n```\n\n## Recent Updates \n\n### API Refactor Alignment\n- \u2705 **Updated endpoint paths**: Code interpreter now uses `/api/v1/interpreter/*`\n- \u2705 **Container-focused architecture**: All compute operations use `/api/v1/compute/containers/*`\n- \u2705 **Networking parameter**: Security-focused networking control for all operations\n- \u2705 **New execution endpoints**: Comprehensive execution management and history\n- \u2705 **Production-ready defaults**: All clients default to `https://api.cognitora.dev`\n\n### New Features\n- \u2705 **Networking Control**: Optional `networking` parameter with security-focused defaults\n- \u2705 **Execution Management**: List, filter, and retrieve execution details across all services\n- \u2705 **Session History**: Track and manage executions within persistent sessions\n- \u2705 **Container Execution History**: Detailed tracking of container execution lifecycle\n\n### Breaking Changes from Previous Versions\n- **Method Names**: `compute.*` methods renamed to `containers.*`\n- **Endpoint Paths**: Code interpreter paths changed from `/code-interpreter/` to `/interpreter/`\n- **Default Networking**: Containers now default to `networking=False` for security\n\n## Data Classes and Types\n\n```python\nfrom cognitora import (\n ExecuteCodeRequest,\n ExecuteCodeResponse, \n ComputeExecutionRequest,\n FileUpload,\n Session,\n Execution\n)\n\n# All request and response types are provided as dataclasses\nrequest = ExecuteCodeRequest(\n code=\"print('Hello')\",\n language=\"python\",\n networking=True # NEW: networking control\n)\n\ncontainer_request = ComputeExecutionRequest(\n image=\"python:3.11\",\n command=[\"python\", \"-c\", \"print('test')\"],\n cpu_cores=1.0,\n memory_mb=512,\n max_cost_credits=5,\n networking=False # NEW: networking control\n)\n```\n\n## Support\n\n- **Documentation**: [docs.cognitora.dev](https://www.cognitora.dev/docs/)\n- **Support or get an early access**: [hello@cognitora.dev](mailto:hello@cognitora.dev)\n\n## License\n\nMIT License - see [LICENSE](LICENSE) file for details. \n",
"bugtrack_url": null,
"license": null,
"summary": "Official Python SDK for Cognitora - Operating System for Autonomous AI Agents",
"version": "1.5.0",
"project_urls": {
"Bug Tracker": "https://github.com/Cognitora/python-sdk/issues",
"Documentation": "https://www.cognitora.dev/docs/",
"Homepage": "https://github.com/Cognitora/python-sdk",
"Source Code": "https://github.com/Cognitora/python-sdk"
},
"split_keywords": [
"cognitora",
" ai",
" agents",
" code-interpreter",
" containers",
" sdk",
" python",
" automation"
],
"urls": [
{
"comment_text": null,
"digests": {
"blake2b_256": "51bbf0ef70f279c0535eb8033a01e600216886bac6937ecac3d2ef6ce9af93a1",
"md5": "d428f32121af0c81bef2200a9efa149d",
"sha256": "cdf2e18f1d8feecc50e9fda24cd64d227774923cdb8122d8d5bf06ee68be75d4"
},
"downloads": -1,
"filename": "cognitora-1.5.0-py3-none-any.whl",
"has_sig": false,
"md5_digest": "d428f32121af0c81bef2200a9efa149d",
"packagetype": "bdist_wheel",
"python_version": "py3",
"requires_python": ">=3.8",
"size": 13671,
"upload_time": "2025-08-03T15:58:54",
"upload_time_iso_8601": "2025-08-03T15:58:54.320020Z",
"url": "https://files.pythonhosted.org/packages/51/bb/f0ef70f279c0535eb8033a01e600216886bac6937ecac3d2ef6ce9af93a1/cognitora-1.5.0-py3-none-any.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": null,
"digests": {
"blake2b_256": "2a1dcd15fce430b24d24d1f51632d92f3316aedb04b29e30f87933689d075917",
"md5": "e4743c15718b86840642fe2d6eaf6547",
"sha256": "f30ae6fbae2398aea774709a326b1cefeb326ff4c4f1d0a67dc8341aa0e38f85"
},
"downloads": -1,
"filename": "cognitora-1.5.0.tar.gz",
"has_sig": false,
"md5_digest": "e4743c15718b86840642fe2d6eaf6547",
"packagetype": "sdist",
"python_version": "source",
"requires_python": ">=3.8",
"size": 19513,
"upload_time": "2025-08-03T15:58:55",
"upload_time_iso_8601": "2025-08-03T15:58:55.496877Z",
"url": "https://files.pythonhosted.org/packages/2a/1d/cd15fce430b24d24d1f51632d92f3316aedb04b29e30f87933689d075917/cognitora-1.5.0.tar.gz",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2025-08-03 15:58:55",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "Cognitora",
"github_project": "python-sdk",
"github_not_found": true,
"lcname": "cognitora"
}