[![Multi-Modality](agorabanner.png)](https://discord.gg/qUtxnK2NMf)
# Multi-Modal Casual Multi-Grouped Query Attention
Experiments around using Multi-Modal Casual Attention with Multi-Grouped Query Attention
# Appreciation
* Lucidrains
* Agorians
# Install
`pip install mmmgqa`
# Usage
```python
import torch
from mmca_mgqa.attention import SimpleMMCA
# Define the dimensions
dim = 512
head = 8
seq_len = 10
batch_size = 32
#attn
attn = SimpleMMCA(dim=dim, heads=head)
#random tokens
v = torch.randn(batch_size, seq_len, dim)
t = torch.randn(batch_size, seq_len, dim)
#pass the tokens throught attn
tokens = attn(v, t)
print(tokens)
```
# Architecture
# Todo
# License
MIT
Raw data
{
"_id": null,
"home_page": "https://github.com/kyegomez/mmca-mgqa",
"name": "mmmgqa",
"maintainer": "",
"docs_url": null,
"requires_python": ">=3.6,<4.0",
"maintainer_email": "",
"keywords": "artificial intelligence,deep learning,optimizers,Prompt Engineering",
"author": "Kye Gomez",
"author_email": "kye@apac.ai",
"download_url": "https://files.pythonhosted.org/packages/e2/8f/6a39928e215f056514b984d4a559a194e0504a9782205a67f0cd1b319d6a/mmmgqa-0.0.2.tar.gz",
"platform": null,
"description": "[![Multi-Modality](agorabanner.png)](https://discord.gg/qUtxnK2NMf)\n\n# Multi-Modal Casual Multi-Grouped Query Attention\nExperiments around using Multi-Modal Casual Attention with Multi-Grouped Query Attention\n\n\n# Appreciation\n* Lucidrains\n* Agorians\n\n\n# Install\n`pip install mmmgqa`\n\n# Usage\n```python\nimport torch \nfrom mmca_mgqa.attention import SimpleMMCA\n\n# Define the dimensions\ndim = 512\nhead = 8\nseq_len = 10\nbatch_size = 32\n\n#attn\nattn = SimpleMMCA(dim=dim, heads=head)\n\n#random tokens\nv = torch.randn(batch_size, seq_len, dim)\nt = torch.randn(batch_size, seq_len, dim)\n\n#pass the tokens throught attn\ntokens = attn(v, t)\n\nprint(tokens)\n```\n\n# Architecture\n\n# Todo\n\n\n# License\nMIT\n",
"bugtrack_url": null,
"license": "MIT",
"summary": "mmca-mgqa - Pytorch",
"version": "0.0.2",
"project_urls": {
"Homepage": "https://github.com/kyegomez/mmca-mgqa",
"Repository": "https://github.com/kyegomez/mmca-mgqa"
},
"split_keywords": [
"artificial intelligence",
"deep learning",
"optimizers",
"prompt engineering"
],
"urls": [
{
"comment_text": "",
"digests": {
"blake2b_256": "3f867d45f22fb76b40649a22ddd8c0ac322cb8f821a2f17fd0670a93947e67f5",
"md5": "7f02092a199aec00f4baeacb0e76bf21",
"sha256": "838f2005dfd24f9357024b63c2d405490929ca23c216398076cf54db4b882769"
},
"downloads": -1,
"filename": "mmmgqa-0.0.2-py3-none-any.whl",
"has_sig": false,
"md5_digest": "7f02092a199aec00f4baeacb0e76bf21",
"packagetype": "bdist_wheel",
"python_version": "py3",
"requires_python": ">=3.6,<4.0",
"size": 3019,
"upload_time": "2023-09-28T00:46:06",
"upload_time_iso_8601": "2023-09-28T00:46:06.376536Z",
"url": "https://files.pythonhosted.org/packages/3f/86/7d45f22fb76b40649a22ddd8c0ac322cb8f821a2f17fd0670a93947e67f5/mmmgqa-0.0.2-py3-none-any.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "e28f6a39928e215f056514b984d4a559a194e0504a9782205a67f0cd1b319d6a",
"md5": "620af15dd76899d136479189bb29f375",
"sha256": "0ab9320fe3693b5590a560966c6460c96d36748e5e3bab8d3ca3fa69581ffcc0"
},
"downloads": -1,
"filename": "mmmgqa-0.0.2.tar.gz",
"has_sig": false,
"md5_digest": "620af15dd76899d136479189bb29f375",
"packagetype": "sdist",
"python_version": "source",
"requires_python": ">=3.6,<4.0",
"size": 2964,
"upload_time": "2023-09-28T00:46:07",
"upload_time_iso_8601": "2023-09-28T00:46:07.769829Z",
"url": "https://files.pythonhosted.org/packages/e2/8f/6a39928e215f056514b984d4a559a194e0504a9782205a67f0cd1b319d6a/mmmgqa-0.0.2.tar.gz",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2023-09-28 00:46:07",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "kyegomez",
"github_project": "mmca-mgqa",
"travis_ci": false,
"coveralls": false,
"github_actions": true,
"requirements": [],
"lcname": "mmmgqa"
}