Raw data
{
"_id": null,
"home_page": "https://github.com/Nemesis-12/multihead-latent-attention",
"name": "multihead-latent-attention",
"maintainer": null,
"docs_url": null,
"requires_python": null,
"maintainer_email": null,
"keywords": "artificial intelligence, attention, attention mechanism, deep learning, natural language processing, pytorch, transformer",
"author": "Farhan Mohammed",
"author_email": "mfa200312@gmail.com",
"download_url": "https://files.pythonhosted.org/packages/6d/ff/3881dbb39fffb209e4a0ce1a268ed14f0997f93b662dfbcadab582988bb9/multihead_latent_attention-0.1.0.tar.gz",
"platform": null,
"description": "",
"bugtrack_url": null,
"license": "MIT",
"summary": "Multi-head Latent Attention (MLA) - PyTorch",
"version": "0.1.0",
"project_urls": {
"Homepage": "https://github.com/Nemesis-12/multihead-latent-attention"
},
"split_keywords": [
"artificial intelligence",
" attention",
" attention mechanism",
" deep learning",
" natural language processing",
" pytorch",
" transformer"
],
"urls": [
{
"comment_text": null,
"digests": {
"blake2b_256": "9cefbb6a3f96b37ab3c6da9a6c4d07d5e8a687be89bb79fd9bb018b4f2940424",
"md5": "6d886aa1ecd972f30a2fda36523432b4",
"sha256": "68f306407276ffecb3b23663bddc0964ddfa47ad69ed0bd8e29bcf2fca67623f"
},
"downloads": -1,
"filename": "multihead_latent_attention-0.1.0-py3-none-any.whl",
"has_sig": false,
"md5_digest": "6d886aa1ecd972f30a2fda36523432b4",
"packagetype": "bdist_wheel",
"python_version": "py3",
"requires_python": null,
"size": 4047,
"upload_time": "2025-08-22T00:59:25",
"upload_time_iso_8601": "2025-08-22T00:59:25.232227Z",
"url": "https://files.pythonhosted.org/packages/9c/ef/bb6a3f96b37ab3c6da9a6c4d07d5e8a687be89bb79fd9bb018b4f2940424/multihead_latent_attention-0.1.0-py3-none-any.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": null,
"digests": {
"blake2b_256": "6dff3881dbb39fffb209e4a0ce1a268ed14f0997f93b662dfbcadab582988bb9",
"md5": "d831cbd522838dffd62b24da484709d4",
"sha256": "a9c87c795dd5fb78bbb34cba3e573682125a18c7caabf125526ca7b245a0c13c"
},
"downloads": -1,
"filename": "multihead_latent_attention-0.1.0.tar.gz",
"has_sig": false,
"md5_digest": "d831cbd522838dffd62b24da484709d4",
"packagetype": "sdist",
"python_version": "source",
"requires_python": null,
"size": 3965,
"upload_time": "2025-08-22T00:59:26",
"upload_time_iso_8601": "2025-08-22T00:59:26.529345Z",
"url": "https://files.pythonhosted.org/packages/6d/ff/3881dbb39fffb209e4a0ce1a268ed14f0997f93b662dfbcadab582988bb9/multihead_latent_attention-0.1.0.tar.gz",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2025-08-22 00:59:26",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "Nemesis-12",
"github_project": "multihead-latent-attention",
"travis_ci": false,
"coveralls": false,
"github_actions": false,
"lcname": "multihead-latent-attention"
}