mixture-of-attention


Namemixture-of-attention JSON
Version 0.0.25 PyPI version JSON
download
home_pagehttps://github.com/lucidrains/mixture-of-attention
SummaryMixture of Attention
upload_time2024-10-17 12:04:49
maintainerNone
docs_urlNone
authorPhil Wang
requires_pythonNone
licenseMIT
keywords artificial intelligence deep learning transformers attention mechanism mixture-of-experts routed attention
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            
            

Raw data

            {
    "_id": null,
    "home_page": "https://github.com/lucidrains/mixture-of-attention",
    "name": "mixture-of-attention",
    "maintainer": null,
    "docs_url": null,
    "requires_python": null,
    "maintainer_email": null,
    "keywords": "artificial intelligence, deep learning, transformers, attention mechanism, mixture-of-experts, routed attention",
    "author": "Phil Wang",
    "author_email": "lucidrains@gmail.com",
    "download_url": "https://files.pythonhosted.org/packages/05/c8/2e4567637872d86b6407cc720157ef30c6707b718f3418c9033a80a7308b/mixture_of_attention-0.0.25.tar.gz",
    "platform": null,
    "description": "",
    "bugtrack_url": null,
    "license": "MIT",
    "summary": "Mixture of Attention",
    "version": "0.0.25",
    "project_urls": {
        "Homepage": "https://github.com/lucidrains/mixture-of-attention"
    },
    "split_keywords": [
        "artificial intelligence",
        " deep learning",
        " transformers",
        " attention mechanism",
        " mixture-of-experts",
        " routed attention"
    ],
    "urls": [
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "f92c0a018dd49bbd16b92ae423a8b63bb88c7ac3fbf5cf239f1fff0d160624d6",
                "md5": "4f89db9f3040343adfb3c770920da292",
                "sha256": "93d10286ced5efc5da1adce98433af098691de8ac2378a62cf383b9d41193090"
            },
            "downloads": -1,
            "filename": "mixture_of_attention-0.0.25-py3-none-any.whl",
            "has_sig": false,
            "md5_digest": "4f89db9f3040343adfb3c770920da292",
            "packagetype": "bdist_wheel",
            "python_version": "py3",
            "requires_python": null,
            "size": 11135,
            "upload_time": "2024-10-17T12:04:48",
            "upload_time_iso_8601": "2024-10-17T12:04:48.008438Z",
            "url": "https://files.pythonhosted.org/packages/f9/2c/0a018dd49bbd16b92ae423a8b63bb88c7ac3fbf5cf239f1fff0d160624d6/mixture_of_attention-0.0.25-py3-none-any.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "05c82e4567637872d86b6407cc720157ef30c6707b718f3418c9033a80a7308b",
                "md5": "26f991ea77c86e69462111809753cf39",
                "sha256": "4301b3c542345e4077155dbd396ffdd5611f961e98a14c22ff3fc074990c7625"
            },
            "downloads": -1,
            "filename": "mixture_of_attention-0.0.25.tar.gz",
            "has_sig": false,
            "md5_digest": "26f991ea77c86e69462111809753cf39",
            "packagetype": "sdist",
            "python_version": "source",
            "requires_python": null,
            "size": 11113,
            "upload_time": "2024-10-17T12:04:49",
            "upload_time_iso_8601": "2024-10-17T12:04:49.482781Z",
            "url": "https://files.pythonhosted.org/packages/05/c8/2e4567637872d86b6407cc720157ef30c6707b718f3418c9033a80a7308b/mixture_of_attention-0.0.25.tar.gz",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2024-10-17 12:04:49",
    "github": true,
    "gitlab": false,
    "bitbucket": false,
    "codeberg": false,
    "github_user": "lucidrains",
    "github_project": "mixture-of-attention",
    "travis_ci": false,
    "coveralls": false,
    "github_actions": true,
    "lcname": "mixture-of-attention"
}
        
Elapsed time: 0.45351s