flash-attention-jax


Nameflash-attention-jax JSON
Version 0.3.1 PyPI version JSON
download
home_pagehttps://github.com/lucidrains/flash-attention-jax
SummaryFlash Attention - in Jax
upload_time2023-07-18 02:45:18
maintainer
docs_urlNone
authorPhil Wang
requires_python
licenseMIT
keywords artificial intelligence deep learning transformers attention mechanism jax
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            
            

Raw data

            {
    "_id": null,
    "home_page": "https://github.com/lucidrains/flash-attention-jax",
    "name": "flash-attention-jax",
    "maintainer": "",
    "docs_url": null,
    "requires_python": "",
    "maintainer_email": "",
    "keywords": "artificial intelligence,deep learning,transformers,attention mechanism,jax",
    "author": "Phil Wang",
    "author_email": "lucidrains@gmail.com",
    "download_url": "https://files.pythonhosted.org/packages/e2/8a/c7c8f926d613392d2009a646b45d5dcab072c1326b42aafad93de45399c9/flash-attention-jax-0.3.1.tar.gz",
    "platform": null,
    "description": "",
    "bugtrack_url": null,
    "license": "MIT",
    "summary": "Flash Attention - in Jax",
    "version": "0.3.1",
    "project_urls": {
        "Homepage": "https://github.com/lucidrains/flash-attention-jax"
    },
    "split_keywords": [
        "artificial intelligence",
        "deep learning",
        "transformers",
        "attention mechanism",
        "jax"
    ],
    "urls": [
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "84f3d642b12992b4b15ffd1496775af0100278212c1b3254afffc9964356dc93",
                "md5": "cc637cc4799f0d7986f103a815562217",
                "sha256": "20dc2f01c671517afaac83483329bf0ccdb89b20f817bb6ea5be09bd8e5c950c"
            },
            "downloads": -1,
            "filename": "flash_attention_jax-0.3.1-py3-none-any.whl",
            "has_sig": false,
            "md5_digest": "cc637cc4799f0d7986f103a815562217",
            "packagetype": "bdist_wheel",
            "python_version": "py3",
            "requires_python": null,
            "size": 10066,
            "upload_time": "2023-07-18T02:45:16",
            "upload_time_iso_8601": "2023-07-18T02:45:16.901811Z",
            "url": "https://files.pythonhosted.org/packages/84/f3/d642b12992b4b15ffd1496775af0100278212c1b3254afffc9964356dc93/flash_attention_jax-0.3.1-py3-none-any.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "e28ac7c8f926d613392d2009a646b45d5dcab072c1326b42aafad93de45399c9",
                "md5": "0959bd7aaf45c7ac86e9bed2f0f17424",
                "sha256": "ce9a8e11905157c827e806ffc55183b4faed586083d08ebba0463cc73a6006d5"
            },
            "downloads": -1,
            "filename": "flash-attention-jax-0.3.1.tar.gz",
            "has_sig": false,
            "md5_digest": "0959bd7aaf45c7ac86e9bed2f0f17424",
            "packagetype": "sdist",
            "python_version": "source",
            "requires_python": null,
            "size": 7805,
            "upload_time": "2023-07-18T02:45:18",
            "upload_time_iso_8601": "2023-07-18T02:45:18.140711Z",
            "url": "https://files.pythonhosted.org/packages/e2/8a/c7c8f926d613392d2009a646b45d5dcab072c1326b42aafad93de45399c9/flash-attention-jax-0.3.1.tar.gz",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2023-07-18 02:45:18",
    "github": true,
    "gitlab": false,
    "bitbucket": false,
    "codeberg": false,
    "github_user": "lucidrains",
    "github_project": "flash-attention-jax",
    "travis_ci": false,
    "coveralls": false,
    "github_actions": true,
    "lcname": "flash-attention-jax"
}
        
Elapsed time: 0.09053s