flash-attn-wheels-test


Nameflash-attn-wheels-test JSON
Version 2.0.8.post17 PyPI version JSON
download
home_pagehttps://github.com/Dao-AILab/flash-attention
SummaryFlash Attention: Fast and Memory-Efficient Exact Attention
upload_time2023-08-13 21:27:09
maintainer
docs_urlNone
authorTri Dao
requires_python>=3.7
license
keywords
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            
            

Raw data

            {
    "_id": null,
    "home_page": "https://github.com/Dao-AILab/flash-attention",
    "name": "flash-attn-wheels-test",
    "maintainer": "",
    "docs_url": null,
    "requires_python": ">=3.7",
    "maintainer_email": "",
    "keywords": "",
    "author": "Tri Dao",
    "author_email": "trid@cs.stanford.edu",
    "download_url": "https://files.pythonhosted.org/packages/44/fd/e920608e4f115c21a340d66015dc0258b2e99e699c7cfc4fe8b433dda411/flash_attn_wheels_test-2.0.8.post17.tar.gz",
    "platform": null,
    "description": "",
    "bugtrack_url": null,
    "license": "",
    "summary": "Flash Attention: Fast and Memory-Efficient Exact Attention",
    "version": "2.0.8.post17",
    "project_urls": {
        "Homepage": "https://github.com/Dao-AILab/flash-attention"
    },
    "split_keywords": [],
    "urls": [
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "44fde920608e4f115c21a340d66015dc0258b2e99e699c7cfc4fe8b433dda411",
                "md5": "045a7d3474e5ca148d4f4aeb50d19621",
                "sha256": "632018117f1acba417d574ca4d66a0a6ae51e107ed3ed2b897381af0d17ef3cc"
            },
            "downloads": -1,
            "filename": "flash_attn_wheels_test-2.0.8.post17.tar.gz",
            "has_sig": false,
            "md5_digest": "045a7d3474e5ca148d4f4aeb50d19621",
            "packagetype": "sdist",
            "python_version": "source",
            "requires_python": ">=3.7",
            "size": 1898451,
            "upload_time": "2023-08-13T21:27:09",
            "upload_time_iso_8601": "2023-08-13T21:27:09.714676Z",
            "url": "https://files.pythonhosted.org/packages/44/fd/e920608e4f115c21a340d66015dc0258b2e99e699c7cfc4fe8b433dda411/flash_attn_wheels_test-2.0.8.post17.tar.gz",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2023-08-13 21:27:09",
    "github": true,
    "gitlab": false,
    "bitbucket": false,
    "codeberg": false,
    "github_user": "Dao-AILab",
    "github_project": "flash-attention",
    "travis_ci": false,
    "coveralls": false,
    "github_actions": true,
    "lcname": "flash-attn-wheels-test"
}
        
Elapsed time: 0.14746s