vllm-flash-attn


Namevllm-flash-attn JSON
Version 2.6.2 PyPI version JSON
download
home_pagehttps://github.com/vllm-project/flash-attention.git
SummaryForward-only flash-attn
upload_time2024-09-05 20:36:33
maintainerNone
docs_urlNone
authorvLLM Team
requires_python>=3.8
licenseNone
keywords
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            Forward-only flash-attn package built for PyTorch 2.4.0 and CUDA 12.1

            

Raw data

            {
    "_id": null,
    "home_page": "https://github.com/vllm-project/flash-attention.git",
    "name": "vllm-flash-attn",
    "maintainer": null,
    "docs_url": null,
    "requires_python": ">=3.8",
    "maintainer_email": null,
    "keywords": null,
    "author": "vLLM Team",
    "author_email": null,
    "download_url": null,
    "platform": null,
    "description": "Forward-only flash-attn package built for PyTorch 2.4.0 and CUDA 12.1\n",
    "bugtrack_url": null,
    "license": null,
    "summary": "Forward-only flash-attn",
    "version": "2.6.2",
    "project_urls": {
        "Homepage": "https://github.com/vllm-project/flash-attention.git"
    },
    "split_keywords": [],
    "urls": [
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "528c37d8f8e830d75439a5521b31cc0cdac9d17c9d9a86ce1ed762c97965b5e8",
                "md5": "3798c70a2dbb7a0a83b167283d62f593",
                "sha256": "a0c52274036c10c1025bdedf233a52312bb21f8443b6bb1c89b7824e18c2fbbb"
            },
            "downloads": -1,
            "filename": "vllm_flash_attn-2.6.2-cp310-cp310-manylinux1_x86_64.whl",
            "has_sig": false,
            "md5_digest": "3798c70a2dbb7a0a83b167283d62f593",
            "packagetype": "bdist_wheel",
            "python_version": "cp310",
            "requires_python": ">=3.8",
            "size": 75813172,
            "upload_time": "2024-09-05T20:36:33",
            "upload_time_iso_8601": "2024-09-05T20:36:33.680502Z",
            "url": "https://files.pythonhosted.org/packages/52/8c/37d8f8e830d75439a5521b31cc0cdac9d17c9d9a86ce1ed762c97965b5e8/vllm_flash_attn-2.6.2-cp310-cp310-manylinux1_x86_64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "7d89d2bee0718dc70830903977f006c80b1f0f60820d96295fc956db6236a3ef",
                "md5": "af3e3abed30501fb78c9c77eda0374a8",
                "sha256": "6e73a16c5eac1a5a15a689af0e320053f7eac753dd3f3201d7ad998e4634fb34"
            },
            "downloads": -1,
            "filename": "vllm_flash_attn-2.6.2-cp311-cp311-manylinux1_x86_64.whl",
            "has_sig": false,
            "md5_digest": "af3e3abed30501fb78c9c77eda0374a8",
            "packagetype": "bdist_wheel",
            "python_version": "cp311",
            "requires_python": ">=3.8",
            "size": 75829458,
            "upload_time": "2024-09-05T20:36:38",
            "upload_time_iso_8601": "2024-09-05T20:36:38.253873Z",
            "url": "https://files.pythonhosted.org/packages/7d/89/d2bee0718dc70830903977f006c80b1f0f60820d96295fc956db6236a3ef/vllm_flash_attn-2.6.2-cp311-cp311-manylinux1_x86_64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "8f84cf51bbf22b12d25f578b11a5f9a17918b34a9c82f72219f96447b518fe3d",
                "md5": "3f024d3d56a23d8c323f36aaf2e0e824",
                "sha256": "6d4ce39ff18c8aa21003e34fe3ac4a4580a0cbec671882ee1e8bf900b4538ced"
            },
            "downloads": -1,
            "filename": "vllm_flash_attn-2.6.2-cp312-cp312-manylinux1_x86_64.whl",
            "has_sig": false,
            "md5_digest": "3f024d3d56a23d8c323f36aaf2e0e824",
            "packagetype": "bdist_wheel",
            "python_version": "cp312",
            "requires_python": ">=3.8",
            "size": 75830192,
            "upload_time": "2024-09-05T20:36:42",
            "upload_time_iso_8601": "2024-09-05T20:36:42.990055Z",
            "url": "https://files.pythonhosted.org/packages/8f/84/cf51bbf22b12d25f578b11a5f9a17918b34a9c82f72219f96447b518fe3d/vllm_flash_attn-2.6.2-cp312-cp312-manylinux1_x86_64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "988efe850be5b7371d9d1dbf042558431030abeb9b348413ba46ad38e8ca3d7a",
                "md5": "ab6afe5aed60ba0cd6d57fb1ec2a46f4",
                "sha256": "c9b60617513ec3170d1b3c1378ae1330cce3b1cf699b8b826529f32bd319172e"
            },
            "downloads": -1,
            "filename": "vllm_flash_attn-2.6.2-cp38-cp38-manylinux1_x86_64.whl",
            "has_sig": false,
            "md5_digest": "ab6afe5aed60ba0cd6d57fb1ec2a46f4",
            "packagetype": "bdist_wheel",
            "python_version": "cp38",
            "requires_python": ">=3.8",
            "size": 75815807,
            "upload_time": "2024-09-05T20:36:47",
            "upload_time_iso_8601": "2024-09-05T20:36:47.840560Z",
            "url": "https://files.pythonhosted.org/packages/98/8e/fe850be5b7371d9d1dbf042558431030abeb9b348413ba46ad38e8ca3d7a/vllm_flash_attn-2.6.2-cp38-cp38-manylinux1_x86_64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "37bbde4c8a771b1183cd6d7f4b8616b9c579cff28eb165333ed521ac999405d6",
                "md5": "8938359525db471e3836d25e63d03f75",
                "sha256": "6637cfc2b815296041217c5ec7dda67e3c6f9bea724c7c3f8ade97773332b02d"
            },
            "downloads": -1,
            "filename": "vllm_flash_attn-2.6.2-cp39-cp39-manylinux1_x86_64.whl",
            "has_sig": false,
            "md5_digest": "8938359525db471e3836d25e63d03f75",
            "packagetype": "bdist_wheel",
            "python_version": "cp39",
            "requires_python": ">=3.8",
            "size": 75812161,
            "upload_time": "2024-09-05T20:36:52",
            "upload_time_iso_8601": "2024-09-05T20:36:52.863071Z",
            "url": "https://files.pythonhosted.org/packages/37/bb/de4c8a771b1183cd6d7f4b8616b9c579cff28eb165333ed521ac999405d6/vllm_flash_attn-2.6.2-cp39-cp39-manylinux1_x86_64.whl",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2024-09-05 20:36:33",
    "github": true,
    "gitlab": false,
    "bitbucket": false,
    "codeberg": false,
    "github_user": "vllm-project",
    "github_project": "flash-attention",
    "travis_ci": false,
    "coveralls": false,
    "github_actions": true,
    "lcname": "vllm-flash-attn"
}
        
Elapsed time: 0.41141s