Name | vllm JSON |
Version |
0.6.2
JSON |
| download |
home_page | https://github.com/vllm-project/vllm |
Summary | A high-throughput and memory-efficient inference and serving engine for LLMs |
upload_time | 2024-09-25 22:36:56 |
maintainer | None |
docs_url | None |
author | vLLM Team |
requires_python | >=3.8 |
license | Apache 2.0 |
keywords |
|
VCS |
|
bugtrack_url |
|
requirements |
No requirements were recorded.
|
Travis-CI |
No Travis.
|
coveralls test coverage |
No coveralls.
|
Raw data
{
"_id": null,
"home_page": "https://github.com/vllm-project/vllm",
"name": "vllm",
"maintainer": null,
"docs_url": null,
"requires_python": ">=3.8",
"maintainer_email": null,
"keywords": null,
"author": "vLLM Team",
"author_email": null,
"download_url": "https://files.pythonhosted.org/packages/0c/26/3f0ad89fc748475a02c5b3d1a6cb2e64153c5eb7595afadf468905929a03/vllm-0.6.2.tar.gz",
"platform": null,
"description": "",
"bugtrack_url": null,
"license": "Apache 2.0",
"summary": "A high-throughput and memory-efficient inference and serving engine for LLMs",
"version": "0.6.2",
"project_urls": {
"Documentation": "https://vllm.readthedocs.io/en/latest/",
"Homepage": "https://github.com/vllm-project/vllm"
},
"split_keywords": [],
"urls": [
{
"comment_text": "",
"digests": {
"blake2b_256": "7cb1a521b04c77f50b3b6b7a34cf25affbdbac195e4fc7c87e364c5084d60d47",
"md5": "35ce4dba45167c4e860375b97d0c3fcc",
"sha256": "414e2244a6c3a97175e7659f9a6e10c2e295376d1d1e4bec704da18caa237f0b"
},
"downloads": -1,
"filename": "vllm-0.6.2-cp38-abi3-manylinux1_x86_64.whl",
"has_sig": false,
"md5_digest": "35ce4dba45167c4e860375b97d0c3fcc",
"packagetype": "bdist_wheel",
"python_version": "cp38",
"requires_python": ">=3.8",
"size": 228341272,
"upload_time": "2024-09-25T22:36:49",
"upload_time_iso_8601": "2024-09-25T22:36:49.460921Z",
"url": "https://files.pythonhosted.org/packages/7c/b1/a521b04c77f50b3b6b7a34cf25affbdbac195e4fc7c87e364c5084d60d47/vllm-0.6.2-cp38-abi3-manylinux1_x86_64.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "0c263f0ad89fc748475a02c5b3d1a6cb2e64153c5eb7595afadf468905929a03",
"md5": "d15914905f459b08ad664c048805164f",
"sha256": "2fffd856a25d3defa38a539150fccf9126959ce4c6781c1c5a76d5da7216af59"
},
"downloads": -1,
"filename": "vllm-0.6.2.tar.gz",
"has_sig": false,
"md5_digest": "d15914905f459b08ad664c048805164f",
"packagetype": "sdist",
"python_version": "source",
"requires_python": ">=3.8",
"size": 2641029,
"upload_time": "2024-09-25T22:36:56",
"upload_time_iso_8601": "2024-09-25T22:36:56.019835Z",
"url": "https://files.pythonhosted.org/packages/0c/26/3f0ad89fc748475a02c5b3d1a6cb2e64153c5eb7595afadf468905929a03/vllm-0.6.2.tar.gz",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2024-09-25 22:36:56",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "vllm-project",
"github_project": "vllm",
"travis_ci": false,
"coveralls": false,
"github_actions": true,
"lcname": "vllm"
}