Name | llama-index-embeddings-ipex-llm JSON |
Version |
0.3.0
JSON |
| download |
home_page | None |
Summary | llama-index embeddings ipex-llm integration |
upload_time | 2024-11-18 02:29:22 |
maintainer | None |
docs_url | None |
author | Your Name |
requires_python | <4.0,>=3.9 |
license | MIT |
keywords |
|
VCS |
|
bugtrack_url |
|
requirements |
No requirements were recorded.
|
Travis-CI |
No Travis.
|
coveralls test coverage |
No coveralls.
|
# LlamaIndex Embeddings Integration: Ipex_Llm
[IPEX-LLM](https://github.com/intel-analytics/ipex-llm) is a PyTorch library for running LLM on Intel CPU and GPU (e.g., local PC with iGPU, discrete GPU such as Arc, Flex and Max) with very low latency. This module allows loading Embedding models with ipex-llm optimizations.
Raw data
{
"_id": null,
"home_page": null,
"name": "llama-index-embeddings-ipex-llm",
"maintainer": null,
"docs_url": null,
"requires_python": "<4.0,>=3.9",
"maintainer_email": null,
"keywords": null,
"author": "Your Name",
"author_email": "you@example.com",
"download_url": "https://files.pythonhosted.org/packages/5a/b1/ec6e213e5534579375e859d26767128d87f89a722f5429a0eb6f1d8fc806/llama_index_embeddings_ipex_llm-0.3.0.tar.gz",
"platform": null,
"description": "# LlamaIndex Embeddings Integration: Ipex_Llm\n\n[IPEX-LLM](https://github.com/intel-analytics/ipex-llm) is a PyTorch library for running LLM on Intel CPU and GPU (e.g., local PC with iGPU, discrete GPU such as Arc, Flex and Max) with very low latency. This module allows loading Embedding models with ipex-llm optimizations.\n",
"bugtrack_url": null,
"license": "MIT",
"summary": "llama-index embeddings ipex-llm integration",
"version": "0.3.0",
"project_urls": null,
"split_keywords": [],
"urls": [
{
"comment_text": "",
"digests": {
"blake2b_256": "c2251dd2a819fb929486c222d05b961e44f4f4ab47667eea7c241ce6414e4e61",
"md5": "f20cff446e45d9306b989d1a522ae915",
"sha256": "6de54bf4bc0a750c9fa40077ea317316c359ee3c8bd461dd8714b1905ade36bd"
},
"downloads": -1,
"filename": "llama_index_embeddings_ipex_llm-0.3.0-py3-none-any.whl",
"has_sig": false,
"md5_digest": "f20cff446e45d9306b989d1a522ae915",
"packagetype": "bdist_wheel",
"python_version": "py3",
"requires_python": "<4.0,>=3.9",
"size": 4811,
"upload_time": "2024-11-18T02:29:21",
"upload_time_iso_8601": "2024-11-18T02:29:21.252521Z",
"url": "https://files.pythonhosted.org/packages/c2/25/1dd2a819fb929486c222d05b961e44f4f4ab47667eea7c241ce6414e4e61/llama_index_embeddings_ipex_llm-0.3.0-py3-none-any.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "5ab1ec6e213e5534579375e859d26767128d87f89a722f5429a0eb6f1d8fc806",
"md5": "7ca2aff1aea14954fc0ef30f5817531c",
"sha256": "3df6750fdd8a042ca1279676061178935c589da3aa04f6cbd9ede6f8cff5567c"
},
"downloads": -1,
"filename": "llama_index_embeddings_ipex_llm-0.3.0.tar.gz",
"has_sig": false,
"md5_digest": "7ca2aff1aea14954fc0ef30f5817531c",
"packagetype": "sdist",
"python_version": "source",
"requires_python": "<4.0,>=3.9",
"size": 4389,
"upload_time": "2024-11-18T02:29:22",
"upload_time_iso_8601": "2024-11-18T02:29:22.838822Z",
"url": "https://files.pythonhosted.org/packages/5a/b1/ec6e213e5534579375e859d26767128d87f89a722f5429a0eb6f1d8fc806/llama_index_embeddings_ipex_llm-0.3.0.tar.gz",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2024-11-18 02:29:22",
"github": false,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"lcname": "llama-index-embeddings-ipex-llm"
}