llama-index-llms-gaudi


Namellama-index-llms-gaudi JSON
Version 0.1.0 PyPI version JSON
download
home_pageNone
Summaryllama-index llms gaudi integration
upload_time2024-10-09 00:03:07
maintainerNone
docs_urlNone
authorYour Name
requires_python<4.0,>=3.9
licenseMIT
keywords
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            # LlamaIndex Llms Integration with Intel Gaudi

## Installation

```bash
pip install --upgrade-strategy eager optimum[habana]
pip install llama-index-llms-gaudi
pip install llama-index-llms-huggingface
```

## Usage

```python
import argparse
import os, logging
from llama_index.llms.gaudi import GaudiLLM


def setup_parser(parser):
    parser.add_argument(...)
    args = parser.parse_args()
    return args


if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        description="GaudiLLM Basic Usage Example"
    )
    args = setup_parser(parser)
    args.model_name_or_path = "HuggingFaceH4/zephyr-7b-alpha"

    llm = GaudiLLM(
        args=args,
        logger=logger,
        model_name="HuggingFaceH4/zephyr-7b-alpha",
        tokenizer_name="HuggingFaceH4/zephyr-7b-alpha",
        query_wrapper_prompt=PromptTemplate(
            "<|system|>\n</s>\n<|user|>\n{query_str}</s>\n<|assistant|>\n"
        ),
        context_window=3900,
        max_new_tokens=256,
        generate_kwargs={"temperature": 0.7, "top_k": 50, "top_p": 0.95},
        messages_to_prompt=messages_to_prompt,
        device_map="auto",
    )

    query = "Is the ocean blue?"
    print("\n----------------- Complete ------------------")
    completion_response = llm.complete(query)
    print(completion_response.text)
```

## Examples

- [More Examples](https://github.com/run-llama/llama_index/tree/main/llama-index-integrations/llms/llama-index-llms-gaudi/examples)

            

Raw data

            {
    "_id": null,
    "home_page": null,
    "name": "llama-index-llms-gaudi",
    "maintainer": null,
    "docs_url": null,
    "requires_python": "<4.0,>=3.9",
    "maintainer_email": null,
    "keywords": null,
    "author": "Your Name",
    "author_email": "you@example.com",
    "download_url": "https://files.pythonhosted.org/packages/08/9c/6ab8a6d0cd99c349c6d0fb511219451ec7e8b37e54f7c80040e0d9859300/llama_index_llms_gaudi-0.1.0.tar.gz",
    "platform": null,
    "description": "# LlamaIndex Llms Integration with Intel Gaudi\n\n## Installation\n\n```bash\npip install --upgrade-strategy eager optimum[habana]\npip install llama-index-llms-gaudi\npip install llama-index-llms-huggingface\n```\n\n## Usage\n\n```python\nimport argparse\nimport os, logging\nfrom llama_index.llms.gaudi import GaudiLLM\n\n\ndef setup_parser(parser):\n    parser.add_argument(...)\n    args = parser.parse_args()\n    return args\n\n\nif __name__ == \"__main__\":\n    parser = argparse.ArgumentParser(\n        description=\"GaudiLLM Basic Usage Example\"\n    )\n    args = setup_parser(parser)\n    args.model_name_or_path = \"HuggingFaceH4/zephyr-7b-alpha\"\n\n    llm = GaudiLLM(\n        args=args,\n        logger=logger,\n        model_name=\"HuggingFaceH4/zephyr-7b-alpha\",\n        tokenizer_name=\"HuggingFaceH4/zephyr-7b-alpha\",\n        query_wrapper_prompt=PromptTemplate(\n            \"<|system|>\\n</s>\\n<|user|>\\n{query_str}</s>\\n<|assistant|>\\n\"\n        ),\n        context_window=3900,\n        max_new_tokens=256,\n        generate_kwargs={\"temperature\": 0.7, \"top_k\": 50, \"top_p\": 0.95},\n        messages_to_prompt=messages_to_prompt,\n        device_map=\"auto\",\n    )\n\n    query = \"Is the ocean blue?\"\n    print(\"\\n----------------- Complete ------------------\")\n    completion_response = llm.complete(query)\n    print(completion_response.text)\n```\n\n## Examples\n\n- [More Examples](https://github.com/run-llama/llama_index/tree/main/llama-index-integrations/llms/llama-index-llms-gaudi/examples)\n",
    "bugtrack_url": null,
    "license": "MIT",
    "summary": "llama-index llms gaudi integration",
    "version": "0.1.0",
    "project_urls": null,
    "split_keywords": [],
    "urls": [
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "893f9dd0bf5f7cd7098fa5b13697c4e7c50735153aa125cddc20ee4529c4185b",
                "md5": "83ab79649b8ea9358d59a26ef0c6ef32",
                "sha256": "d14c303f3380df14357df0ffdf5160ad014e705af62c13552c4a15d27c16ca2a"
            },
            "downloads": -1,
            "filename": "llama_index_llms_gaudi-0.1.0-py3-none-any.whl",
            "has_sig": false,
            "md5_digest": "83ab79649b8ea9358d59a26ef0c6ef32",
            "packagetype": "bdist_wheel",
            "python_version": "py3",
            "requires_python": "<4.0,>=3.9",
            "size": 12341,
            "upload_time": "2024-10-09T00:03:05",
            "upload_time_iso_8601": "2024-10-09T00:03:05.529992Z",
            "url": "https://files.pythonhosted.org/packages/89/3f/9dd0bf5f7cd7098fa5b13697c4e7c50735153aa125cddc20ee4529c4185b/llama_index_llms_gaudi-0.1.0-py3-none-any.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "089c6ab8a6d0cd99c349c6d0fb511219451ec7e8b37e54f7c80040e0d9859300",
                "md5": "fb240ec6827e8aeb31a74d7a757499a2",
                "sha256": "de2c2a63f343657079624975123e110797d18ae0b6177b0e0d55846f363cae12"
            },
            "downloads": -1,
            "filename": "llama_index_llms_gaudi-0.1.0.tar.gz",
            "has_sig": false,
            "md5_digest": "fb240ec6827e8aeb31a74d7a757499a2",
            "packagetype": "sdist",
            "python_version": "source",
            "requires_python": "<4.0,>=3.9",
            "size": 11719,
            "upload_time": "2024-10-09T00:03:07",
            "upload_time_iso_8601": "2024-10-09T00:03:07.609095Z",
            "url": "https://files.pythonhosted.org/packages/08/9c/6ab8a6d0cd99c349c6d0fb511219451ec7e8b37e54f7c80040e0d9859300/llama_index_llms_gaudi-0.1.0.tar.gz",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2024-10-09 00:03:07",
    "github": false,
    "gitlab": false,
    "bitbucket": false,
    "codeberg": false,
    "lcname": "llama-index-llms-gaudi"
}
        
Elapsed time: 0.42090s