llama-cpp-haystack


Namellama-cpp-haystack JSON
Version 1.4.0 PyPI version JSON
download
home_pageNone
SummaryAn integration between the llama.cpp LLM framework and Haystack
upload_time2025-10-23 10:20:54
maintainerNone
docs_urlNone
authorAshwin Mathur
requires_python>=3.9
licenseNone
keywords
VCS
bugtrack_url
requirements hatch
Travis-CI No Travis.
coveralls test coverage No coveralls.
            # llama-cpp-haystack

[![PyPI - Version](https://img.shields.io/pypi/v/llama-cpp-haystack.svg)](https://pypi.org/project/llama-cpp-haystack)
[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/llama-cpp-haystack.svg)](https://pypi.org/project/llama-cpp-haystack)

- [Integration page](https://haystack.deepset.ai/integrations/llama_-_cpp)
- [Changelog](https://github.com/deepset-ai/haystack-core-integrations/blob/main/integrations/llama_cpp/CHANGELOG.md)

---

## Contributing

Refer to the general [Contribution Guidelines](https://github.com/deepset-ai/haystack-core-integrations/blob/main/CONTRIBUTING.md).




            

Raw data

            {
    "_id": null,
    "home_page": null,
    "name": "llama-cpp-haystack",
    "maintainer": null,
    "docs_url": null,
    "requires_python": ">=3.9",
    "maintainer_email": null,
    "keywords": null,
    "author": "Ashwin Mathur",
    "author_email": "deepset GmbH <info@deepset.ai>",
    "download_url": "https://files.pythonhosted.org/packages/ac/79/0367d1a8fa30df5ab1cc3223b968fa76d5f05f38630a447814457e814e49/llama_cpp_haystack-1.4.0.tar.gz",
    "platform": null,
    "description": "# llama-cpp-haystack\n\n[![PyPI - Version](https://img.shields.io/pypi/v/llama-cpp-haystack.svg)](https://pypi.org/project/llama-cpp-haystack)\n[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/llama-cpp-haystack.svg)](https://pypi.org/project/llama-cpp-haystack)\n\n- [Integration page](https://haystack.deepset.ai/integrations/llama_-_cpp)\n- [Changelog](https://github.com/deepset-ai/haystack-core-integrations/blob/main/integrations/llama_cpp/CHANGELOG.md)\n\n---\n\n## Contributing\n\nRefer to the general [Contribution Guidelines](https://github.com/deepset-ai/haystack-core-integrations/blob/main/CONTRIBUTING.md).\n\n\n\n",
    "bugtrack_url": null,
    "license": null,
    "summary": "An integration between the llama.cpp LLM framework and Haystack",
    "version": "1.4.0",
    "project_urls": {
        "Documentation": "https://github.com/deepset-ai/haystack-core-integrations/tree/main/integrations/llama_cpp#readme",
        "Issues": "https://github.com/deepset-ai/haystack-core-integrations/issues",
        "Source": "https://github.com/deepset-ai/haystack-core-integrations/tree/main/integrations/llama_cpp"
    },
    "split_keywords": [],
    "urls": [
        {
            "comment_text": null,
            "digests": {
                "blake2b_256": "7591b10e6d30aeeb0e3954a37a82f00121b5a4dd62c1cf08155d74450c7f8704",
                "md5": "02dc9bfec3b916f1a59961accdd4af8c",
                "sha256": "b84f84498a661a38bbf0a245066ee19e9b2b893f69563322cc505ad1aa068df3"
            },
            "downloads": -1,
            "filename": "llama_cpp_haystack-1.4.0-py3-none-any.whl",
            "has_sig": false,
            "md5_digest": "02dc9bfec3b916f1a59961accdd4af8c",
            "packagetype": "bdist_wheel",
            "python_version": "py3",
            "requires_python": ">=3.9",
            "size": 14421,
            "upload_time": "2025-10-23T10:20:55",
            "upload_time_iso_8601": "2025-10-23T10:20:55.545570Z",
            "url": "https://files.pythonhosted.org/packages/75/91/b10e6d30aeeb0e3954a37a82f00121b5a4dd62c1cf08155d74450c7f8704/llama_cpp_haystack-1.4.0-py3-none-any.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": null,
            "digests": {
                "blake2b_256": "ac790367d1a8fa30df5ab1cc3223b968fa76d5f05f38630a447814457e814e49",
                "md5": "17afc40dd7cd8f94c275dcae6cbe7200",
                "sha256": "3b484401bb6401afc9589d9b8c9e2c87fc7e7f5af5327db1d5ba7c7bb99159b2"
            },
            "downloads": -1,
            "filename": "llama_cpp_haystack-1.4.0.tar.gz",
            "has_sig": false,
            "md5_digest": "17afc40dd7cd8f94c275dcae6cbe7200",
            "packagetype": "sdist",
            "python_version": "source",
            "requires_python": ">=3.9",
            "size": 96696,
            "upload_time": "2025-10-23T10:20:54",
            "upload_time_iso_8601": "2025-10-23T10:20:54.760691Z",
            "url": "https://files.pythonhosted.org/packages/ac/79/0367d1a8fa30df5ab1cc3223b968fa76d5f05f38630a447814457e814e49/llama_cpp_haystack-1.4.0.tar.gz",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2025-10-23 10:20:54",
    "github": true,
    "gitlab": false,
    "bitbucket": false,
    "codeberg": false,
    "github_user": "deepset-ai",
    "github_project": "haystack-core-integrations",
    "travis_ci": false,
    "coveralls": false,
    "github_actions": true,
    "requirements": [
        {
            "name": "hatch",
            "specs": []
        }
    ],
    "lcname": "llama-cpp-haystack"
}
        
Elapsed time: 1.79190s