taylor-series-linear-attention


Nametaylor-series-linear-attention JSON
Version 0.1.12 PyPI version JSON
download
home_pagehttps://github.com/lucidrains/taylor-series-linear-attention
SummaryTaylor Series Linear Attention
upload_time2024-08-18 16:59:01
maintainerNone
docs_urlNone
authorPhil Wang
requires_pythonNone
licenseMIT
keywords artificial intelligence deep learning attention mechanism
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            
            

Raw data

            {
    "_id": null,
    "home_page": "https://github.com/lucidrains/taylor-series-linear-attention",
    "name": "taylor-series-linear-attention",
    "maintainer": null,
    "docs_url": null,
    "requires_python": null,
    "maintainer_email": null,
    "keywords": "artificial intelligence, deep learning, attention mechanism",
    "author": "Phil Wang",
    "author_email": "lucidrains@gmail.com",
    "download_url": "https://files.pythonhosted.org/packages/8f/63/2a2bd42c72e8f0fa054d3da59e86ba5d19f9baf8d3ba6099159a0e964c7f/taylor_series_linear_attention-0.1.12.tar.gz",
    "platform": null,
    "description": "",
    "bugtrack_url": null,
    "license": "MIT",
    "summary": "Taylor Series Linear Attention",
    "version": "0.1.12",
    "project_urls": {
        "Homepage": "https://github.com/lucidrains/taylor-series-linear-attention"
    },
    "split_keywords": [
        "artificial intelligence",
        " deep learning",
        " attention mechanism"
    ],
    "urls": [
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "f045027cd68ba4320478516c237cea7bbeae0848f74ecf80f012e5de459105ee",
                "md5": "1048f0e5dc3912f848b3ab1d893ac902",
                "sha256": "addd78a987866cba60ad003679a7f400cfcd71b2b3334b50ef44c43a2f75e8a3"
            },
            "downloads": -1,
            "filename": "taylor_series_linear_attention-0.1.12-py3-none-any.whl",
            "has_sig": false,
            "md5_digest": "1048f0e5dc3912f848b3ab1d893ac902",
            "packagetype": "bdist_wheel",
            "python_version": "py3",
            "requires_python": null,
            "size": 7761,
            "upload_time": "2024-08-18T16:59:00",
            "upload_time_iso_8601": "2024-08-18T16:59:00.052885Z",
            "url": "https://files.pythonhosted.org/packages/f0/45/027cd68ba4320478516c237cea7bbeae0848f74ecf80f012e5de459105ee/taylor_series_linear_attention-0.1.12-py3-none-any.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "8f632a2bd42c72e8f0fa054d3da59e86ba5d19f9baf8d3ba6099159a0e964c7f",
                "md5": "d61f5522e8415d1664b5e52436cc8b7d",
                "sha256": "34873ee6177ef40ec45adf362d9130db62b9f1ac18807d694ae7f619a0b9e991"
            },
            "downloads": -1,
            "filename": "taylor_series_linear_attention-0.1.12.tar.gz",
            "has_sig": false,
            "md5_digest": "d61f5522e8415d1664b5e52436cc8b7d",
            "packagetype": "sdist",
            "python_version": "source",
            "requires_python": null,
            "size": 8530,
            "upload_time": "2024-08-18T16:59:01",
            "upload_time_iso_8601": "2024-08-18T16:59:01.392261Z",
            "url": "https://files.pythonhosted.org/packages/8f/63/2a2bd42c72e8f0fa054d3da59e86ba5d19f9baf8d3ba6099159a0e964c7f/taylor_series_linear_attention-0.1.12.tar.gz",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2024-08-18 16:59:01",
    "github": true,
    "gitlab": false,
    "bitbucket": false,
    "codeberg": false,
    "github_user": "lucidrains",
    "github_project": "taylor-series-linear-attention",
    "travis_ci": false,
    "coveralls": false,
    "github_actions": true,
    "requirements": [],
    "lcname": "taylor-series-linear-attention"
}
        
Elapsed time: 0.42043s