memory-compressed-attention


Namememory-compressed-attention JSON
Version 0.0.7 PyPI version JSON
download
home_pagehttps://github.com/lucidrains/memory-compressed-attention
SummaryMemory-Compressed Self Attention
upload_time2023-04-10 03:40:06
maintainer
docs_urlNone
authorPhil Wang
requires_python
licenseMIT
keywords transformers artificial intelligence attention mechanism
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            
            

Raw data

            {
    "_id": null,
    "home_page": "https://github.com/lucidrains/memory-compressed-attention",
    "name": "memory-compressed-attention",
    "maintainer": "",
    "docs_url": null,
    "requires_python": "",
    "maintainer_email": "",
    "keywords": "transformers,artificial intelligence,attention mechanism",
    "author": "Phil Wang",
    "author_email": "lucidrains@gmail.com",
    "download_url": "https://files.pythonhosted.org/packages/c8/89/a787e9c44144c0c73c295ca634715df1e267408bd88844cf902733dc8d19/memory_compressed_attention-0.0.7.tar.gz",
    "platform": null,
    "description": "",
    "bugtrack_url": null,
    "license": "MIT",
    "summary": "Memory-Compressed Self Attention",
    "version": "0.0.7",
    "split_keywords": [
        "transformers",
        "artificial intelligence",
        "attention mechanism"
    ],
    "urls": [
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "286fcdb1c05f8239abc686bc3dcf132397cc5c2891b6888ce5666fb684f0cd2c",
                "md5": "84d491556d037b51af695aff9ccf5519",
                "sha256": "e2b72b3be29f29ff860aeef8d5f791b7661f8ac34b7d4c2c1818875aa7c8044c"
            },
            "downloads": -1,
            "filename": "memory_compressed_attention-0.0.7-py3-none-any.whl",
            "has_sig": false,
            "md5_digest": "84d491556d037b51af695aff9ccf5519",
            "packagetype": "bdist_wheel",
            "python_version": "py3",
            "requires_python": null,
            "size": 3954,
            "upload_time": "2023-04-10T03:40:04",
            "upload_time_iso_8601": "2023-04-10T03:40:04.789490Z",
            "url": "https://files.pythonhosted.org/packages/28/6f/cdb1c05f8239abc686bc3dcf132397cc5c2891b6888ce5666fb684f0cd2c/memory_compressed_attention-0.0.7-py3-none-any.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "c889a787e9c44144c0c73c295ca634715df1e267408bd88844cf902733dc8d19",
                "md5": "f8906ed1d9c920c3720ad92d6bad853a",
                "sha256": "d3498dd0987ed707dbaadf004f3a8274967513b926c44766981a89980e8a0635"
            },
            "downloads": -1,
            "filename": "memory_compressed_attention-0.0.7.tar.gz",
            "has_sig": false,
            "md5_digest": "f8906ed1d9c920c3720ad92d6bad853a",
            "packagetype": "sdist",
            "python_version": "source",
            "requires_python": null,
            "size": 3957,
            "upload_time": "2023-04-10T03:40:06",
            "upload_time_iso_8601": "2023-04-10T03:40:06.401466Z",
            "url": "https://files.pythonhosted.org/packages/c8/89/a787e9c44144c0c73c295ca634715df1e267408bd88844cf902733dc8d19/memory_compressed_attention-0.0.7.tar.gz",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2023-04-10 03:40:06",
    "github": true,
    "gitlab": false,
    "bitbucket": false,
    "github_user": "lucidrains",
    "github_project": "memory-compressed-attention",
    "travis_ci": false,
    "coveralls": false,
    "github_actions": true,
    "lcname": "memory-compressed-attention"
}
        
Elapsed time: 0.07839s