adjacent-attention-pytorch


Nameadjacent-attention-pytorch JSON
Version 0.0.12 PyPI version JSON
download
home_pagehttps://github.com/lucidrains/adjacent-attention-pytorch
SummaryAdjacent Attention Network - Pytorch
upload_time2022-12-24 16:52:18
maintainer
docs_urlNone
authorPhil Wang
requires_python
licenseMIT
keywords artificial intelligence attention mechanism graph neural network transformers
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            
            

Raw data

            {
    "_id": null,
    "home_page": "https://github.com/lucidrains/adjacent-attention-pytorch",
    "name": "adjacent-attention-pytorch",
    "maintainer": "",
    "docs_url": null,
    "requires_python": "",
    "maintainer_email": "",
    "keywords": "artificial intelligence,attention mechanism,graph neural network,transformers",
    "author": "Phil Wang",
    "author_email": "lucidrains@gmail.com",
    "download_url": "https://files.pythonhosted.org/packages/37/ca/409f91f2072996d499a73b658d27eb80e310b207b563deab2286623c1aa7/adjacent-attention-pytorch-0.0.12.tar.gz",
    "platform": null,
    "description": "",
    "bugtrack_url": null,
    "license": "MIT",
    "summary": "Adjacent Attention Network - Pytorch",
    "version": "0.0.12",
    "split_keywords": [
        "artificial intelligence",
        "attention mechanism",
        "graph neural network",
        "transformers"
    ],
    "urls": [
        {
            "comment_text": "",
            "digests": {
                "md5": "391b0332cacaf01e1528d2bc64bebfda",
                "sha256": "69c9d0702343465662570ecb22938d771cf04e2b3c1595f1ef09e8ea63f40bf6"
            },
            "downloads": -1,
            "filename": "adjacent_attention_pytorch-0.0.12-py3-none-any.whl",
            "has_sig": false,
            "md5_digest": "391b0332cacaf01e1528d2bc64bebfda",
            "packagetype": "bdist_wheel",
            "python_version": "py3",
            "requires_python": null,
            "size": 5040,
            "upload_time": "2022-12-24T16:52:16",
            "upload_time_iso_8601": "2022-12-24T16:52:16.661320Z",
            "url": "https://files.pythonhosted.org/packages/d3/de/441f2e623c22701ab0d3c05c8a9943ca86a82b96982353c227a6ff486589/adjacent_attention_pytorch-0.0.12-py3-none-any.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "md5": "6ce43da69b80bab2db01041bc6655ab1",
                "sha256": "77a9b2409cabe406ed35727cb8ce2493c3b5a8a30034aca889c85dbb725bf066"
            },
            "downloads": -1,
            "filename": "adjacent-attention-pytorch-0.0.12.tar.gz",
            "has_sig": false,
            "md5_digest": "6ce43da69b80bab2db01041bc6655ab1",
            "packagetype": "sdist",
            "python_version": "source",
            "requires_python": null,
            "size": 5256,
            "upload_time": "2022-12-24T16:52:18",
            "upload_time_iso_8601": "2022-12-24T16:52:18.160880Z",
            "url": "https://files.pythonhosted.org/packages/37/ca/409f91f2072996d499a73b658d27eb80e310b207b563deab2286623c1aa7/adjacent-attention-pytorch-0.0.12.tar.gz",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2022-12-24 16:52:18",
    "github": true,
    "gitlab": false,
    "bitbucket": false,
    "github_user": "lucidrains",
    "github_project": "adjacent-attention-pytorch",
    "lcname": "adjacent-attention-pytorch"
}
        
Elapsed time: 0.02794s