tokenize-transformer


Nametokenize-transformer JSON
Version 0.2.14 PyPI version JSON
download
home_pageNone
SummaryTokenizing and processing text inputs with transformer models
upload_time2024-08-01 18:39:29
maintainerNone
docs_urlNone
authorUrdu Ibdaa
requires_python>=3.6
licenseNone
keywords tokenization text-processing nlp transformers
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            A package that provides functionalities for tokenizing and processing text inputs using transformer models and other NLP tools.

            

Raw data

            {
    "_id": null,
    "home_page": null,
    "name": "tokenize-transformer",
    "maintainer": null,
    "docs_url": null,
    "requires_python": ">=3.6",
    "maintainer_email": null,
    "keywords": "tokenization, text-processing, nlp, transformers",
    "author": "Urdu Ibdaa",
    "author_email": "urduibdaa@uts.rf.gd",
    "download_url": "https://files.pythonhosted.org/packages/f0/1d/34115ef1169307d1bc6b91caea2f0c851b195aef5e03bdcd6a7269d2e2fd/tokenize_transformer-0.2.14.tar.gz",
    "platform": null,
    "description": "A package that provides functionalities for tokenizing and processing text inputs using transformer models and other NLP tools.\n",
    "bugtrack_url": null,
    "license": null,
    "summary": "Tokenizing and processing text inputs with transformer models",
    "version": "0.2.14",
    "project_urls": null,
    "split_keywords": [
        "tokenization",
        " text-processing",
        " nlp",
        " transformers"
    ],
    "urls": [
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "13a8535d7122e54b056a2915bb4ac32a0c58e1c5eaea7bf309b0af2683cb4b7f",
                "md5": "73dc4a91ffabb8383f71ab8532af7774",
                "sha256": "4edd262d23b371a2a0b335658009675a88e1b08086c0f2b1681dae9b91c3f72a"
            },
            "downloads": -1,
            "filename": "tokenize_transformer-0.2.14-py3-none-any.whl",
            "has_sig": false,
            "md5_digest": "73dc4a91ffabb8383f71ab8532af7774",
            "packagetype": "bdist_wheel",
            "python_version": "py3",
            "requires_python": ">=3.6",
            "size": 3802,
            "upload_time": "2024-08-01T18:39:28",
            "upload_time_iso_8601": "2024-08-01T18:39:28.273985Z",
            "url": "https://files.pythonhosted.org/packages/13/a8/535d7122e54b056a2915bb4ac32a0c58e1c5eaea7bf309b0af2683cb4b7f/tokenize_transformer-0.2.14-py3-none-any.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "f01d34115ef1169307d1bc6b91caea2f0c851b195aef5e03bdcd6a7269d2e2fd",
                "md5": "c14943f3af2dbadeff0e6e7d099b1199",
                "sha256": "fb35e116a2a8103f1371c065312fb39787eb102c9839e2ee1ee58658fe5148ef"
            },
            "downloads": -1,
            "filename": "tokenize_transformer-0.2.14.tar.gz",
            "has_sig": false,
            "md5_digest": "c14943f3af2dbadeff0e6e7d099b1199",
            "packagetype": "sdist",
            "python_version": "source",
            "requires_python": ">=3.6",
            "size": 4408,
            "upload_time": "2024-08-01T18:39:29",
            "upload_time_iso_8601": "2024-08-01T18:39:29.555677Z",
            "url": "https://files.pythonhosted.org/packages/f0/1d/34115ef1169307d1bc6b91caea2f0c851b195aef5e03bdcd6a7269d2e2fd/tokenize_transformer-0.2.14.tar.gz",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2024-08-01 18:39:29",
    "github": false,
    "gitlab": false,
    "bitbucket": false,
    "codeberg": false,
    "lcname": "tokenize-transformer"
}
        
Elapsed time: 0.29615s