tokenize-text


Nametokenize-text JSON
Version 0.2.32 PyPI version JSON
download
home_pageNone
SummaryTokenizing and processing text inputs with transformer models
upload_time2024-08-11 21:53:24
maintainerNone
docs_urlNone
authorUrdu NLTK
requires_python>=3.6
licenseNone
keywords tokenization text-processing nlp transformers
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            A package that provides functionalities for tokenizing and processing text inputs using transformer models and other NLP tools.

            

Raw data

            {
    "_id": null,
    "home_page": null,
    "name": "tokenize-text",
    "maintainer": null,
    "docs_url": null,
    "requires_python": ">=3.6",
    "maintainer_email": null,
    "keywords": "tokenization, text-processing, nlp, transformers",
    "author": "Urdu NLTK",
    "author_email": "urdu-nltk@uts.rf.gd",
    "download_url": "https://files.pythonhosted.org/packages/e5/fb/d5027fe22c54580709eb25f3b80df1978a14477b90b5f5c5b4595c841dba/tokenize_text-0.2.32.tar.gz",
    "platform": null,
    "description": "A package that provides functionalities for tokenizing and processing text inputs using transformer models and other NLP tools.\n",
    "bugtrack_url": null,
    "license": null,
    "summary": "Tokenizing and processing text inputs with transformer models",
    "version": "0.2.32",
    "project_urls": null,
    "split_keywords": [
        "tokenization",
        " text-processing",
        " nlp",
        " transformers"
    ],
    "urls": [
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "80d93adae72518280a3420ac191af5aa04d118c0f0cd3faf66237cc479eba8f3",
                "md5": "008c8101944f1d8ab6baf2440ca3a180",
                "sha256": "a17a8e3a50d82830053026215c054ce1a1706796ebfb91a8d88644300ede0d9f"
            },
            "downloads": -1,
            "filename": "tokenize_text-0.2.32-py3-none-any.whl",
            "has_sig": false,
            "md5_digest": "008c8101944f1d8ab6baf2440ca3a180",
            "packagetype": "bdist_wheel",
            "python_version": "py3",
            "requires_python": ">=3.6",
            "size": 59431,
            "upload_time": "2024-08-11T21:53:23",
            "upload_time_iso_8601": "2024-08-11T21:53:23.345006Z",
            "url": "https://files.pythonhosted.org/packages/80/d9/3adae72518280a3420ac191af5aa04d118c0f0cd3faf66237cc479eba8f3/tokenize_text-0.2.32-py3-none-any.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "e5fbd5027fe22c54580709eb25f3b80df1978a14477b90b5f5c5b4595c841dba",
                "md5": "fce8b788882829dc2b8978ff1aa7c6e1",
                "sha256": "82f9e433ad76246cbcb915d9eb33c32ab201b8fe3f616b857a1aea5e71a31604"
            },
            "downloads": -1,
            "filename": "tokenize_text-0.2.32.tar.gz",
            "has_sig": false,
            "md5_digest": "fce8b788882829dc2b8978ff1aa7c6e1",
            "packagetype": "sdist",
            "python_version": "source",
            "requires_python": ">=3.6",
            "size": 60445,
            "upload_time": "2024-08-11T21:53:24",
            "upload_time_iso_8601": "2024-08-11T21:53:24.998551Z",
            "url": "https://files.pythonhosted.org/packages/e5/fb/d5027fe22c54580709eb25f3b80df1978a14477b90b5f5c5b4595c841dba/tokenize_text-0.2.32.tar.gz",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2024-08-11 21:53:24",
    "github": false,
    "gitlab": false,
    "bitbucket": false,
    "codeberg": false,
    "lcname": "tokenize-text"
}
        
Elapsed time: 0.28214s