basictokenizer


Namebasictokenizer JSON
Version 0.0.4 PyPI version JSON
download
home_page
SummaryA basic and useful tokenizer.
upload_time2023-02-06 23:07:12
maintainer
docs_urlNone
authorUesleiDev
requires_python
licenseMIT
keywords tokenizer token basic-tokenizer basic easy-and-useful
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            The Tokenizer package provides an easy-to-use and efficient way to tokenize text data. The Tokenizer package is built with performance in mind, making it a fast and reliable choice for tokenizing text data at scale.
            

Raw data

            {
    "_id": null,
    "home_page": "",
    "name": "basictokenizer",
    "maintainer": "",
    "docs_url": null,
    "requires_python": "",
    "maintainer_email": "",
    "keywords": "tokenizer, token,basic-tokenizer,basic,easy-and-useful",
    "author": "UesleiDev",
    "author_email": "uesleibros@gmail.com",
    "download_url": "https://files.pythonhosted.org/packages/3a/ed/76ca2bb72c59cd1b924b0446a5337a8c976d82e8cc050ad9037cd9530ad7/basictokenizer-0.0.4.tar.gz",
    "platform": null,
    "description": "The Tokenizer package provides an easy-to-use and efficient way to tokenize text data. The Tokenizer package is built with performance in mind, making it a fast and reliable choice for tokenizing text data at scale.",
    "bugtrack_url": null,
    "license": "MIT",
    "summary": "A basic and useful tokenizer.",
    "version": "0.0.4",
    "split_keywords": [
        "tokenizer",
        " token",
        "basic-tokenizer",
        "basic",
        "easy-and-useful"
    ],
    "urls": [
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "3aed76ca2bb72c59cd1b924b0446a5337a8c976d82e8cc050ad9037cd9530ad7",
                "md5": "44d30e6fcb7a555289b58ff34a8f7d3a",
                "sha256": "bc86d245127ecff3fcb065a2e1bcf7dbaf9c1c1754f3b671cf2804c391726089"
            },
            "downloads": -1,
            "filename": "basictokenizer-0.0.4.tar.gz",
            "has_sig": false,
            "md5_digest": "44d30e6fcb7a555289b58ff34a8f7d3a",
            "packagetype": "sdist",
            "python_version": "source",
            "requires_python": null,
            "size": 2106,
            "upload_time": "2023-02-06T23:07:12",
            "upload_time_iso_8601": "2023-02-06T23:07:12.010483Z",
            "url": "https://files.pythonhosted.org/packages/3a/ed/76ca2bb72c59cd1b924b0446a5337a8c976d82e8cc050ad9037cd9530ad7/basictokenizer-0.0.4.tar.gz",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2023-02-06 23:07:12",
    "github": false,
    "gitlab": false,
    "bitbucket": false,
    "lcname": "basictokenizer"
}
        
Elapsed time: 0.06087s