kotokenizer


Namekotokenizer JSON
Version 0.1.1 PyPI version JSON
download
home_pagehttps://github.com/dsdanielpark/ko-tokenizer
SummaryKorean tokenizer, sentence classification, and spacing model.
upload_time2024-03-16 07:07:35
maintainer
docs_urlNone
authordaniel park
requires_python>=3.6
license
keywords python tokenizer korean korean tokenizer nlp natural language process llm large language model
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            # Korean Tokenizer
Wait for release

            

Raw data

            {
    "_id": null,
    "home_page": "https://github.com/dsdanielpark/ko-tokenizer",
    "name": "kotokenizer",
    "maintainer": "",
    "docs_url": null,
    "requires_python": ">=3.6",
    "maintainer_email": "",
    "keywords": "Python,Tokenizer,Korean,Korean Tokenizer,NLP,Natural Language Process,LLM,Large Language Model",
    "author": "daniel park",
    "author_email": "parkminwoo1991@gmail.com",
    "download_url": "https://files.pythonhosted.org/packages/ed/54/ef4449c1d564ece112ec20aca48c0534e50e59db02849dd61704bcfcc9fc/kotokenizer-0.1.1.tar.gz",
    "platform": null,
    "description": "# Korean Tokenizer\r\nWait for release\r\n",
    "bugtrack_url": null,
    "license": "",
    "summary": "Korean tokenizer, sentence classification, and spacing model.",
    "version": "0.1.1",
    "project_urls": {
        "Homepage": "https://github.com/dsdanielpark/ko-tokenizer"
    },
    "split_keywords": [
        "python",
        "tokenizer",
        "korean",
        "korean tokenizer",
        "nlp",
        "natural language process",
        "llm",
        "large language model"
    ],
    "urls": [
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "a33c6f6b4d036d8814767e23a4f923fb4d244140ac7f886cf81077029a7b49bc",
                "md5": "692370cfaa9778d5f057965353ba124d",
                "sha256": "9976c3f5c5e8ddc828139c12a948ee107a72197ac5d1c68c3b049e7faad863d2"
            },
            "downloads": -1,
            "filename": "kotokenizer-0.1.1-py3-none-any.whl",
            "has_sig": false,
            "md5_digest": "692370cfaa9778d5f057965353ba124d",
            "packagetype": "bdist_wheel",
            "python_version": "py3",
            "requires_python": ">=3.6",
            "size": 5785,
            "upload_time": "2024-03-16T07:07:34",
            "upload_time_iso_8601": "2024-03-16T07:07:34.415382Z",
            "url": "https://files.pythonhosted.org/packages/a3/3c/6f6b4d036d8814767e23a4f923fb4d244140ac7f886cf81077029a7b49bc/kotokenizer-0.1.1-py3-none-any.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "ed54ef4449c1d564ece112ec20aca48c0534e50e59db02849dd61704bcfcc9fc",
                "md5": "2745415eff1647591e6a359fe1cdab70",
                "sha256": "14bc8346a6affc334b4ad319cf11488b3966d73ed0843af1196c021ab9747bf8"
            },
            "downloads": -1,
            "filename": "kotokenizer-0.1.1.tar.gz",
            "has_sig": false,
            "md5_digest": "2745415eff1647591e6a359fe1cdab70",
            "packagetype": "sdist",
            "python_version": "source",
            "requires_python": ">=3.6",
            "size": 5778,
            "upload_time": "2024-03-16T07:07:35",
            "upload_time_iso_8601": "2024-03-16T07:07:35.802314Z",
            "url": "https://files.pythonhosted.org/packages/ed/54/ef4449c1d564ece112ec20aca48c0534e50e59db02849dd61704bcfcc9fc/kotokenizer-0.1.1.tar.gz",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2024-03-16 07:07:35",
    "github": true,
    "gitlab": false,
    "bitbucket": false,
    "codeberg": false,
    "github_user": "dsdanielpark",
    "github_project": "ko-tokenizer",
    "github_not_found": true,
    "lcname": "kotokenizer"
}
        
Elapsed time: 0.24570s