nltokeniz


Namenltokeniz JSON
Version 0.0.4 PyPI version JSON
home_pagehttps://github.com/raviqqe/nltokeniz.py/
SummaryNatural language tokenizer for documents in Python
upload_time2017-01-17 07:36:06
maintainer
docs_urlNone
authorYota Toyama
requires_python
licensePublic Domain
keywords
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI
Coveralis test coverage No Coveralis.
            # nltokeniz.py

[![PyPI version](https://badge.fury.io/py/nltokeniz.svg)](https://badge.fury.io/py/nltokeniz)
[![Python versions](https://img.shields.io/pypi/pyversions/nltokeniz.svg)](setup.py)
[![Build Status](https://travis-ci.org/raviqqe/nltokeniz.py.svg?branch=master)](https://travis-ci.org/raviqqe/nltokeniz.py)
[![License](https://img.shields.io/badge/license-unlicense-lightgray.svg)](https://unlicense.org)

Natural language tokenizer for English and Japanese documents in Python


## License

[The Unlicense](https://unlicense.org)



            

Raw data

            {
    "maintainer": "", 
    "docs_url": null, 
    "requires_python": "", 
    "maintainer_email": "", 
    "cheesecake_code_kwalitee_id": null, 
    "coveralis": false, 
    "keywords": "", 
    "upload_time": "2017-01-17 07:36:06", 
    "author": "Yota Toyama", 
    "home_page": "https://github.com/raviqqe/nltokeniz.py/", 
    "github_user": "raviqqe", 
    "download_url": "https://pypi.python.org/packages/e5/ce/b426b9d47ea7dd9fc524e9a2d9e6abcc6e2cbefc8d106f2fe65127643721/nltokeniz-0.0.4.tar.gz", 
    "platform": "", 
    "version": "0.0.4", 
    "cheesecake_documentation_id": null, 
    "description": "# nltokeniz.py\n\n[![PyPI version](https://badge.fury.io/py/nltokeniz.svg)](https://badge.fury.io/py/nltokeniz)\n[![Python versions](https://img.shields.io/pypi/pyversions/nltokeniz.svg)](setup.py)\n[![Build Status](https://travis-ci.org/raviqqe/nltokeniz.py.svg?branch=master)](https://travis-ci.org/raviqqe/nltokeniz.py)\n[![License](https://img.shields.io/badge/license-unlicense-lightgray.svg)](https://unlicense.org)\n\nNatural language tokenizer for English and Japanese documents in Python\n\n\n## License\n\n[The Unlicense](https://unlicense.org)\n\n\n", 
    "lcname": "nltokeniz", 
    "bugtrack_url": null, 
    "github": true, 
    "name": "nltokeniz", 
    "license": "Public Domain", 
    "travis_ci": true, 
    "github_project": "nltokeniz.py", 
    "summary": "Natural language tokenizer for documents in Python", 
    "split_keywords": [], 
    "author_email": "raviqqe@gmail.com", 
    "urls": [
        {
            "has_sig": false, 
            "upload_time": "2017-01-17T07:36:03", 
            "comment_text": "", 
            "python_version": "py3", 
            "url": "https://pypi.python.org/packages/b1/71/a0578c3141c0e47696f3d334387810fdfbc06875567c8ca88acea7654259/nltokeniz-0.0.4-py3-none-any.whl", 
            "md5_digest": "c6e6567a744151c035c4f9a30294cc8a", 
            "downloads": 0, 
            "filename": "nltokeniz-0.0.4-py3-none-any.whl", 
            "packagetype": "bdist_wheel", 
            "path": "b1/71/a0578c3141c0e47696f3d334387810fdfbc06875567c8ca88acea7654259/nltokeniz-0.0.4-py3-none-any.whl", 
            "size": 4287
        }, 
        {
            "has_sig": false, 
            "upload_time": "2017-01-17T07:36:05", 
            "comment_text": "", 
            "python_version": "3.6", 
            "url": "https://pypi.python.org/packages/b2/51/1aa7649e8bbe745a7f656fbcbef12ef99cf25cabc082b797531859633614/nltokeniz-0.0.4-py3.6.egg", 
            "md5_digest": "245c7d58baaed208bbf10f910c0753b7", 
            "downloads": 0, 
            "filename": "nltokeniz-0.0.4-py3.6.egg", 
            "packagetype": "bdist_egg", 
            "path": "b2/51/1aa7649e8bbe745a7f656fbcbef12ef99cf25cabc082b797531859633614/nltokeniz-0.0.4-py3.6.egg", 
            "size": 4988
        }, 
        {
            "has_sig": false, 
            "upload_time": "2017-01-17T07:36:06", 
            "comment_text": "", 
            "python_version": "source", 
            "url": "https://pypi.python.org/packages/e5/ce/b426b9d47ea7dd9fc524e9a2d9e6abcc6e2cbefc8d106f2fe65127643721/nltokeniz-0.0.4.tar.gz", 
            "md5_digest": "0c57585bcbbd540bd6005e1527d5553b", 
            "downloads": 0, 
            "filename": "nltokeniz-0.0.4.tar.gz", 
            "packagetype": "sdist", 
            "path": "e5/ce/b426b9d47ea7dd9fc524e9a2d9e6abcc6e2cbefc8d106f2fe65127643721/nltokeniz-0.0.4.tar.gz", 
            "size": 3108
        }
    ], 
    "_id": null, 
    "cheesecake_installability_id": null
}