nltokeniz


Namenltokeniz JSON
Version 0.0.5 PyPI version JSON
download
home_pagehttps://github.com/raviqqe/nltokeniz.py/
SummaryNatural language tokenizer for documents in Python
upload_time2017-07-16 13:23:14
maintainer
docs_urlNone
authorYota Toyama
requires_python
licensePublic Domain
keywords
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI
coveralls test coverage No coveralls.
            # nltokeniz.py

[![PyPI version](https://badge.fury.io/py/nltokeniz.svg)](https://badge.fury.io/py/nltokeniz)
[![Python versions](https://img.shields.io/pypi/pyversions/nltokeniz.svg)](setup.py)
[![Build Status](https://travis-ci.org/raviqqe/nltokeniz.py.svg?branch=master)](https://travis-ci.org/raviqqe/nltokeniz.py)
[![License](https://img.shields.io/badge/license-unlicense-lightgray.svg)](https://unlicense.org)

Natural language tokenizer for English and Japanese documents in Python


## License

[The Unlicense](https://unlicense.org)



            

Raw data

            {
    "maintainer": "", 
    "docs_url": null, 
    "requires_python": "", 
    "maintainer_email": "", 
    "cheesecake_code_kwalitee_id": null, 
    "keywords": "", 
    "upload_time": "2017-07-16 13:23:14", 
    "author": "Yota Toyama", 
    "home_page": "https://github.com/raviqqe/nltokeniz.py/", 
    "github_user": "raviqqe", 
    "download_url": "https://pypi.python.org/packages/80/b6/1715d4d55a8174ccc0504bae02171717c4180e7e9cf2c21dccc50fda25a1/nltokeniz-0.0.5.tar.gz", 
    "platform": "", 
    "version": "0.0.5", 
    "cheesecake_documentation_id": null, 
    "description": "# nltokeniz.py\n\n[![PyPI version](https://badge.fury.io/py/nltokeniz.svg)](https://badge.fury.io/py/nltokeniz)\n[![Python versions](https://img.shields.io/pypi/pyversions/nltokeniz.svg)](setup.py)\n[![Build Status](https://travis-ci.org/raviqqe/nltokeniz.py.svg?branch=master)](https://travis-ci.org/raviqqe/nltokeniz.py)\n[![License](https://img.shields.io/badge/license-unlicense-lightgray.svg)](https://unlicense.org)\n\nNatural language tokenizer for English and Japanese documents in Python\n\n\n## License\n\n[The Unlicense](https://unlicense.org)\n\n\n", 
    "lcname": "nltokeniz", 
    "bugtrack_url": null, 
    "github": true, 
    "coveralls": false, 
    "name": "nltokeniz", 
    "license": "Public Domain", 
    "travis_ci": true, 
    "github_project": "nltokeniz.py", 
    "summary": "Natural language tokenizer for documents in Python", 
    "split_keywords": [], 
    "author_email": "raviqqe@gmail.com", 
    "urls": [
        {
            "has_sig": false, 
            "upload_time": "2017-07-16T13:23:11", 
            "comment_text": "", 
            "python_version": "py3", 
            "url": "https://pypi.python.org/packages/8f/f5/2e0d5c1c629adfc92af6472bfc7e49281c3ee98038465a1261d2b6e80fe8/nltokeniz-0.0.5-py3-none-any.whl", 
            "md5_digest": "9922294ac10102ef073264fa5a902270", 
            "downloads": 0, 
            "filename": "nltokeniz-0.0.5-py3-none-any.whl", 
            "packagetype": "bdist_wheel", 
            "path": "8f/f5/2e0d5c1c629adfc92af6472bfc7e49281c3ee98038465a1261d2b6e80fe8/nltokeniz-0.0.5-py3-none-any.whl", 
            "size": 4329
        }, 
        {
            "has_sig": false, 
            "upload_time": "2017-07-16T13:23:13", 
            "comment_text": "", 
            "python_version": "3.6", 
            "url": "https://pypi.python.org/packages/f6/3b/abd71d6b8544317b93a1557323a92becc9afc2d43124dd62a1e329192828/nltokeniz-0.0.5-py3.6.egg", 
            "md5_digest": "083446d6fc0ca6efefd6851ab66d3c78", 
            "downloads": 0, 
            "filename": "nltokeniz-0.0.5-py3.6.egg", 
            "packagetype": "bdist_egg", 
            "path": "f6/3b/abd71d6b8544317b93a1557323a92becc9afc2d43124dd62a1e329192828/nltokeniz-0.0.5-py3.6.egg", 
            "size": 5107
        }, 
        {
            "has_sig": false, 
            "upload_time": "2017-07-16T13:23:14", 
            "comment_text": "", 
            "python_version": "source", 
            "url": "https://pypi.python.org/packages/80/b6/1715d4d55a8174ccc0504bae02171717c4180e7e9cf2c21dccc50fda25a1/nltokeniz-0.0.5.tar.gz", 
            "md5_digest": "0ce117172cdb970df8482e9595b8733c", 
            "downloads": 0, 
            "filename": "nltokeniz-0.0.5.tar.gz", 
            "packagetype": "sdist", 
            "path": "80/b6/1715d4d55a8174ccc0504bae02171717c4180e7e9cf2c21dccc50fda25a1/nltokeniz-0.0.5.tar.gz", 
            "size": 3144
        }
    ], 
    "_id": null, 
    "cheesecake_installability_id": null
}