sapiens-tokenizer


Namesapiens-tokenizer JSON
Version 1.1.3 PyPI version JSON
download
home_pagehttps://github.com/sapiens-technology/SapiensTokenizer
SummaryNone
upload_time2025-07-12 18:33:49
maintainerNone
docs_urlNone
authorSAPIENS TECHNOLOGY
requires_pythonNone
licenseProprietary Software
keywords
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            
            

Raw data

            {
    "_id": null,
    "home_page": "https://github.com/sapiens-technology/SapiensTokenizer",
    "name": "sapiens-tokenizer",
    "maintainer": null,
    "docs_url": null,
    "requires_python": null,
    "maintainer_email": null,
    "keywords": null,
    "author": "SAPIENS TECHNOLOGY",
    "author_email": null,
    "download_url": "https://files.pythonhosted.org/packages/b9/3f/13b349a2951d6a716217a816ac1ae87d3a55047b7b6e04a809165e075d44/sapiens_tokenizer-1.1.3.tar.gz",
    "platform": null,
    "description": "",
    "bugtrack_url": null,
    "license": "Proprietary Software",
    "summary": null,
    "version": "1.1.3",
    "project_urls": {
        "Homepage": "https://github.com/sapiens-technology/SapiensTokenizer"
    },
    "split_keywords": [],
    "urls": [
        {
            "comment_text": null,
            "digests": {
                "blake2b_256": "fa6e119a2409ef420440b7d7d4a1645db20c9eaea6d92a0b46b8c5fa7e7fbae1",
                "md5": "43ba1a06f5ab390e6358402ca3105a14",
                "sha256": "6b7653c308842cc6194e78fef95d447a99703bb1c4cc1679d664a5b455ef5250"
            },
            "downloads": -1,
            "filename": "sapiens_tokenizer-1.1.3-py3-none-any.whl",
            "has_sig": false,
            "md5_digest": "43ba1a06f5ab390e6358402ca3105a14",
            "packagetype": "bdist_wheel",
            "python_version": "py3",
            "requires_python": null,
            "size": 8148333,
            "upload_time": "2025-07-12T18:33:37",
            "upload_time_iso_8601": "2025-07-12T18:33:37.640609Z",
            "url": "https://files.pythonhosted.org/packages/fa/6e/119a2409ef420440b7d7d4a1645db20c9eaea6d92a0b46b8c5fa7e7fbae1/sapiens_tokenizer-1.1.3-py3-none-any.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": null,
            "digests": {
                "blake2b_256": "b93f13b349a2951d6a716217a816ac1ae87d3a55047b7b6e04a809165e075d44",
                "md5": "a518a5ddcc46cbad931ec31363e9e756",
                "sha256": "f77628f19ecafc9d39669acb36858245ade601723d16e3d5736b17682a30f461"
            },
            "downloads": -1,
            "filename": "sapiens_tokenizer-1.1.3.tar.gz",
            "has_sig": false,
            "md5_digest": "a518a5ddcc46cbad931ec31363e9e756",
            "packagetype": "sdist",
            "python_version": "source",
            "requires_python": null,
            "size": 8149137,
            "upload_time": "2025-07-12T18:33:49",
            "upload_time_iso_8601": "2025-07-12T18:33:49.458584Z",
            "url": "https://files.pythonhosted.org/packages/b9/3f/13b349a2951d6a716217a816ac1ae87d3a55047b7b6e04a809165e075d44/sapiens_tokenizer-1.1.3.tar.gz",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2025-07-12 18:33:49",
    "github": true,
    "gitlab": false,
    "bitbucket": false,
    "codeberg": false,
    "github_user": "sapiens-technology",
    "github_project": "SapiensTokenizer",
    "github_not_found": true,
    "lcname": "sapiens-tokenizer"
}
        
Elapsed time: 0.43308s