Raw data
{
"_id": null,
"home_page": "https://github.com/lgomezt/Tidytweets",
"name": "tidytweets",
"maintainer": "",
"docs_url": null,
"requires_python": ">=3.6",
"maintainer_email": "",
"keywords": "NLP Text processing Twitter API processing data cleaning",
"author": "Lucas G\u00f3mez Tob\u00f3n, Jose Fernando Barrera",
"author_email": "lucasgomeztobon@hotmail.com, jf.barrera10@uniandes.edu.co",
"download_url": "https://files.pythonhosted.org/packages/03/26/ca725e9095e7c24f7e571c8625ce2c59f1f4b3430a505afae0e82ad91416/tidytweets-0.1.tar.gz",
"platform": null,
"description": "",
"bugtrack_url": null,
"license": "MIT",
"summary": "Clean tweets to perform various NLP tasks such as topic analysis, word embeddings, sentiment analysis, etc.",
"version": "0.1",
"project_urls": {
"Homepage": "https://github.com/lgomezt/Tidytweets"
},
"split_keywords": [
"nlp",
"text",
"processing",
"twitter",
"api",
"processing",
"data",
"cleaning"
],
"urls": [
{
"comment_text": "",
"digests": {
"blake2b_256": "0326ca725e9095e7c24f7e571c8625ce2c59f1f4b3430a505afae0e82ad91416",
"md5": "e1f1baaae13d9c0c40b6feeb67614e3e",
"sha256": "0937c160c538d70c9779480a44ca62e2ffe94051e628e94e166a4c2626a93752"
},
"downloads": -1,
"filename": "tidytweets-0.1.tar.gz",
"has_sig": false,
"md5_digest": "e1f1baaae13d9c0c40b6feeb67614e3e",
"packagetype": "sdist",
"python_version": "source",
"requires_python": ">=3.6",
"size": 2601,
"upload_time": "2023-06-29T14:04:04",
"upload_time_iso_8601": "2023-06-29T14:04:04.608467Z",
"url": "https://files.pythonhosted.org/packages/03/26/ca725e9095e7c24f7e571c8625ce2c59f1f4b3430a505afae0e82ad91416/tidytweets-0.1.tar.gz",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2023-06-29 14:04:04",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "lgomezt",
"github_project": "Tidytweets",
"travis_ci": false,
"coveralls": false,
"github_actions": false,
"lcname": "tidytweets"
}