Name | BAScraper JSON |
Version |
0.1.2
JSON |
| download |
home_page | None |
Summary | API wrapper for PullPush.io - the 3rd party replacement API for Reddit. |
upload_time | 2024-09-02 17:30:00 |
maintainer | None |
docs_url | None |
author | maxjo |
requires_python | >=3.11 |
license | None |
keywords |
reddit
scraper
pullpush
wrapper
|
VCS |
|
bugtrack_url |
|
requirements |
No requirements were recorded.
|
Travis-CI |
No Travis.
|
coveralls test coverage |
No coveralls.
|
currently it can:
- get submissions/comments from a certain subreddit in supported order/sorting methods specified in the PullPush.io API docs
- get comments under the retrieved submissions
- can get all the submissions based on the number of submissions or in a certain timeframe you specify
- can recover(if possible) deleted/removed submission/comments from the returned result
check the [documentation on the github](https://github.com/maxjo020418/BAScraper) for detailed info
Raw data
{
"_id": null,
"home_page": null,
"name": "BAScraper",
"maintainer": null,
"docs_url": null,
"requires_python": ">=3.11",
"maintainer_email": null,
"keywords": "reddit, scraper, PullPush, wrapper",
"author": "maxjo",
"author_email": "jo@yeongmin.net",
"download_url": "https://files.pythonhosted.org/packages/36/87/9ee77a5dd5b6d820fa0b9602b4c514b20902ab4ed363a2d79ebc8c754480/bascraper-0.1.2.tar.gz",
"platform": null,
"description": "currently it can:\n- get submissions/comments from a certain subreddit in supported order/sorting methods specified in the PullPush.io API docs\n- get comments under the retrieved submissions\n- can get all the submissions based on the number of submissions or in a certain timeframe you specify\n- can recover(if possible) deleted/removed submission/comments from the returned result\ncheck the [documentation on the github](https://github.com/maxjo020418/BAScraper) for detailed info\n",
"bugtrack_url": null,
"license": null,
"summary": "API wrapper for PullPush.io - the 3rd party replacement API for Reddit.",
"version": "0.1.2",
"project_urls": {
"Github": "https://github.com/maxjo020418/BAScraper"
},
"split_keywords": [
"reddit",
" scraper",
" pullpush",
" wrapper"
],
"urls": [
{
"comment_text": "",
"digests": {
"blake2b_256": "b7cc4bd488f3125589dc97267bdc5f584d433090299ca051ff9e5de34871ba09",
"md5": "b58cd77f4e11c1eefa9bbb82f49911c3",
"sha256": "b3e276c1d4def0553243eec1c6c677868d4b081fe4978612898cbe6551ba21fa"
},
"downloads": -1,
"filename": "BAScraper-0.1.2-py3-none-any.whl",
"has_sig": false,
"md5_digest": "b58cd77f4e11c1eefa9bbb82f49911c3",
"packagetype": "bdist_wheel",
"python_version": "py3",
"requires_python": ">=3.11",
"size": 19535,
"upload_time": "2024-09-02T17:29:59",
"upload_time_iso_8601": "2024-09-02T17:29:59.476716Z",
"url": "https://files.pythonhosted.org/packages/b7/cc/4bd488f3125589dc97267bdc5f584d433090299ca051ff9e5de34871ba09/BAScraper-0.1.2-py3-none-any.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "36879ee77a5dd5b6d820fa0b9602b4c514b20902ab4ed363a2d79ebc8c754480",
"md5": "dc4a329179fd704a1135b38dca6c5f83",
"sha256": "41bf187ef2ce356d6de8b1d384e470357bf8b0c624a5d8a1aabb3868308d02ae"
},
"downloads": -1,
"filename": "bascraper-0.1.2.tar.gz",
"has_sig": false,
"md5_digest": "dc4a329179fd704a1135b38dca6c5f83",
"packagetype": "sdist",
"python_version": "source",
"requires_python": ">=3.11",
"size": 21050,
"upload_time": "2024-09-02T17:30:00",
"upload_time_iso_8601": "2024-09-02T17:30:00.931701Z",
"url": "https://files.pythonhosted.org/packages/36/87/9ee77a5dd5b6d820fa0b9602b4c514b20902ab4ed363a2d79ebc8c754480/bascraper-0.1.2.tar.gz",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2024-09-02 17:30:00",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "maxjo020418",
"github_project": "BAScraper",
"travis_ci": false,
"coveralls": false,
"github_actions": false,
"lcname": "bascraper"
}