| Name | scraper2-hj3415 JSON |
| Version |
1.0.1
JSON |
| download |
| home_page | None |
| Summary | Naver WiseReport C103/C104 scraper + ingestion orchestrator |
| upload_time | 2025-11-05 08:11:09 |
| maintainer | None |
| docs_url | None |
| author | None |
| requires_python | >=3.11 |
| license | None |
| keywords |
example
demo
|
| VCS |
|
| bugtrack_url |
|
| requirements |
No requirements were recorded.
|
| Travis-CI |
No Travis.
|
| coveralls test coverage |
No coveralls.
|
## 단일 종목 수집 후 저장
### 환경변수로 기본값 제어 가능
export SCRAPER_HEADLESS=true
export SCRAPER_SINK_CHUNK=1000
### Mongo 연결은 db2-hj3415 쪽 init 로직이 CLI 내부에 있다면, 옵션 또는 env로 지정
export MONGO_URI="mongodb://localhost:27017"
export MONGO_DB="nfs_db"
### 삼성전자: c103 + c104 모두 저장
scraper2 ingest one 005930 --pages c103 c104
### 예: 저장하지 않고 번들만 수집
scraper2 ingest one 005930 --pages c103 c104 --no-save --collect-only
#### 가능 옵션:
• --pages c103 c104 : 처리할 페이지 선택
• --save/--no-save : DB 저장 여부(defalut --save)
• --collect-only : 수집만 하고 저장하지 않음(defalut False)
---
## 여러 종목 동시 수집
### 쉼표 구분
scraper2 ingest many 005930,000660 --pages c103 c104 --concurrency 2
### 파일 입력 (한 줄에 하나)
scraper2 ingest many --file ./codes.txt --pages c103 c104 --concurrency 3
---
## 헬스체크/버전
scraper2 health
scraper2 version
Raw data
{
"_id": null,
"home_page": null,
"name": "scraper2-hj3415",
"maintainer": null,
"docs_url": null,
"requires_python": ">=3.11",
"maintainer_email": null,
"keywords": "example, demo",
"author": null,
"author_email": "Hyungjin Kim <hj3415@gmail.com>",
"download_url": "https://files.pythonhosted.org/packages/c5/4e/bdc429232dc5c575b8c28c6cbed86c81921c4bd13b957f39e39118cdf268/scraper2_hj3415-1.0.1.tar.gz",
"platform": null,
"description": "## \ub2e8\uc77c \uc885\ubaa9 \uc218\uc9d1 \ud6c4 \uc800\uc7a5 \n \n### \ud658\uacbd\ubcc0\uc218\ub85c \uae30\ubcf8\uac12 \uc81c\uc5b4 \uac00\ub2a5 \nexport SCRAPER_HEADLESS=true \nexport SCRAPER_SINK_CHUNK=1000 \n \n### Mongo \uc5f0\uacb0\uc740 db2-hj3415 \ucabd init \ub85c\uc9c1\uc774 CLI \ub0b4\ubd80\uc5d0 \uc788\ub2e4\uba74, \uc635\uc158 \ub610\ub294 env\ub85c \uc9c0\uc815 \nexport MONGO_URI=\"mongodb://localhost:27017\" \nexport MONGO_DB=\"nfs_db\" \n \n### \uc0bc\uc131\uc804\uc790: c103 + c104 \ubaa8\ub450 \uc800\uc7a5 \nscraper2 ingest one 005930 --pages c103 c104\n\n### \uc608: \uc800\uc7a5\ud558\uc9c0 \uc54a\uace0 \ubc88\ub4e4\ub9cc \uc218\uc9d1 \n\nscraper2 ingest one 005930 --pages c103 c104 --no-save --collect-only\n\n#### \uac00\ub2a5 \uc635\uc158: \n \u2022 --pages c103 c104 : \ucc98\ub9ac\ud560 \ud398\uc774\uc9c0 \uc120\ud0dd \n \u2022 --save/--no-save : DB \uc800\uc7a5 \uc5ec\ubd80(defalut --save)\n \u2022 --collect-only : \uc218\uc9d1\ub9cc \ud558\uace0 \uc800\uc7a5\ud558\uc9c0 \uc54a\uc74c(defalut False)\n\n---\n\n## \uc5ec\ub7ec \uc885\ubaa9 \ub3d9\uc2dc \uc218\uc9d1 \n \n### \uc27c\ud45c \uad6c\ubd84 \nscraper2 ingest many 005930,000660 --pages c103 c104 --concurrency 2 \n \n### \ud30c\uc77c \uc785\ub825 (\ud55c \uc904\uc5d0 \ud558\ub098) \nscraper2 ingest many --file ./codes.txt --pages c103 c104 --concurrency 3 \n\n---\n\n## \ud5ec\uc2a4\uccb4\ud06c/\ubc84\uc804 \n \nscraper2 health \nscraper2 version \n",
"bugtrack_url": null,
"license": null,
"summary": "Naver WiseReport C103/C104 scraper + ingestion orchestrator",
"version": "1.0.1",
"project_urls": null,
"split_keywords": [
"example",
" demo"
],
"urls": [
{
"comment_text": null,
"digests": {
"blake2b_256": "9de5d7e6364312c4c77cac3912137dbbc7fc6262e7ddc9dedb874b7475a8afca",
"md5": "08db639e3d322e2a814a29138d45f6b4",
"sha256": "918bb74a4bb33bc7ec5e07203878ba30d801ea5c7d9fe25388f96652f0656e4a"
},
"downloads": -1,
"filename": "scraper2_hj3415-1.0.1-py3-none-any.whl",
"has_sig": false,
"md5_digest": "08db639e3d322e2a814a29138d45f6b4",
"packagetype": "bdist_wheel",
"python_version": "py3",
"requires_python": ">=3.11",
"size": 26608,
"upload_time": "2025-11-05T08:11:07",
"upload_time_iso_8601": "2025-11-05T08:11:07.807595Z",
"url": "https://files.pythonhosted.org/packages/9d/e5/d7e6364312c4c77cac3912137dbbc7fc6262e7ddc9dedb874b7475a8afca/scraper2_hj3415-1.0.1-py3-none-any.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": null,
"digests": {
"blake2b_256": "c54ebdc429232dc5c575b8c28c6cbed86c81921c4bd13b957f39e39118cdf268",
"md5": "c994efb6408048e4064834e70516f643",
"sha256": "0f7da9ae5c78e4785b27eeacab7ac1203a09019a7789a67a5b68afc7449450df"
},
"downloads": -1,
"filename": "scraper2_hj3415-1.0.1.tar.gz",
"has_sig": false,
"md5_digest": "c994efb6408048e4064834e70516f643",
"packagetype": "sdist",
"python_version": "source",
"requires_python": ">=3.11",
"size": 38603,
"upload_time": "2025-11-05T08:11:09",
"upload_time_iso_8601": "2025-11-05T08:11:09.395768Z",
"url": "https://files.pythonhosted.org/packages/c5/4e/bdc429232dc5c575b8c28c6cbed86c81921c4bd13b957f39e39118cdf268/scraper2_hj3415-1.0.1.tar.gz",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2025-11-05 08:11:09",
"github": false,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"lcname": "scraper2-hj3415"
}