Skip to content

Commit 0e60e9f

Browse files
committedSep 13, 2024
update: Add CLI flags to skip fetching or parsing data
Closes #7
1 parent 89d601a commit 0e60e9f

File tree

1 file changed

+13
-9
lines changed

1 file changed

+13
-9
lines changed
 

‎scripts/update.py

+13-9
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@
2727
choices=ALL_SCRAPERS.keys(),
2828
help="Specify one or more scrapers to use",
2929
)
30+
parser.add_argument("--no-fetch", action="store_true", help="Skip fetching data")
31+
parser.add_argument("--no-parse", action="store_true", help="Skip parsing data")
3032
args = parser.parse_args()
3133

3234
# Ensure DB has the latest structure
@@ -40,20 +42,22 @@
4042
else:
4143
scrapers_to_use = sorted(set(args.scrapers))
4244

43-
# Fetch data
44-
for scraper_name in scrapers_to_use:
45-
scraper = ALL_SCRAPERS[scraper_name]
46-
scraper.fetch_data()
45+
# Fetch data if not skipped
46+
if not args.no_fetch:
47+
for scraper_name in scrapers_to_use:
48+
scraper = ALL_SCRAPERS[scraper_name]
49+
scraper.fetch_data()
4750

4851
# Ensure tags and categories are created
4952
ensure_categories_created()
5053
ensure_tags_created()
5154

52-
# Parse the data
53-
for scraper_name in scrapers_to_use:
54-
scraper = ALL_SCRAPERS[scraper_name]
55-
for path in scraper.find_files():
56-
parse_data(path, scraper)
55+
# Parse data if not skipped
56+
if not args.no_parse:
57+
for scraper_name in scrapers_to_use:
58+
scraper = ALL_SCRAPERS[scraper_name]
59+
for path in scraper.find_files():
60+
parse_data(path, scraper)
5761

5862
if args.serve:
5963
from streamlit.web.cli import main_run

0 commit comments

Comments
 (0)
Please sign in to comment.