-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathhelpers.py
94 lines (77 loc) · 2.92 KB
/
helpers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
import os
from datetime import datetime
from typing import Generator
import loggi
import loggi.models
from pathier import Pathier
import board_detector
import models
root = Pathier(__file__).parent
def create_scraper_from_template(url: str, company: str, board_type: str | None = None):
templates_path = root / "templates"
if not board_type:
board_type = board_detector.get_board_type_from_text(url)
if not board_type:
template = (templates_path / "template.py").read_text()
else:
if board_type == "greenhouse_embed":
board_type = "greenhouse"
template = (
(templates_path / "subgruel_template.py)")
.read_text()
.replace("JobGruel", f"{board_type.capitalize()}Gruel")
)
stem = company.lower().replace(" ", "_")
py_path = root / "scrapers" / f"{stem}.py"
py_path.write_text(template)
if not board_type:
os.system(f"code -r {py_path}")
def load_log(company: str) -> loggi.models.Log:
stem = company.lower().replace(" ", "_")
return loggi.load_log(root / "gruel_logs" / f"{stem}.log")
def get_all_logs() -> Generator[loggi.models.Log, None, None]:
for file in (root / "gruel_logs").glob("*.log"):
yield loggi.load_log(file)
def get_failed_scrapers(start_time: datetime) -> list[str]:
"""Returns a list of scrapers whose last log message is an `ERROR` or `EXCEPTION`."""
fails = []
for log in get_all_logs():
if log.filter_dates(start_time).filter_levels(["ERROR", "EXCEPTION"]).events:
assert log.path
fails.append(log.path.stem)
return fails
def get_scrapers_with_errors(start_time: datetime) -> dict[str, list[str]]:
scrapers = {
"redirects": [],
"404s": [],
"no_listings": [],
"parse_fails": [],
"misc_fails": [],
}
for log in get_all_logs():
log = log.filter_dates(start_time)
assert log.path
error_exceptions = log.filter_levels(["ERROR", "EXCEPTION"])
if (
log.filter_levels(["WARNING"])
.filter_messages(["Board url * resolved to *"])
.events
):
scrapers["redirects"].append(log.path.stem)
elif error_exceptions.filter_messages(["*returned status code 404*"]).events:
scrapers["404s"].append(log.path.stem)
elif log.filter_messages(["*get_parsable_items() returned 0 items*"]).events:
scrapers["no_listings"].append(log.path.stem)
elif error_exceptions.filter_messages(["*Failure to parse item*"]).events:
scrapers["parse_fails"].append(log.path.stem)
elif error_exceptions.events:
scrapers["misc_fails"].append(log.path.stem)
return scrapers
def main():
""" """
for k, v in get_scrapers_with_errors(datetime(2023, 10, 24)).items():
print(f"{k}:")
for s in v:
print(f" {s}")
if __name__ == "__main__":
main()