|
1 | | -import datetime |
2 | | -import pathlib |
3 | | -from urllib.parse import urlparse |
4 | | -from typing import Iterator |
5 | | - |
6 | | -import json |
7 | | -import gh_issues |
8 | | - |
9 | | - |
10 | | -QUERY = "repo:blackpythondevs/blackpythondevs.github.io type:issue label:conference" |
11 | | - |
12 | | - |
13 | | -def get_conference_issues( |
14 | | - query: str = QUERY, |
15 | | -) -> Iterator[gh_issues.Issue]: # pragma no cover |
16 | | - issues = gh_issues.issues_by_query(query) |
17 | | - return issues |
18 | | - |
19 | | - |
20 | | -def normalize_url(url_match: str | None) -> str | None: |
21 | | - """ |
22 | | - Parse the url and see if a scheme (`https`) is included in it. |
23 | | - If not, then prepend `https` to the url from the issue body |
24 | | -
|
25 | | - This guards against the website thinking the passed in url is another page on https://blackpythondevs.com/ |
26 | | - """ |
27 | | - if url_match: |
28 | | - parsed_url = urlparse(url_match) |
29 | | - |
30 | | - if "http" not in parsed_url.scheme.casefold(): |
31 | | - return f"https://{url_match}" |
32 | | - else: |
33 | | - return url_match |
34 | | - |
35 | | - |
36 | | -def write_conferences_to_file(confs: list[dict]): |
37 | | - # Write the conferences to the _data/conferences.yml file |
38 | | - conferences_path.write_text(json.dumps(confs)) |
39 | | - |
40 | | - |
41 | | -def __to_conference_date(conference_date: str) -> datetime.date: |
42 | | - return datetime.date.fromisoformat(conference_date) |
43 | | - |
44 | | - |
45 | | -def parse_conference(issue: gh_issues.Issue) -> dict[str, str | None]: |
46 | | - """convert an issue to a dictionary of parsed data""" |
47 | | - |
48 | | - KEYS = [ |
49 | | - "conference_name", |
50 | | - "url", |
51 | | - "conference_start_date", |
52 | | - "conference_end_date", |
53 | | - "conference_type", |
54 | | - "conference_location", |
55 | | - "summary", |
56 | | - "speaking", |
57 | | - ] |
58 | | - |
59 | | - _issue = {k: getattr(issue, k, None) for k in KEYS} |
60 | | - _issue["url"] = normalize_url(_issue.get("url", None)) |
61 | | - return _issue |
62 | | - |
63 | | - |
64 | | -def _validate_issue(issue: gh_issues.Issue, date_to_check: str) -> bool: |
65 | | - """Validate an issue based on its `date_to_check`""" |
66 | | - if not (valid_date := getattr(issue, date_to_check, False)): |
67 | | - return False |
68 | | - else: |
69 | | - return __to_conference_date(valid_date) >= datetime.date.today() |
70 | | - |
71 | | - |
72 | | -def build_conferences() -> list[dict[str, str | None]]: # pragma: no cover |
73 | | - return [ |
74 | | - parse_conference(issue) |
75 | | - for issue in get_conference_issues() |
76 | | - if _validate_issue(issue, "conference_end_date") |
77 | | - ] |
78 | | - |
79 | | - |
80 | | -if __name__ == "__main__": # pragma: no cover |
81 | | - ROOT = pathlib.Path(__file__).parent.parent |
82 | | - conferences_path = ROOT.joinpath("_data/conferences.json") |
83 | | - conferences = build_conferences() |
84 | | - j_conferences = json.dumps( |
85 | | - list( |
86 | | - sorted( |
87 | | - conferences, |
88 | | - key=lambda x: __to_conference_date(x["conference_start_date"]), |
89 | | - ) |
90 | | - ), |
91 | | - indent=2, |
92 | | - ) |
93 | | - conferences_path.write_text(f"{j_conferences}\n") |
| 1 | +import datetime |
| 2 | +import json |
| 3 | +import pathlib |
| 4 | +from typing import Iterator |
| 5 | +from urllib.parse import urlparse |
| 6 | + |
| 7 | +import gh_issues |
| 8 | + |
| 9 | +QUERY = "repo:blackpythondevs/blackpythondevs.github.io type:issue label:conference" |
| 10 | + |
| 11 | + |
| 12 | +def get_conference_issues( |
| 13 | + query: str = QUERY, |
| 14 | +) -> Iterator[gh_issues.Issue]: # pragma no cover |
| 15 | + issues = gh_issues.issues_by_query(query) |
| 16 | + return issues |
| 17 | + |
| 18 | + |
| 19 | +def normalize_url(url_match: str | None) -> str | None: |
| 20 | + """ |
| 21 | + Parse the url and see if a scheme (`https`) is included in it. |
| 22 | + If not, then prepend `https` to the url from the issue body |
| 23 | +
|
| 24 | + This guards against the website thinking the passed in url is another page on https://blackpythondevs.com/ |
| 25 | + """ |
| 26 | + if url_match: |
| 27 | + parsed_url = urlparse(url_match) |
| 28 | + url_scheme = parsed_url.scheme |
| 29 | + |
| 30 | + # If "https" is already the scheme, then we're good and don't need to do anything else |
| 31 | + if url_scheme == "https": |
| 32 | + return url_match |
| 33 | + |
| 34 | + # If the scheme is not "https", then we need to prepend "https" to the url |
| 35 | + if url_scheme.strip() == "": |
| 36 | + return f"https://{url_match}" |
| 37 | + else: |
| 38 | + # If the scheme is a valid protocol (ftp, http, etc.), |
| 39 | + # but not "https", then we need to replace it with "https" |
| 40 | + return url_match.replace(parsed_url.scheme, "https") |
| 41 | + |
| 42 | + |
| 43 | +def write_conferences_to_file(confs: list[dict]): |
| 44 | + # Write the conferences to the _data/conferences.yml file |
| 45 | + conferences_path.write_text(json.dumps(confs)) |
| 46 | + |
| 47 | + |
| 48 | +def __to_conference_date(conference_date: str) -> datetime.date: |
| 49 | + return datetime.date.fromisoformat(conference_date) |
| 50 | + |
| 51 | + |
| 52 | +def parse_conference(issue: gh_issues.Issue) -> dict[str, str | None]: |
| 53 | + """convert an issue to a dictionary of parsed data""" |
| 54 | + |
| 55 | + KEYS = [ |
| 56 | + "conference_name", |
| 57 | + "url", |
| 58 | + "conference_start_date", |
| 59 | + "conference_end_date", |
| 60 | + "conference_type", |
| 61 | + "conference_location", |
| 62 | + "summary", |
| 63 | + "speaking", |
| 64 | + ] |
| 65 | + |
| 66 | + _issue = {k: getattr(issue, k, None) for k in KEYS} |
| 67 | + _issue["url"] = normalize_url(_issue.get("url", None)) |
| 68 | + return _issue |
| 69 | + |
| 70 | + |
| 71 | +def _validate_issue(issue: gh_issues.Issue, date_to_check: str) -> bool: |
| 72 | + """Validate an issue based on its `date_to_check`""" |
| 73 | + if not (valid_date := getattr(issue, date_to_check, False)): |
| 74 | + return False |
| 75 | + else: |
| 76 | + return __to_conference_date(valid_date) >= datetime.date.today() |
| 77 | + |
| 78 | + |
| 79 | +def build_conferences() -> list[dict[str, str | None]]: # pragma: no cover |
| 80 | + return [ |
| 81 | + parse_conference(issue) |
| 82 | + for issue in get_conference_issues() |
| 83 | + if _validate_issue(issue, "conference_end_date") |
| 84 | + ] |
| 85 | + |
| 86 | + |
| 87 | +if __name__ == "__main__": # pragma: no cover |
| 88 | + ROOT = pathlib.Path(__file__).parent.parent |
| 89 | + conferences_path = ROOT.joinpath("_data/conferences.json") |
| 90 | + conferences = build_conferences() |
| 91 | + j_conferences = json.dumps( |
| 92 | + list( |
| 93 | + sorted( |
| 94 | + conferences, |
| 95 | + key=lambda x: __to_conference_date(x["conference_start_date"]), |
| 96 | + ) |
| 97 | + ), |
| 98 | + indent=2, |
| 99 | + ) |
| 100 | + conferences_path.write_text(f"{j_conferences}\n") |
0 commit comments