git ssb

0+

Grey the earthling / gkn.me.uk



Tree: 3fde758dc2bd375ea2ab1e2a7188a1a3535d3c19

Files: 3fde758dc2bd375ea2ab1e2a7188a1a3535d3c19 / generator / common.py

6394 bytesRaw
1import datetime
2import bs4
3import html as htmllib
4import marko
5import re
6import urllib.parse
7
8import parse_content
9
10days = parse_content.days
11entries = parse_content.entries
12months = parse_content.months
13series = parse_content.series
14slugify = parse_content.slugify
15tags = parse_content.tags
16updated = parse_content.updated
17years = parse_content.years
18
19
20def atom_entry(id):
21 return (
22 "<entry><title>"
23 + entries[id]["title"]
24 + "</title><id>"
25 + base_url
26 + url_of_entry(id)
27 + "</id>"
28 f"""<link href="{base_url}{url_of_entry(id)}" rel="alternate" type="text/html"/>"""
29 "<published>" + entries[id]["date"].isoformat() + "</published>"
30 "<updated>"
31 + (
32 entries[id]["modified"].isoformat()
33 if "modified" in entries[id]
34 else entries[id]["date"].isoformat()
35 )
36 + "</updated>"
37 + (
38 (
39 (
40 "<summary>"
41 + print_html_as_text(entries[id]["description"])
42 + "</summary>"
43 )
44 if "description" in entries[id]
45 else ""
46 )
47 )
48 + '<content type="html">'
49 + "\n"
50 + print_html_escaped(convert_to_html(entries[id].content))
51 + "</content>"
52 + "</entry>"
53 )
54
55
56base_url = "https://gkn.me.uk"
57
58colour_class = f"month-{list(reversed(months))[0].month:02d}"
59
60
61def convert_to_html(hypertext):
62 hypertext = "\n".join(
63 [
64 re.sub(r"^=>\s*(\S+)\s+(.*)$", r'<p><a href="\1">\2</a></p>', line)
65 for line in hypertext.split("\n")
66 ]
67 )
68 hypertext = "\n".join(
69 [
70 re.sub(r"^=>\s*(\S+)$", r'<p><a href="\1">\1</a></p>', line)
71 for line in hypertext.split("\n")
72 ]
73 )
74 return marko.convert(hypertext)
75
76
77def entries_in_series(series):
78 return dict(
79 [
80 (id, entries[id])
81 for id in entries
82 if "series" in entries[id]
83 and slugify(series) == slugify(entries[id]["series"])
84 ]
85 )
86
87
88def entries_with_tag(tag):
89 return dict(
90 [
91 (id, entries[id])
92 for id in entries
93 if "tags" in entries[id]
94 and slugify(tag) in map(slugify, entries[id]["tags"])
95 ]
96 )
97
98
99head = """
100 <meta charset="utf-8">
101 <meta name="viewport" content="width=device-width, initial-scale=1">
102 <link href="/style/tarazed.css" rel="stylesheet">
103 <link href="/style/icon.svg" rel="icon">
104 <link href="/feed" rel="alternate" type="application/atom+xml" title="Grey Nicholson">
105"""
106
107hues = [15, 30, 52, 82, 149, 187, 210, 246, 269, 291, 321, 352]
108
109link_home = '<a href="/"><strong>Home</strong></a>'
110
111link_to_all_entries = '<a href="/entries"><strong>All entries</strong></a>'
112
113link_to_feed = (
114 '<a href="/feed" rel="alternate" type="application/atom+xml">'
115 "<strong>Subscribe</strong>"
116 "</a>"
117)
118
119footer = (
120 "<footer>"
121 + "<hr/>"
122 + "<nav><ul>"
123 + "<li>"
124 + link_to_feed
125 + "</li>"
126 + "<li>"
127 + link_home
128 + "</li>"
129 + "</ul></nav>"
130 + "</footer>"
131)
132
133
134def link_to_entry(id, **kwargs):
135 return (
136 f'<a href="{url_of_entry(id)}"'
137 + (f''' rel="{kwargs["rel"]}"''' if "rel" in kwargs else "")
138 + ">"
139 + "<span>"
140 + ("<strong>" + entries[id]["title"] + "</strong>")
141 + "<span>"
142 + ('<span class="hidden"> ·</span>' + " ")
143 + (
144 "from " + "<cite>" + entries[id]["series"] + "</cite>" + " · "
145 if "series" in entries[id] and "omit_series" not in kwargs
146 else ""
147 )
148 + (
149 f'<time datetime="{entries[id]["date"]:%Y-%m-%dT%H:%MZ}">'
150 + entries[id]["date"].date().isoformat()
151 + "</time>"
152 )
153 + (
154 " / "
155 + f'<time datetime="{entries[id]["modified"]:%Y-%m-%dT%H:%MZ}">'
156 + entries[id]["modified"].date().isoformat()
157 + "</time>"
158 if "modified" in entries[id]
159 else ""
160 )
161 + (
162 " · " + ", ".join("<b>" + label + "</b>" for label in kwargs["labels"])
163 if "labels" in kwargs
164 else ""
165 )
166 + "</span>"
167 + (
168 (
169 '<span class="hidden"> ·</span>'
170 + " "
171 + '<i class="description">'
172 + print_html_as_text(entries[id]["description"])
173 + "</i>"
174 )
175 if "description" in entries[id]
176 else ""
177 )
178 + "</span>"
179 + "</a>"
180 )
181
182
183def link_with_details(url, title, description=None, rel=None, type=None):
184 return (
185 f'<a href="{url}"'
186 + (f' rel="{rel}"' if rel else "")
187 + ">"
188 + "<span>"
189 + ("<strong>" + title + "</strong>")
190 + (
191 (
192 '<span class="hidden"> ·</span>'
193 + " "
194 + '<i class="description">'
195 + description
196 + "</i>"
197 )
198 if description
199 else ""
200 )
201 + "</span>"
202 + "</a>"
203 )
204
205
206def offset_id(seq, id, offset):
207 part_seq = list(seq)[list(seq).index(id) :: offset]
208 if len(part_seq) > 1:
209 return part_seq[1]
210
211
212def print_html(html):
213 return str(bs4.BeautifulSoup(html, "html.parser"))
214
215
216def print_html_as_text(html):
217 return str(bs4.BeautifulSoup(html, "html.parser").get_text())
218
219
220def print_html_escaped(html):
221 return htmllib.escape(str(bs4.BeautifulSoup(html, "html.parser")))
222
223
224repeated_tags = [tag for tag in tags if len(entries_with_tag(tag)) >= 4]
225
226
227def stardate(date):
228 return format(int(date.strftime("%s")) / 100000, ".1f")
229
230
231def title(string):
232 return string + " · Grey Nicholson" if string else "Grey Nicholson"
233
234
235def url_of_day(date):
236 return f"/{date.year}/{date.month:02d}/{date.day:02d}"
237
238
239def url_of_entry(id):
240 return f"/{id}"
241
242
243def url_of_month(date):
244 return f"/{date.year}/{date.month:02d}"
245
246
247def url_of_tag(tag):
248 return f"/entries/{slugify(tag)}"
249
250
251def url_of_tag_feed(tag):
252 return f"/entries/{slugify(tag)}/feed"
253
254
255def url_of_series(series):
256 return f"/{slugify(series)}"
257
258
259def url_of_series_feed(series):
260 return f"/{slugify(series)}/feed"
261
262
263def url_of_year(date):
264 return f"/{date.year}"
265

Built with git-ssb-web