html.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670
  1. from pyhtml import *
  2. from html import unescape, escape
  3. from bs4 import BeautifulSoup
  4. from glom import glom as g
  5. from glom import Coalesce
  6. from kddit.settings import *
  7. from urllib.parse import urlparse, parse_qs, urlencode
  8. from kddit.utils import get_time, human_format, preview_re, external_preview_re, builder, processing_re, video_re
  9. from kddit.utils import tuplefy, get_metadata, replace_tag
  10. nothing = (p("there doesn't seem to be anything here"),)
  11. style_css = link(rel="stylesheet", type="text/css", href="/static/style.css")
  12. slider_css = link(rel="stylesheet", type="text/css", href="/static/slider.css")
  13. favicon = link(rel="icon", href="/static/favicon.svg")
  14. viewport = meta(name="viewport", content_="width=device-width, initial-scale=1.0")
  15. default_head = (style_css, slider_css, favicon, viewport)
  16. class progress(Tag):
  17. self_closing = False
  18. class svg(Tag):
  19. self_closing = False
  20. class path(Tag):
  21. self_closing = False
  22. def subreddit_link(sub):
  23. return a(Class="sub-link", href=f"/r/{sub}")(f"r/{sub}")
  24. def header_div(*args):
  25. return div(Class="header")(*args)
  26. def container_div(*args):
  27. return div(Class="container")(*args)
  28. def content_div(*args):
  29. return div(Class="content")(*args)
  30. def post_div(*args):
  31. return div(Class="post")(*args)
  32. def inner_post_div(*args):
  33. return div(Class="inner-post")(*args)
  34. def media_div(*args):
  35. return div(Class="media")(*args)
  36. def menu_div(*args):
  37. return div(Class="menu")(*args)
  38. def post_info_div(*args):
  39. return div(Class="post-info")(*args)
  40. def post_content_div(*args):
  41. return div(Class="post-content")(*args)
  42. def comment_content_div(*args):
  43. return div(Class="comment-content")(*args)
  44. def slider(arg):
  45. mask = div(Class="css-slider-mask")
  46. ul_ = ul(Class="css-slider with-responsive-images")
  47. return builder(mask, ul_, arg)
  48. def slider_media(arg):
  49. slider = li(Class="slide", tabindex=1)
  50. outer = span(Class="slide-outer")
  51. inner = span(Class="slide-inner")
  52. gfx = span(Class="slide-gfx")(arg)
  53. return builder(slider, outer, inner, gfx)
  54. def nsfw_label(arg):
  55. return label(input_(Class="nsfw", type="checkbox"),arg)
  56. def get_thumbnail(data):
  57. thumbnail = g(data, Coalesce("preview.images.-1.source.url",
  58. "secure_media.oembed.thumbnail_url",
  59. ), default="")
  60. return f"/proxy/{unescape(thumbnail)}" if thumbnail else None
  61. def get_video(data):
  62. is_gif = g(data, Coalesce("media.reddit_video.is_gif", "preview.reddit_video_preview.is_gif") , default=False)
  63. url = g(data, Coalesce("media.reddit_video.fallback_url", "preview.reddit_video_preview.fallback_url", "url"))
  64. return f"/video/{url}" if not is_gif else f"/proxy/{url}"
  65. @tuplefy
  66. def alternate_video(data, url, safe=False):
  67. return None # disabling for now
  68. opts = {}
  69. opts["src"] = f"/video/{url}"
  70. opts["controls"] = ""
  71. if nsfw(data) and safe:
  72. opts["preload"] = "none"
  73. elif thumbnail := get_thumbnail(data):
  74. opts["preload"] = "none"
  75. opts["poster"] = thumbnail
  76. else:
  77. opts["preload"] = "metadata"
  78. video_ = media_div(video(**opts))
  79. return video_
  80. def nsfw(data):
  81. return data.get("over_18")
  82. @tuplefy
  83. def reddit_video(data, safe=False):
  84. opts = {"controls":""}
  85. opts["preload"] = "none"
  86. opts["src"] = get_video(data)
  87. if not (nsfw(data) and safe):
  88. opts["poster"] = get_thumbnail(data)
  89. video_ = video(**opts)
  90. output = media_div(video_)
  91. return output
  92. @tuplefy
  93. def reddit_embed_video(url, safe=False):
  94. opts = {"controls":""}
  95. opts["preload"] = "none" if safe else "auto"
  96. opts["src"] = f'/video/{url}'
  97. video_ = video(**opts)
  98. output = media_div(video_)
  99. return output
  100. @tuplefy
  101. def reddit_image(data, url=None, safe=False, text=None):
  102. url = url or unescape(g(data, Coalesce("preview.images.-1.source.url", "url")))
  103. image_ = media_div(img(src=f'/proxy/{url}'), em(text))
  104. if nsfw(data) and safe:
  105. output = nsfw_label(image_)
  106. else:
  107. output = image_
  108. return output
  109. def gallery(data, safe=False):
  110. output = ()
  111. images = ()
  112. for item in reversed(g(data,"gallery_data.items", default=[])):
  113. media_id = item["media_id"]
  114. url = get_metadata(data, media_id)
  115. if url:
  116. images += reddit_image(data, url, safe)
  117. if images:
  118. output += slider((slider_media(media) for media in images))
  119. return output
  120. def page(title_, header_, content_):
  121. head_ = head(title(unescape(title_)), default_head)
  122. body_ = (header_div(header_), container_div(content_div(content_)))
  123. output = html(head_, body_)
  124. return output
  125. def post_content(data, safe):
  126. output = ()
  127. text = unescape(data["selftext_html"])
  128. soup = BeautifulSoup(text, "html.parser")
  129. for video_link in soup.find_all("a", href=video_re):
  130. url = video_link.attrs["href"]
  131. name = video_re.match(url).group(1)
  132. r_video = reddit_embed_video(f"https://v.redd.it/{name}", safe=safe)
  133. replace_tag(video_link.parent, r_video)
  134. for preview_link in soup.find_all("a", href=preview_re):
  135. url = preview_link.attrs["href"]
  136. preview_text = preview_link.text
  137. caption = preview_text if preview_text != url else None
  138. r_image = reddit_image(data, url, safe, text=caption)
  139. replace_tag(preview_link.parent, r_image)
  140. for preview_em in soup.find_all("em", string=processing_re):
  141. name = processing_re.match(preview_em.text).group(1)
  142. if url := get_metadata(data, name):
  143. r_image = reddit_image(data, url, safe)
  144. replace_tag(preview_em , r_image)
  145. output += (post_content_div(Safe(str(soup))),)
  146. return output
  147. def comment_content(data, safe):
  148. text = unescape(data["body_html"])
  149. soup = BeautifulSoup(text, "html.parser")
  150. for preview_link in soup.find_all("a", href=preview_re):
  151. url = preview_link.attrs["href"]
  152. preview_text = preview_link.text
  153. caption = preview_text if preview_text != url else None
  154. r_image = reddit_image(data, url, safe, text=caption)
  155. replace_tag(preview_link, r_image)
  156. for preview_img in soup.find_all("img", src=external_preview_re):
  157. url = preview_img.attrs["src"]
  158. preview_img.attrs["src"] = f'/proxy/{url}'
  159. for preview_em in soup.find_all("em", string=processing_re):
  160. name = processing_re.match(preview_em.text).group(1)
  161. if url := get_metadata(data, name):
  162. r_image = reddit_image(data, url, safe)
  163. replace_tag(preview_em , r_image)
  164. return builder(comment_content_div, Safe,str,soup)
  165. @tuplefy
  166. def subreddit_menu(option, subreddit):
  167. output = []
  168. focused = option or DEFAULT_OPTION
  169. for o in SUBREDDIT_OPTIONS:
  170. focus = o == focused
  171. sub = f"/r/{subreddit}" if subreddit else ""
  172. url = f"{sub}/{o}"
  173. a_ = a(href=url, Class="focus")(o) if focus else a(href=url)(o)
  174. output.append(a_)
  175. return menu_div(output)
  176. @tuplefy
  177. def search_sort_menu(subreddit, params):
  178. output = []
  179. focused = params.get("sort", "relevance")
  180. for o in SEARCH_SORT:
  181. query = params.copy()
  182. query["sort"] = o
  183. focus = o == focused
  184. sub = f"/r/{subreddit}" if subreddit else ""
  185. url = f"{sub}/search?{urlencode(query)}"
  186. a_ = a(href=url, Class="focus")(o) if focus else a(href=url)(o)
  187. output.append(a_)
  188. return menu_div(output)
  189. @tuplefy
  190. def search_time_menu(subreddit, params):
  191. output = []
  192. focused = params.get("t", "hour")
  193. for i, v in TIME_OPTIONS.items():
  194. query = params.copy()
  195. query["t"] = i
  196. focus = i == focused
  197. sub = f"/r/{subreddit}" if subreddit else ""
  198. url = f"{sub}/search?{urlencode(query)}"
  199. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  200. output.append(a_)
  201. return menu_div(output)
  202. @tuplefy
  203. def domain_menu(option, domain):
  204. output = []
  205. focused = option or DEFAULT_OPTION
  206. for o in SUBREDDIT_OPTIONS:
  207. focus = o == focused
  208. url = f"/domain/{domain}/{o}"
  209. a_ = a(href=url, Class="focus")(o) if focus else a(href=url)(o)
  210. output.append(a_)
  211. return menu_div(output)
  212. @tuplefy
  213. def subreddit_sort_menu(subreddit, option, time=None):
  214. p = f"/r/{subreddit}" if subreddit else ""
  215. focused = time or "hour"
  216. output = []
  217. for i, v in TIME_OPTIONS.items():
  218. focus = i == focused
  219. url = f'{p}/{option}?t={i}'
  220. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  221. output.append(a_)
  222. return menu_div(output)
  223. @tuplefy
  224. def domain_sort_menu(domain, option, time=None):
  225. output = []
  226. focused = time or "hour"
  227. for i, v in TIME_OPTIONS.items():
  228. focus = i == focused
  229. url = f"/domain/{domain}/{option}?t={i}"
  230. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  231. output.append(a_)
  232. return menu_div(output)
  233. @tuplefy
  234. def multi_sort_menu(user, multi, option, time=None):
  235. p = f"/u/{user}/m/{multi}"
  236. focused = time or "hour"
  237. output = []
  238. for i, v in TIME_OPTIONS.items():
  239. focus = i == focused
  240. url = f'{p}/{option}?t={i}'
  241. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  242. output.append(a_)
  243. return menu_div(output)
  244. @tuplefy
  245. def user_menu(option, user):
  246. output = []
  247. for o in USER_OPTIONS:
  248. focus = option == o or (not option and o == DEFAULT_OPTION)
  249. link_ = f"/u/{user}/{o}"
  250. if focus:
  251. a_ = a(href=link_, Class="focus")(o)
  252. else:
  253. a_ = a(href=link_)(o)
  254. output.append(a_)
  255. return menu_div(output)
  256. @tuplefy
  257. def user_sort_menu(option, sort, user):
  258. output = []
  259. focused = sort or DEFAULT_OPTION
  260. for o in USER_SORT:
  261. focus = o == focused
  262. link_ = f"/u/{user}/{option}/?sort={o}"
  263. a_ = a(href=link_, Class="focus")(o) if focus else a(href=link_)(o)
  264. output.append(a_)
  265. return menu_div(output)
  266. @tuplefy
  267. def multi_menu(option, user, multi):
  268. output = []
  269. for o in SUBREDDIT_OPTIONS:
  270. focus = option == o or (not option and o == DEFAULT_OPTION)
  271. link_ = f"/u/{user}/m/{multi}/{o}"
  272. if focus:
  273. a_ = a(href=link_, Class="focus")(o)
  274. else:
  275. a_ = a(href=link_)(o)
  276. output.append(a_)
  277. return menu_div(output)
  278. @tuplefy
  279. def before_link(data, target, option, t=None):
  280. option = option or ""
  281. sub = f"/{target}" if target else ""
  282. time = f"t={t}&" if t else ""
  283. url = f'{sub}/{option}?{time}count=25&before={data["data"]["before"]}'
  284. a_ = a(Class="button", href=url)("<prev")
  285. return a_
  286. @tuplefy
  287. def after_link(data, target, option, t=None):
  288. option = option or ""
  289. sub = f"/{target}" if target else ""
  290. time = f"t={t}&" if t else ""
  291. url = f'{sub}/{option}?{time}count=25&after={data["data"]["after"]}'
  292. a_ = a(Class="button", href=url)("next>")
  293. return a_
  294. @tuplefy
  295. def search_before_link(data, target, params):
  296. query = params.copy()
  297. query.pop("after", None)
  298. query["before"] = g(data,"data.before")
  299. url = f'{target}/?{urlencode(query)}'
  300. a_ = a(Class="button", href=url)("<prev")
  301. return a_
  302. @tuplefy
  303. def search_after_link(data, target, params):
  304. query = params.copy()
  305. query.pop("before", None)
  306. query["after"] = g(data, "data.after")
  307. url = f'{target}/?{urlencode(query)}'
  308. a_ = a(Class="button", href=url)("next>")
  309. return a_
  310. @tuplefy
  311. def user_before_link(data, target, option, sort=None):
  312. option = option or ""
  313. sub = f"/{target}" if target else ""
  314. time = f"sort={sort}&" if sort else ""
  315. url = f'{sub}/{option}?{time}count=25&before={data["data"]["before"]}'
  316. a_ = a(Class="button", href=url)("<prev")
  317. return a_
  318. @tuplefy
  319. def user_after_link(data, target, option, sort=None):
  320. option = option or ""
  321. sub = f"/{target}" if target else ""
  322. time = f"sort={sort}&" if sort else ""
  323. url = f'{sub}/{option}?{time}count=25&after={data["data"]["after"]}'
  324. a_ = a(Class="button", href=url)("next>")
  325. return a_
  326. def reddit_media(data, safe):
  327. output = ()
  328. if data["is_video"] or g(data, "preview.reddit_video_preview", default=None):
  329. output += reddit_video(data, safe=safe)
  330. elif (data.get("post_hint") and data.get("post_hint") != "image") or not data.get('is_reddit_media_domain'):
  331. return output
  332. else:
  333. output += reddit_image(data, safe=safe)
  334. return post_content_div(output)
  335. def reddit_content(data, safe=False):
  336. if data.get("is_gallery"):
  337. output = gallery(data, safe=safe)
  338. elif not data.get("is_self") and (data.get("thumbnail") and data.get("thumbnail") not in ("self", "spoiler")) or data.get("is_reddit_media_domain"):
  339. output = reddit_media(data, safe)
  340. else:
  341. output = None
  342. return output
  343. def rich_text(richtext, text):
  344. for item in richtext:
  345. a_ = item.get("a")
  346. u = item.get("u")
  347. if not (a_ or u):
  348. continue
  349. text = text.replace(a_, f'<span class="flair-emoji" style="background-image:url(/proxy/{u});"></span>')
  350. return text
  351. def domain_link(data):
  352. if data.get("is_self"):
  353. return None
  354. elif data.get("author") == "[deleted]":
  355. return None
  356. elif data.get("crosspost_parent_list"):
  357. return None
  358. domain = data.get("domain")
  359. domain_url = f"/domain/{domain}"
  360. return ("(", a(href=domain_url)(f"{domain}"), ")")
  361. @tuplefy
  362. def post(data, safe=False):
  363. content = ()
  364. if not data.get("is_self") and not data.get("crosspost_parent_list"):
  365. content += (a(Class="post-link",href=data["url"])(data["url"]),)
  366. if data.get("selftext_html"):
  367. content += post_content(data, safe)
  368. if data.get("crosspost_parent_list"):
  369. content += post(data['crosspost_parent_list'][0], True)
  370. elif data.get("poll_data"):
  371. content += poll(data)
  372. elif data.get("self_text") == "[deleted]":
  373. pass
  374. elif result := reddit_content(data, safe):
  375. content += (result,)
  376. author = data.get("author")
  377. permalink = data.get("permalink")
  378. title_ = unescape(data.get("title"))
  379. domain = domain_link(data)
  380. votes = human_format(int(data.get("ups") or data.get("downs")))
  381. author = ("Posted by", a(href=f'/u/{author}')(f'u/{author}'))
  382. title_link = builder(a(href=permalink),Safe,b,title_)
  383. post_info = post_info_div(subreddit_link(data["subreddit"]),"•", author, get_time(data["created"]), domain)
  384. flair = post_flair(data)
  385. inner = (title_link, flair, content)
  386. votes = div(Class="votes")(
  387. span(Class="icon icon-upvote"),
  388. votes,
  389. )
  390. return post_div(votes, inner_post_div(post_info, inner))
  391. @tuplefy
  392. def poll(data):
  393. poll_options = ()
  394. tvotes = g(data,"poll_data.total_vote_count")
  395. for opt in data["poll_data"]["options"]:
  396. if "vote_count" in opt:
  397. votes = opt["vote_count"]
  398. cin = (
  399. p(f'{opt["text"]} : {votes}'),
  400. progress(
  401. value=votes,
  402. max=tvotes))
  403. poll_options += cin
  404. else:
  405. cin = (p(input_(disabled="", type="radio"), opt["text"]))
  406. poll_options += (cin,)
  407. div_ = div(Class="poll")(poll_options)
  408. return div_
  409. def posts(data, safe=False):
  410. posts_ = ()
  411. for children in g(data, "data.children"):
  412. data = children["data"]
  413. posts_ += post(data, safe)
  414. return posts_
  415. @tuplefy
  416. def mixed_content(data, safe):
  417. output = ()
  418. for children in g(data, "data.children"):
  419. if children["kind"] == "t1":
  420. output += (comment(children, safe),)
  421. elif children["kind"] == "t3":
  422. output += (post(children["data"], safe),)
  423. return output
  424. def comment_flair(data):
  425. flair_text = g(data, "author_flair_text", default=None)
  426. if flair_richtext := data.get("author_flair_richtext"):
  427. flair_text = rich_text(flair_richtext, flair_text )
  428. return builder(span(Class="flair"),Safe,unescape,flair_text) if flair_text else None
  429. def post_flair(data):
  430. flair_text = g(data, "link_flair_text", default=None)
  431. if flair_richtext := data.get("link_flair_richtext"):
  432. flair_text = rich_text(flair_richtext, flair_text )
  433. return builder(span(Class="flair"),Safe,unescape,flair_text) if flair_text else None
  434. def comment(data, full=False):
  435. comment_ = data["data"]
  436. text = unescape(comment_["body_html"])
  437. flair = comment_flair(comment_)
  438. if full:
  439. title_ = comment_["link_title"]
  440. header_ = ()
  441. header_ += ("by", a(href=f'/u/{comment_["author"]}')(f'u/{comment_["author"]}'),flair)
  442. header_ += ("in", subreddit_link(comment_["subreddit"]))
  443. header_ += (get_time(comment_["created"]),)
  444. inner = (
  445. a(href=comment_["permalink"])(b(title_)),
  446. div(Class="comment-info")(header_),
  447. comment_content(comment_, True)
  448. )
  449. return div(Class="comment")(inner)
  450. else:
  451. replies_ = replies(data)
  452. a_ = a(href=f'/u/{comment_["author"]}')(f'u/{comment_["author"]}')
  453. link_ = a(href=comment_["permalink"])("🔗")
  454. points = (span(human_format(int(comment_["ups"] or comment_["downs"]))), "points", "·" )
  455. inner = (div(Class="comment-info")(
  456. a_,flair, points,
  457. get_time(comment_["created"]), link_),
  458. comment_content(comment_, True),
  459. replies_)
  460. return div(Class="comment")(inner)
  461. @tuplefy
  462. def reply(data):
  463. comment_ = data["data"]
  464. text = unescape(comment_["body_html"])
  465. flair = comment_flair(comment_)
  466. replies_ = replies(data)
  467. a_ = a(href=f'/u/{comment_["author"]}')(f'u/{comment_["author"]}')
  468. link_ = a(href=comment_["permalink"])("🔗")
  469. points = (span(human_format(int(comment_["ups"] or comment_["downs"]))), "points", "·" )
  470. inner = (div(Class="comment-info")(
  471. a_,flair, points,
  472. get_time(comment_["created"]), link_),
  473. comment_content(comment_, True),
  474. replies_)
  475. return div(Class="reply")(inner)
  476. def comments(data_list):
  477. comments = ()
  478. for data in data_list:
  479. if data['kind'] == "more":
  480. comments += (p("..."),)
  481. else:
  482. comments += (comment(data),)
  483. return div(Class="comments")(comments)
  484. def replies(data):
  485. replies_ = ()
  486. if data['kind'] == "more":
  487. replies_ += (p("..."),)
  488. for children in g(data, "data.replies.data.children", default=[]):
  489. if children['kind'] == "more":
  490. replies_ += (p("..."),)
  491. else:
  492. replies_ += reply(children)
  493. return ul(replies_) if replies else None
  494. @tuplefy
  495. def subreddit_nav(data, subreddit, option=None, time=None):
  496. buttons = ()
  497. target = f"r/{subreddit}" if subreddit else ""
  498. if data["data"]["before"]:
  499. buttons += before_link(data, target, option, time)
  500. if data["data"]["after"]:
  501. buttons += after_link(data, target, option, time)
  502. return div(Class="nav")(buttons) if buttons else ()
  503. @tuplefy
  504. def search_nav(data, subreddit, params):
  505. buttons = ()
  506. target = f"/r/{subreddit}/search" if subreddit else "/search"
  507. if g(data, "data.before"):
  508. buttons += search_before_link(data, target, params)
  509. if g(data, "data.after"):
  510. buttons += search_after_link(data, target, params)
  511. return div(Class="nav")(buttons) if buttons else None
  512. @tuplefy
  513. def domain_nav(data, domain, option=None, time=None):
  514. buttons = ()
  515. target = f"domain/{domain}"
  516. if data["data"]["before"]:
  517. buttons += before_link(data, target, option, time)
  518. if data["data"]["after"]:
  519. buttons += after_link(data, target, option, time)
  520. return div(Class="nav")(buttons) if buttons else ()
  521. @tuplefy
  522. def user_nav(data, user, option=None, time=None):
  523. buttons = ()
  524. target = f"u/{user}"
  525. if data["data"]["before"]:
  526. buttons += user_before_link(data, target, option, time)
  527. if data["data"]["after"]:
  528. buttons += user_after_link(data, target, option, time)
  529. return div(Class="nav")(buttons) if buttons else ()
  530. @tuplefy
  531. def multi_nav(data, user, multi, option=None, time=None):
  532. buttons = ()
  533. target = f"u/{user}/m/{multi}"
  534. if data["data"]["before"]:
  535. buttons += user_before_link(data, target, option, time)
  536. if data["data"]["after"]:
  537. buttons += user_after_link(data, target, option, time)
  538. return div(Class="nav")(buttons) if buttons else ()
  539. def page_header(subreddit=None, user=None, multi=None, domain=None):
  540. header_ = (a(Class="main-link",href="/")("kddit"),)
  541. if subreddit:
  542. header_ += (a(Class="subreddit-link", href=f"/r/{subreddit}")(f"r/{subreddit}"),)
  543. elif multi and user:
  544. header_ += (a(Class="subreddit-link", href=f"/u/{user}/m/{multi}")(f"u/{user}/m/{multi}"),)
  545. elif user:
  546. header_ += (a(Class="subreddit-link", href=f"/u/{user}")(f"u/{user}"),)
  547. elif domain:
  548. header_ += (a(Class="subreddit-link", href=f"/domain/{domain}")(f"domain/{domain}"),)
  549. return header_
  550. def error_page(error):
  551. title_ = f"{error.status}!"
  552. output = h1(title_)
  553. header_ = page_header()
  554. return page(title_, header_, output)