html.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687
  1. from pyhtml import *
  2. from html import unescape, escape
  3. from bs4 import BeautifulSoup
  4. from glom import glom as g
  5. from glom import Coalesce
  6. from kddit.settings import *
  7. from urllib.parse import urlparse, parse_qs, urlencode
  8. from kddit.utils import get_time, human_format, preview_re, external_preview_re, builder, processing_re, video_re
  9. from kddit.utils import tuplefy, get_metadata, replace_tag
  10. nothing = (p("there doesn't seem to be anything here"),)
  11. style_css = link(rel="stylesheet", type="text/css", href="/static/style.css")
  12. slider_css = link(rel="stylesheet", type="text/css", href="/static/slider.css")
  13. favicon = link(rel="icon", href="/static/favicon.svg")
  14. viewport = meta(name="viewport", content_="width=device-width, initial-scale=1.0")
  15. default_head = (style_css, slider_css, favicon, viewport)
  16. class progress(Tag):
  17. self_closing = False
  18. class svg(Tag):
  19. self_closing = False
  20. class path(Tag):
  21. self_closing = False
  22. def subreddit_link(sub):
  23. return a(Class="sub-link", href=f"/r/{sub}")(f"r/{sub}")
  24. def header_div(*args):
  25. return div(Class="header")(*args)
  26. def container_div(*args):
  27. return div(Class="container")(*args)
  28. def content_div(*args):
  29. return div(Class="content")(*args)
  30. def post_div(*args):
  31. return div(Class="post")(*args)
  32. def inner_post_div(*args):
  33. return div(Class="inner-post")(*args)
  34. def media_div(*args):
  35. return div(Class="media")(*args)
  36. def menu_div(*args):
  37. return div(Class="menu")(*args)
  38. def post_info_div(*args):
  39. return div(Class="post-info")(*args)
  40. def post_content_div(*args):
  41. return div(Class="post-content")(*args)
  42. def comment_content_div(*args):
  43. return div(Class="comment-content")(*args)
  44. def slider(arg):
  45. mask = div(Class="css-slider-mask")
  46. ul_ = ul(Class="css-slider with-responsive-images")
  47. return builder(mask, ul_, arg)
  48. def slider_media(arg):
  49. slider = li(Class="slide", tabindex=1)
  50. outer = span(Class="slide-outer")
  51. inner = span(Class="slide-inner")
  52. gfx = span(Class="slide-gfx")(arg)
  53. return builder(slider, outer, inner, gfx)
  54. def nsfw_label(arg):
  55. return label(input_(Class="nsfw", type="checkbox"),arg)
  56. def get_thumbnail(data):
  57. thumbnail = g(data, Coalesce("secure_media.oembed.thumbnail_url", "preview.images.-1.source.url"), default="")
  58. return f"/proxy/{unescape(thumbnail)}" if thumbnail else None
  59. def get_video(data):
  60. is_gif = g(data, "media.reddit_video.is_gif", default=False)
  61. url = g(data, "url") if not is_gif else g(data, "media.reddit_video.fallback_url")
  62. return f"/video/{url}" if not is_gif else f"/proxy/{url}"
  63. @tuplefy
  64. def alternate_video(data, url, safe=False):
  65. return None # disabling for now
  66. opts = {}
  67. opts["src"] = f"/video/{url}"
  68. opts["controls"] = ""
  69. if nsfw(data) and safe:
  70. opts["preload"] = "none"
  71. elif thumbnail := get_thumbnail(data):
  72. opts["preload"] = "none"
  73. opts["poster"] = thumbnail
  74. else:
  75. opts["preload"] = "metadata"
  76. video_ = media_div(video(**opts))
  77. return video_
  78. def nsfw(data):
  79. return data.get("over_18")
  80. @tuplefy
  81. def reddit_video(data, safe=False):
  82. is_gif = g(data, "media.reddit_video.is_gif", default=False)
  83. opts = {"controls":""}
  84. opts["preload"] = "none"
  85. opts["src"] = get_video(data)
  86. if not (nsfw(data) and safe):
  87. opts["poster"] = get_thumbnail(data)
  88. video_ = video(**opts)
  89. output = media_div(video_)
  90. return output
  91. @tuplefy
  92. def reddit_embed_video(url, safe=False):
  93. opts = {"controls":""}
  94. opts["preload"] = "none" if safe else "auto"
  95. opts["src"] = f'/video/{url}'
  96. video_ = video(**opts)
  97. output = media_div(video_)
  98. return output
  99. @tuplefy
  100. def reddit_image(data, url=None, safe=False, text=None):
  101. url = url or unescape(g(data, Coalesce("preview.images.-1.source.url", "url")))
  102. image_ = media_div(img(src=f'/proxy/{url}'), em(text))
  103. if nsfw(data) and safe:
  104. output = nsfw_label(image_)
  105. else:
  106. output = image_
  107. return output
  108. def gallery(data, safe=False):
  109. output = (a(Class="post-link",href=data["url"])(data["url"]),)
  110. images = ()
  111. for item in reversed(g(data,"gallery_data.items", default=[])):
  112. media_id = item["media_id"]
  113. url = get_metadata(data, media_id)
  114. if url:
  115. images += reddit_image(data, url, safe)
  116. if images:
  117. output += slider((slider_media(media) for media in images))
  118. return output
  119. def page(title_, header_, content_):
  120. head_ = head(title(unescape(title_)), default_head)
  121. body_ = (header_div(header_), container_div(content_div(content_)))
  122. output = html(head_, body_)
  123. return output
  124. def post_content(data, safe):
  125. text = unescape(data["selftext_html"])
  126. soup = BeautifulSoup(text, "html.parser")
  127. for video_link in soup.find_all("a", href=video_re):
  128. url = video_link.attrs["href"]
  129. name = video_re.match(url).group(1)
  130. r_video = reddit_embed_video(f"https://v.redd.it/{name}", safe=safe)
  131. replace_tag(video_link.parent, r_video)
  132. for preview_link in soup.find_all("a", href=preview_re):
  133. url = preview_link.attrs["href"]
  134. preview_text = preview_link.text
  135. caption = preview_text if preview_text != url else None
  136. r_image = reddit_image(data, url, safe, text=caption)
  137. replace_tag(preview_link.parent, r_image)
  138. for preview_em in soup.find_all("em", string=processing_re):
  139. name = processing_re.match(preview_em.text).group(1)
  140. if url := get_metadata(data, name):
  141. r_image = reddit_image(data, url, safe)
  142. replace_tag(preview_em , r_image)
  143. return builder(post_content_div, Safe,str,soup)
  144. def comment_content(data, safe):
  145. text = unescape(data["body_html"])
  146. soup = BeautifulSoup(text, "html.parser")
  147. for preview_link in soup.find_all("a", href=preview_re):
  148. url = preview_link.attrs["href"]
  149. preview_text = preview_link.text
  150. caption = preview_text if preview_text != url else None
  151. r_image = reddit_image(data, url, safe, text=caption)
  152. replace_tag(preview_link.parent, r_image)
  153. for preview_img in soup.find_all("img", src=external_preview_re):
  154. url = preview_img.attrs["src"]
  155. preview_img.attrs["src"] = f'/proxy/{url}'
  156. for preview_em in soup.find_all("em", string=processing_re):
  157. name = processing_re.match(preview_em.text).group(1)
  158. if url := get_metadata(data, name):
  159. r_image = reddit_image(data, url, safe)
  160. replace_tag(preview_em , r_image)
  161. return builder(comment_content_div, Safe,str,soup)
  162. @tuplefy
  163. def subreddit_menu(option, subreddit):
  164. output = []
  165. focused = option or DEFAULT_OPTION
  166. for o in SUBREDDIT_OPTIONS:
  167. focus = o == focused
  168. sub = f"/r/{subreddit}" if subreddit else ""
  169. url = f"{sub}/{o}"
  170. a_ = a(href=url, Class="focus")(o) if focus else a(href=url)(o)
  171. output.append(a_)
  172. return menu_div(output)
  173. @tuplefy
  174. def search_sort_menu(subreddit, params):
  175. output = []
  176. focused = params.get("sort", "relevance")
  177. for o in SEARCH_SORT:
  178. query = params.copy()
  179. query["sort"] = o
  180. focus = o == focused
  181. sub = f"/r/{subreddit}" if subreddit else ""
  182. url = f"{sub}/search?{urlencode(query)}"
  183. a_ = a(href=url, Class="focus")(o) if focus else a(href=url)(o)
  184. output.append(a_)
  185. return menu_div(output)
  186. @tuplefy
  187. def search_time_menu(subreddit, params):
  188. output = []
  189. focused = params.get("t", "hour")
  190. for i, v in TIME_OPTIONS.items():
  191. query = params.copy()
  192. query["t"] = i
  193. focus = i == focused
  194. sub = f"/r/{subreddit}" if subreddit else ""
  195. url = f"{sub}/search?{urlencode(query)}"
  196. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  197. output.append(a_)
  198. return menu_div(output)
  199. @tuplefy
  200. def domain_menu(option, domain):
  201. output = []
  202. focused = option or DEFAULT_OPTION
  203. for o in SUBREDDIT_OPTIONS:
  204. focus = o == focused
  205. url = f"/domain/{domain}/{o}"
  206. a_ = a(href=url, Class="focus")(o) if focus else a(href=url)(o)
  207. output.append(a_)
  208. return menu_div(output)
  209. @tuplefy
  210. def subreddit_sort_menu(subreddit, option, time=None):
  211. p = f"/r/{subreddit}" if subreddit else ""
  212. focused = time or "hour"
  213. output = []
  214. for i, v in TIME_OPTIONS.items():
  215. focus = i == focused
  216. url = f'{p}/{option}?t={i}'
  217. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  218. output.append(a_)
  219. return menu_div(output)
  220. @tuplefy
  221. def domain_sort_menu(domain, option, time=None):
  222. output = []
  223. focused = time or "hour"
  224. for i, v in TIME_OPTIONS.items():
  225. focus = i == focused
  226. url = f"/domain/{domain}/{option}?t={i}"
  227. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  228. output.append(a_)
  229. return menu_div(output)
  230. @tuplefy
  231. def multi_sort_menu(user, multi, option, time=None):
  232. p = f"/u/{user}/m/{multi}"
  233. focused = time or "hour"
  234. output = []
  235. for i, v in TIME_OPTIONS.items():
  236. focus = i == focused
  237. url = f'{p}/{option}?t={i}'
  238. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  239. output.append(a_)
  240. return menu_div(output)
  241. @tuplefy
  242. def user_menu(option, user):
  243. output = []
  244. for o in USER_OPTIONS:
  245. focus = option == o or (not option and o == DEFAULT_OPTION)
  246. link_ = f"/u/{user}/{o}"
  247. if focus:
  248. a_ = a(href=link_, Class="focus")(o)
  249. else:
  250. a_ = a(href=link_)(o)
  251. output.append(a_)
  252. return menu_div(output)
  253. @tuplefy
  254. def user_sort_menu(option, sort, user):
  255. output = []
  256. focused = sort or DEFAULT_OPTION
  257. for o in USER_SORT:
  258. focus = o == focused
  259. link_ = f"/u/{user}/{option}/?sort={o}"
  260. a_ = a(href=link_, Class="focus")(o) if focus else a(href=link_)(o)
  261. output.append(a_)
  262. return menu_div(output)
  263. @tuplefy
  264. def multi_menu(option, user, multi):
  265. output = []
  266. for o in SUBREDDIT_OPTIONS:
  267. focus = option == o or (not option and o == DEFAULT_OPTION)
  268. link_ = f"/u/{user}/m/{multi}/{o}"
  269. if focus:
  270. a_ = a(href=link_, Class="focus")(o)
  271. else:
  272. a_ = a(href=link_)(o)
  273. output.append(a_)
  274. return menu_div(output)
  275. @tuplefy
  276. def before_link(data, target, option, t=None):
  277. option = option or ""
  278. sub = f"/{target}" if target else ""
  279. time = f"t={t}&" if t else ""
  280. url = f'{sub}/{option}?{time}count=25&before={data["data"]["before"]}'
  281. a_ = a(Class="button", href=url)("<prev")
  282. return a_
  283. @tuplefy
  284. def after_link(data, target, option, t=None):
  285. option = option or ""
  286. sub = f"/{target}" if target else ""
  287. time = f"t={t}&" if t else ""
  288. url = f'{sub}/{option}?{time}count=25&after={data["data"]["after"]}'
  289. a_ = a(Class="button", href=url)("next>")
  290. return a_
  291. @tuplefy
  292. def search_before_link(data, target, params):
  293. query = params.copy()
  294. query.pop("after", None)
  295. query["before"] = g(data,"data.before")
  296. url = f'{target}/?{urlencode(query)}'
  297. a_ = a(Class="button", href=url)("<prev")
  298. return a_
  299. @tuplefy
  300. def search_after_link(data, target, params):
  301. query = params.copy()
  302. query.pop("before", None)
  303. query["after"] = g(data, "data.after")
  304. url = f'{target}/?{urlencode(query)}'
  305. a_ = a(Class="button", href=url)("next>")
  306. return a_
  307. @tuplefy
  308. def user_before_link(data, target, option, sort=None):
  309. option = option or ""
  310. sub = f"/{target}" if target else ""
  311. time = f"sort={sort}&" if sort else ""
  312. url = f'{sub}/{option}?{time}count=25&before={data["data"]["before"]}'
  313. a_ = a(Class="button", href=url)("<prev")
  314. return a_
  315. @tuplefy
  316. def user_after_link(data, target, option, sort=None):
  317. option = option or ""
  318. sub = f"/{target}" if target else ""
  319. time = f"sort={sort}&" if sort else ""
  320. url = f'{sub}/{option}?{time}count=25&after={data["data"]["after"]}'
  321. a_ = a(Class="button", href=url)("next>")
  322. return a_
  323. def alternate_media(data, safe=False):
  324. pass
  325. def youtube_media(data, url, uri, safe):
  326. output = ()
  327. if uri.netloc == "youtu.be":
  328. output += alternate_video(data, url, safe)
  329. elif v := parse_qs(uri.query).get("v"):
  330. u = f"https://youtu.be/{v[0]}"
  331. output += alternate_video(data, u, safe)
  332. return output
  333. def imgur_media(data, url, safe):
  334. if url.endswith(".gifv"):
  335. output = alternate_video(data, url, safe=safe)
  336. else:
  337. output = reddit_image(data, safe=safe)
  338. return output
  339. def alternate_content(data, safe=False):
  340. url = data["url"]
  341. output = (a(Class="post-link",href=url)(url),)
  342. uri = urlparse(url)
  343. netloc = uri.netloc
  344. if netloc in PROXY_ALLOW["youtube"]:
  345. output += youtube_media(data, url, uri, safe)
  346. elif netloc in PROXY_ALLOW["video"]:
  347. output += alternate_video(data, url, safe=safe)
  348. elif netloc in PROXY_ALLOW["imgur"]:
  349. output += imgur_media(data, url, safe)
  350. elif netloc in PROXY_ALLOW["image"]:
  351. output += reddit_image(data, safe=safe)
  352. return post_content_div(output)
  353. def reddit_media(data, safe):
  354. output = (a(Class="post-link", href=data["url"])(data["url"]),)
  355. if data["is_video"]:
  356. output += reddit_video(data, safe=safe)
  357. else:
  358. output += reddit_image(data, safe=safe)
  359. return post_content_div(output)
  360. def reddit_content(data, safe=False):
  361. if data.get("selftext_html"):
  362. output = post_content(data, safe)
  363. elif data.get("is_reddit_media_domain") and data.get("thumbnail"):
  364. output = reddit_media(data, safe)
  365. elif data.get("is_gallery"):
  366. output = gallery(data, safe=safe)
  367. else:
  368. output = None
  369. return output
  370. def rich_text(richtext, text):
  371. for item in richtext:
  372. a_ = item.get("a")
  373. u = item.get("u")
  374. if not (a_ or u):
  375. continue
  376. text = text.replace(a_, f'<span class="flair-emoji" style="background-image:url(/proxy/{u});"></span>')
  377. return text
  378. def domain_link(data):
  379. if data.get("is_self"):
  380. return None
  381. domain = data.get("domain")
  382. domain_url = f"/domain/{domain}"
  383. return ("(", a(href=domain_url)(f"{domain}"), ")")
  384. @tuplefy
  385. def post(data, safe=False):
  386. if data.get("crosspost_parent_list"):
  387. content = post(data['crosspost_parent_list'][0], True)
  388. elif data.get("poll_data"):
  389. content = poll(data)
  390. else:
  391. content = reddit_content(data, safe) or alternate_content(data, safe)
  392. author = data.get("author")
  393. permalink = data.get("permalink")
  394. title_ = unescape(data.get("title"))
  395. domain = domain_link(data)
  396. votes = human_format(int(data.get("ups") or data.get("downs")))
  397. author = ("Posted by", a(href=f'/u/{author}')(f'u/{author}'))
  398. title_link = builder(a(href=permalink),Safe,b,title_)
  399. post_info = post_info_div(subreddit_link(data["subreddit"]),"•", author, get_time(data["created"]), domain)
  400. flair = post_flair(data)
  401. inner = (title_link, flair, content)
  402. votes = div(Class="votes")(span(Class="icon icon-upvote"), votes , span(Class="icon icon-downvote"))
  403. return post_div(votes, inner_post_div(post_info, inner))
  404. def poll(data):
  405. options = ()
  406. tvotes = g(data,"poll_data.total_vote_count")
  407. for opt in data["poll_data"]["options"]:
  408. if "vote_count" in opt:
  409. votes = opt["vote_count"]
  410. cin = (
  411. p(f'{opt["text"]} : {votes}'),
  412. progress(
  413. value=votes,
  414. max=tvotes))
  415. options += cin
  416. else:
  417. cin = (p(input_(disabled="", type="radio"), opt["text"]))
  418. options += (cin,)
  419. div_ = div(Class="poll")(options)
  420. return div_
  421. def posts(data, safe=False):
  422. posts_ = ()
  423. for children in g(data, "data.children"):
  424. data = children["data"]
  425. posts_ += post(data, safe)
  426. return posts_
  427. @tuplefy
  428. def mixed_content(data, safe):
  429. output = ()
  430. for children in g(data, "data.children"):
  431. if children["kind"] == "t1":
  432. output += (comment(children, safe),)
  433. elif children["kind"] == "t3":
  434. output += (post(children["data"], safe),)
  435. return output
  436. def comment_flair(data):
  437. flair_text = g(data, "author_flair_text", default=None)
  438. if flair_richtext := data.get("author_flair_richtext"):
  439. flair_text = rich_text(flair_richtext, flair_text )
  440. return builder(span(Class="flair"),Safe,unescape,flair_text) if flair_text else None
  441. def post_flair(data):
  442. flair_text = g(data, "link_flair_text", default=None)
  443. if flair_richtext := data.get("link_flair_richtext"):
  444. flair_text = rich_text(flair_richtext, flair_text )
  445. return builder(span(Class="flair"),Safe,unescape,flair_text) if flair_text else None
  446. def comment(data, full=False):
  447. comment_ = data["data"]
  448. text = unescape(comment_["body_html"])
  449. flair = comment_flair(comment_)
  450. if full:
  451. title_ = comment_["link_title"]
  452. header_ = ()
  453. header_ += ("by", a(href=f'/u/{comment_["author"]}')(f'u/{comment_["author"]}'),flair)
  454. header_ += ("in", subreddit_link(comment_["subreddit"]))
  455. header_ += (get_time(comment_["created"]),)
  456. inner = (
  457. a(href=comment_["permalink"])(b(title_)),
  458. div(Class="comment-info")(header_),
  459. comment_content(comment_, True)
  460. )
  461. return div(Class="comment")(inner)
  462. else:
  463. replies_ = replies(data)
  464. a_ = a(href=f'/u/{comment_["author"]}')(f'u/{comment_["author"]}')
  465. link_ = a(href=comment_["permalink"])("🔗")
  466. points = (span(human_format(int(comment_["ups"] or comment_["downs"]))), "points", "·" )
  467. inner = (div(Class="comment-info")(
  468. a_,flair, points,
  469. get_time(comment_["created"]), link_),
  470. comment_content(comment_, True),
  471. replies_)
  472. return div(Class="comment")(inner)
  473. @tuplefy
  474. def reply(data):
  475. comment_ = data["data"]
  476. text = unescape(comment_["body_html"])
  477. flair = comment_flair(comment_)
  478. replies_ = replies(data)
  479. a_ = a(href=f'/u/{comment_["author"]}')(f'u/{comment_["author"]}')
  480. link_ = a(href=comment_["permalink"])("🔗")
  481. points = (span(human_format(int(comment_["ups"] or comment_["downs"]))), "points", "·" )
  482. inner = (div(Class="comment-info")(
  483. a_,flair, points,
  484. get_time(comment_["created"]), link_),
  485. comment_content(comment_, True),
  486. replies_)
  487. return div(Class="reply")(inner)
  488. def comments(data_list):
  489. comments = ()
  490. for data in data_list:
  491. if data['kind'] == "more":
  492. comments += (p("..."),)
  493. else:
  494. comments += (comment(data),)
  495. return div(Class="comments")(comments)
  496. def replies(data):
  497. replies_ = ()
  498. if data['kind'] == "more":
  499. replies_ += (p("..."),)
  500. for children in g(data, "data.replies.data.children", default=[]):
  501. if children['kind'] == "more":
  502. replies_ += (p("..."),)
  503. else:
  504. replies_ += reply(children)
  505. return ul(replies_) if replies else None
  506. @tuplefy
  507. def subreddit_nav(data, subreddit, option=None, time=None):
  508. buttons = ()
  509. target = f"r/{subreddit}" if subreddit else ""
  510. if data["data"]["before"]:
  511. buttons += before_link(data, target, option, time)
  512. if data["data"]["after"]:
  513. buttons += after_link(data, target, option, time)
  514. return div(Class="nav")(buttons) if buttons else ()
  515. @tuplefy
  516. def search_nav(data, subreddit, params):
  517. buttons = ()
  518. target = f"/r/{subreddit}/search" if subreddit else "/search"
  519. if g(data, "data.before"):
  520. buttons += search_before_link(data, target, params)
  521. if g(data, "data.after"):
  522. buttons += search_after_link(data, target, params)
  523. return div(Class="nav")(buttons) if buttons else None
  524. @tuplefy
  525. def domain_nav(data, domain, option=None, time=None):
  526. buttons = ()
  527. target = f"domain/{domain}"
  528. if data["data"]["before"]:
  529. buttons += before_link(data, target, option, time)
  530. if data["data"]["after"]:
  531. buttons += after_link(data, target, option, time)
  532. return div(Class="nav")(buttons) if buttons else ()
  533. @tuplefy
  534. def user_nav(data, user, option=None, time=None):
  535. buttons = ()
  536. target = f"u/{user}"
  537. if data["data"]["before"]:
  538. buttons += user_before_link(data, target, option, time)
  539. if data["data"]["after"]:
  540. buttons += user_after_link(data, target, option, time)
  541. return div(Class="nav")(buttons) if buttons else ()
  542. @tuplefy
  543. def multi_nav(data, user, multi, option=None, time=None):
  544. buttons = ()
  545. target = f"u/{user}/m/{multi}"
  546. if data["data"]["before"]:
  547. buttons += user_before_link(data, target, option, time)
  548. if data["data"]["after"]:
  549. buttons += user_after_link(data, target, option, time)
  550. return div(Class="nav")(buttons) if buttons else ()
  551. def page_header(subreddit=None, user=None, multi=None, domain=None):
  552. header_ = (a(Class="main-link",href="/")("kddit"),)
  553. if subreddit:
  554. header_ += (a(Class="subreddit-link", href=f"/r/{subreddit}")(f"r/{subreddit}"),)
  555. elif multi and user:
  556. header_ += (a(Class="subreddit-link", href=f"/u/{user}/m/{multi}")(f"u/{user}/m/{multi}"),)
  557. elif user:
  558. header_ += (a(Class="subreddit-link", href=f"/u/{user}")(f"u/{user}"),)
  559. elif domain:
  560. header_ += (a(Class="subreddit-link", href=f"/domain/{domain}")(f"domain/{domain}"),)
  561. return header_
  562. def error_page(error):
  563. title_ = f"{error.status}!"
  564. output = h1(title_)
  565. header_ = page_header()
  566. return page(title_, header_, output)