html.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696
  1. from pyhtml import *
  2. from html import unescape
  3. from bs4 import BeautifulSoup
  4. from glom import glom as g
  5. from glom import Coalesce
  6. from kddit.settings import *
  7. from urllib.parse import urlencode
  8. from kddit.utils import get_time, human_format, preview_re, external_preview_re, builder, processing_re, video_re, image_re
  9. from kddit.utils import tuplefy, get_metadata, replace_tag
  10. nothing = (p("there doesn't seem to be anything here"),)
  11. style_css = link(rel="stylesheet", type="text/css", href="/static/style.css")
  12. slider_css = link(rel="stylesheet", type="text/css", href="/static/slider.css")
  13. favicon = link(rel="icon", href="/static/favicon.svg")
  14. viewport = meta(name="viewport", content_="width=device-width, initial-scale=1.0")
  15. default_head = (style_css, slider_css, favicon, viewport)
  16. class progress(Tag):
  17. self_closing = False
  18. class svg(Tag):
  19. self_closing = False
  20. class path(Tag):
  21. self_closing = False
  22. def subreddit_link(sub):
  23. return a(Class="sub-link", href=f"/r/{sub}")(f"r/{sub}")
  24. def header_div(*args):
  25. return div(Class="header")(*args)
  26. def container_div(*args):
  27. return div(Class="container")(*args)
  28. def content_div(*args):
  29. return div(Class="content")(*args)
  30. def post_div(*args):
  31. return div(Class="post")(*args)
  32. def inner_post_div(*args):
  33. return div(Class="inner-post")(*args)
  34. def media_div(*args):
  35. return div(Class="media")(*args)
  36. def menu_div(*args):
  37. return div(Class="menu")(*args)
  38. @tuplefy
  39. def post_info_div(*args):
  40. return div(Class="post-info")(*args)
  41. def post_content_div(*args):
  42. return div(Class="post-content")(*args)
  43. def comment_content_div(*args):
  44. return div(Class="comment-content")(*args)
  45. def slider(arg):
  46. mask = div(Class="css-slider-mask")
  47. ul_ = ul(Class="css-slider with-responsive-images")
  48. return builder(mask, ul_, arg)
  49. def slider_media(arg):
  50. slider = li(Class="slide", tabindex=1)
  51. outer = span(Class="slide-outer")
  52. inner = span(Class="slide-inner")
  53. gfx = span(Class="slide-gfx")(arg)
  54. return builder(slider, outer, inner, gfx)
  55. def nsfw_label(arg):
  56. return label(input_(Class="nsfw", type="checkbox"),arg)
  57. def get_thumbnail(data):
  58. thumbnail = g(data, Coalesce("preview.images.-1.source.url",
  59. "secure_media.oembed.thumbnail_url",
  60. ), default="")
  61. return f"/proxy/{unescape(thumbnail)}" if thumbnail else None
  62. def get_video(data):
  63. is_gif = g(data, Coalesce("media.reddit_video.is_gif", "preview.reddit_video_preview.is_gif") , default=False)
  64. url = g(data, Coalesce("media.reddit_video.fallback_url", "preview.reddit_video_preview.fallback_url", "url"))
  65. return f"/video/{url}" if not is_gif else f"/proxy/{url}"
  66. @tuplefy
  67. def alternate_video(data, url, over_18=False):
  68. return None # disabling for now
  69. opts = {}
  70. opts["src"] = f"/video/{url}"
  71. opts["controls"] = ""
  72. if nsfw(data) and over_18:
  73. opts["preload"] = "none"
  74. elif thumbnail := get_thumbnail(data):
  75. opts["preload"] = "none"
  76. opts["poster"] = thumbnail
  77. else:
  78. opts["preload"] = "metadata"
  79. video_ = media_div(video(**opts))
  80. return video_
  81. def nsfw(data):
  82. return data.get("over_18")
  83. @tuplefy
  84. def reddit_video(data, over_18=False):
  85. opts = {"controls":""}
  86. opts["preload"] = "none"
  87. opts["src"] = get_video(data)
  88. if not (nsfw(data) and over_18):
  89. opts["poster"] = get_thumbnail(data)
  90. video_ = video(**opts)
  91. output = media_div(video_)
  92. return output
  93. @tuplefy
  94. def reddit_embed_video(url, over_18=False):
  95. opts = {"controls":""}
  96. opts["preload"] = "none" if over_18 else "auto"
  97. opts["src"] = f'/video/{url}'
  98. video_ = video(**opts)
  99. output = media_div(video_)
  100. return output
  101. @tuplefy
  102. def reddit_image(data, url=None, over_18=False, text=None):
  103. url = url or unescape(g(data, Coalesce("preview.images.-1.variants.gif.source.url", "preview.images.-1.source.url", "url")))
  104. image_ = media_div(img(src=f'/proxy/{url}', loading="lazy"), em(text))
  105. if nsfw(data) and over_18:
  106. output = nsfw_label(image_)
  107. else:
  108. output = image_
  109. return output
  110. def gallery(data, over_18=False):
  111. output = ()
  112. images = ()
  113. for item in reversed(g(data,"gallery_data.items", default=[])):
  114. media_id = item["media_id"]
  115. url = get_metadata(data, media_id)
  116. if url:
  117. images += reddit_image(data, url, over_18)
  118. if images:
  119. output += slider((slider_media(media) for media in images))
  120. return output
  121. def page(title_, header_, content_):
  122. head_ = head(title(unescape(title_)), default_head)
  123. body_ = (header_div(header_), container_div(content_div(content_)))
  124. output = html(head_, body_)
  125. return output
  126. def post_content(data, over_18):
  127. output = ()
  128. text = unescape(data["selftext_html"])
  129. soup = BeautifulSoup(text, "html.parser")
  130. for video_link in soup.find_all("a", href=video_re):
  131. url = video_link.attrs["href"]
  132. name = video_re.match(url).group(1)
  133. r_video = reddit_embed_video(f"https://v.redd.it/{name}", over_18=over_18)
  134. replace_tag(video_link.parent, r_video)
  135. for preview_link in soup.find_all("a", href=preview_re):
  136. url = preview_link.attrs["href"]
  137. preview_text = preview_link.text
  138. caption = preview_text if preview_text != url else None
  139. r_image = reddit_image(data, url, over_18, text=caption)
  140. replace_tag(preview_link.parent, r_image)
  141. for preview_em in soup.find_all("em", string=processing_re):
  142. name = processing_re.match(preview_em.text).group(1)
  143. if url := get_metadata(data, name):
  144. r_image = reddit_image(data, url, over_18)
  145. replace_tag(preview_em , r_image)
  146. output += (post_content_div(Safe(str(soup))),)
  147. return output
  148. def comment_content(data, over_18):
  149. text = unescape(data["body_html"])
  150. soup = BeautifulSoup(text, "html.parser")
  151. for preview_link in soup.find_all("a", href=preview_re):
  152. url = preview_link.attrs["href"]
  153. preview_text = preview_link.text
  154. caption = preview_text if preview_text != url else None
  155. r_image = reddit_image(data, url, over_18, text=caption)
  156. replace_tag(preview_link, r_image)
  157. for image_link in soup.find_all("a", href=image_re):
  158. url = image_link.attrs["href"]
  159. preview_text = image_link.text
  160. caption = preview_text if preview_text != url else None
  161. r_image = reddit_image(data, url, over_18, text=caption)
  162. replace_tag(image_link, r_image)
  163. for preview_img in soup.find_all("img", src=external_preview_re):
  164. url = preview_img.attrs["src"]
  165. preview_img.attrs["src"] = f'/proxy/{url}'
  166. for preview_em in soup.find_all("em", string=processing_re):
  167. name = processing_re.match(preview_em.text).group(1)
  168. if url := get_metadata(data, name):
  169. r_image = reddit_image(data, url, over_18)
  170. replace_tag(preview_em , r_image)
  171. return builder(comment_content_div, Safe,str,soup)
  172. @tuplefy
  173. def subreddit_menu(option, subreddit):
  174. output = []
  175. focused = option or DEFAULT_OPTION
  176. for o in SUBREDDIT_OPTIONS:
  177. focus = o == focused
  178. sub = f"/r/{subreddit}" if subreddit else ""
  179. url = f"{sub}/{o}"
  180. a_ = a(href=url, Class="focus")(o) if focus else a(href=url)(o)
  181. output.append(a_)
  182. return menu_div(output)
  183. @tuplefy
  184. def search_sort_menu(subreddit, params):
  185. output = []
  186. focused = params.get("sort", "relevance")
  187. for o in SEARCH_SORT:
  188. query = params.copy()
  189. query["sort"] = o
  190. focus = o == focused
  191. sub = f"/r/{subreddit}" if subreddit else ""
  192. url = f"{sub}/search?{urlencode(query)}"
  193. a_ = a(href=url, Class="focus")(o) if focus else a(href=url)(o)
  194. output.append(a_)
  195. return menu_div(output)
  196. @tuplefy
  197. def search_time_menu(subreddit, params):
  198. output = []
  199. focused = params.get("t", "hour")
  200. for i, v in TIME_OPTIONS.items():
  201. query = params.copy()
  202. query["t"] = i
  203. focus = i == focused
  204. sub = f"/r/{subreddit}" if subreddit else ""
  205. url = f"{sub}/search?{urlencode(query)}"
  206. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  207. output.append(a_)
  208. return menu_div(output)
  209. @tuplefy
  210. def domain_menu(option, domain):
  211. output = []
  212. focused = option or DEFAULT_OPTION
  213. for o in SUBREDDIT_OPTIONS:
  214. focus = o == focused
  215. url = f"/domain/{domain}/{o}"
  216. a_ = a(href=url, Class="focus")(o) if focus else a(href=url)(o)
  217. output.append(a_)
  218. return menu_div(output)
  219. @tuplefy
  220. def subreddit_sort_menu(subreddit, option, time=None):
  221. p = f"/r/{subreddit}" if subreddit else ""
  222. focused = time or "hour"
  223. output = []
  224. for i, v in TIME_OPTIONS.items():
  225. focus = i == focused
  226. url = f'{p}/{option}?t={i}'
  227. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  228. output.append(a_)
  229. return menu_div(output)
  230. @tuplefy
  231. def domain_sort_menu(domain, option, time=None):
  232. output = []
  233. focused = time or "hour"
  234. for i, v in TIME_OPTIONS.items():
  235. focus = i == focused
  236. url = f"/domain/{domain}/{option}?t={i}"
  237. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  238. output.append(a_)
  239. return menu_div(output)
  240. @tuplefy
  241. def multi_sort_menu(user, multi, option, time=None):
  242. p = f"/u/{user}/m/{multi}"
  243. focused = time or "hour"
  244. output = []
  245. for i, v in TIME_OPTIONS.items():
  246. focus = i == focused
  247. url = f'{p}/{option}?t={i}'
  248. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  249. output.append(a_)
  250. return menu_div(output)
  251. @tuplefy
  252. def user_menu(option, user):
  253. output = []
  254. for o in USER_OPTIONS:
  255. focus = option == o or (not option and o == DEFAULT_OPTION)
  256. link_ = f"/u/{user}/{o}"
  257. if focus:
  258. a_ = a(href=link_, Class="focus")(o)
  259. else:
  260. a_ = a(href=link_)(o)
  261. output.append(a_)
  262. return menu_div(output)
  263. @tuplefy
  264. def user_sort_menu(option, sort, user):
  265. output = []
  266. focused = sort or DEFAULT_OPTION
  267. for o in USER_SORT:
  268. focus = o == focused
  269. link_ = f"/u/{user}/{option}/?sort={o}"
  270. a_ = a(href=link_, Class="focus")(o) if focus else a(href=link_)(o)
  271. output.append(a_)
  272. return menu_div(output)
  273. @tuplefy
  274. def user_comments_sort_menu(path, sort):
  275. output = []
  276. focused = sort or DEFAULT_OPTION
  277. for o in USER_COMMENT_SORT:
  278. focus = o == focused
  279. link_ = f"{path}/?sort={o}"
  280. a_ = a(href=link_, Class="focus")(o) if focus else a(href=link_)(o)
  281. output.append(a_)
  282. return menu_div(output)
  283. @tuplefy
  284. def multi_menu(option, user, multi):
  285. output = []
  286. for o in SUBREDDIT_OPTIONS:
  287. focus = option == o or (not option and o == DEFAULT_OPTION)
  288. link_ = f"/u/{user}/m/{multi}/{o}"
  289. if focus:
  290. a_ = a(href=link_, Class="focus")(o)
  291. else:
  292. a_ = a(href=link_)(o)
  293. output.append(a_)
  294. return menu_div(output)
  295. @tuplefy
  296. def before_link(data, target, option, t=None):
  297. option = option or ""
  298. sub = f"/{target}" if target else ""
  299. time = f"t={t}&" if t else ""
  300. url = f'{sub}/{option}?{time}count=25&before={data["data"]["before"]}'
  301. a_ = a(Class="button", href=url)("<prev")
  302. return a_
  303. @tuplefy
  304. def after_link(data, target, option, t=None):
  305. option = option or ""
  306. sub = f"/{target}" if target else ""
  307. time = f"t={t}&" if t else ""
  308. url = f'{sub}/{option}?{time}count=25&after={data["data"]["after"]}'
  309. a_ = a(Class="button", href=url)("next>")
  310. return a_
  311. @tuplefy
  312. def search_before_link(data, target, params):
  313. query = params.copy()
  314. query.pop("after", None)
  315. query["before"] = g(data,"data.before")
  316. url = f'{target}/?{urlencode(query)}'
  317. a_ = a(Class="button", href=url)("<prev")
  318. return a_
  319. @tuplefy
  320. def search_after_link(data, target, params):
  321. query = params.copy()
  322. query.pop("before", None)
  323. query["after"] = g(data, "data.after")
  324. url = f'{target}/?{urlencode(query)}'
  325. a_ = a(Class="button", href=url)("next>")
  326. return a_
  327. @tuplefy
  328. def user_before_link(data, target, option, sort=None):
  329. option = option or ""
  330. sub = f"/{target}" if target else ""
  331. time = f"sort={sort}&" if sort else ""
  332. url = f'{sub}/{option}?{time}count=25&before={data["data"]["before"]}'
  333. a_ = a(Class="button", href=url)("<prev")
  334. return a_
  335. @tuplefy
  336. def user_after_link(data, target, option, sort=None):
  337. option = option or ""
  338. sub = f"/{target}" if target else ""
  339. time = f"sort={sort}&" if sort else ""
  340. url = f'{sub}/{option}?{time}count=25&after={data["data"]["after"]}'
  341. a_ = a(Class="button", href=url)("next>")
  342. return a_
  343. def reddit_media(data, over_18):
  344. output = ()
  345. if data["is_video"] or g(data, "preview.reddit_video_preview", default=None):
  346. output += reddit_video(data, over_18=over_18)
  347. elif (data.get("post_hint") and data.get("post_hint") != "image") or not data.get('is_reddit_media_domain'):
  348. return output
  349. else:
  350. output += reddit_image(data, over_18=over_18)
  351. return post_content_div(output)
  352. def reddit_content(data, over_18=False):
  353. if data.get("is_gallery"):
  354. output = gallery(data, over_18=over_18)
  355. elif not data.get("is_self") and (data.get("thumbnail") and data.get("thumbnail") not in ("self", "spoiler")) or data.get("is_reddit_media_domain"):
  356. output = reddit_media(data, over_18)
  357. else:
  358. output = None
  359. return output
  360. def rich_text(richtext, text):
  361. for item in richtext:
  362. a_ = item.get("a")
  363. u = item.get("u")
  364. if not (a_ or u):
  365. continue
  366. text = text.replace(a_, f'<span class="flair-emoji" style="background-image:url(/proxy/{u});"></span>')
  367. return text
  368. def domain_link(data):
  369. if data.get("is_self"):
  370. return None
  371. elif data.get("author") == "[deleted]":
  372. return None
  373. elif data.get("crosspost_parent_list"):
  374. return None
  375. domain = data.get("domain")
  376. domain_url = f"/domain/{domain}"
  377. return ("(", a(href=domain_url)(f"{domain}"), ")")
  378. @tuplefy
  379. def post(data, over_18=False, from_user=False):
  380. content = ()
  381. if not data.get("is_self") and not data.get("crosspost_parent_list"):
  382. content += (a(Class="post-link",href=data["url"])(data["url"]),)
  383. if data.get("selftext_html"):
  384. content += post_content(data, over_18)
  385. if data.get("crosspost_parent_list"):
  386. content += post(data['crosspost_parent_list'][0], True)
  387. elif data.get("poll_data"):
  388. content += poll(data)
  389. elif data.get("removed_by_category") or (data.get("author") == "[deleted]"):
  390. pass
  391. elif result := reddit_content(data, over_18):
  392. content += (result,)
  393. author = data.get("author")
  394. permalink = data.get("permalink")
  395. title_ = unescape(data.get("title"))
  396. domain = domain_link(data)
  397. votes = human_format(int(data.get("ups") or data.get("downs")))
  398. author_info = ("Posted by", a(href=f'/u/{author}')(f'u/{author}'))
  399. title_link = builder(a(href=permalink),Safe,b,title_)
  400. info_args = (subreddit_link(data["subreddit"]),"•", author_info, get_time(data["created"]), domain)
  401. if from_user:
  402. user_comment_url = f"/user/{author}/comments/{data['id']}/_"
  403. info_args += (a(href=user_comment_url)("🔗"),)
  404. post_info = post_info_div(*info_args)
  405. flair = post_flair(data)
  406. inner = (title_link, flair, content)
  407. votes = div(Class="votes")(
  408. span(Class="icon icon-upvote"),
  409. votes,
  410. )
  411. return post_div(votes, inner_post_div(post_info, inner))
  412. @tuplefy
  413. def poll(data):
  414. poll_options = ()
  415. tvotes = g(data,"poll_data.total_vote_count")
  416. for opt in data["poll_data"]["options"]:
  417. if "vote_count" in opt:
  418. votes = opt["vote_count"]
  419. cin = (
  420. p(f'{opt["text"]} : {votes}'),
  421. progress(
  422. value=votes,
  423. max=tvotes))
  424. poll_options += cin
  425. else:
  426. cin = (p(input_(disabled="", type="radio"), opt["text"]))
  427. poll_options += (cin,)
  428. div_ = div(Class="poll")(poll_options)
  429. return div_
  430. def posts(data, over_18=False):
  431. posts_ = ()
  432. for children in g(data, "data.children"):
  433. data = children["data"]
  434. posts_ += post(data, over_18)
  435. return posts_
  436. @tuplefy
  437. def mixed_content(data, over_18, from_user = False):
  438. output = ()
  439. for children in g(data, "data.children"):
  440. if children["kind"] == "t1":
  441. output += (comment(children, False, from_user),)
  442. elif children["kind"] == "t3":
  443. output += (post(children["data"], over_18, from_user),)
  444. return output
  445. def comment_flair(data):
  446. flair_text = g(data, "author_flair_text", default=None)
  447. if flair_richtext := data.get("author_flair_richtext"):
  448. flair_text = rich_text(flair_richtext, flair_text )
  449. return builder(span(Class="flair"),Safe,unescape,flair_text) if flair_text else None
  450. def post_flair(data):
  451. flair_text = g(data, "link_flair_text", default=None)
  452. if flair_richtext := data.get("link_flair_richtext"):
  453. flair_text = rich_text(flair_richtext, flair_text )
  454. return builder(span(Class="flair"),Safe,unescape,flair_text) if flair_text else None
  455. def comment(data, full=False, from_user=False):
  456. comment_ = data["data"]
  457. flair = comment_flair(comment_)
  458. if full:
  459. title_ = comment_["link_title"]
  460. header_ = ()
  461. header_ += ("by", a(href=f'/u/{comment_["author"]}')(f'u/{comment_["author"]}'),flair)
  462. header_ += ("in", subreddit_link(comment_["subreddit"]))
  463. header_ += (get_time(comment_["created"]),)
  464. if from_user:
  465. user_comment_url = f"/u/{comment_['author']}/comments/{data['id']}/comment/{comment_['id']}"
  466. header_ += a(href=user_comment_url)("🔗")
  467. inner = (
  468. a(href=comment_["permalink"])(b(title_)),
  469. div(Class="comment-info")(header_),
  470. comment_content(comment_, True)
  471. )
  472. return div(Class="comment")(inner)
  473. else:
  474. replies_ = replies(data)
  475. a_ = a(href=f'/u/{comment_["author"]}')(f'u/{comment_["author"]}')
  476. link_ = a(href=comment_["permalink"])("🔗")
  477. points = (span(human_format(int(comment_["ups"] or comment_["downs"]))), "points", "·" )
  478. inner = (div(Class="comment-info")(
  479. a_,flair, points,
  480. get_time(comment_["created"]), link_),
  481. comment_content(comment_, True),
  482. replies_)
  483. return div(Class="comment")(inner)
  484. @tuplefy
  485. def reply(data):
  486. comment_ = data["data"]
  487. flair = comment_flair(comment_)
  488. replies_ = replies(data)
  489. a_ = a(href=f'/u/{comment_["author"]}')(f'u/{comment_["author"]}')
  490. link_ = a(href=comment_["permalink"])("🔗")
  491. points = (span(human_format(int(comment_["ups"] or comment_["downs"]))), "points", "·" )
  492. inner = (div(Class="comment-info")(
  493. a_,flair, points,
  494. get_time(comment_["created"]), link_),
  495. comment_content(comment_, True),
  496. replies_)
  497. return div(Class="reply")(inner)
  498. @tuplefy
  499. def comments(data_list, from_user=False):
  500. comments = ()
  501. for data in data_list:
  502. if data['kind'] == "more":
  503. comments += (p("..."),)
  504. else:
  505. comments += (comment(data, False, from_user),)
  506. return div(Class="comments")(comments)
  507. def replies(data):
  508. replies_ = ()
  509. if data['kind'] == "more":
  510. replies_ += (p("..."),)
  511. for children in g(data, "data.replies.data.children", default=[]):
  512. if children['kind'] == "more":
  513. replies_ += (p("..."),)
  514. else:
  515. replies_ += reply(children)
  516. return ul(replies_) if replies else None
  517. @tuplefy
  518. def subreddit_nav(data, subreddit, option=None, time=None):
  519. buttons = ()
  520. target = f"r/{subreddit}" if subreddit else ""
  521. if data["data"]["before"]:
  522. buttons += before_link(data, target, option, time)
  523. if data["data"]["after"]:
  524. buttons += after_link(data, target, option, time)
  525. return div(Class="nav")(buttons) if buttons else ()
  526. @tuplefy
  527. def search_nav(data, subreddit, params):
  528. buttons = ()
  529. target = f"/r/{subreddit}/search" if subreddit else "/search"
  530. if g(data, "data.before"):
  531. buttons += search_before_link(data, target, params)
  532. if g(data, "data.after"):
  533. buttons += search_after_link(data, target, params)
  534. return div(Class="nav")(buttons) if buttons else None
  535. @tuplefy
  536. def domain_nav(data, domain, option=None, time=None):
  537. buttons = ()
  538. target = f"domain/{domain}"
  539. if data["data"]["before"]:
  540. buttons += before_link(data, target, option, time)
  541. if data["data"]["after"]:
  542. buttons += after_link(data, target, option, time)
  543. return div(Class="nav")(buttons) if buttons else ()
  544. @tuplefy
  545. def user_nav(data, user, option=None, time=None):
  546. buttons = ()
  547. target = f"u/{user}"
  548. if data["data"]["before"]:
  549. buttons += user_before_link(data, target, option, time)
  550. if data["data"]["after"]:
  551. buttons += user_after_link(data, target, option, time)
  552. return div(Class="nav")(buttons) if buttons else ()
  553. @tuplefy
  554. def multi_nav(data, user, multi, option=None, time=None):
  555. buttons = ()
  556. target = f"u/{user}/m/{multi}"
  557. if data["data"]["before"]:
  558. buttons += user_before_link(data, target, option, time)
  559. if data["data"]["after"]:
  560. buttons += user_after_link(data, target, option, time)
  561. return div(Class="nav")(buttons) if buttons else ()
  562. def page_header(subreddit=None, user=None, multi=None, domain=None):
  563. header_ = (a(Class="main-link",href="/")("kddit"),)
  564. if subreddit:
  565. header_ += (a(Class="subreddit-link", href=f"/r/{subreddit}")(f"r/{subreddit}"),)
  566. elif multi and user:
  567. header_ += (a(Class="subreddit-link", href=f"/u/{user}/m/{multi}")(f"u/{user}/m/{multi}"),)
  568. elif user:
  569. header_ += (a(Class="subreddit-link", href=f"/u/{user}")(f"u/{user}"),)
  570. elif domain:
  571. header_ += (a(Class="subreddit-link", href=f"/domain/{domain}")(f"domain/{domain}"),)
  572. return header_
  573. def error_page(error):
  574. title_ = f"{error.status}!"
  575. output = h1(title_)
  576. header_ = page_header()
  577. return page(title_, header_, output)