html.py 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635
  1. from pyhtml import *
  2. from html import unescape, escape
  3. from bs4 import BeautifulSoup
  4. from glom import glom as g
  5. from glom import Coalesce
  6. from kddit.settings import *
  7. from urllib.parse import urlparse, parse_qs, urlencode
  8. from kddit.utils import get_time, human_format, preview_re, builder, processing_re
  9. from kddit.utils import tuplefy, get_metadata, replace_tag
  10. nothing = (p("there doesn't seem to be anything here"),)
  11. style_css = link(rel="stylesheet", type="text/css", href="/static/style.css")
  12. slider_css = link(rel="stylesheet", type="text/css", href="/static/slider.css")
  13. favicon = link(rel="icon", href="/static/favicon.svg")
  14. viewport = meta(name="viewport", content_="width=device-width, initial-scale=1.0")
  15. default_head = (style_css, slider_css, favicon, viewport)
  16. class progress(Tag):
  17. self_closing = False
  18. class svg(Tag):
  19. self_closing = False
  20. class path(Tag):
  21. self_closing = False
  22. def subreddit_link(sub):
  23. return a(Class="sub-link", href=f"/r/{sub}")(f"r/{sub}")
  24. def header_div(*args):
  25. return div(Class="header")(*args)
  26. def container_div(*args):
  27. return div(Class="container")(*args)
  28. def content_div(*args):
  29. return div(Class="content")(*args)
  30. def post_div(*args):
  31. return div(Class="post")(*args)
  32. def inner_post_div(*args):
  33. return div(Class="inner-post")(*args)
  34. def media_div(*args):
  35. return div(Class="media")(*args)
  36. def menu_div(*args):
  37. return div(Class="menu")(*args)
  38. def awards_div(*args):
  39. return div(Class="awards")(*args)
  40. def post_info_div(*args):
  41. return div(Class="post-info")(*args)
  42. def post_content_div(*args):
  43. return div(Class="post-content")(*args)
  44. def comment_content_div(*args):
  45. return div(Class="comment-content")(*args)
  46. def slider(arg):
  47. mask = div(Class="css-slider-mask")
  48. ul_ = ul(Class="css-slider with-responsive-images")
  49. return builder(mask, ul_, arg)
  50. def slider_media(arg):
  51. slider = li(Class="slide", tabindex=1)
  52. outer = span(Class="slide-outer")
  53. inner = span(Class="slide-inner")
  54. gfx = span(Class="slide-gfx")(arg)
  55. return builder(slider, outer, inner, gfx)
  56. def nsfw_label(arg):
  57. return label(input_(Class="nsfw", type="checkbox"),arg)
  58. def get_thumbnail(data):
  59. thumbnail = g(data, Coalesce("secure_media.oembed.thumbnail_url", "preview.images.0.source.url"), default="")
  60. return unescape(thumbnail)
  61. @tuplefy
  62. def alternate_video(data, url, safe=False):
  63. return None # disabling for now
  64. opts = {}
  65. opts["src"] = f"/video/{url}"
  66. opts["controls"] = ""
  67. thumbnail = get_thumbnail(data)
  68. if nsfw(data) and safe:
  69. opts["preload"] = "none"
  70. elif thumbnail:
  71. opts["preload"] = "none"
  72. opts["poster"] = f"/proxy/{thumbnail}"
  73. else:
  74. opts["preload"] = "metadata"
  75. video_ = media_div(video(**opts))
  76. return video_
  77. def nsfw(data):
  78. return data.get("over_18")
  79. @tuplefy
  80. def reddit_video(data, thumbnail=None, safe=False):
  81. url = g(data, "url")
  82. opts = {"controls":"", "src":f"/video/{url}"}
  83. opts["preload"] = "none"
  84. if not (nsfw(data) and safe):
  85. thumbnail = get_thumbnail(data)
  86. opts["poster"] = f"/proxy/{thumbnail}"
  87. video_ = video(**opts)
  88. output = media_div(video_)
  89. return output
  90. @tuplefy
  91. def reddit_image(data, url=None, safe=False, text=None):
  92. url = url or data["url"]
  93. image_ = media_div(img(src=f'/proxy/{url}'), em(text))
  94. if nsfw(data) and safe:
  95. output = nsfw_label(image_)
  96. else:
  97. output = image_
  98. return output
  99. def gallery(data, safe=False):
  100. output = (a(Class="post-link",href=data["url"])(data["url"]),)
  101. images = ()
  102. for item in reversed(g(data,"gallery_data.items", default=[])):
  103. media_id = item["media_id"]
  104. url = get_metadata(data, media_id)
  105. if url:
  106. images += reddit_image(data, url, safe)
  107. if images:
  108. output += slider((slider_media(media) for media in images))
  109. return output
  110. def page(title_, header_, content_):
  111. head_ = head(title(unescape(title_)), default_head)
  112. body_ = (header_div(header_), container_div(content_div(content_)))
  113. output = html(head_, body_)
  114. return output
  115. def post_content(data, safe):
  116. text = unescape(data["selftext_html"])
  117. soup = BeautifulSoup(text, "html.parser")
  118. for preview_link in soup.find_all("a", href=preview_re):
  119. url = preview_link.attrs["href"]
  120. preview_text = preview_link.text
  121. caption = preview_text if preview_text != url else None
  122. r_image = reddit_image(data, url, safe, text=caption)
  123. replace_tag(preview_link.parent, r_image)
  124. for preview_em in soup.find_all("em", string=processing_re):
  125. name = processing_re.match(preview_em.text).group(1)
  126. if url := get_metadata(data, name):
  127. r_image = reddit_image(data, url, safe)
  128. replace_tag(preview_em , r_image)
  129. return builder(post_content_div, Safe,str,soup)
  130. def awards(data):
  131. if not "all_awardings" in data:
  132. return
  133. output = []
  134. url = f'/{data["subreddit_name_prefixed"]}/gilded'
  135. for awarding in data["all_awardings"]:
  136. award = [img(src=f'/proxy/{unescape(awarding["resized_icons"][0]["url"])}', alt=awarding["name"])]
  137. count = awarding["count"]
  138. name = escape(awarding["name"])
  139. if count > 1:
  140. award.append(span(count))
  141. a_ = a(href=url, Class="awarding-icon", title=name)(award)
  142. output.append(a_)
  143. return awards_div(output)
  144. @tuplefy
  145. def subreddit_menu(option, subreddit):
  146. output = []
  147. focused = option or DEFAULT_OPTION
  148. for o in SUBREDDIT_OPTIONS:
  149. focus = o == focused
  150. sub = f"/r/{subreddit}" if subreddit else ""
  151. url = f"{sub}/{o}"
  152. a_ = a(href=url, Class="focus")(o) if focus else a(href=url)(o)
  153. output.append(a_)
  154. return menu_div(output)
  155. @tuplefy
  156. def search_sort_menu(subreddit, params):
  157. output = []
  158. focused = params.get("sort", "relevance")
  159. for o in SEARCH_SORT:
  160. query = params.copy()
  161. query["sort"] = o
  162. focus = o == focused
  163. sub = f"/r/{subreddit}" if subreddit else ""
  164. url = f"{sub}/search?{urlencode(query)}"
  165. a_ = a(href=url, Class="focus")(o) if focus else a(href=url)(o)
  166. output.append(a_)
  167. return menu_div(output)
  168. @tuplefy
  169. def search_time_menu(subreddit, params):
  170. output = []
  171. focused = params.get("t", "hour")
  172. for i, v in TIME_OPTIONS.items():
  173. query = params.copy()
  174. query["t"] = i
  175. focus = i == focused
  176. sub = f"/r/{subreddit}" if subreddit else ""
  177. url = f"{sub}/search?{urlencode(query)}"
  178. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  179. output.append(a_)
  180. return menu_div(output)
  181. @tuplefy
  182. def domain_menu(option, domain):
  183. output = []
  184. focused = option or DEFAULT_OPTION
  185. for o in SUBREDDIT_OPTIONS:
  186. focus = o == focused
  187. url = f"/domain/{domain}/{o}"
  188. a_ = a(href=url, Class="focus")(o) if focus else a(href=url)(o)
  189. output.append(a_)
  190. return menu_div(output)
  191. @tuplefy
  192. def subreddit_sort_menu(subreddit, option, time=None):
  193. p = f"/r/{subreddit}" if subreddit else ""
  194. focused = time or "hour"
  195. output = []
  196. for i, v in TIME_OPTIONS.items():
  197. focus = i == focused
  198. url = f'{p}/{option}?t={i}'
  199. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  200. output.append(a_)
  201. return menu_div(output)
  202. @tuplefy
  203. def domain_sort_menu(domain, option, time=None):
  204. output = []
  205. focused = time or "hour"
  206. for i, v in TIME_OPTIONS.items():
  207. focus = i == focused
  208. url = f"/domain/{domain}/{option}?t={i}"
  209. a_ = a(Class="focus",href=url)(v) if focus else a(href=url)(v)
  210. output.append(a_)
  211. return menu_div(output)
  212. @tuplefy
  213. def user_menu(option, user):
  214. output = []
  215. for o in USER_OPTIONS:
  216. focus = option == o or (not option and o == DEFAULT_OPTION)
  217. link_ = f"/u/{user}/{o}"
  218. if focus:
  219. a_ = a(href=link_, Class="focus")(o)
  220. else:
  221. a_ = a(href=link_)(o)
  222. output.append(a_)
  223. return menu_div(output)
  224. @tuplefy
  225. def user_sort_menu(option, sort, user):
  226. output = []
  227. focused = sort or DEFAULT_OPTION
  228. for o in USER_SORT:
  229. focus = o == focused
  230. link_ = f"/u/{user}/{option}/?sort={o}"
  231. a_ = a(href=link_, Class="focus")(o) if focus else a(href=link_)(o)
  232. output.append(a_)
  233. return menu_div(output)
  234. @tuplefy
  235. def before_link(data, target, option, t=None):
  236. option = option or ""
  237. sub = f"/{target}" if target else ""
  238. time = f"t={t}&" if t else ""
  239. url = f'{sub}/{option}?{time}count=25&before={data["data"]["before"]}'
  240. a_ = a(Class="button", href=url)("<prev")
  241. return a_
  242. @tuplefy
  243. def after_link(data, target, option, t=None):
  244. option = option or ""
  245. sub = f"/{target}" if target else ""
  246. time = f"t={t}&" if t else ""
  247. url = f'{sub}/{option}?{time}count=25&after={data["data"]["after"]}'
  248. a_ = a(Class="button", href=url)("next>")
  249. return a_
  250. @tuplefy
  251. def search_before_link(data, target, params):
  252. query = params.copy()
  253. query.pop("after", None)
  254. query["before"] = g(data,"data.before")
  255. url = f'{target}/?{urlencode(query)}'
  256. a_ = a(Class="button", href=url)("<prev")
  257. return a_
  258. @tuplefy
  259. def search_after_link(data, target, params):
  260. query = params.copy()
  261. query.pop("before", None)
  262. query["after"] = g(data, "data.after")
  263. url = f'{target}/?{urlencode(query)}'
  264. a_ = a(Class="button", href=url)("next>")
  265. return a_
  266. @tuplefy
  267. def user_before_link(data, target, option, sort=None):
  268. option = option or ""
  269. sub = f"/{target}" if target else ""
  270. time = f"sort={sort}&" if sort else ""
  271. url = f'{sub}/{option}?{time}count=25&before={data["data"]["before"]}'
  272. a_ = a(Class="button", href=url)("<prev")
  273. return a_
  274. @tuplefy
  275. def user_after_link(data, target, option, sort=None):
  276. option = option or ""
  277. sub = f"/{target}" if target else ""
  278. time = f"sort={sort}&" if sort else ""
  279. url = f'{sub}/{option}?{time}count=25&after={data["data"]["after"]}'
  280. a_ = a(Class="button", href=url)("next>")
  281. return a_
  282. def alternate_media(data, safe=False):
  283. pass
  284. def youtube_media(data, url, uri, safe):
  285. output = ()
  286. if uri.netloc == "youtu.be":
  287. output += alternate_video(data, url, safe)
  288. elif v := parse_qs(uri.query).get("v"):
  289. u = f"https://youtu.be/{v[0]}"
  290. output += alternate_video(data, u, safe)
  291. return output
  292. def imgur_media(data, url, safe):
  293. if url.endswith(".gifv"):
  294. output = alternate_video(data, url, safe=safe)
  295. else:
  296. output = reddit_image(data, safe=safe)
  297. return output
  298. def alternate_content(data, safe=False):
  299. url = data["url"]
  300. output = (a(Class="post-link",href=url)(url),)
  301. uri = urlparse(url)
  302. netloc = uri.netloc
  303. if netloc in PROXY_ALLOW["youtube"]:
  304. output += youtube_media(data, url, uri, safe)
  305. elif netloc in PROXY_ALLOW["video"]:
  306. output += alternate_video(data, url, safe=safe)
  307. elif netloc in PROXY_ALLOW["imgur"]:
  308. output += imgur_media(data, url, safe)
  309. elif netloc in PROXY_ALLOW["image"]:
  310. output += reddit_image(data, safe=safe)
  311. return post_content_div(output)
  312. def reddit_media(data, safe):
  313. output = (a(Class="post-link", href=data["url"])(data["url"]),)
  314. if data["is_video"]:
  315. output += reddit_video(data, safe=safe)
  316. else:
  317. output += reddit_image(data, safe=safe)
  318. return post_content_div(output)
  319. def reddit_content(data, safe=False):
  320. if data["selftext_html"]:
  321. output = post_content(data, safe)
  322. elif data["is_reddit_media_domain"] and data["thumbnail"]:
  323. output = reddit_media(data, safe)
  324. elif data.get("is_gallery"):
  325. output = gallery(data, safe=safe)
  326. else:
  327. output = None
  328. return output
  329. def rich_text(richtext, text):
  330. for item in richtext:
  331. a_ = item.get("a")
  332. u = item.get("u")
  333. if not (a_ or u):
  334. continue
  335. text = text.replace(a_, f'<span class="flair-emoji" style="background-image:url(/proxy/{u});"></span>')
  336. return text
  337. def domain_link(data):
  338. if data.get("is_self"):
  339. return None
  340. domain = data.get("domain")
  341. domain_url = f"/domain/{domain}"
  342. return ("(", a(href=domain_url)(f"{domain}"), ")")
  343. @tuplefy
  344. def post(data, safe=False):
  345. if data.get("crosspost_parent_list"):
  346. content = post(data['crosspost_parent_list'][0], True)
  347. elif data.get("poll_data"):
  348. content = poll(data)
  349. else:
  350. content = reddit_content(data, safe) or alternate_content(data, safe=safe)
  351. author = data.get("author")
  352. permalink = data.get("permalink")
  353. title_ = unescape(data.get("title"))
  354. domain = domain_link(data)
  355. votes = human_format(int(data.get("ups") or data.get("downs")))
  356. author = ("Posted by", a(href=f'/u/{author}')(f'u/{author}'))
  357. title_link = builder(a(href=permalink),Safe,b,title_)
  358. post_info = post_info_div(subreddit_link(data["subreddit"]),"•", author, get_time(data["created"]), domain, awards(data))
  359. flair = post_flair(data)
  360. inner = (title_link, flair, content)
  361. votes = div(Class="votes")(span(Class="icon icon-upvote"), votes , span(Class="icon icon-downvote"))
  362. return post_div(votes, inner_post_div(post_info, inner))
  363. def poll(data):
  364. options = ()
  365. tvotes = g(data,"poll_data.total_vote_count")
  366. for opt in data["poll_data"]["options"]:
  367. if "vote_count" in opt:
  368. votes = opt["vote_count"]
  369. cin = (
  370. p(f'{opt["text"]} : {votes}'),
  371. progress(
  372. value=votes,
  373. max=tvotes))
  374. options += cin
  375. else:
  376. cin = (p(input_(disabled="", type="radio"), opt["text"]))
  377. options += (cin,)
  378. div_ = div(Class="poll")(options)
  379. return div_
  380. def posts(data, safe=False):
  381. posts_ = ()
  382. for children in g(data, "data.children"):
  383. data = children["data"]
  384. posts_ += post(data, safe)
  385. return posts_
  386. @tuplefy
  387. def mixed_content(data, safe):
  388. output = ()
  389. for children in g(data, "data.children"):
  390. if children["kind"] == "t1":
  391. output += (comment(children, safe),)
  392. elif children["kind"] == "t3":
  393. output += (post(children["data"], safe),)
  394. return output
  395. def comment_flair(data):
  396. flair_text = g(data, "author_flair_text", default=None)
  397. if flair_richtext := data.get("author_flair_richtext"):
  398. flair_text = rich_text(flair_richtext, flair_text )
  399. return builder(span(Class="flair"),Safe,unescape,flair_text) if flair_text else None
  400. def post_flair(data):
  401. flair_text = g(data, "link_flair_text", default=None)
  402. if flair_richtext := data.get("link_flair_richtext"):
  403. flair_text = rich_text(flair_richtext, flair_text )
  404. return builder(span(Class="flair"),Safe,unescape,flair_text) if flair_text else None
  405. def comment(data, full=False):
  406. comment_ = data["data"]
  407. text = unescape(comment_["body_html"])
  408. flair = comment_flair(comment_)
  409. if full:
  410. title_ = comment_["link_title"]
  411. header_ = ()
  412. header_ += ("by", a(href=f'/u/{comment_["author"]}')(f'u/{comment_["author"]}'),flair)
  413. header_ += ("in", subreddit_link(comment_["subreddit"]))
  414. header_ += (get_time(comment_["created"]),)
  415. inner = (
  416. a(href=comment_["permalink"])(b(title_)),
  417. div(Class="comment-info")(header_),
  418. awards(comment_),
  419. comment_content_div(Safe(text))
  420. )
  421. return div(Class="comment")(inner)
  422. else:
  423. replies_ = replies(data)
  424. a_ = a(href=f'/u/{comment_["author"]}')(f'u/{comment_["author"]}')
  425. link_ = a(href=comment_["permalink"])("🔗")
  426. points = (span(human_format(int(comment_["ups"] or comment_["downs"]))), "points", "·" )
  427. inner = (div(Class="comment-info")(
  428. a_,flair, points,
  429. get_time(comment_["created"]), link_),
  430. awards(comment_),
  431. comment_content_div(Safe(text)),
  432. replies_)
  433. return div(Class="comment")(inner)
  434. @tuplefy
  435. def reply(data):
  436. comment_ = data["data"]
  437. text = unescape(comment_["body_html"])
  438. flair = comment_flair(comment_)
  439. replies_ = replies(data)
  440. a_ = a(href=f'/u/{comment_["author"]}')(f'u/{comment_["author"]}')
  441. link_ = a(href=comment_["permalink"])("🔗")
  442. points = (span(human_format(int(comment_["ups"] or comment_["downs"]))), "points", "·" )
  443. inner = (div(Class="comment-info")(
  444. a_,flair, points,
  445. get_time(comment_["created"]), link_),
  446. awards(comment_),
  447. Safe(text),
  448. replies_)
  449. return div(Class="reply")(inner)
  450. def comments(data_list):
  451. comments = ()
  452. for data in data_list:
  453. if data['kind'] == "more":
  454. comments += (p("..."),)
  455. else:
  456. comments += (comment(data),)
  457. return div(Class="comments")(comments)
  458. def replies(data):
  459. replies_ = ()
  460. if data['kind'] == "more":
  461. replies_ += (p("..."),)
  462. for children in g(data, "data.replies.data.children", default=[]):
  463. if children['kind'] == "more":
  464. replies_ += (p("..."),)
  465. else:
  466. replies_ += reply(children)
  467. return ul(replies_) if replies else None
  468. @tuplefy
  469. def subreddit_nav(data, subreddit, option=None, time=None):
  470. buttons = ()
  471. target = f"r/{subreddit}" if subreddit else ""
  472. if data["data"]["before"]:
  473. buttons += before_link(data, target, option, time)
  474. if data["data"]["after"]:
  475. buttons += after_link(data, target, option, time)
  476. return div(Class="nav")(buttons) if buttons else ()
  477. @tuplefy
  478. def search_nav(data, subreddit, params):
  479. buttons = ()
  480. target = f"/r/{subreddit}/search" if subreddit else "/search"
  481. if g(data, "data.before"):
  482. buttons += search_before_link(data, target, params)
  483. if g(data, "data.after"):
  484. buttons += search_after_link(data, target, params)
  485. return div(Class="nav")(buttons) if buttons else None
  486. @tuplefy
  487. def domain_nav(data, domain, option=None, time=None):
  488. buttons = ()
  489. target = f"domain/{domain}"
  490. if data["data"]["before"]:
  491. buttons += before_link(data, target, option, time)
  492. if data["data"]["after"]:
  493. buttons += after_link(data, target, option, time)
  494. return div(Class="nav")(buttons) if buttons else ()
  495. @tuplefy
  496. def user_nav(data, user, option=None, time=None):
  497. buttons = ()
  498. target = f"u/{user}"
  499. if data["data"]["before"]:
  500. buttons += user_before_link(data, target, option, time)
  501. if data["data"]["after"]:
  502. buttons += user_after_link(data, target, option, time)
  503. return div(Class="nav")(buttons) if buttons else ()
  504. def page_header(subreddit=None, user=None, domain=None, option=None, q=""):
  505. placeholder = "search"
  506. action = f"/r/{subreddit}/search" if subreddit else "/search"
  507. button = input_(Class="button", type="submit", value="")
  508. header_ = (a(Class="main-link",href="/")("kddit"),)
  509. if subreddit:
  510. header_ += (a(Class="subreddit-link", href=f"/r/{subreddit}")(f"/r/{subreddit}"),)
  511. #header_ += (form(method="GET", action=action)(input_(name="q", required="", id="search-bar", placeholder=q or placeholder, value=q), button),)
  512. return header_
  513. def error_page(error):
  514. title_ = f"{error.status}!"
  515. output = h1(title_)
  516. header_ = page_header()
  517. return page(title_, header_, output)