| 1 | # targets.py / Template engine for my website |
| 2 | # Joshua Stockin / josh@joshstock.in / https://joshstock.in |
| 3 |
|
| 4 | import os |
| 5 | import html |
| 6 | from datetime import datetime, timezone, timedelta |
| 7 |
|
| 8 | import markdown2 |
| 9 | import htmlgenerator as hg |
| 10 | import readtime |
| 11 | import sass |
| 12 | from feedgen.feed import FeedGenerator |
| 13 |
|
| 14 | from _utils import dotdict as namespace, current_dir, load_generators, list_files |
| 15 |
|
| 16 | # Site generation metadata |
| 17 | CONTENT_DIRECTORY = os.path.join(current_dir(), "content") |
| 18 | SASS_DIRECTORY = os.path.join(current_dir(), "style") |
| 19 | STATIC_DIRECTORY = os.path.join(current_dir(), "static") |
| 20 |
|
| 21 | blog_description = "Barely coherent ramblings about engineering projects, software, hardware, and other things." |
| 22 |
|
| 23 | # Fetch generator functions |
| 24 | GENERATORS_MODULE = "generators" |
| 25 | GENERATORS = [ |
| 26 | "head.head", |
| 27 | "header", |
| 28 | "footer", |
| 29 | "blog.article", |
| 30 | "blog.index", |
| 31 | "blog.listing", |
| 32 | ] |
| 33 |
|
| 34 | generate = load_generators(GENERATORS_MODULE, GENERATORS) |
| 35 |
|
| 36 | # Site template implementation; returns dict {filename: data} |
| 37 | def template() -> {str: str}: |
| 38 | files = {} |
| 39 |
|
| 40 | articles_list = [] |
| 41 | fg = FeedGenerator() |
| 42 | fg.id("https://joshstock.in/blog") |
| 43 | fg.title("Blog - Josh Stockin") |
| 44 | fg.author({"name": "Josh Stockin", "email": "josh@joshstock.in", "uri": "https://joshstock.in"}) |
| 45 | fg.link(href="https://joshstock.in/blog", rel="alternate") |
| 46 | fg.subtitle(blog_description) |
| 47 | fg.link(href="https://joshstock.in/atom", rel="self") |
| 48 | fg.language("en") |
| 49 |
|
| 50 | for content_file in list_files(CONTENT_DIRECTORY, ".md"): |
| 51 | f = open(content_file, "r") |
| 52 | data = f.read() |
| 53 | f.close() |
| 54 |
|
| 55 | content_html = markdown2.markdown( |
| 56 | data, |
| 57 | safe_mode=False, |
| 58 | extras=[ |
| 59 | "code-friendly", |
| 60 | "cuddled-lists", |
| 61 | "fenced-code-blocks", |
| 62 | "footnotes", |
| 63 | "header-ids", |
| 64 | "metadata", |
| 65 | "strike", |
| 66 | "tables", |
| 67 | "wiki-tables", |
| 68 | "tag-friendly", |
| 69 | "target-blank-links", |
| 70 | ], |
| 71 | ) |
| 72 |
|
| 73 | page_data = namespace(content_html.metadata) |
| 74 |
|
| 75 | page_data.link = page_data.link or "" |
| 76 |
|
| 77 | if page_data.type == "website": |
| 78 | page_generator = hg.HTML( |
| 79 | generate("head.head", page_data), |
| 80 | hg.BODY( |
| 81 | *generate("header", page_data), |
| 82 | hg.DIV( |
| 83 | hg.DIV(hg.mark_safe(content_html), _class="content-body"), |
| 84 | hg.DIV(_class="vfill"), |
| 85 | generate("footer"), |
| 86 | _class="content-container", |
| 87 | ), |
| 88 | onscroll="scroll()", |
| 89 | ), |
| 90 | ) |
| 91 | files[page_data.index] = hg.render(page_generator, {}).encode("utf-8") |
| 92 |
|
| 93 | elif page_data.type == "article": # Blog article handling |
| 94 | page_data.readtime = readtime.of_html(content_html, wpm=150) |
| 95 | page_data.thumbnail = page_data.banner_image |
| 96 | page_data.link = "/blog/" + page_data.identifier |
| 97 | page_data.links = page_data.links or {} |
| 98 | articles_list += [generate("blog.listing", page_data)] |
| 99 | page_data.content = content_html |
| 100 |
|
| 101 | fe = fg.add_entry() |
| 102 | fe.id("https://joshstock.in/blog/" + page_data.identifier) |
| 103 | fe.author({"name": "Josh Stockin", "email": "josh@joshstock.in", "uri": "https://joshstock.in"}) |
| 104 | fe.title(page_data.title) |
| 105 | fe.summary(page_data.description + " / https://joshstock.in/blog/" + page_data.identifier) |
| 106 | datetime_pub = datetime.strptime(page_data.datestring, "%Y-%m-%d").replace(tzinfo=timezone(-timedelta(hours=6))) |
| 107 | fe.published(datetime_pub) |
| 108 | fe.updated(datetime_pub) |
| 109 | fe.link(href="https://joshstock.in/blog/" + page_data.identifier) |
| 110 |
|
| 111 | page_generator = hg.HTML( |
| 112 | generate("head.head", page_data), |
| 113 | hg.BODY( |
| 114 | *generate("header", page_data), |
| 115 | hg.DIV( |
| 116 | hg.DIV( |
| 117 | *generate("blog.article", page_data), _class="content-body" |
| 118 | ), |
| 119 | hg.DIV(_class="vfill"), |
| 120 | generate("footer"), |
| 121 | _class="content-container", |
| 122 | ), |
| 123 | onscroll="scroll()", |
| 124 | ), |
| 125 | ) |
| 126 |
|
| 127 | files["blog/" + page_data.identifier + ".html"] = hg.render( |
| 128 | page_generator, {} |
| 129 | ).encode("utf-8") |
| 130 |
|
| 131 | # Create blog index page |
| 132 | blog_page_data = namespace( |
| 133 | title="Blog", |
| 134 | banner_image="", |
| 135 | thumbnail="", |
| 136 | link="/blog", |
| 137 | description=fg.subtitle(), |
| 138 | ) |
| 139 | blog_page_generator = hg.HTML( |
| 140 | generate("head.head", blog_page_data), |
| 141 | hg.BODY( |
| 142 | *generate("header", blog_page_data), |
| 143 | hg.DIV( |
| 144 | hg.DIV( |
| 145 | hg.DIV( |
| 146 | hg.H1("Blog ", hg.IMG(src="/static/svg/memo.svg", _class="inline svg")), |
| 147 | hg.P( |
| 148 | fg.subtitle(), hg.BR(), |
| 149 | hg.SPAN("[", hg.A("Atom feed", href="/atom"), "] ", style="font-size: 0.75em; color: var(--caption-color)"), |
| 150 | hg.SPAN("[", hg.A("RSS feed", href="/rss"), "]", style="font-size: 0.75em; color: var(--caption-color)") |
| 151 | ) |
| 152 | ), |
| 153 | *articles_list, |
| 154 | _class="content-body", |
| 155 | ), |
| 156 | hg.DIV(_class="vfill"), |
| 157 | generate("footer"), |
| 158 | _class="content-container", |
| 159 | ), |
| 160 | onscroll="scroll()", |
| 161 | ), |
| 162 | ) |
| 163 | files["blog.html"] = hg.render(blog_page_generator, {}).encode("utf-8") |
| 164 |
|
| 165 | # Feeds |
| 166 | files["atom.xml"] = fg.atom_str(pretty=True) |
| 167 | fg.link(href="https://joshstock.in/rss", rel="self", replace=True) |
| 168 | files["rss.xml"] = fg.rss_str(pretty=True) |
| 169 |
|
| 170 | # Compile Sass stylesheets |
| 171 | for stylesheet_file in list_files(SASS_DIRECTORY, ".scss"): |
| 172 | if os.path.basename(stylesheet_file)[0] != "_": |
| 173 | files[ |
| 174 | os.path.join( |
| 175 | "static", |
| 176 | "style", |
| 177 | os.path.splitext(os.path.relpath(stylesheet_file, SASS_DIRECTORY))[ |
| 178 | 0 |
| 179 | ] |
| 180 | + ".css", |
| 181 | ) |
| 182 | ] = sass.compile(filename=stylesheet_file, output_style="compressed").encode("utf-8") |
| 183 |
|
| 184 | # Copy content from static files |
| 185 | for static_file in list_files(STATIC_DIRECTORY): |
| 186 | f = open(static_file, "rb") |
| 187 | data = f.read() |
| 188 | f.close() |
| 189 |
|
| 190 | files[ |
| 191 | os.path.join("static", os.path.relpath(static_file, STATIC_DIRECTORY)) |
| 192 | ] = data |
| 193 |
|
| 194 | return files |
| 195 |
|