Fix: ablog feeds

This commit is contained in:
Kujiu 2021-02-02 15:58:27 +01:00
parent dc975e0bd1
commit f510a9b2cc
Signed by: kujiu
GPG Key ID: ABBB2CAC6855599F
2 changed files with 96 additions and 27 deletions

View File

@ -22,6 +22,40 @@ from ablog.blog import Blog, os_path_join, revise_pending_xrefs
logger = logging.getLogger(__name__)
text_type = str
class Page:
""" Mini translator for gemini """
def __init__(self, builder, docname: str) -> None:
self.body = ''
self.docname = docname
self.builder = builder
def add_title(self, text: str, level: int = 1) -> None:
self.body += '# ' * level
self.body += text + '\n'
def add_link(self, uri: str, desc: str = None) -> None:
self.body += '=> %s' % uri
if desc:
self.body += ' %s' % desc
self.body += '\n'
def end_block(self) -> None:
self.body += '\n'
def add_paragraph(self, text: str) -> None:
self.body += text
self.body += '\n\n'
def write(self):
path = os.path.join(self.builder.outdir, self.docname)
folder = os.path.dirname(path)
if not os.path.exists(folder):
os.makedirs(folder)
with open(path, "w", encoding="utf-8") as out:
out.write(self.body)
def to_gemini(builder, post, pagename, fulltext=False):
"""
@ -57,33 +91,45 @@ def generate_archive_pages(builder):
Generate archive pages for all posts, categories, tags, authors, and
drafts (from ablog).
"""
if not ablog.builder_support(builder.app):
return
blog = Blog(builder.app)
all_contexts = []
for post in blog.posts:
for redirect in post.redirect:
yield (redirect, {"redirect": post.docname, "post": post}, "redirect.gmi")
doc = Page(builder, redirect)
doc.add_title(post.title)
doc.add_link(
relative_uri(post.uri, redirect),
_("Resource as been moved. Go here.")
)
doc.write()
found_docs = builder.env.found_docs
atom_feed = bool(blog.blog_baseurl)
atom_feed = bool(builder.config.gemini_baseurl)
feed_archives = blog.blog_feed_archives
blog_path = blog.blog_path
for title, header, catalog in [
(_("Authors"), _("Posts by"), blog.author),
(_("Locations"), _("Posts from"), blog.location),
(_("Languages"), _("Posts in"), blog.language),
(_("Categories"), _("Posts in"), blog.category),
(_("All posts"), _("Posted in"), blog.archive),
(_("Authors"), _("Posts by author"), blog.author),
(_("Locations"), _("Posts from location"), blog.location),
(_("Languages"), _("Posts in language"), blog.language),
(_("Categories"), _("Posts in category"), blog.category),
(_("All posts"), _("Posted in archive"), blog.archive),
(_("Tags"), _("Posts tagged"), blog.tags),
]:
if not catalog:
continue
context = {"parents": [], "title": title, "header": header, "catalog": catalog, "summary": True}
context = {
"atom_feed": False,
"parents": [],
"title": title,
"header": header,
"collection": catalog,
"summary": True,
"docname": catalog.docname,
}
if catalog.docname not in found_docs:
yield (catalog.docname, context, "catalog.gmi")
all_contexts.append(context)
for collection in catalog:
@ -96,11 +142,12 @@ def generate_archive_pages(builder):
"collection": collection,
"summary": True,
"feed_path": collection.path if feed_archives else blog_path,
"archive_feed": atom_feed and feed_archives,
"atom_feed": atom_feed and feed_archives,
"docname": collection.docname,
}
context["feed_title"] = context["title"]
if collection.docname not in found_docs:
yield (collection.docname, context, "collection.gmi")
all_contexts.append(context)
context = {
"parents": [],
@ -110,12 +157,32 @@ def generate_archive_pages(builder):
"summary": True,
"atom_feed": atom_feed,
"feed_path": blog.blog_path,
"docname": "blog/feeds",
}
docname = blog.posts.docname
yield (docname, context, "collection.gmi")
all_contexts.append(context)
context = {
"parents": [],
"atom_feed": False,
"title": _("Drafts"),
"collection": blog.drafts,
"summary": True,
"docname": "blog/drafts",
}
all_contexts.append(context)
for context in all_contexts:
collection = context["collection"]
doc = Page(builder, context["docname"])
doc.add_title(str(collection))
if context["atom_feed"]:
doc.add_link(
collection.path+"/atom.xml",
_("Atom feed")
)
doc.end_block()
doc.write()
context = {"parents": [], "title": _("Drafts"), "collection": blog.drafts, "summary": True}
yield (blog.drafts.docname, context, "collection.gmi")
def generate_atom_feeds(builder):
@ -125,7 +192,7 @@ def generate_atom_feeds(builder):
"""
blog = Blog(builder.app)
url = blog.blog_baseurl
url = builder.config.gemini_baseurl
if not url:
return
@ -143,11 +210,11 @@ def generate_atom_feeds(builder):
if blog.blog_feed_archives:
for header, catalog in [
(_("Posts by"), blog.author),
(_("Posts from"), blog.location),
(_("Posts in"), blog.language),
(_("Posts in"), blog.category),
(_("Posted in"), blog.archive),
(_("Posts by author"), blog.author),
(_("Posts from location"), blog.location),
(_("Posts in language"), blog.language),
(_("Posts in category"), blog.category),
(_("Posted in archive"), blog.archive),
(_("Posts tagged"), blog.tags),
]:
@ -176,7 +243,7 @@ def generate_atom_feeds(builder):
for feed_posts, pagename, feed_path, feed_title, feed_url in feeds:
feed = FeedGenerator()
feed.id(blog.blog_baseurl)
feed.id(builder.config.gemini_baseurl)
feed.title(feed_title)
feed.link(href=url)
feed.subtitle(blog.blog_feed_subtitle)
@ -187,7 +254,7 @@ def generate_atom_feeds(builder):
for i, post in enumerate(feed_posts):
if feed_length and i == feed_length:
break
post_url = os_path_join(url, builder.get_target_uri(post.docname))
post_url = builder.get_target_uri(post.docname)
if blog.blog_feed_titles:
content = None
@ -204,7 +271,7 @@ def generate_atom_feeds(builder):
feed_entry.content(content=content, type="text/gemini")
parent_dir = os.path.dirname(feed_path)
if not os.path.isdir(parent_dir):
if not os.path.exists(parent_dir):
os.makedirs(parent_dir)
with open(feed_path, "w", encoding="utf-8") as out:

View File

@ -402,6 +402,8 @@ class GeminiTranslator(SphinxTranslator):
def visit_image(self, node: Element) -> None:
self.end_block()
uri = relative_uri(self.builder.current_docname, node['uri'])
if self.builder.config.gemini_baseurl:
uri = self.builder.config.gemini_baseurl + node['uri']
if 'alt' in node.attributes:
self.add_link(uri, __('[image: %s]') % node['alt'])
else: