import argparse
import html
import json
import re
import xml.sax.saxutils as xml
from abc import abstractmethod
from collections.abc import Mapping, Sequence
from pathlib import Path
from typing import Any, cast, ClassVar, Generic, get_args, NamedTuple
from markdown_it.token import Token
from . import md, options
from .docbook import DocBookRenderer, Heading, make_xml_id
from .html import HTMLRenderer, UnresolvedXrefError
from .manual_structure import check_structure, FragmentType, is_include, TocEntry, TocEntryType, XrefTarget
from .md import Converter, Renderer
class BaseConverter(Converter[md.TR], Generic[md.TR]):
# per-converter configuration for ns:arg=value arguments to include blocks, following
# the include type. html converters need something like this to support chunking, or
# another external method like the chunktocs docbook uses (but block options seem like
# a much nicer of doing this).
INCLUDE_ARGS_NS: ClassVar[str]
INCLUDE_FRAGMENT_ALLOWED_ARGS: ClassVar[set[str]] = set()
INCLUDE_OPTIONS_ALLOWED_ARGS: ClassVar[set[str]] = set()
_base_paths: list[Path]
_current_type: list[TocEntryType]
def convert(self, infile: Path, outfile: Path) -> None:
self._base_paths = [ infile ]
self._current_type = ['book']
try:
tokens = self._parse(infile.read_text())
self._postprocess(infile, outfile, tokens)
converted = self._renderer.render(tokens)
outfile.write_text(converted)
except Exception as e:
raise RuntimeError(f"failed to render manual {infile}") from e
def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None:
pass
def _parse(self, src: str) -> list[Token]:
tokens = super()._parse(src)
check_structure(self._current_type[-1], tokens)
for token in tokens:
if not is_include(token):
continue
directive = token.info[12:].split()
if not directive:
continue
args = { k: v for k, _sep, v in map(lambda s: s.partition('='), directive[1:]) }
typ = directive[0]
if typ == 'options':
token.type = 'included_options'
self._process_include_args(token, args, self.INCLUDE_OPTIONS_ALLOWED_ARGS)
self._parse_options(token, args)
else:
fragment_type = typ.removesuffix('s')
if fragment_type not in get_args(FragmentType):
raise RuntimeError(f"unsupported structural include type '{typ}'")
self._current_type.append(cast(FragmentType, fragment_type))
token.type = 'included_' + typ
self._process_include_args(token, args, self.INCLUDE_FRAGMENT_ALLOWED_ARGS)
self._parse_included_blocks(token, args)
self._current_type.pop()
return tokens
def _process_include_args(self, token: Token, args: dict[str, str], allowed: set[str]) -> None:
ns = self.INCLUDE_ARGS_NS + ":"
args = { k[len(ns):]: v for k, v in args.items() if k.startswith(ns) }
if unknown := set(args.keys()) - allowed:
assert token.map
raise RuntimeError(f"unrecognized include argument in line {token.map[0] + 1}", unknown)
token.meta['include-args'] = args
def _parse_included_blocks(self, token: Token, block_args: dict[str, str]) -> None:
assert token.map
included = token.meta['included'] = []
for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2):
line = line.strip()
path = self._base_paths[-1].parent / line
if path in self._base_paths:
raise RuntimeError(f"circular include found in line {lnum}")
try:
self._base_paths.append(path)
with open(path, 'r') as f:
tokens = self._parse(f.read())
included.append((tokens, path))
self._base_paths.pop()
except Exception as e:
raise RuntimeError(f"processing included file {path} from line {lnum}") from e
def _parse_options(self, token: Token, block_args: dict[str, str]) -> None:
assert token.map
items = {}
for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2):
if len(args := line.split(":", 1)) != 2:
raise RuntimeError(f"options directive with no argument in line {lnum}")
(k, v) = (args[0].strip(), args[1].strip())
if k in items:
raise RuntimeError(f"duplicate options directive {k} in line {lnum}")
items[k] = v
try:
id_prefix = items.pop('id-prefix')
varlist_id = items.pop('list-id')
source = items.pop('source')
except KeyError as e:
raise RuntimeError(f"options directive {e} missing in block at line {token.map[0] + 1}")
if items.keys():
raise RuntimeError(
f"unsupported options directives in block at line {token.map[0] + 1}",
" ".join(items.keys()))
try:
with open(self._base_paths[-1].parent / source, 'r') as f:
token.meta['id-prefix'] = id_prefix
token.meta['list-id'] = varlist_id
token.meta['source'] = json.load(f)
except Exception as e:
raise RuntimeError(f"processing options block in line {token.map[0] + 1}") from e
class RendererMixin(Renderer):
_toplevel_tag: str
_revision: str
def __init__(self, toplevel_tag: str, revision: str, *args: Any, **kwargs: Any):
super().__init__(*args, **kwargs)
self._toplevel_tag = toplevel_tag
self._revision = revision
self.rules |= {
'included_sections': lambda *args: self._included_thing("section", *args),
'included_chapters': lambda *args: self._included_thing("chapter", *args),
'included_preface': lambda *args: self._included_thing("preface", *args),
'included_parts': lambda *args: self._included_thing("part", *args),
'included_appendix': lambda *args: self._included_thing("appendix", *args),
'included_options': self.included_options,
}
def render(self, tokens: Sequence[Token]) -> str:
# books get special handling because they have *two* title tags. doing this with
# generic code is more complicated than it's worth. the checks above have verified
# that both titles actually exist.
if self._toplevel_tag == 'book':
return self._render_book(tokens)
return super().render(tokens)
@abstractmethod
def _render_book(self, tokens: Sequence[Token]) -> str:
raise NotImplementedError()
@abstractmethod
def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str:
raise NotImplementedError()
@abstractmethod
def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise NotImplementedError()
class ManualDocBookRenderer(RendererMixin, DocBookRenderer):
def __init__(self, toplevel_tag: str, revision: str, manpage_urls: Mapping[str, str]):
super().__init__(toplevel_tag, revision, manpage_urls)
def _render_book(self, tokens: Sequence[Token]) -> str:
assert tokens[1].children
assert tokens[4].children
if (maybe_id := cast(str, tokens[0].attrs.get('id', ""))):
maybe_id = "xml:id=" + xml.quoteattr(maybe_id)
return (f'