diff options
| -rw-r--r-- | .dub/dub.json | 15 | ||||
| -rw-r--r-- | .gitignore | 7 | ||||
| -rw-r--r-- | devnotes.org (renamed from readme.org) | 54 | ||||
| -rw-r--r-- | dub.json | 14 | ||||
| -rw-r--r-- | dub.selections.json | 5 | ||||
| -rwxr-xr-x | lib/sdp.d | 186 | ||||
| -rw-r--r-- | lib/sdp/ao_emitter.d | 1479 | ||||
| -rw-r--r-- | lib/sdp/version.txt | 8 | ||||
| -rw-r--r-- | makefile | 14 | ||||
| -rw-r--r-- | maker.org | 86 | ||||
| -rw-r--r-- | org/ao_abstract_doc_source.org | 12 | ||||
| -rw-r--r-- | org/ao_defaults.org | 8 | ||||
| -rw-r--r-- | org/ao_emitter.org | 1568 | ||||
| -rw-r--r-- | org/ao_output_debugs.org | 10 | ||||
| -rw-r--r-- | org/ao_read_source_files.org | 6 | ||||
| -rw-r--r-- | org/compile_time_info.org | 2 | ||||
| -rw-r--r-- | org/sdp.org | 124 | ||||
| -rw-r--r-- | org/sdp_conf.org | 40 | ||||
| -rw-r--r-- | sdp.org | 385 | ||||
| -rwxr-xr-x | src/sdp.d (renamed from lib/sdp/sdp.d) | 2 | ||||
| -rw-r--r-- | src/sdp/ao_abstract_doc_source.d (renamed from lib/sdp/ao_abstract_doc_source.d) | 8 | ||||
| -rw-r--r-- | src/sdp/ao_ansi_colors.d (renamed from lib/sdp/ao_ansi_colors.d) | 0 | ||||
| -rw-r--r-- | src/sdp/ao_defaults.d (renamed from lib/sdp/ao_defaults.d) | 2 | ||||
| -rw-r--r-- | src/sdp/ao_object_setter.d (renamed from lib/sdp/ao_object_setter.d) | 0 | ||||
| -rw-r--r-- | src/sdp/ao_output_debugs.d (renamed from lib/sdp/ao_output_debugs.d) | 8 | ||||
| -rw-r--r-- | src/sdp/ao_read_source_files.d (renamed from lib/sdp/ao_read_source_files.d) | 0 | ||||
| -rw-r--r-- | src/sdp/ao_rgx.d (renamed from lib/sdp/ao_rgx.d) | 0 | ||||
| -rw-r--r-- | src/sdp/compile_time_info.d (renamed from lib/sdp/compile_time_info.d) | 0 | ||||
| -rw-r--r-- | views/version.txt (renamed from lib/version.txt) | 1 | 
29 files changed, 262 insertions, 3782 deletions
| diff --git a/.dub/dub.json b/.dub/dub.json new file mode 100644 index 0000000..41dafd6 --- /dev/null +++ b/.dub/dub.json @@ -0,0 +1,15 @@ +{ +	"description": "sisu document parser.", +	"authors": [ +		"Ralph Amissah" +	], +	"license": "GPL-3.0", +	"dependencies": {}, +	"add-path": "./src/sdp", +	"dub": { +		"lastUpgrade": "2016-04-25T06:24:35.4069093", +		"cachedUpgrades": {} +	}, +	"homepage": "http://sisudoc.org", +	"name": "sdp" +} @@ -1,13 +1,18 @@  # git ls-files --others --exclude-from=.git/info/exclude +#./.dub/** +.dub/**  *  !.gitignore  !makefile +!version.txt  !tangle  !*.org  !*.d  !*.txt +!*.json  !org -!lib +!src +!views  !**/  **/.#*  #!*/ diff --git a/readme.org b/devnotes.org index 695365c..7b64fa8 100644 --- a/readme.org +++ b/devnotes.org @@ -1,4 +1,4 @@ -#+TITLE: Summary of Dotfiles +#+TITLE: sdp (project) discussion  #+AUTHOR: Ralph Amissah  #+EMAIL: ralph.amissah@gmail.com  #+STARTUP: indent @@ -11,13 +11,13 @@  #+EXPORT_EXCLUDE_TAGS: noexport  #+PRIORITIES: A F E  #+TAGS: Amissah(A) Class(c) WEB(W) noexport(n) -#+FILETAGS: :sdp:readme: +#+FILETAGS: :sdp:dev:notes:  * sdp  ** debug                                                            :debug: -  objects -  header -  heading -  poem verse ocn +objects +header +heading +poem verse ocn  ** tasks                                                            :tasks:  *** sisu_loop.d @@ -92,8 +92,8 @@ search and replace inline ascii markup with less ambiguous utf-8 markers  clean from object, store under separate key  consider json  ****** TODO [#B] book index sort -    sort algorithm currently sorts A-Za-z need Aa-Zz -    also main terms references need to follow main term ... placed under _a which is not always sorted first +sort algorithm currently sorts A-Za-z need Aa-Zz +also main terms references need to follow main term ... placed under _a which is not always sorted first  ***** bibliography  identify and store under separate hash  consider using json to structure @@ -154,6 +154,8 @@ rdmd -d-debug sisu_ref0.d ../markup/test0.sst  VER='sdp2' && rdmd --build-only --chatty lib/${VER}/sdp.d +rdmd --makedepend --chatty -debug lib/sdp.d +  *** dmd                                                                :dmd:  dmd -de -w -unittest -debug sdp.d @@ -258,6 +260,22 @@ import path[1] = /usr/include/d/4.9  **** dmd (non-free) install arch?  **** issues +** run test                                                        :run:test: +*** sisu-markup-samples +cd /home/ralph/grotto/repo/git.repo/doc/sisu-markup-samples/data/samples/current +cd ~sisumarkupgit/data/samples/current + +time ~sdp/bin/sdp --html --no-assert en/viral_spiral.david_bollier.sst +time ~sdpdev/bin/sdp --html --no-assert en/viral_spiral.david_bollier.sst +time ~sdpniu/bin/sdp --html --no-assert en/viral_spiral.david_bollier.sst + +*** live-manual +cd ~/live-manual/manual + +time ~sdp/bin/sdp --html --no-assert en/live-manual.ssm +time ~sdpdev/bin/sdp --html --no-assert en/live-manual.ssm +time ~sdpniu/bin/sdp --html --no-assert en/live-manual.ssm +  ** notes                                                              :notes:  **** read file  char[][] markup_sourcefile_content = split(cast(char[])read(fn_src), rgx_line_delimiter); @@ -282,26 +300,30 @@ char[][] markup_sourcefile_content = markupSourceLineArray(markupSourceString(fn  // emacs ./lib/sdp2/sisu_*.d &  ** bugs                                                                :bugs: -ok -time ~dlang/bin/sdp2 --html --no-assert en/[a-eg-z]* +**** batch processing issue with ldc, not dmd +ok with dmd issue with ldc +time ~dlang/bin/sdp --html --no-assert en/[a-eg-z]*  not ok -time ~dlang/bin/sdp2 --html --no-assert en/free_for_all.peter_wayner.sst en/gpl3.fsf.sst +time ~dlang/bin/sdp --html --no-assert en/free_for_all.peter_wayner.sst en/gpl3.fsf.sst  works if:    poems removed from gpl3;    biblio removed from free for all -time ~dlang/bin/sdp2 --html --no-assert en/free_for_all.peter_wayner.sst en/gpl2.fsf.sst en/gpl3.fsf.sst -time ~dlang/bin/sdp2 --html --no-assert en/[fg]* -time ~dlang/bin/sdp2 --html --no-assert en/[a-z]* +time ~dlang/bin/sdp --html --no-assert en/free_for_all.peter_wayner.sst en/gpl2.fsf.sst en/gpl3.fsf.sst +time ~dlang/bin/sdp --html --no-assert en/[fg]* +time ~dlang/bin/sdp --html --no-assert en/[a-z]*  leaving out free_for_all seems to make it ok -time ~dlang/bin/sdp2 --html --no-assert en/[a-eg-z]* +time ~dlang/bin/sdp --html --no-assert en/[a-eg-z]*  leaving out any two bibliography entries within free_for_all appears to fix the problem!  works in dmd not in ldc2 -**** Error in `/home/ralph/grotto/repo/git.repo/utils/d/bin/sdp2': corrupted double-linked list: 0x00000008b905b310 *** +Error in `/home/ralph/grotto/repo/git.repo/utils/d/bin/sdp2': corrupted double-linked list: 0x00000008b905b310 ***  in free_for_all bibliography first instance FatBrain ref  gets stuck after:  en/free_for_all.peter_wayner.sst + +**** master & imports e.g. live-manual works with dmd not ldc +  ** desc                                                                :desc:  *** process files  .sst (text) regular diff --git a/dub.json b/dub.json new file mode 100644 index 0000000..f23678a --- /dev/null +++ b/dub.json @@ -0,0 +1,14 @@ + +{ +  "name": "sdp", +  "targetType": "executable", +  "targetName": "sdp", +  "targetPath": "bin", +  "description": "sisu document parser.", +  "authors": ["Ralph Amissah"], +  "homepage": "http://sisudoc.org", +  "license": "GPL-3.0", +  "add-path": "./src/sdp", +  "dependencies": { +  } +} diff --git a/dub.selections.json b/dub.selections.json new file mode 100644 index 0000000..78166ff --- /dev/null +++ b/dub.selections.json @@ -0,0 +1,5 @@ + +{ +	"fileVersion": 1, +	"versions": {} +} diff --git a/lib/sdp.d b/lib/sdp.d deleted file mode 100755 index 4903be3..0000000 --- a/lib/sdp.d +++ /dev/null @@ -1,186 +0,0 @@ -#!/usr/bin/env rdmd -/+ -  sdp -  sdp.d -+/ -/+ sdp  sisu document parser +/ -private import -  std.getopt, -  std.process, -  std.stdio, -  std.algorithm, -  std.array, -  std.container, -  std.exception, -  std.json, -  // std.path, -  std.range, -  std.regex, -  // std.stdio, -  std.string, -  std.traits, -  std.typecons, -  std.utf, -  // std.variant, -  std.conv : to; -/+ sdp  sisu document parser +/ -import -  lib.sdp.compile_time_info,            // sdp/compile_time_info.d -  lib.sdp.ao_abstract_doc_source,       // sdp/ao_abstract_doc_source.d -  lib.sdp.ao_defaults,                  // sdp/ao_defaults.d -  lib.sdp.ao_read_source_files,         // sdp/ao_read_source_files.d -  lib.sdp.ao_output_debugs,             // sdp/ao_output_debugs.d -  lib.sdp.ao_rgx,                       // sdp/ao_rgx.d -  lib.sdp.ao_ansi_colors;               // sdp/ao_ansi_colors.d -  // std.conv; -mixin(import("version.txt")); -mixin CompileTimeInfo; -mixin RgxInit; -void main(string[] args) { -   -  mixin SiSUheader; -  mixin SiSUbiblio; -  mixin SiSUrgxInitFlags; -  mixin SiSUmarkupRaw; -  mixin SiSUdocAbstraction; -  mixin SiSUoutputDebugs; -  mixin ScreenTxtColors; -  auto raw = MarkupRaw(); -  auto abs = Abstraction(); -  auto dbg = SDPoutputDebugs(); -  /+ -  struct DocumentParts { -    string[string][] contents; -    JSONValue[string] metadata_json; -    JSONValue[string] make_json; -    string[][string][string] bookindex_unordered_hashes; -    JSONValue[] biblio; -  } -  +/ -  string[] fns_src; -  string flag_action; -  string arg_unrecognized; -  auto rgx = Rgx(); -  scope(success) { -    debug(checkdoc) { -      writefln( -        "%s~ run complete, ok ~ %s (sdp-%s.%s.%s, %s v%s, %s %s)", -        scr_txt_color["cyan"], scr_txt_color["off"], -        ver.major, ver.minor, ver.patch, -        __VENDOR__, __VERSION__, -        bits, os, -      ); -    } -    // stderr.writeln("0"); -  } -  scope(failure) { -    debug(checkdoc) { -      stderr.writefln( -        "%s~ run failure ~%s", -         scr_txt_color["fuchsia"], scr_txt_color["off"], -      ); -    } -  } -   -  bool[string] opt_action_bool = [ -    "assertions"          : false, -    "html"                : false, -    "no_obj_cite_number"  : false, -    "verbose"             : false, -  ]; -  auto helpInfo = getopt(args, -    std.getopt.config.passThrough, -    "assert",    "--assert set optional assertions on",          &opt_action_bool["assertions"], -    "html",      "--html process html output",                   &opt_action_bool["html"], -    "no-ocn",    "--no-ocn suppress object cite number output",  &opt_action_bool["no_obj_cite_number"], -    "verbose|v", "--verbose output to terminal",                 &opt_action_bool["verbose"], -  ); -  if (helpInfo.helpWanted) { -    defaultGetoptPrinter("Some information about the program.", helpInfo.options); -  } -  foreach(arg; args) { -    if (match(arg, rgx.flag_action)) { -      flag_action ~= " " ~ arg;   // flags not taken by getopt -    } else if (match(arg, rgx.src_pth)) { -      fns_src ~= arg;             // gather input markup source file names for processing -    } else {                      // anything remaining, unused -      arg_unrecognized ~= " " ~ arg; -    } -  } -  foreach(fn_src; fns_src) { -  // foreach(fn_src; fns_src) { -    if (!empty(fn_src)) { -      scope(success) { -        debug(checkdoc) { -          writefln( -            "%s~ document complete, ok ~%s", -            scr_txt_color["green"], scr_txt_color["off"], -          ); -        } -        // stderr.writeln("0"); -      } -      scope(failure) { -        debug(checkdoc) { -          stderr.writefln( -            "%s~ document run failure ~%s (%s  v%s)\n\t%s", -            scr_txt_color["red"], scr_txt_color["off"], -            __VENDOR__, __VERSION__, -            fn_src -          ); -        } -        // stderr.writeln("1"); -      } -      enforce( -        match(fn_src, rgx.src_pth), -        "not a sisu markup filename" -      ); -      /+ ↓ read file +/ -      auto sourcefile_content = -        raw.sourceContent(fn_src); -      /+ ↓ porcess document, return abstraction as tuple +/ -      auto t = -        abs.abstract_doc_source(sourcefile_content); -      static assert(!isTypeTuple!(t)); -      auto doc_ao_contents = t[0]; // contents ~ endnotes ~ bookindex; -      // static assert(!isIterable!(doc_ao_contents)); -      auto doc_ao_metadata_json = t[1]; -      auto doc_ao_make_json = t[2]; -      auto doc_ao_bookindex_unordered_hashes = t[3]; -      auto doc_ao_biblio = t[4]; -      // destroy(t); -      /+ ↓ document parts +/ -      debug(checkdoc) { // checkbook & dumpdoc -        dbg.abstract_doc_source_debugs( -          doc_ao_contents, -          doc_ao_make_json, -          doc_ao_metadata_json, -          doc_ao_bookindex_unordered_hashes, -          doc_ao_biblio, -          fn_src, -          opt_action_bool -        ); -      } -      scope(exit) { -        debug(checkdoc) { -          writefln( -            "processed file: %s", -            fn_src -          ); -        } -        destroy(sourcefile_content); -        destroy(t); -        destroy(doc_ao_contents); -        destroy(doc_ao_make_json); -        destroy(doc_ao_metadata_json); -        destroy(doc_ao_bookindex_unordered_hashes); -        destroy(doc_ao_biblio); -        destroy(fn_src); -      } -    } else { -      /+ no recognized filename provided +/ -      writeln("no recognized filename"); -      break; -      // terminate, stop -    } -  } -} diff --git a/lib/sdp/ao_emitter.d b/lib/sdp/ao_emitter.d deleted file mode 100644 index 7ed9fa8..0000000 --- a/lib/sdp/ao_emitter.d +++ /dev/null @@ -1,1479 +0,0 @@ -/+ -  emitters -  ao_emitters.d -+/ -mixin template Emitters() { -  mixin InternalMarkup; -  struct CLI { -    string[string] extract_actions(string cmdlnins, string[string] actions) -    in { } -    body { -      switch (cmdlnins) { -      case "--no-assert": -        actions["assert"] = "no"; -        break; -      default: -        break; -      } -      return actions; -    } -  } -  struct OCNemitter { -  // class OCNemitter : AssertOCN { -    int ocn, ocn_; -    int ocn_emitter(int ocn_status_flag) -    in { assert(ocn_status_flag <= 2); } -    body { -      if (ocn_status_flag == 0) { -        ocn=++ocn_; -      } else { -        ocn=0; -      } -      assert(ocn >= 0); -      return ocn; -    } -    invariant() { -    } -  } -  struct ObjAttributes { -  // class ObjAttributes : AssertObjAttributes { -    string[string] obj_txt; -    string para_and_blocks(string obj_txt_in) -    in { } -    body { -      auto rgx = Rgx(); -      obj_txt["munge"]=obj_txt_in; -      if (match(obj_txt_in, rgx.para_bullet)) { -        obj_txt["attrib"] =" \"bullet\": \"true\"," -        ~ " \"indent_first\": 0," -        ~ " \"indent_rest\": 0,"; -      } else if (auto m = match(obj_txt_in, rgx.para_bullet_indent)) { -        obj_txt["attrib"] =" \"bullet\": \"true\"," -        ~ " \"indent_first\": " ~ to!string(m.captures[1]) ~ "," -        ~ " \"indent_rest\": " ~ to!string(m.captures[1]) ~ ","; -      } else if (auto m = match(obj_txt_in, rgx.para_indent_hang)) { -        obj_txt["attrib"] =" \"bullet\": \"false\"," -        ~ " \"indent_first\": " ~ to!string(m.captures[1]) ~ "," -        ~ " \"indent_rest\": " ~  to!string(m.captures[2]) ~ ","; -      } else if (auto m = match(obj_txt_in, rgx.para_indent)) { -        obj_txt["attrib"] =" \"bullet\": \"false\"," -        ~ " \"indent_first\": " ~ to!string(m.captures[1]) ~ "," -        ~ " \"indent_rest\": " ~ to!string(m.captures[1]) ~ ","; -      } else { -        obj_txt["attrib"] =" \"bullet\": \"false\"," -        ~ " \"indent_first\": 0," -        ~ " \"indent_rest\": 0,"; -      } -      return obj_txt["attrib"]; -    } -    string para(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      obj_txt["attrib"] = " \"use\": \"content\"," -      ~ " \"of\": \"para\"," -      ~ " \"is\": \"para\""; -      return obj_txt["attrib"]; -    } -    invariant() { -    } -    string heading(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      obj_txt["attrib"] = " \"use\": \"content\"," -      ~ " \"of\": \"para\"," -      ~ " \"is\": \"heading\""; -      // obj_txt["struct"]=; -      return obj_txt["attrib"]; -    } -    invariant() { -    } -    string header_make(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      obj_txt["attrib"] = " \"use\": \"head\"," -      ~ " \"of\": \"header\"," -      ~ " \"is\": \"header_make\""; -      return obj_txt["attrib"]; -    } -    invariant() { -    } -    string header_metadata(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      obj_txt["attrib"] = " \"use\": \"head\"," -      ~ " \"of\": \"header\"," -      ~ " \"is\": \"header_metadata\""; -      return obj_txt["attrib"]; -    } -    invariant() { -    } -    string code(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      obj_txt["attrib"] = " \"use\": \"content\"," -      ~ " \"of\": \"block\"," -      ~ " \"is\": \"code\""; -      return obj_txt["attrib"]; -    } -    invariant() { -    } -    string group(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      obj_txt["attrib"] = " \"use\": \"content\"," -      ~ " \"of\": \"block\"," -      ~ " \"is\": \"group\""; -      return obj_txt["attrib"]; -    } -    invariant() { -    } -    string block(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      obj_txt["attrib"] = " \"use\": \"content\"," -      ~ " \"of\": \"block\"," -      ~ " \"is\": \"block\""; -      return obj_txt["attrib"]; -    } -    invariant() { -    } -    string verse(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      obj_txt["attrib"] = " \"use\": \"content\"," -      ~ " \"of\": \"block\"," -      ~ " \"is\": \"verse\""; -      return obj_txt["attrib"]; -    } -    invariant() { -    } -    string quote(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      obj_txt["attrib"] = " \"use\": \"content\"," -      ~ " \"of\": \"block\"," -      ~ " \"is\": \"quote\""; -      return obj_txt["attrib"]; -    } -    invariant() { -    } -    string table(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      obj_txt["attrib"] = " \"use\": \"content\"," -      ~ " \"of\": \"block\"," -      ~ " \"is\": \"table\""; -      return obj_txt["attrib"]; -    } -    invariant() { -    } -    string comment(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      obj_txt["attrib"] = " \"use\": \"comment\"," -      ~ " \"of\": \"comment\"," -      ~ " \"is\": \"comment\""; -      return obj_txt["attrib"]; -    } -    invariant() { -    } -  } -  struct ObjInlineMarkupMunge { -  // struct ObjInlineMarkupMunge : AssertObjInlineMarkup { -    string[string] obj_txt; -    int n_foot, n_foot_reg, n_foot_sp_asterisk, n_foot_sp_plus; -    string obj_txt_out, tail, note; -    private auto initialize_note_numbers() { -      n_foot = 0; -      n_foot_reg = 0; -      n_foot_sp_asterisk = 0; -      n_foot_sp_plus = 0; -    } -    private auto object_notes_(string obj_txt_in) -    in { } -    body { -      auto rgx = Rgx(); -      auto mkup = InternalMarkup(); -      obj_txt_out = ""; -      tail = ""; -      obj_txt_in = replaceAll( -        obj_txt_in, -        rgx.inline_notes_curly_sp_asterisk, -        (mkup.en_a_o ~ "*" ~ " $1" ~ mkup.en_a_c) -      ); -      obj_txt_in = -        replaceAll( -          obj_txt_in, -          rgx.inline_notes_curly_sp_plus, -          (mkup.en_a_o ~ "+" ~ " $1" ~ mkup.en_a_c) -        ); -      obj_txt_in = -        replaceAll( -          obj_txt_in, -          rgx.inline_notes_curly, -          (mkup.en_a_o ~ " $1" ~ mkup.en_a_c) -        ); -      if (match(obj_txt_in, rgx.inline_notes_al_gen)) { -        foreach(m; matchAll(obj_txt_in, rgx.inline_text_and_note_al)) { -          if (match(obj_txt_in, rgx.inline_al_delimiter_open_asterisk)) { -            n_foot_sp_asterisk++; -            n_foot=n_foot_sp_asterisk; -          } else if (match(obj_txt_in, rgx.inline_al_delimiter_open_plus)) { -            n_foot_sp_plus++; -            n_foot=n_foot_sp_plus; -          } else { -            n_foot_reg++; -            n_foot=n_foot_reg; -          } -          obj_txt_out ~= replaceFirst( -            m.hit, -            rgx.inline_al_delimiter_open_regular, -            (mkup.en_a_o ~ to!string(n_foot)) -          ); -          tail = m.post; -          // if (!empty(m.post)) { -          //   tail = m.post; -          // } else { -          //   tail = ""; -          // } -        } -      } else { -        obj_txt_out = obj_txt_in; -      } -      debug(footnotes) { -        writeln(obj_txt_out, tail); -      } -      obj_txt_out = obj_txt_out ~ tail; -      debug(footnotesdone) { -        foreach(m; matchAll(obj_txt_out, -        (mkup.en_a_o ~ `\s*(.+?)` ~ mkup.en_a_c))) { -          writeln(m.captures[1]); -          writeln(m.hit); -        } -      } -      return obj_txt_out; -    } -    string para(string obj_txt_in) -    in { } -    body { -      auto rgx = Rgx(); -      obj_txt["munge"]=obj_txt_in; -      obj_txt["munge"]=replaceFirst(obj_txt["munge"], rgx.para_attribs, ""); -      obj_txt["munge"]=replaceFirst(obj_txt["munge"], rgx.ocn_off_all, ""); -      obj_txt["munge"]=object_notes_(obj_txt["munge"]); -      debug(munge) { -        writeln(__LINE__); -        writeln(obj_txt_in); -        writeln(__LINE__); -        writeln(to!string(obj_txt["munge"])); -      } -      return obj_txt["munge"]; -    } -    string heading(string obj_txt_in) -    in { } -    body { -      auto rgx = Rgx(); -      obj_txt["munge"]=obj_txt_in; -      obj_txt["munge"]=replaceFirst(obj_txt["munge"], rgx.heading, ""); -      obj_txt["munge"]=replaceFirst(obj_txt["munge"], rgx.ocn_off_all, ""); -      obj_txt["munge"]=object_notes_(obj_txt["munge"]); -      debug(munge) { -        writeln(__LINE__); -        writeln(obj_txt_in); -        writeln(__LINE__); -        writeln(to!string(obj_txt["munge"])); -      } -      return obj_txt["munge"]; -    } -    invariant() { -    } -    string header_make(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      return obj_txt["munge"]; -    } -    invariant() { -    } -    string header_metadata(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      return obj_txt["munge"]; -    } -    invariant() { -    } -    string code(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      return obj_txt["munge"]; -    } -    invariant() { -    } -    string group(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      obj_txt["munge"]=object_notes_(obj_txt["munge"]); -      return obj_txt["munge"]; -    } -    invariant() { -    } -    string block(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      obj_txt["munge"]=object_notes_(obj_txt["munge"]); -      return obj_txt["munge"]; -    } -    invariant() { -    } -    string verse(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      obj_txt["munge"]=object_notes_(obj_txt["munge"]); -      return obj_txt["munge"]; -    } -    invariant() { -    } -    string quote(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      return obj_txt["munge"]; -    } -    invariant() { -    } -    string table(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      return obj_txt["munge"]; -    } -    invariant() { -    } -    string comment(string obj_txt_in) -    in { } -    body { -      obj_txt["munge"]=obj_txt_in; -      return obj_txt["munge"]; -    } -    invariant() { -    } -  } -  struct ObjInlineMarkup { -  // struct ObjInlineMarkup : AssertObjInlineMarkup { -    auto munge = ObjInlineMarkupMunge(); -    string[string] obj_txt; -    string obj_inline_markup(string obj_is_, string obj_raw) -    in { } -    body { -      obj_txt["munge"]=obj_raw.dup; -      obj_txt["munge"]=(match(obj_is_, ctRegex!(`verse|code`))) -        ? obj_txt["munge"] -        : strip(obj_txt["munge"]); -      switch (obj_is_) { -      case "header_make": -        obj_txt["munge"]=munge.header_make(obj_txt["munge"]); -        break; -      case "header_metadata": -        obj_txt["munge"]=munge.header_metadata(obj_txt["munge"]); -        break; -      case "heading": -        obj_txt["munge"]=munge.heading(obj_txt["munge"]); -        break; -      case "para": -        obj_txt["munge"]=munge.para(obj_txt["munge"]); -        break; -      case "code": -        obj_txt["munge"]=munge.code(obj_txt["munge"]); -        break; -      case "group": -        obj_txt["munge"]=munge.group(obj_txt["munge"]); -        break; -      case "block": -        obj_txt["munge"]=munge.block(obj_txt["munge"]); -        break; -      case "verse": -        obj_txt["munge"]=munge.verse(obj_txt["munge"]); -        break; -      case "quote": -        obj_txt["munge"]=munge.quote(obj_txt["munge"]); -        break; -      case "table": -        obj_txt["munge"]=munge.table(obj_txt["munge"]); -        break; -      case "comment": -        obj_txt["munge"]=munge.comment(obj_txt["munge"]); -        break; -      case "doc_end_reset": -        munge.initialize_note_numbers(); -        break; -      default: -        break; -      } -      return obj_txt["munge"]; -    } -    invariant() { -    } -  } -  struct ObjAttrib { -  // struct ObjAttrib : AssertObjAttrib { -  // auto sink = appender!(char[])(); -    auto attrib = ObjAttributes(); -    string[string] obj_attrib; -    string obj_attributes(string obj_is_, string obj_raw, string node) -    in { } -    body { -      // string s = "{ \"language\": \"D\", \"rating\": 3.14, \"code\": \"42\" }"; -      scope(exit) { -        // destroy(obj_is_); -        destroy(obj_raw); -        destroy(node); -      } -      JSONValue node_j = parseJSON(node); -      obj_attrib.remove("json"); -      obj_attrib["json"] ="{"; -      switch (obj_is_) { -      case "header_make": -        obj_attrib["json"] ~= attrib.header_make(obj_raw); -        break; -      case "header_metadata": -        obj_attrib["json"] ~= attrib.header_metadata(obj_raw); -        break; -      case "heading": -        obj_attrib["json"] ~= attrib.heading(obj_raw); // -        break; -      case "para": -        obj_attrib["json"] ~= attrib.para_and_blocks(obj_raw) -        ~ attrib.para(obj_raw); -        break; -      case "code": -        obj_attrib["json"] ~= attrib.code(obj_raw); -        break; -      case "group": -        obj_attrib["json"] ~= attrib.para_and_blocks(obj_raw) -        ~ attrib.group(obj_raw); -        break; -      case "block": -        obj_attrib["json"] ~= attrib.para_and_blocks(obj_raw) -        ~ attrib.block(obj_raw); -        break; -      case "verse": -        obj_attrib["json"] ~= attrib.verse(obj_raw); -        break; -      case "quote": -        obj_attrib["json"] ~= attrib.quote(obj_raw); -        break; -      case "table": -        obj_attrib["json"] ~= attrib.table(obj_raw); -        break; -      case "comment": -        obj_attrib["json"] ~= attrib.comment(obj_raw); -        break; -      default: -        obj_attrib["json"] ~= attrib.para(obj_raw); -        break; -      } -      obj_attrib["json"] ~=" }"; -      JSONValue oa_j = parseJSON(obj_attrib["json"]); -      assert( -        (oa_j.type == JSON_TYPE.OBJECT) && -        (node_j.type == JSON_TYPE.OBJECT) -      ); -      if (obj_is_ == "heading") { -        oa_j.object["ocn"] = node_j["ocn"]; -        oa_j.object["lvn"] = node_j["lvn"]; -        oa_j.object["lcn"] = node_j["lcn"]; -        oa_j.object["heading_pointer"] = -          node_j["heading_pointer"]; // check -        oa_j.object["doc_object_pointer"] = -          node_j["doc_object_pointer"]; // check -      } -      oa_j.object["parent_ocn"] = node_j["parent_ocn"]; -      oa_j.object["parent_lvn"] = node_j["parent_lvn"]; -      obj_attrib["json"] = oa_j.toString(); -      debug(structattrib) { -        if (oa_j["is"].str() == "heading") { -          // writeln(__LINE__); -          writeln(obj_attrib["json"]); -          // writeln(node); -          writeln( -            "is: ", oa_j["is"].str(), -            "; ocn: ", oa_j["ocn"].integer() -          ); -        } -      } -      // obj_attrib["json"]="{}"; -      return obj_attrib["json"]; -    } -    invariant() { -    } -  } -  struct HeaderDocMetadataMakeJson { -  // class HeaderMetadataMakeHash : AssertHeaderMetadataMakeJson { -    auto rgx = Rgx(); -    string hm, hs; -    auto header_metadata_and_make_jsonstr( -      string header, -      JSONValue[string] dochead_metadata, -      JSONValue[string] dochead_make -    ) -    in { } -    body { -      scope(exit) { -        destroy(header); -        destroy(dochead_metadata); -        destroy(dochead_make); -      } -      if (auto t = match(header, rgx.head_main)) { -        char[][] obj_spl = split( -          cast(char[]) header, -          rgx.line_delimiter_ws_strip -        ); -        auto hm = to!string(t.captures[1]); -        if (match(hm, rgx.main_headers)) { -          foreach (line; obj_spl) { -            if (auto m = match(line, rgx.head_main)) { -              if (!empty(m.captures[2])) { -                if (hm == "creator") { -                  dochead_metadata[hm]["author"].str = -                    to!string(m.captures[2]); -                } else if (hm == "title") { -                  dochead_metadata[hm]["main"].str = -                    to!string(m.captures[2]); -                } else if (hm == "publisher") { -                  dochead_metadata[hm]["name"].str = -                    to!string(m.captures[2]); -                } -              } -            } else if (auto s = match(line, rgx.head_sub)) { -              if (!empty(s.captures[2])) { -                auto hs = to!string(s.captures[1]); -                if ((hm == "make" ) -                && (dochead_make[hm].type() == JSON_TYPE.OBJECT)) { -                  switch (hm) { -                  case "make": -                    if (match(hs, rgx.subhead_make)) { -                      if (dochead_make[hm][hs].type() == JSON_TYPE.STRING) { -                        dochead_make[hm][hs].str = to!string(s.captures[2]); -                      } -                    } else { -                      writeln("not a valid header type:", hm, ":", hs); -                      destroy(hm); -                      destroy(hs); -                    } -                    break; -                  default: -                    break; -                  } -                } else if (dochead_metadata[hm].type() == JSON_TYPE.OBJECT) { -                  switch (hm) { -                  case "creator": -                    if (match(hs, rgx.subhead_creator)) { -                      if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                        dochead_metadata[hm][hs].str = -                          to!string(s.captures[2]); -                      } -                    } else { -                      writeln("not a valid header type:", hm, ":", hs); -                      destroy(hm); -                      destroy(hs); -                    } -                    break; -                  case "title": -                    if (match(hs, rgx.subhead_title)) { -                      if ((hs == "subtitle") -                      && (dochead_metadata[hm]["sub"].type() == JSON_TYPE.STRING)) { -                        dochead_metadata[hm]["sub"].str = -                          to!string(s.captures[2]); -                      } else if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                        dochead_metadata[hm][hs].str = -                          to!string(s.captures[2]); -                      } -                    } else { -                      writeln("not a valid header type:", hm, ":", hs); -                      destroy(hm); -                      destroy(hs); -                    } -                    break; -                  case "rights": -                    if (match(hs, rgx.subhead_rights)) { -                      if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                        dochead_metadata[hm][hs].str = -                          to!string(s.captures[2]); -                      } -                    } else { -                      writeln("not a valid header type:", hm, ":", hs); -                      destroy(hm); -                      destroy(hs); -                    } -                    break; -                  case "date": -                    if (match(hs, rgx.subhead_date)) { -                      if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                        dochead_metadata[hm][hs].str = -                          to!string(s.captures[2]); -                      } -                    } else { -                      writeln("not a valid header type:", hm, ":", hs); -                      destroy(hm); -                      destroy(hs); -                    } -                    break; -                  case "original": -                    if (match(hs, rgx.subhead_original)) { -                      if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                        dochead_metadata[hm][hs].str = -                          to!string(s.captures[2]); -                      } -                    } else { -                      writeln("not a valid header type:", hm, ":", hs); -                      destroy(hm); -                      destroy(hs); -                    } -                    break; -                  case "classify": -                    if (match(hs, rgx.subhead_classify)) { -                      if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                        dochead_metadata[hm][hs].str = -                          to!string(s.captures[2]); -                      } -                    } else { -                      writeln("not a valid header type:", hm, ":", hs); -                      destroy(hm); -                      destroy(hs); -                    } -                    break; -                  case "identifier": -                    if (match(hs, rgx.subhead_identifier)) { -                      if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                        dochead_metadata[hm][hs].str = -                          to!string(s.captures[2]); -                      } -                    } else { -                      writeln("not a valid header type:", hm, ":", hs); -                      destroy(hm); -                      destroy(hs); -                    } -                    break; -                  case "notes": -                    if (match(hs, rgx.subhead_notes)) { -                      if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                        dochead_metadata[hm][hs].str = -                          to!string(s.captures[2]); -                      } -                    } else { -                      writeln("not a valid header type:", hm, ":", hs); -                      destroy(hm); -                      destroy(hs); -                    } -                    break; -                  case "publisher": -                    if (match(hs, rgx.subhead_publisher)) { -                      if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                        dochead_metadata[hm][hs].str = -                          to!string(s.captures[2]); -                      } -                    } else { -                      writeln("not a valid header type:", hm, ":", hs); -                      destroy(hm); -                      destroy(hs); -                    } -                    break; -                  case "links": -                    destroy(hm); -                    destroy(hs); -                    // if (match(hs, rgx.subhead_links)) { -                    //   if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                    //     dochead_metadata[hm][hs].str = to!string(s.captures[2]); -                    //   } -                    // } else { -                    //   writeln("not a valid header type:", hm, ":", hs); -                    //   destroy(hm); -                    //   destroy(hs); -                    // } -                    break; -                  default: -                    break; -                  } -                } -              } -            } -          } -        } else { -          writeln("not a valid header type:", hm); -        } -      } -      auto t = tuple(dochead_metadata, dochead_make); -      static assert(!isTypeTuple!(t)); -      return t; -    } -    // invariant() { -    // } -  } -  class HeaderMetadataMakeHash { -  // class HeaderMetadataMakeHash : AssertHeaderMetadataMakeHash { -    auto rgx = Rgx(); -    string header_main; -    string[string] head; -    string[string] header_topic_hash(string header) -    in { } -    body { -      if (auto t = match(header, rgx.head_main)) { -        char[][] obj_spl = split( -          cast(char[]) header, -          rgx.line_delimiter_ws_strip -        ); -        auto header_main = to!string(t.captures[1]); -        head[header_main] = "{"; -        foreach (line; obj_spl) { -          if (auto m = match(line, rgx.head_main)) { -            if (!empty(m.captures[2])) { -              head[header_main] ~= -                "\"" ~ header_main ~ -                "\": \"" ~ -                to!string(m.captures[2]) ~ -                "\","; -            } -          } else if (auto s = match(line, rgx.head_sub)) { -            head[header_main] ~= "\"" ~ s.captures[1] ~ "\":"; -            if (!empty(s.captures[2])) { -              head[header_main] ~= "\"" ~ s.captures[2] ~ "\","; -            } -          } -        } -        head[header_main] = replaceFirst( -          head[header_main], -          rgx.tailing_comma, -          "" -        ); -        head[header_main] ~= "}"; -        debug(headerjson) { -          JSONValue j = parseJSON(head[header_main]); -          assert( -            (j.type == JSON_TYPE.OBJECT) -          ); -        } -      } -      return head; -    } -    invariant() { -    } -  } -  struct BookIndexNuggetHash { -  // class BookIndexNuggetHash : AssertBookIndexNuggetHash { -    string main_term, sub_term, sub_term_bits; -    uint ocn_offset, ocn_endpoint; -    string[] ocns; -    string[][string][string] bi; -    string[][string][string] hash_nugget; -    string[] bi_main_terms_split_arr; -    string[][string][string] bookindex_nugget_hash(string bookindex, int ocn) -    in { -      debug(bookindexraw) { -        mixin ScreenTxtColors; -        if (!bookindex.empty) { -          writeln( -            scr_txt_color["blue"], "* [bookindex] ", scr_txt_color["off"], -            "[", to!string(ocn), "] ", bookindex -          ); -        } -      } -    } -    body { -      auto rgx = Rgx(); -      if (!bookindex.empty) { -        auto bi_main_terms_split_arr = -          split(bookindex, rgx.bi_main_terms_split); -        foreach (bi_main_terms_content; bi_main_terms_split_arr) { -          auto bi_main_term_and_rest = -            split(bi_main_terms_content, rgx.bi_main_term_plus_rest_split); -          if (auto m = match( -            bi_main_term_and_rest[0], -            rgx.bi_term_and_ocns_match) -          ) { -            main_term = strip(m.captures[1]); -            ocn_offset = to!uint(m.captures[2]); -            ocn_endpoint=(ocn + ocn_offset); -            ocns ~= (to!string(ocn) ~ "-" ~ to!string(ocn_endpoint)); -          } else { -            main_term = strip(bi_main_term_and_rest[0]); -            ocns ~= to!string(ocn); -          } -          bi[main_term]["_a"] ~= ocns; -          ocns=null; -          if (bi_main_term_and_rest.length > 1) { -            auto bi_sub_terms_split_arr = -              split( -                bi_main_term_and_rest[1], -                rgx.bi_sub_terms_plus_ocn_offset_split -              ); -            foreach (sub_terms_bits; bi_sub_terms_split_arr) { -              if (auto m = match(sub_terms_bits, rgx.bi_term_and_ocns_match)) { -                sub_term = strip(m.captures[1]); -                ocn_offset = to!uint(m.captures[2]); -                ocn_endpoint=(ocn + ocn_offset); -                ocns ~= (to!string(ocn) ~ " - " ~ to!string(ocn_endpoint)); -              } else { -                sub_term = strip(sub_terms_bits); -                ocns ~= to!string(ocn); -              } -              if (!empty(sub_term)) { -                bi[main_term][sub_term] ~= ocns; -              } -              ocns=null; -            } -          } -          // ocns=null; -        } -      } -      hash_nugget = bi; -      // bi=null; // bi.init; // use to empty for each next object; else, harvest hashes at the end of the document -      return hash_nugget; -    } -    invariant() { -    } -  } -  struct BookIndexReport { -  // class BookIndexReport : AssertBookIndexReport { -    int mkn, skn; -    auto bookindex_report_sorted( -      string[][string][string] bookindex_unordered_hashes -    ) { -      auto mainkeys=bookindex_unordered_hashes.byKey.array. -        sort!("toLower(a) < toLower(b)", SwapStrategy.stable).release; -      foreach (mainkey; mainkeys) { -        auto subkeys=bookindex_unordered_hashes[mainkey].byKey.array. -          sort!("toLower(a) < toLower(b)", SwapStrategy.stable).release; -        foreach (subkey; subkeys) { -          debug(bookindex) { -            writeln( -              mainkey, ": ", -              subkey, ": ", -              to!string(bookindex_unordered_hashes[mainkey][subkey]) -            ); -          } -          // bookindex_the[mkn][mainkey][skn][subkey] ~= (bookindex_unordered_hashes[mainkey][subkey]); -          skn++; -        } -        mkn++; -      } -      // return bookindex_the; -    } -  } -  struct BookIndexReportIndent { -    int mkn, skn; -    auto bookindex_report_indented( -      string[][string][string] bookindex_unordered_hashes -    ) { -      auto mainkeys= -        bookindex_unordered_hashes.byKey.array.sort().release; -      foreach (mainkey; mainkeys) { -        debug(bookindex) { -          writeln(mainkey); -        } -        auto subkeys= -          bookindex_unordered_hashes[mainkey].byKey.array.sort().release; -        foreach (subkey; subkeys) { -          debug(bookindex) { -            writeln("  ", subkey); -            writeln("    ", to!string( -              bookindex_unordered_hashes[mainkey][subkey] -            )); -          } -          // bookindex_the[mkn][mainkey][skn][subkey] ~= (bookindex_unordered_hashes[mainkey][subkey]); -          skn++; -        } -        mkn++; -      } -    } -  } -  struct BookIndexReportSection { -    mixin ObjectSetters; -    int mkn, skn; -    auto rgx = Rgx(); -    auto bookindex_write_section( -      string[][string][string] bookindex_unordered_hashes -    ) { -      auto mainkeys=bookindex_unordered_hashes.byKey.array.sort().release; -      foreach (mainkey; mainkeys) { -        write("_0_1 !{", mainkey, "}! "); -        foreach (ref_; bookindex_unordered_hashes[mainkey]["_a"]) { -          auto go = replaceAll(ref_, rgx.book_index_go, "$1"); -          write(" {", ref_, "}#", go, ", "); -        } -        writeln(" \\\\"); -        bookindex_unordered_hashes[mainkey].remove("_a"); -        auto subkeys= -          bookindex_unordered_hashes[mainkey].byKey.array.sort().release; -        foreach (subkey; subkeys) { -          write("  ", subkey, ", "); -          foreach (ref_; bookindex_unordered_hashes[mainkey][subkey]) { -            auto go = replaceAll(ref_, rgx.book_index_go, "$1"); -            write(" {", ref_, "}#", go, ", "); -          } -          writeln(" \\\\"); -          skn++; -        } -        mkn++; -      } -    } -    auto bookindex_build_section( -      string[][string][string] bookindex_unordered_hashes, -      int ocn -    ) { -      string type; -      int type_heading; -      string lev, lvn, lcn; -      string attrib; -      string indent_first; -      string indent_second; -      auto set_oa = ObjectAbstractSet(); -      auto mainkeys = -        bookindex_unordered_hashes.byKey.array.sort().release; -      string bi_tmp; -      string[string][] bookindex; -      writeln(mainkeys.length); -      // B~ Book Index -      type_heading=1; -      bi_tmp = "Book Index"; -      attrib=""; -      lev="B"; -      lvn="1"; -      lcn="1"; -      bookindex ~= -        set_oa.contents_heading( -          type_heading, -          bi_tmp, -          attrib, -          ocn, -          lev, -          lvn, -          lcn -        ); -      ocn++; -      mkn++; -      // 1~ Index -      type_heading=1; -      bi_tmp = "Index"; -      attrib=""; -      lev="1"; -      lvn="4"; -      lcn="2"; -      bookindex ~= -        set_oa.contents_heading( -          type_heading, -          bi_tmp, -          attrib, -          ocn, -          lev, -          lvn, -          lcn -        ); -      ocn++; -      mkn++; -      foreach (mainkey; mainkeys) { -        bi_tmp = "!{" ~ mainkey ~ "}! "; -        // bi_tmp = "_0_1 !{" ~ mainkey ~ "}! "; -        foreach (ref_; bookindex_unordered_hashes[mainkey]["_a"]) { -          auto go = replaceAll(ref_, rgx.book_index_go, "$1"); -          bi_tmp ~= " {" ~ ref_ ~ "}#" ~ go ~ ", "; -        } -        bi_tmp ~= " \\\\\n    "; -        bookindex_unordered_hashes[mainkey].remove("_a"); -        auto subkeys = -          bookindex_unordered_hashes[mainkey].byKey.array.sort().release; -        foreach (subkey; subkeys) { -          bi_tmp ~= subkey ~ ", "; -          foreach (ref_; bookindex_unordered_hashes[mainkey][subkey]) { -            auto go = replaceAll(ref_, rgx.book_index_go, "$1"); -            bi_tmp ~= " {" ~ ref_ ~ "}#" ~ go ~ ", "; -          } -          bi_tmp ~= " \\\\\n    "; -          skn++; -        } -        bi_tmp = replaceFirst(bi_tmp, rgx.trailing_linebreak, ""); -        type="para"; -        attrib=""; -        indent_first = "0"; -        indent_second = "1"; -        attrib=""; -        bookindex ~= -          set_oa.contents_para( -            type, -            bi_tmp, -            attrib, -            ocn, -            indent_first, -            indent_second, -            false -          ); -        ocn++; -        mkn++; -      } -      auto t = tuple(bookindex, ocn); -      return t; -    } -    auto bookindex_build_section_( -      string[][string][string] bookindex_unordered_hashes -    ) { -      auto mainkeys = -        bookindex_unordered_hashes.byKey.array.sort().release; -      string bi_tmp; -      string[] bookindex; -      // int bi_num; -      writeln(mainkeys.length); -      foreach (mainkey; mainkeys) { -        bi_tmp = "_0_1 !{" ~ mainkey ~ "}! "; -        foreach (ref_; bookindex_unordered_hashes[mainkey]["_a"]) { -          auto go = replaceAll(ref_, rgx.book_index_go, "$1"); -          bi_tmp ~= " {" ~ ref_ ~ "}#" ~ go ~ ", "; -        } -        bi_tmp ~= " \\\\\n    "; -        bookindex_unordered_hashes[mainkey].remove("_a"); -        auto subkeys = -          bookindex_unordered_hashes[mainkey].byKey.array.sort().release; -        foreach (subkey; subkeys) { -          bi_tmp ~= subkey ~ ", "; -          // bi_tmp ~= "  " ~ subkey ~ ", "; -          foreach (ref_; bookindex_unordered_hashes[mainkey][subkey]) { -            auto go = replaceAll(ref_, rgx.book_index_go, "$1"); -            bi_tmp ~= " {" ~ ref_ ~ "}#" ~ go ~ ", "; -          } -          bi_tmp ~= " \\\\\n    "; -          skn++; -        } -        bi_tmp = replaceFirst(bi_tmp, rgx.trailing_linebreak, ""); -        bookindex ~= bi_tmp; -        mkn++; -      } -      return bookindex; -    } -  } -  struct NotesSection { -    mixin ObjectSetters; -    string object_notes; -    ulong previous_count; -    int mkn; -    auto rgx = Rgx(); -    private auto gather_notes_for_endnote_section( -      string[string][] contents_arbitrary_max_length_set, -      ulong counter -    ) -    in { -      // endnotes/ footnotes for -      // doc objects other than paragraphs & headings -      // various forms of grouped text -      assert((contents_arbitrary_max_length_set[counter]["is"] == "para") -      || (contents_arbitrary_max_length_set[counter]["is"] == "heading")); -      assert(counter > previous_count); -      previous_count=counter; -      assert( -        match(contents_arbitrary_max_length_set[counter]["obj"], -        rgx.inline_notes_delimiter_al_regular_number_note) -      ); -    } -    body { -      foreach(m; -      matchAll(contents_arbitrary_max_length_set[counter]["obj"], -      rgx.inline_notes_delimiter_al_regular_number_note)) { -        debug(endnotes_build) { -          writeln( -            "{^{", m.captures[1], ".}^}#noteref_", m.captures[1], " ", -            m.captures[2]); // sometimes need segment name (segmented html & epub) -          // writeln("{^{", m.captures[1], ".}^}#", contents_arbitrary_max_length_set[counter]["ocn"], " ", m.captures[2]); -        } -        object_notes ~= -          "{^{" ~ m.captures[1] ~ ".}^}#noteref_" ~ -          m.captures[1] ~ " " ~ m.captures[2] ~ "』"; -      } -      return object_notes; -    } -    private auto gathered_notes() -    in { -    } -    body { -      string[] endnotes_; -      if (object_notes.length > 1) { -        endnotes_ = (split(object_notes, rgx.break_string))[0..$-1]; -      } -      return endnotes_; -    } -    private auto endnote_objects(int ocn) -    in { -    } -    body { -      auto set_oa = ObjectAbstractSet(); -      string[string][] endnotes; -      auto endnotes_ = gathered_notes(); -      // auto endnotes_ = (split(object_notes, rgx.break_string))[0..$-1]; -      string type; -      int type_heading; -      string lev, lvn, lcn; -      string attrib; -      string indent_first; -      string indent_second; -      // B~ Endnotes -      type_heading=1; -      attrib=""; -      lev="B"; -      lvn="1"; -      lcn="1"; -      endnotes ~= -        set_oa.contents_heading( -          type_heading, -          "Endnotes", -          attrib, -          ocn, -          lev, -          lvn, -          lcn -        ); -      ocn++; -      mkn++; -      // 1~ Endnotes -      type_heading=1; -      attrib=""; -      lev="1"; -      lvn="4"; -      lcn="2"; -      endnotes ~= -        set_oa.contents_heading( -          type_heading, -          "Endnotes", -          attrib, -          ocn, -          lev, -          lvn, -          lcn -        ); -      ocn++; -      mkn++; -      foreach (endnote; endnotes_) { -        type="para"; -        attrib=""; -        indent_first = "0"; -        indent_second = "0"; -        attrib=""; -        endnotes ~= -          set_oa.contents_para( -            type, -            endnote, -            attrib, -            ocn, -            indent_first, -            indent_second, -            false -          ); -        ocn++; -        mkn++; -      } -      auto t = tuple(endnotes, ocn); -      return t; -    } -  } -  struct Bibliography { -    public JSONValue[] bibliography(ref string[] biblio_unsorted_incomplete, ref JSONValue[] bib_arr_json) -    in { } -    body { -      JSONValue[] biblio_unsorted = -        biblio_unsorted_complete(biblio_unsorted_incomplete, bib_arr_json); -      JSONValue[] biblio_sorted = biblio_sort(biblio_unsorted); -      biblio_debug(biblio_sorted); -      return biblio_sorted; -    } -    final private JSONValue[] biblio_unsorted_complete( -      string[] biblio_unordered, -      ref JSONValue[] bib_arr_json -    ) { -      // JSONValue[] bib_arr_json; -      // int count_biblio_entry; -      // count_biblio_entry=0; // watch -      foreach (bibent; biblio_unordered) { -        // update bib to include deemed_author, needed for: -        // sort_bibliography_array_by_deemed_author_year_title -        // either: sort on multiple fields, or; create such sort field -        JSONValue j = parseJSON(bibent); -        if (!empty(j["fulltitle"].str)) { -          if (!empty(j["author_raw"].str)) { -            j["deemed_author"]=j["author_arr"][0]; -          } else if (!empty(j["editor_raw"].str)) { -            j["deemed_author"]=j["editor_arr"][0]; -          } -          j["sortby_deemed_author_year_title"] = ( -            j["deemed_author"].str ~ -             "; " ~ -             j["year"].str ~ -             "; "  ~ -             j["fulltitle"].str -          ); -          // bib[count_biblio_entry] = j.toString(); -        } -        bib_arr_json ~= j; -        // count_biblio_entry++; -        // bib_arr_json[count_biblio_entry] = j; -        // count_biblio_entry++; -      } -      JSONValue[] biblio_unsorted_array_of_json_objects = -        bib_arr_json.dup; -      return biblio_unsorted_array_of_json_objects; -    } -    final private JSONValue[] biblio_sort(JSONValue[] biblio_unordered) { -      JSONValue[] biblio_sorted; -      biblio_sorted = -        sort!((a, b){ -          return ((a["sortby_deemed_author_year_title"].str) < (b["sortby_deemed_author_year_title"].str)); -        })(biblio_unordered).array; -      debug(bibliosorted) { -        foreach (j; biblio_sorted) { -          if (!empty(j["fulltitle"].str)) { -            writeln(j["sortby_deemed_author_year_title"]); -            // writeln(j["deemed_author"], " (", j["author"], ") ",  j["fulltitle"]); -          } -        } -      } -      return biblio_sorted; -    } -    auto biblio_debug(JSONValue[] biblio_sorted) { -      debug(biblio) { -        foreach (j; biblio_sorted) { -          if (!empty(j["fulltitle"].str)) { -            writeln(j["sortby_deemed_author_year_title"]); -          } -        } -      } -    } -  } -  struct NodeStructureMetadata { -  // class NodeStructureMetadata : AssertNodeJSON { -    int lv, lv0, lv1, lv2, lv3, lv4, lv5, lv6, lv7; -    uint ocn; -    uint[string] p_; // p_ parent_ -    string node; -    string node_emitter( -      string lvn, -      int ocn_, -      int counter_, -      int pointer_, -      string is_ -    ) -    in { -      auto rgx = Rgx(); -      assert(is_ != "heading"); -      assert(to!int(ocn_) >= 0); -    } -    body { -      // scope(failure) { -      //   writeln(__FILE__, ":", __LINE__, " failed here:"); -      //   writeln("  is  : ", is_); -      //   writeln("  node: ", node); -      // } -      assert(is_ != "heading"); // should not be necessary -      assert(to!int(ocn_) >= 0); // should not be necessary -      uint ocn=to!uint(ocn_); -      if (lv7 > 0) { -        p_["lvn"] = 7; p_["ocn"] = lv7; -      } else if (lv6 > 0) { -        p_["lvn"] = 6; p_["ocn"] = lv6; -      } else if (lv5 > 0) { -        p_["lvn"] = 5; p_["ocn"] = lv5; -      } else { -        p_["lvn"] = 4; p_["ocn"] = lv4; -      } -      node=("{ " ~ -        "\"is\": \"" ~ is_ ~ "\"" ~ -        ", \"heading_pointer\": " ~ to!string(pointer_) ~ -        ", \"doc_object_pointer\": " ~ to!string(counter_) ~ -        ", \"ocn\": " ~ to!string(ocn_) ~ -        ", \"parent_ocn\": " ~ to!string(p_["ocn"]) ~ -        ", \"parent_lvn\": " ~ to!string(p_["lvn"]) ~ -        " }" -      ); -      debug(node) { -        mixin ScreenTxtColors; -        if (match(lvn, rgx.levels_numbered_headings)) { -          writeln(scr_txt_marker["yellow"], to!string(node)); -        } else { -          writeln(scr_txt_marker["white"], to!string(node)); -        } -      } -      JSONValue j = parseJSON(node); -      assert(j["parent_lvn"].integer >= 4); -      assert(j["parent_lvn"].integer <= 7); -      assert(j["parent_ocn"].integer >= 0); -      return node; -    } -    invariant() { -    } -    string node_emitter_heading( -      string lvn, -      string lcn, -      int ocn_, -      int counter_, -      int pointer_, -      string is_ -    ) -    in { -      auto rgx = Rgx(); -      assert(is_ == "heading"); -      assert(to!uint(ocn_) >= 0); -      assert( -        match(lvn, rgx.levels_numbered), -        ("not a valid heading level: " ~ lvn ~ " at " ~ to!string(ocn_)) -      ); -      // assert(to!uint(ocn_) >= 0); -      if (match(lvn, rgx.levels_numbered)) { -        if (to!uint(lvn) == 0) { -          assert(to!uint(ocn_) == 1); -          // writeln(lvn); -        } -      } -    } -    body { -      // scope(failure) { -      //   writeln(__FILE__, ":", __LINE__, " failed here:"); -      //   writeln("  is  : ", is_); -      //   writeln("  node: ", node); -      // } -      auto rgx = Rgx(); -      uint ocn=to!uint(ocn_); -      switch (lvn) { // switch (to!string(lv)) { -      case "0": -        lv=0; -        lv0=ocn; lv1=0; lv2=0; lv3=0; lv4=0; lv5=0; lv6=0; lv7=0; -        p_["lvn"] = 0; p_["ocn"] = 0; -        break; -      case "1": -        lv=1; -        lv1=ocn; lv2=0; lv3=0; lv4=0; lv5=0; lv6=0; lv7=0; -        p_["lvn"] = 0; p_["ocn"] = lv0; -        break; -      case "2": -        lv=2; -        lv2=ocn; lv3=0; lv4=0; lv5=0; lv6=0; lv7=0; -        p_["lvn"] = 1; p_["ocn"] = lv1; -        break; -      case "3": -        lv=3; -        lv3=ocn; lv4=0; lv5=0; lv6=0; lv7=0; -        p_["lvn"] = 2; p_["ocn"] = lv2; -        break; -      case "4": -        lv=4; -        lv4=ocn; lv5=0; lv6=0; lv7=0; -        if (lv3 > 0) { -          p_["lvn"] = 3; p_["ocn"] = lv3; -        } else if (lv2 > 0) { -          p_["lvn"] = 2; p_["ocn"] = lv2; -        } else if (lv1 > 0) { -          p_["lvn"] = 1; p_["ocn"] = lv1; -        } else { -          p_["lvn"] = 0; p_["ocn"] = lv0; -        } -        break; -      case "5": -        lv=5; -        lv5=ocn; lv6=0; lv7=0; -        p_["lvn"] = 4; p_["ocn"] = lv4; -        break; -      case "6": -        lv=6; -        lv6=ocn; lv7=0; -        p_["lvn"] = 5; p_["ocn"] = lv5; -        break; -      case "7": -        lv=7; -        lv7=ocn; -        p_["lvn"] = 6; p_["ocn"] = lv6; -        break; -      default: -        // if (lv7 > 0) { -        //   p_["lvn"] = 7; p_["ocn"] = lv7; -        // } else if (lv6 > 0) { -        //   p_["lvn"] = 6; p_["ocn"] = lv6; -        // } else if (lv5 > 0) { -        //   p_["lvn"] = 5; p_["ocn"] = lv5; -        // } else { -        //   p_["lvn"] = 4; p_["ocn"] = lv4; -        // } -        break; -      } -      node=("{ " ~ -        "\"is\": \"" ~ is_ ~ "\"" ~ -        ", \"heading_pointer\": " ~ to!string(pointer_) ~ -        ", \"doc_object_pointer\": " ~ to!string(counter_) ~ -        ", \"ocn\": " ~ to!string(ocn_) ~ -        ",  \"lvn\": " ~ to!string(lvn) ~ -        ",  \"lcn\": " ~ to!string(lcn) ~ -        ", \"parent_ocn\": " ~ to!string(p_["ocn"]) ~ -        ", \"parent_lvn\": " ~ to!string(p_["lvn"]) ~ -        " }" -      ); -      debug(heading) { -        mixin ScreenTxtColors; -        if (match(lvn, rgx.levels_numbered_headings)) { -          writeln(scr_txt_marker["yellow"], to!string(node)); -        } -      } -      debug(node) { -        mixin ScreenTxtColors; -        if (match(lvn, rgx.levels_numbered_headings)) { -          writeln(scr_txt_marker["yellow"], to!string(node)); -        } else { -          writeln(scr_txt_marker["white"], to!string(node)); -        } -      } -      JSONValue j = parseJSON(node); -      assert(j["parent_lvn"].integer <= 7); -      assert(j["parent_ocn"].integer >= 0); -      if (match(lvn, rgx.levels_numbered_headings)) { -        assert(j["lvn"].integer <= 7); -        assert(j["ocn"].integer >= 0); -        if (j["parent_lvn"].integer > 0) { -          assert(j["parent_lvn"].integer < j["lvn"].integer); -          if (j["ocn"].integer != 0) { -            assert(j["parent_ocn"].integer < j["ocn"].integer); -          } -        } -        if (j["lvn"].integer == 0) { -          assert(j["parent_lvn"].integer == 0); -        } else if  (j["lvn"].integer == 1) { -          assert(j["parent_lvn"].integer == 0); -        } else if  (j["lvn"].integer == 2) { -          assert(j["parent_lvn"].integer == 1); -        } else if  (j["lvn"].integer == 3) { -          assert(j["parent_lvn"].integer == 2); -        } else if  (j["lvn"].integer == 4) { -          assert(j["parent_lvn"].integer <= 3); -        } else if  (j["lvn"].integer == 5) { -          assert(j["parent_lvn"].integer == 4); -        } else if  (j["lvn"].integer == 6) { -          assert(j["parent_lvn"].integer == 5); -        } else if  (j["lvn"].integer == 7) { -          assert(j["parent_lvn"].integer == 6); -        } else if  (j["lvn"].integer == 8) { -          // writeln(j["parent_lvn"].integer); -          // assert(j["parent_lvn"].integer >= 4); -          // assert(j["parent_lvn"].integer <= 7); -        } -      } -      return node; -    } -    invariant() { -    } -  } -} diff --git a/lib/sdp/version.txt b/lib/sdp/version.txt deleted file mode 100644 index f4dde1a..0000000 --- a/lib/sdp/version.txt +++ /dev/null @@ -1,8 +0,0 @@ -/+ obt - org generated file +/ -// [used by rdmd] -struct Version { -  int major; -  int minor; -  int patch; -} -enum ver = Version(1, 0, 0); @@ -1,19 +1,20 @@  DMD=dmd -DMD_FLAGS=-de -w -J./lib +DMD_FLAGS=-de -w -J./views -I./src/sdp  DMD_FLAGS_RELEASE=-O -release  DMD_FLAG_BINOF=-of  LDC=ldc2 -LDC_FLAGS=-w -J=lib +LDC_FLAGS=-w -J=views -I=src/sdp  LDC_FLAGS_RELEASE=-O4 -release  LDC_FLAG_BINOF=-of=  GDC=gdc  #GDC=gdc-5  GDC_FLAGS=  GDC_FLAGS_RELEASE=-march=native -O3 -pipe -frelease -#GDC_FLAGS_RELEASE=-frelease  GDC_FLAG_BINOF=-o  RDMD=rdmd  RDMD_FLAGS=--build-only --compiler= +DUB=dub +DUB_FLAGS=-v --force --compiler=  # SET_D_COMPILER=(one of: DMD LDC or GDC):  SET_D_COMPILER=DMD  SET_DC_FLAGS_DEBUG_EXTRA=-debug=dumpdoc @@ -33,9 +34,10 @@ endif  ifeq ($(DC), $(GDC))  	DC_FLAGS_DEBUG :=$(shell echo $(DC_FLAGS_DEBUG_SET)| sed -e "s/-debug/-fdebug/g")  endif +DUB_FLAGS_DEBUG :=$(shell echo $(DC_FLAGS_DEBUG_SET)| sed -e "s/-debug/--debug/g"| sed -e "s/-unittest//g")  PRG_NAME=sdp  PRG_SRC=$(PRG_NAME).d -PRG_SRCDIR=./lib +PRG_SRCDIR=./src  PRG_BIN=$(PRG_NAME)  PRG_BINDIR=./bin  # ORG @@ -57,6 +59,10 @@ rebuild: $(PRG_SRCDIR)/$(PRG_SRC) $(PRG_BINDIR)/$(PRG_BIN).o clean build  makefile_new:  	make -k tangle_maker +dub: $(PRG_SRCDIR)/$(PRG_SRC) +	$(DUB) $(DUB_FLAGS)$(DC) +debug_dub: $(PRG_SRCDIR)/$(PRG_SRC) +	$(DUB) $(DUB_FLAGS)$(DC) $(DUB_FLAGS_DEBUG)  rdmd: $(PRG_SRCDIR)/$(PRG_SRC)  	$(RDMD) $(RDMD_FLAGS)$(DC) $(DC_FLAGS) \  	$(DC_FLAG_BINOF)$(PRG_BINDIR)/$(PRG_BIN) \ @@ -20,7 +20,7 @@  #+BEGIN_SRC makefile :tangle makefile  DMD=dmd -DMD_FLAGS=-de -w -J./lib +DMD_FLAGS=-de -w -J./views -I./src/sdp  DMD_FLAGS_RELEASE=-O -release  DMD_FLAG_BINOF=-of  #+END_SRC @@ -29,7 +29,7 @@ DMD_FLAG_BINOF=-of  #+BEGIN_SRC makefile :tangle makefile  LDC=ldc2 -LDC_FLAGS=-w -J=lib +LDC_FLAGS=-w -J=views -I=src/sdp  LDC_FLAGS_RELEASE=-O4 -release  LDC_FLAG_BINOF=-of=  #+END_SRC @@ -41,17 +41,24 @@ GDC=gdc  #GDC=gdc-5  GDC_FLAGS=  GDC_FLAGS_RELEASE=-march=native -O3 -pipe -frelease -#GDC_FLAGS_RELEASE=-frelease  GDC_FLAG_BINOF=-o  #+END_SRC -**** rdmd +*** build tools                                                  :build:tool: +**** rdmd                                                              :rdmd:  #+BEGIN_SRC makefile :tangle makefile  RDMD=rdmd  RDMD_FLAGS=--build-only --compiler=  #+END_SRC +**** dub                                                                :dub: + +#+BEGIN_SRC makefile :tangle makefile +DUB=dub +DUB_FLAGS=-v --force --compiler= +#+END_SRC +  *** TODO set/select: ~D compiler~ & ~debug flags~ [+1] [2/2]   :select:  - [X] Set D_COMPILER (one of DMD LDC or GDC)  - [X] Set debug flags (using DMD standard flag -debug=) @@ -77,6 +84,7 @@ re: dmd "one can easily get 2x (and even more) speedup by simply switching to gd  Set debug flags using DMD standard flag -debug= e.g.:    SET_DC_FLAGS_DEBUG_EXTRA=-debug=headings -debug=bookindex    SET_DC_FLAGS_DEBUG_EXTRA=-debug=headings -debug=footnotes -debug=endnotes +  SET_DC_FLAGS_DEBUG_EXTRA=-debug=headings -debug=biblio  ***** TODO [#A] set debug flags  #+BEGIN_SRC makefile :tangle makefile @@ -163,6 +171,7 @@ endif  ifeq ($(DC), $(GDC))  	DC_FLAGS_DEBUG :=$(shell echo $(DC_FLAGS_DEBUG_SET)| sed -e "s/-debug/-fdebug/g")  endif +DUB_FLAGS_DEBUG :=$(shell echo $(DC_FLAGS_DEBUG_SET)| sed -e "s/-debug/--debug/g"| sed -e "s/-unittest//g")  #+END_SRC  *** Project Details                                             :project:sdp: @@ -170,7 +179,7 @@ endif  #+BEGIN_SRC makefile :tangle makefile  PRG_NAME=sdp  PRG_SRC=$(PRG_NAME).d -PRG_SRCDIR=./lib +PRG_SRCDIR=./src  PRG_BIN=$(PRG_NAME)  PRG_BINDIR=./bin  #+END_SRC @@ -221,6 +230,14 @@ makefile_new:  	make -k tangle_maker  #+END_SRC +**** dub build rebuild +#+BEGIN_SRC makefile :tangle makefile +dub: $(PRG_SRCDIR)/$(PRG_SRC) +	$(DUB) $(DUB_FLAGS)$(DC) +debug_dub: $(PRG_SRCDIR)/$(PRG_SRC) +	$(DUB) $(DUB_FLAGS)$(DC) $(DUB_FLAGS_DEBUG) +#+END_SRC +  **** rdmd build rebuild  #+BEGIN_SRC makefile :tangle makefile @@ -380,7 +397,7 @@ gitsnapshot: distclean tangle  * sh script to batch process emacs org babel tangle     :shell_script:tangle:    [http://orgmode.org/manual/Batch-execution.html]  creates a shell batch script called "tangle", that will tangle (emacs org -babel tangle) org files in ./org/ to create .d source files in ./lib/sdp/ +babel tangle) org files in ./org/ to create .d source files in ./src/sdp/  (similar functionality is contained within the "makefile" created by this  "maker.org" file make tangle) @@ -409,3 +426,60 @@ emacs --batch -Q -q \    (org-babel-tangle)    (kill-buffer)) '($ORGFILES)))" 2>&1 #|grep tangled  #+END_SRC + +* D build notes                                                       :notes: +** compilers                                                       :compiler: +*** [#A] dmd + +#+BEGIN_SRC sh +dmd -de -w -J./views -I./src/sdp -unittest -debug=checkdoc -debug=summary -debug=dumpdoc -of./bin/sdp ./src/sdp.d +dmd -de -w -J./views -I./src/sdp -of./bin/sdp ./src/sdp.d +dmd -de -w -J./views -I./src/sdp -O -release -of./bin/sdp ./src/sdp.d +#+END_SRC + +*** [#A] ldc2 + +#+BEGIN_SRC sh +ldc2 -de -w -J./views -I./src/sdp -unittest -d-debug=checkdoc -d-debug=summary -d-debug=dumpdoc -of=./bin/sdp ./src/sdp.d +ldc2 -de -w -J./views -I./src/sdp -of=./bin/sdp ./src/sdp.d +ldc2 -de -w -J./views -I./src/sdp -O4 -release -of=./bin/sdp ./src/sdp.d +#+END_SRC + +*** TODO gdc + +not done + +#+BEGIN_SRC sh +#+END_SRC + +** build tools                                                   :build:tool: +*** [#B] rdmd + +flags similar to dmd + +#+BEGIN_SRC sh +rdmd -de -w -J./views -I./src/sdp -unittest -debug=checkdoc -debug=summary -debug=dumpdoc -of./bin/sdp ./src/sdp.d +rdmd -de -w -J./views -I./src/sdp -of./bin/sdp ./src/sdp.d +rdmd -de -w -J./views -I./src/sdp -O -release -of./bin/sdp ./src/sdp.d + +#dmd +rdmd -de -w -J./views -I./src/sdp -unittest -debug=checkdoc -debug=summary -debug=dumpdoc --compiler=ldc2 -of./bin/sdp ./src/sdp.d + +#ldc +rdmd -de -w -J./views -I./src/sdp -unittest -d-debug=checkdoc -d-debug=summary -d-debug=dumpdoc --compiler=ldc2 -of./bin/sdp ./src/sdp.d +#+END_SRC + +*** [#A] dub +https://github.com/dlang/dub/ +http://code.dlang.org/getting_started +http://code.dlang.org/docs/commandline +Every DUB package should contain a [[http://code.dlang.org/package-format?lang=json][dub.json]] or [[http://code.dlang.org/package-format?lang=sdl][dub.sdl]] + +#+BEGIN_SRC sh +dub -v --combined --build-mode allAtOnce --compiler ldc2 --build release # --annotate +dub build sdp -v --combined --build-mode allAtOnce --compiler ldc2 +dub describe +dub build sdp -v --force --debug=checkdoc --debug=dumpdoc +dub build sdp -v --force --debug=checkdoc --debug=dumpdoc --compiler dmd +dub build sdp -v --force --debug=checkdoc --debug=dumpdoc --compiler ldc2 +#+END_SRC diff --git a/org/ao_abstract_doc_source.org b/org/ao_abstract_doc_source.org index cc4fd47..796d6b2 100644 --- a/org/ao_abstract_doc_source.org +++ b/org/ao_abstract_doc_source.org @@ -24,10 +24,10 @@ Process markup document, create document abstraction.  #+name: abs_imports  #+BEGIN_SRC d  import -  lib.sdp.ao_defaults,                  // sdp/ao_defaults.d -  lib.sdp.ao_object_setter,             // sdp/ao_object_setter.d -  lib.sdp.ao_rgx,                       // sdp/ao_rgx.d -  lib.sdp.ao_ansi_colors;               // sdp/ao_ansi_colors.d +  ao_defaults,                  // sdp/ao_defaults.d +  ao_object_setter,             // sdp/ao_object_setter.d +  ao_rgx,                       // sdp/ao_rgx.d +  ao_ansi_colors;               // sdp/ao_ansi_colors.d  #+END_SRC  *** mixins                                                           :mixins: @@ -4428,7 +4428,7 @@ auto contents_block_obj_cite_number_string(  * tangles (code structure)                                   :tangle:io:file:  ** abstract doc source:                            :ao_abstract_doc_source.d: -#+BEGIN_SRC d :tangle ../lib/sdp/ao_abstract_doc_source.d +#+BEGIN_SRC d :tangle ../src/sdp/ao_abstract_doc_source.d  /+    document abstraction    ao_abstract_doc_source.d @@ -4491,7 +4491,7 @@ template SiSUdocAbstraction() {  #+END_SRC  ** ao_object_setter:                                     :ao_object_setter.d: -#+BEGIN_SRC d :tangle ../lib/sdp/ao_object_setter.d +#+BEGIN_SRC d :tangle ../src/sdp/ao_object_setter.d  /+    object setter    ao_object_setter.d diff --git a/org/ao_defaults.org b/org/ao_defaults.org index ed01540..9d5eb00 100644 --- a/org/ao_defaults.org +++ b/org/ao_defaults.org @@ -264,7 +264,9 @@ template SiSUbiblio() {  #+BEGIN_SRC d  template InternalMarkup() {    struct InlineMarkup { +  // endnote en_a_o: '~{'; en_a_c: '}~'      auto en_a_o = "【"; auto en_a_c = "】"; +  // endnote en_b_o: '~['; en_b_c: ']~'      auto en_b_o = "〖"; auto en_b_c = "〗";      // auto segname_prefix_auto_num_extract = "c";      // auto segname_prefix_auto_num_provide = "s"; @@ -667,7 +669,7 @@ string[string] scr_txt_marker = [  * tangles (code structure)                                           :tangle:  ** defaults:                                              :ao_defaults.d:  #+name: tangle_ao_defaults -#+BEGIN_SRC d :tangle ../lib/sdp/ao_defaults.d +#+BEGIN_SRC d :tangle ../src/sdp/ao_defaults.d  /+    defaults    ao_defaults.d @@ -677,7 +679,7 @@ string[string] scr_txt_marker = [  ** rgx:                                                        :ao_rgx.d:  #+name: tangle_ao_rgx -#+BEGIN_SRC d :tangle ../lib/sdp/ao_rgx.d +#+BEGIN_SRC d :tangle ../src/sdp/ao_rgx.d  /+    regex    ao_rgx.d @@ -690,7 +692,7 @@ template RgxInit() {  #+END_SRC  ** ansi_colors:                                        :ao_ansi_colors.d: -#+BEGIN_SRC d :tangle ../lib/sdp/ao_ansi_colors.d +#+BEGIN_SRC d :tangle ../src/sdp/ao_ansi_colors.d  /+    utils    ao_util.d diff --git a/org/ao_emitter.org b/org/ao_emitter.org deleted file mode 100644 index 0ecbc2e..0000000 --- a/org/ao_emitter.org +++ /dev/null @@ -1,1568 +0,0 @@ -#+TITLE: sdp emitters & interfaces -#+AUTHOR: Ralph Amissah -#+EMAIL: ralph.amissah@gmail.com -#+STARTUP: indent -#+LANGUAGE: en -#+OPTIONS: H:3 num:nil toc:t \n:nil @:t ::t |:t ^:nil _:nil -:t f:t *:t <:t -#+OPTIONS: TeX:t LaTeX:t skip:nil d:nil todo:t pri:nil tags:not-in-toc -#+OPTIONS: author:nil email:nil creator:nil timestamp:nil -#+PROPERTY: header-args :padline no :exports code :noweb yes -#+EXPORT_SELECT_TAGS: export -#+EXPORT_EXCLUDE_TAGS: noexport -#+FILETAGS: :sdp:dev:ao: -#+TAGS: assert(a) class(c) debug(d) mixin(m) sdp(s) tangle(T) template(t) WEB(W) noexport(n) - -* emitters -[[./sdp.org][sdp]] -ao_emitter.d -various emitters and their interfaces (where available) -** command line interface/instructions -#+name: ao_emitter -#+BEGIN_SRC d -struct CLI { -  string[string] extract_actions(string cmdlnins, string[string] actions) -  in { } -  body { -    switch (cmdlnins) { -    case "--no-assert": -      actions["assert"] = "no"; -      break; -    default: -      break; -    } -    return actions; -  } -} -#+END_SRC -** ocn -#+name: ao_emitter -#+BEGIN_SRC d -struct OCNemitter { -// class OCNemitter : AssertOCN { -  int ocn, ocn_; -  int ocn_emitter(int ocn_status_flag) -  in { assert(ocn_status_flag <= 2); } -  body { -    if (ocn_status_flag == 0) { -      ocn=++ocn_; -    } else { -      ocn=0; -    } -    assert(ocn >= 0); -    return ocn; -  } -  invariant() { -  } -} -#+END_SRC -** object attributes -#+name: ao_emitter -#+BEGIN_SRC d -struct ObjAttributes { -// class ObjAttributes : AssertObjAttributes { -  string[string] obj_txt; -  string para_and_blocks(string obj_txt_in) -  in { } -  body { -    auto rgx = Rgx(); -    obj_txt["munge"]=obj_txt_in; -    if (match(obj_txt_in, rgx.para_bullet)) { -      obj_txt["attrib"] =" \"bullet\": \"true\"," -      ~ " \"indent_first\": 0," -      ~ " \"indent_rest\": 0,"; -    } else if (auto m = match(obj_txt_in, rgx.para_bullet_indent)) { -      obj_txt["attrib"] =" \"bullet\": \"true\"," -      ~ " \"indent_first\": " ~ to!string(m.captures[1]) ~ "," -      ~ " \"indent_rest\": " ~ to!string(m.captures[1]) ~ ","; -    } else if (auto m = match(obj_txt_in, rgx.para_indent_hang)) { -      obj_txt["attrib"] =" \"bullet\": \"false\"," -      ~ " \"indent_first\": " ~ to!string(m.captures[1]) ~ "," -      ~ " \"indent_rest\": " ~  to!string(m.captures[2]) ~ ","; -    } else if (auto m = match(obj_txt_in, rgx.para_indent)) { -      obj_txt["attrib"] =" \"bullet\": \"false\"," -      ~ " \"indent_first\": " ~ to!string(m.captures[1]) ~ "," -      ~ " \"indent_rest\": " ~ to!string(m.captures[1]) ~ ","; -    } else { -      obj_txt["attrib"] =" \"bullet\": \"false\"," -      ~ " \"indent_first\": 0," -      ~ " \"indent_rest\": 0,"; -    } -    return obj_txt["attrib"]; -  } -  string para(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    obj_txt["attrib"] = " \"use\": \"content\"," -    ~ " \"of\": \"para\"," -    ~ " \"is\": \"para\""; -    return obj_txt["attrib"]; -  } -  invariant() { -  } -  string heading(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    obj_txt["attrib"] = " \"use\": \"content\"," -    ~ " \"of\": \"para\"," -    ~ " \"is\": \"heading\""; -    // obj_txt["struct"]=; -    return obj_txt["attrib"]; -  } -  invariant() { -  } -  string header_make(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    obj_txt["attrib"] = " \"use\": \"head\"," -    ~ " \"of\": \"header\"," -    ~ " \"is\": \"header_make\""; -    return obj_txt["attrib"]; -  } -  invariant() { -  } -  string header_metadata(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    obj_txt["attrib"] = " \"use\": \"head\"," -    ~ " \"of\": \"header\"," -    ~ " \"is\": \"header_metadata\""; -    return obj_txt["attrib"]; -  } -  invariant() { -  } -  string code(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    obj_txt["attrib"] = " \"use\": \"content\"," -    ~ " \"of\": \"block\"," -    ~ " \"is\": \"code\""; -    return obj_txt["attrib"]; -  } -  invariant() { -  } -  string group(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    obj_txt["attrib"] = " \"use\": \"content\"," -    ~ " \"of\": \"block\"," -    ~ " \"is\": \"group\""; -    return obj_txt["attrib"]; -  } -  invariant() { -  } -  string block(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    obj_txt["attrib"] = " \"use\": \"content\"," -    ~ " \"of\": \"block\"," -    ~ " \"is\": \"block\""; -    return obj_txt["attrib"]; -  } -  invariant() { -  } -  string verse(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    obj_txt["attrib"] = " \"use\": \"content\"," -    ~ " \"of\": \"block\"," -    ~ " \"is\": \"verse\""; -    return obj_txt["attrib"]; -  } -  invariant() { -  } -  string quote(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    obj_txt["attrib"] = " \"use\": \"content\"," -    ~ " \"of\": \"block\"," -    ~ " \"is\": \"quote\""; -    return obj_txt["attrib"]; -  } -  invariant() { -  } -  string table(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    obj_txt["attrib"] = " \"use\": \"content\"," -    ~ " \"of\": \"block\"," -    ~ " \"is\": \"table\""; -    return obj_txt["attrib"]; -  } -  invariant() { -  } -  string comment(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    obj_txt["attrib"] = " \"use\": \"comment\"," -    ~ " \"of\": \"comment\"," -    ~ " \"is\": \"comment\""; -    return obj_txt["attrib"]; -  } -  invariant() { -  } -} -#+END_SRC -** object inline markup munge -#+name: ao_emitter -#+BEGIN_SRC d -struct ObjInlineMarkupMunge { -// struct ObjInlineMarkupMunge : AssertObjInlineMarkup { -  string[string] obj_txt; -  int n_foot, n_foot_reg, n_foot_sp_asterisk, n_foot_sp_plus; -  string obj_txt_out, tail, note; -  private auto initialize_note_numbers() { -    n_foot = 0; -    n_foot_reg = 0; -    n_foot_sp_asterisk = 0; -    n_foot_sp_plus = 0; -  } -  private auto object_notes_(string obj_txt_in) -  in { } -  body { -    auto rgx = Rgx(); -    auto mkup = InternalMarkup(); -    obj_txt_out = ""; -    tail = ""; -    obj_txt_in = replaceAll( -      obj_txt_in, -      rgx.inline_notes_curly_sp_asterisk, -      (mkup.en_a_o ~ "*" ~ " $1" ~ mkup.en_a_c) -    ); -    obj_txt_in = -      replaceAll( -        obj_txt_in, -        rgx.inline_notes_curly_sp_plus, -        (mkup.en_a_o ~ "+" ~ " $1" ~ mkup.en_a_c) -      ); -    obj_txt_in = -      replaceAll( -        obj_txt_in, -        rgx.inline_notes_curly, -        (mkup.en_a_o ~ " $1" ~ mkup.en_a_c) -      ); -    if (match(obj_txt_in, rgx.inline_notes_al_gen)) { -      foreach(m; matchAll(obj_txt_in, rgx.inline_text_and_note_al)) { -        if (match(obj_txt_in, rgx.inline_al_delimiter_open_asterisk)) { -          n_foot_sp_asterisk++; -          n_foot=n_foot_sp_asterisk; -        } else if (match(obj_txt_in, rgx.inline_al_delimiter_open_plus)) { -          n_foot_sp_plus++; -          n_foot=n_foot_sp_plus; -        } else { -          n_foot_reg++; -          n_foot=n_foot_reg; -        } -        obj_txt_out ~= replaceFirst( -          m.hit, -          rgx.inline_al_delimiter_open_regular, -          (mkup.en_a_o ~ to!string(n_foot)) -        ); -        tail = m.post; -        // if (!empty(m.post)) { -        //   tail = m.post; -        // } else { -        //   tail = ""; -        // } -      } -    } else { -      obj_txt_out = obj_txt_in; -    } -    debug(footnotes) { -      writeln(obj_txt_out, tail); -    } -    obj_txt_out = obj_txt_out ~ tail; -    debug(footnotesdone) { -      foreach(m; matchAll(obj_txt_out, -      (mkup.en_a_o ~ `\s*(.+?)` ~ mkup.en_a_c))) { -        writeln(m.captures[1]); -        writeln(m.hit); -      } -    } -    return obj_txt_out; -  } -  string para(string obj_txt_in) -  in { } -  body { -    auto rgx = Rgx(); -    obj_txt["munge"]=obj_txt_in; -    obj_txt["munge"]=replaceFirst(obj_txt["munge"], rgx.para_attribs, ""); -    obj_txt["munge"]=replaceFirst(obj_txt["munge"], rgx.ocn_off_all, ""); -    obj_txt["munge"]=object_notes_(obj_txt["munge"]); -    debug(munge) { -      writeln(__LINE__); -      writeln(obj_txt_in); -      writeln(__LINE__); -      writeln(to!string(obj_txt["munge"])); -    } -    return obj_txt["munge"]; -  } -  string heading(string obj_txt_in) -  in { } -  body { -    auto rgx = Rgx(); -    obj_txt["munge"]=obj_txt_in; -    obj_txt["munge"]=replaceFirst(obj_txt["munge"], rgx.heading, ""); -    obj_txt["munge"]=replaceFirst(obj_txt["munge"], rgx.ocn_off_all, ""); -    obj_txt["munge"]=object_notes_(obj_txt["munge"]); -    debug(munge) { -      writeln(__LINE__); -      writeln(obj_txt_in); -      writeln(__LINE__); -      writeln(to!string(obj_txt["munge"])); -    } -    return obj_txt["munge"]; -  } -  invariant() { -  } -  string header_make(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    return obj_txt["munge"]; -  } -  invariant() { -  } -  string header_metadata(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    return obj_txt["munge"]; -  } -  invariant() { -  } -  string code(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    return obj_txt["munge"]; -  } -  invariant() { -  } -  string group(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    obj_txt["munge"]=object_notes_(obj_txt["munge"]); -    return obj_txt["munge"]; -  } -  invariant() { -  } -  string block(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    obj_txt["munge"]=object_notes_(obj_txt["munge"]); -    return obj_txt["munge"]; -  } -  invariant() { -  } -  string verse(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    obj_txt["munge"]=object_notes_(obj_txt["munge"]); -    return obj_txt["munge"]; -  } -  invariant() { -  } -  string quote(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    return obj_txt["munge"]; -  } -  invariant() { -  } -  string table(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    return obj_txt["munge"]; -  } -  invariant() { -  } -  string comment(string obj_txt_in) -  in { } -  body { -    obj_txt["munge"]=obj_txt_in; -    return obj_txt["munge"]; -  } -  invariant() { -  } -} -#+END_SRC -** object inline markup -#+name: ao_emitter -#+BEGIN_SRC d -struct ObjInlineMarkup { -// struct ObjInlineMarkup : AssertObjInlineMarkup { -  auto munge = ObjInlineMarkupMunge(); -  string[string] obj_txt; -  string obj_inline_markup(string obj_is_, string obj_raw) -  in { } -  body { -    obj_txt["munge"]=obj_raw.dup; -    obj_txt["munge"]=(match(obj_is_, ctRegex!(`verse|code`))) -      ? obj_txt["munge"] -      : strip(obj_txt["munge"]); -    switch (obj_is_) { -    case "header_make": -      obj_txt["munge"]=munge.header_make(obj_txt["munge"]); -      break; -    case "header_metadata": -      obj_txt["munge"]=munge.header_metadata(obj_txt["munge"]); -      break; -    case "heading": -      obj_txt["munge"]=munge.heading(obj_txt["munge"]); -      break; -    case "para": -      obj_txt["munge"]=munge.para(obj_txt["munge"]); -      break; -    case "code": -      obj_txt["munge"]=munge.code(obj_txt["munge"]); -      break; -    case "group": -      obj_txt["munge"]=munge.group(obj_txt["munge"]); -      break; -    case "block": -      obj_txt["munge"]=munge.block(obj_txt["munge"]); -      break; -    case "verse": -      obj_txt["munge"]=munge.verse(obj_txt["munge"]); -      break; -    case "quote": -      obj_txt["munge"]=munge.quote(obj_txt["munge"]); -      break; -    case "table": -      obj_txt["munge"]=munge.table(obj_txt["munge"]); -      break; -    case "comment": -      obj_txt["munge"]=munge.comment(obj_txt["munge"]); -      break; -    case "doc_end_reset": -      munge.initialize_note_numbers(); -      break; -    default: -      break; -    } -    return obj_txt["munge"]; -  } -  invariant() { -  } -} -#+END_SRC -** object attrib -#+name: ao_emitter -#+BEGIN_SRC d -struct ObjAttrib { -// struct ObjAttrib : AssertObjAttrib { -// auto sink = appender!(char[])(); -  auto attrib = ObjAttributes(); -  string[string] obj_attrib; -  string obj_attributes(string obj_is_, string obj_raw, string node) -  in { } -  body { -    // string s = "{ \"language\": \"D\", \"rating\": 3.14, \"code\": \"42\" }"; -    scope(exit) { -      // destroy(obj_is_); -      destroy(obj_raw); -      destroy(node); -    } -    JSONValue node_j = parseJSON(node); -    obj_attrib.remove("json"); -    obj_attrib["json"] ="{"; -    switch (obj_is_) { -    case "header_make": -      obj_attrib["json"] ~= attrib.header_make(obj_raw); -      break; -    case "header_metadata": -      obj_attrib["json"] ~= attrib.header_metadata(obj_raw); -      break; -    case "heading": -      obj_attrib["json"] ~= attrib.heading(obj_raw); // -      break; -    case "para": -      obj_attrib["json"] ~= attrib.para_and_blocks(obj_raw) -      ~ attrib.para(obj_raw); -      break; -    case "code": -      obj_attrib["json"] ~= attrib.code(obj_raw); -      break; -    case "group": -      obj_attrib["json"] ~= attrib.para_and_blocks(obj_raw) -      ~ attrib.group(obj_raw); -      break; -    case "block": -      obj_attrib["json"] ~= attrib.para_and_blocks(obj_raw) -      ~ attrib.block(obj_raw); -      break; -    case "verse": -      obj_attrib["json"] ~= attrib.verse(obj_raw); -      break; -    case "quote": -      obj_attrib["json"] ~= attrib.quote(obj_raw); -      break; -    case "table": -      obj_attrib["json"] ~= attrib.table(obj_raw); -      break; -    case "comment": -      obj_attrib["json"] ~= attrib.comment(obj_raw); -      break; -    default: -      obj_attrib["json"] ~= attrib.para(obj_raw); -      break; -    } -    obj_attrib["json"] ~=" }"; -    JSONValue oa_j = parseJSON(obj_attrib["json"]); -    assert( -      (oa_j.type == JSON_TYPE.OBJECT) && -      (node_j.type == JSON_TYPE.OBJECT) -    ); -    if (obj_is_ == "heading") { -      oa_j.object["ocn"] = node_j["ocn"]; -      oa_j.object["lvn"] = node_j["lvn"]; -      oa_j.object["lcn"] = node_j["lcn"]; -      oa_j.object["heading_pointer"] = -        node_j["heading_pointer"]; // check -      oa_j.object["doc_object_pointer"] = -        node_j["doc_object_pointer"]; // check -    } -    oa_j.object["parent_ocn"] = node_j["parent_ocn"]; -    oa_j.object["parent_lvn"] = node_j["parent_lvn"]; -    obj_attrib["json"] = oa_j.toString(); -    debug(structattrib) { -      if (oa_j["is"].str() == "heading") { -        // writeln(__LINE__); -        writeln(obj_attrib["json"]); -        // writeln(node); -        writeln( -          "is: ", oa_j["is"].str(), -          "; ocn: ", oa_j["ocn"].integer() -        ); -      } -    } -    // obj_attrib["json"]="{}"; -    return obj_attrib["json"]; -  } -  invariant() { -  } -} -#+END_SRC -** header document metadata in json -#+name: ao_emitter -#+BEGIN_SRC d -struct HeaderDocMetadataMakeJson { -// class HeaderMetadataMakeHash : AssertHeaderMetadataMakeJson { -  auto rgx = Rgx(); -  string hm, hs; -  auto header_metadata_and_make_jsonstr( -    string header, -    JSONValue[string] dochead_metadata, -    JSONValue[string] dochead_make -  ) -  in { } -  body { -    scope(exit) { -      destroy(header); -      destroy(dochead_metadata); -      destroy(dochead_make); -    } -    if (auto t = match(header, rgx.head_main)) { -      char[][] obj_spl = split( -        cast(char[]) header, -        rgx.line_delimiter_ws_strip -      ); -      auto hm = to!string(t.captures[1]); -      if (match(hm, rgx.main_headers)) { -        foreach (line; obj_spl) { -          if (auto m = match(line, rgx.head_main)) { -            if (!empty(m.captures[2])) { -              if (hm == "creator") { -                dochead_metadata[hm]["author"].str = -                  to!string(m.captures[2]); -              } else if (hm == "title") { -                dochead_metadata[hm]["main"].str = -                  to!string(m.captures[2]); -              } else if (hm == "publisher") { -                dochead_metadata[hm]["name"].str = -                  to!string(m.captures[2]); -              } -            } -          } else if (auto s = match(line, rgx.head_sub)) { -            if (!empty(s.captures[2])) { -              auto hs = to!string(s.captures[1]); -              if ((hm == "make" ) -              && (dochead_make[hm].type() == JSON_TYPE.OBJECT)) { -                switch (hm) { -                case "make": -                  if (match(hs, rgx.subhead_make)) { -                    if (dochead_make[hm][hs].type() == JSON_TYPE.STRING) { -                      dochead_make[hm][hs].str = to!string(s.captures[2]); -                    } -                  } else { -                    writeln("not a valid header type:", hm, ":", hs); -                    destroy(hm); -                    destroy(hs); -                  } -                  break; -                default: -                  break; -                } -              } else if (dochead_metadata[hm].type() == JSON_TYPE.OBJECT) { -                switch (hm) { -                case "creator": -                  if (match(hs, rgx.subhead_creator)) { -                    if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                      dochead_metadata[hm][hs].str = -                        to!string(s.captures[2]); -                    } -                  } else { -                    writeln("not a valid header type:", hm, ":", hs); -                    destroy(hm); -                    destroy(hs); -                  } -                  break; -                case "title": -                  if (match(hs, rgx.subhead_title)) { -                    if ((hs == "subtitle") -                    && (dochead_metadata[hm]["sub"].type() == JSON_TYPE.STRING)) { -                      dochead_metadata[hm]["sub"].str = -                        to!string(s.captures[2]); -                    } else if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                      dochead_metadata[hm][hs].str = -                        to!string(s.captures[2]); -                    } -                  } else { -                    writeln("not a valid header type:", hm, ":", hs); -                    destroy(hm); -                    destroy(hs); -                  } -                  break; -                case "rights": -                  if (match(hs, rgx.subhead_rights)) { -                    if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                      dochead_metadata[hm][hs].str = -                        to!string(s.captures[2]); -                    } -                  } else { -                    writeln("not a valid header type:", hm, ":", hs); -                    destroy(hm); -                    destroy(hs); -                  } -                  break; -                case "date": -                  if (match(hs, rgx.subhead_date)) { -                    if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                      dochead_metadata[hm][hs].str = -                        to!string(s.captures[2]); -                    } -                  } else { -                    writeln("not a valid header type:", hm, ":", hs); -                    destroy(hm); -                    destroy(hs); -                  } -                  break; -                case "original": -                  if (match(hs, rgx.subhead_original)) { -                    if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                      dochead_metadata[hm][hs].str = -                        to!string(s.captures[2]); -                    } -                  } else { -                    writeln("not a valid header type:", hm, ":", hs); -                    destroy(hm); -                    destroy(hs); -                  } -                  break; -                case "classify": -                  if (match(hs, rgx.subhead_classify)) { -                    if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                      dochead_metadata[hm][hs].str = -                        to!string(s.captures[2]); -                    } -                  } else { -                    writeln("not a valid header type:", hm, ":", hs); -                    destroy(hm); -                    destroy(hs); -                  } -                  break; -                case "identifier": -                  if (match(hs, rgx.subhead_identifier)) { -                    if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                      dochead_metadata[hm][hs].str = -                        to!string(s.captures[2]); -                    } -                  } else { -                    writeln("not a valid header type:", hm, ":", hs); -                    destroy(hm); -                    destroy(hs); -                  } -                  break; -                case "notes": -                  if (match(hs, rgx.subhead_notes)) { -                    if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                      dochead_metadata[hm][hs].str = -                        to!string(s.captures[2]); -                    } -                  } else { -                    writeln("not a valid header type:", hm, ":", hs); -                    destroy(hm); -                    destroy(hs); -                  } -                  break; -                case "publisher": -                  if (match(hs, rgx.subhead_publisher)) { -                    if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                      dochead_metadata[hm][hs].str = -                        to!string(s.captures[2]); -                    } -                  } else { -                    writeln("not a valid header type:", hm, ":", hs); -                    destroy(hm); -                    destroy(hs); -                  } -                  break; -                case "links": -                  destroy(hm); -                  destroy(hs); -                  // if (match(hs, rgx.subhead_links)) { -                  //   if (dochead_metadata[hm][hs].type() == JSON_TYPE.STRING) { -                  //     dochead_metadata[hm][hs].str = to!string(s.captures[2]); -                  //   } -                  // } else { -                  //   writeln("not a valid header type:", hm, ":", hs); -                  //   destroy(hm); -                  //   destroy(hs); -                  // } -                  break; -                default: -                  break; -                } -              } -            } -          } -        } -      } else { -        writeln("not a valid header type:", hm); -      } -    } -    auto t = tuple(dochead_metadata, dochead_make); -    static assert(!isTypeTuple!(t)); -    return t; -  } -  // invariant() { -  // } -} -#+END_SRC -** header document metadata as hash -#+name: ao_emitter -#+BEGIN_SRC d -class HeaderMetadataMakeHash { -// class HeaderMetadataMakeHash : AssertHeaderMetadataMakeHash { -  auto rgx = Rgx(); -  string header_main; -  string[string] head; -  string[string] header_topic_hash(string header) -  in { } -  body { -    if (auto t = match(header, rgx.head_main)) { -      char[][] obj_spl = split( -        cast(char[]) header, -        rgx.line_delimiter_ws_strip -      ); -      auto header_main = to!string(t.captures[1]); -      head[header_main] = "{"; -      foreach (line; obj_spl) { -        if (auto m = match(line, rgx.head_main)) { -          if (!empty(m.captures[2])) { -            head[header_main] ~= -              "\"" ~ header_main ~ -              "\": \"" ~ -              to!string(m.captures[2]) ~ -              "\","; -          } -        } else if (auto s = match(line, rgx.head_sub)) { -          head[header_main] ~= "\"" ~ s.captures[1] ~ "\":"; -          if (!empty(s.captures[2])) { -            head[header_main] ~= "\"" ~ s.captures[2] ~ "\","; -          } -        } -      } -      head[header_main] = replaceFirst( -        head[header_main], -        rgx.tailing_comma, -        "" -      ); -      head[header_main] ~= "}"; -      debug(headerjson) { -        JSONValue j = parseJSON(head[header_main]); -        assert( -          (j.type == JSON_TYPE.OBJECT) -        ); -      } -    } -    return head; -  } -  invariant() { -  } -} -#+END_SRC -** book index nugget hash -#+name: ao_emitter -#+BEGIN_SRC d -struct BookIndexNuggetHash { -// class BookIndexNuggetHash : AssertBookIndexNuggetHash { -  string main_term, sub_term, sub_term_bits; -  uint ocn_offset, ocn_endpoint; -  string[] ocns; -  string[][string][string] bi; -  string[][string][string] hash_nugget; -  string[] bi_main_terms_split_arr; -  string[][string][string] bookindex_nugget_hash(string bookindex, int ocn) -  in { -    debug(bookindexraw) { -      mixin ScreenTxtColors; -      if (!bookindex.empty) { -        writeln( -          scr_txt_color["blue"], "* [bookindex] ", scr_txt_color["off"], -          "[", to!string(ocn), "] ", bookindex -        ); -      } -    } -  } -  body { -    auto rgx = Rgx(); -    if (!bookindex.empty) { -      auto bi_main_terms_split_arr = -        split(bookindex, rgx.bi_main_terms_split); -      foreach (bi_main_terms_content; bi_main_terms_split_arr) { -        auto bi_main_term_and_rest = -          split(bi_main_terms_content, rgx.bi_main_term_plus_rest_split); -        if (auto m = match( -          bi_main_term_and_rest[0], -          rgx.bi_term_and_ocns_match) -        ) { -          main_term = strip(m.captures[1]); -          ocn_offset = to!uint(m.captures[2]); -          ocn_endpoint=(ocn + ocn_offset); -          ocns ~= (to!string(ocn) ~ "-" ~ to!string(ocn_endpoint)); -        } else { -          main_term = strip(bi_main_term_and_rest[0]); -          ocns ~= to!string(ocn); -        } -        bi[main_term]["_a"] ~= ocns; -        ocns=null; -        if (bi_main_term_and_rest.length > 1) { -          auto bi_sub_terms_split_arr = -            split( -              bi_main_term_and_rest[1], -              rgx.bi_sub_terms_plus_ocn_offset_split -            ); -          foreach (sub_terms_bits; bi_sub_terms_split_arr) { -            if (auto m = match(sub_terms_bits, rgx.bi_term_and_ocns_match)) { -              sub_term = strip(m.captures[1]); -              ocn_offset = to!uint(m.captures[2]); -              ocn_endpoint=(ocn + ocn_offset); -              ocns ~= (to!string(ocn) ~ " - " ~ to!string(ocn_endpoint)); -            } else { -              sub_term = strip(sub_terms_bits); -              ocns ~= to!string(ocn); -            } -            if (!empty(sub_term)) { -              bi[main_term][sub_term] ~= ocns; -            } -            ocns=null; -          } -        } -        // ocns=null; -      } -    } -    hash_nugget = bi; -    // bi=null; // bi.init; // use to empty for each next object; else, harvest hashes at the end of the document -    return hash_nugget; -  } -  invariant() { -  } -} -#+END_SRC -** book index report -#+name: ao_emitter -#+BEGIN_SRC d -struct BookIndexReport { -// class BookIndexReport : AssertBookIndexReport { -  int mkn, skn; -  auto bookindex_report_sorted( -    string[][string][string] bookindex_unordered_hashes -  ) { -    auto mainkeys=bookindex_unordered_hashes.byKey.array. -      sort!("toLower(a) < toLower(b)", SwapStrategy.stable).release; -    foreach (mainkey; mainkeys) { -      auto subkeys=bookindex_unordered_hashes[mainkey].byKey.array. -        sort!("toLower(a) < toLower(b)", SwapStrategy.stable).release; -      foreach (subkey; subkeys) { -        debug(bookindex) { -          writeln( -            mainkey, ": ", -            subkey, ": ", -            to!string(bookindex_unordered_hashes[mainkey][subkey]) -          ); -        } -        // bookindex_the[mkn][mainkey][skn][subkey] ~= (bookindex_unordered_hashes[mainkey][subkey]); -        skn++; -      } -      mkn++; -    } -    // return bookindex_the; -  } -} -#+END_SRC -** book index report indented -#+name: ao_emitter -#+BEGIN_SRC d -struct BookIndexReportIndent { -  int mkn, skn; -  auto bookindex_report_indented( -    string[][string][string] bookindex_unordered_hashes -  ) { -    auto mainkeys= -      bookindex_unordered_hashes.byKey.array.sort().release; -    foreach (mainkey; mainkeys) { -      debug(bookindex) { -        writeln(mainkey); -      } -      auto subkeys= -        bookindex_unordered_hashes[mainkey].byKey.array.sort().release; -      foreach (subkey; subkeys) { -        debug(bookindex) { -          writeln("  ", subkey); -          writeln("    ", to!string( -            bookindex_unordered_hashes[mainkey][subkey] -          )); -        } -        // bookindex_the[mkn][mainkey][skn][subkey] ~= (bookindex_unordered_hashes[mainkey][subkey]); -        skn++; -      } -      mkn++; -    } -  } -} -#+END_SRC -** book index report section -#+name: ao_emitter -#+BEGIN_SRC d -struct BookIndexReportSection { -  mixin ObjectSetters; -  int mkn, skn; -  auto rgx = Rgx(); -  auto bookindex_write_section( -    string[][string][string] bookindex_unordered_hashes -  ) { -    auto mainkeys=bookindex_unordered_hashes.byKey.array.sort().release; -    foreach (mainkey; mainkeys) { -      write("_0_1 !{", mainkey, "}! "); -      foreach (ref_; bookindex_unordered_hashes[mainkey]["_a"]) { -        auto go = replaceAll(ref_, rgx.book_index_go, "$1"); -        write(" {", ref_, "}#", go, ", "); -      } -      writeln(" \\\\"); -      bookindex_unordered_hashes[mainkey].remove("_a"); -      auto subkeys= -        bookindex_unordered_hashes[mainkey].byKey.array.sort().release; -      foreach (subkey; subkeys) { -        write("  ", subkey, ", "); -        foreach (ref_; bookindex_unordered_hashes[mainkey][subkey]) { -          auto go = replaceAll(ref_, rgx.book_index_go, "$1"); -          write(" {", ref_, "}#", go, ", "); -        } -        writeln(" \\\\"); -        skn++; -      } -      mkn++; -    } -  } -  auto bookindex_build_section( -    string[][string][string] bookindex_unordered_hashes, -    int ocn -  ) { -    string type; -    int type_heading; -    string lev, lvn, lcn; -    string attrib; -    string indent_first; -    string indent_second; -    auto set_oa = ObjectAbstractSet(); -    auto mainkeys = -      bookindex_unordered_hashes.byKey.array.sort().release; -    string bi_tmp; -    string[string][] bookindex; -    writeln(mainkeys.length); -    // B~ Book Index -    type_heading=1; -    bi_tmp = "Book Index"; -    attrib=""; -    lev="B"; -    lvn="1"; -    lcn="1"; -    bookindex ~= -      set_oa.contents_heading( -        type_heading, -        bi_tmp, -        attrib, -        ocn, -        lev, -        lvn, -        lcn -      ); -    ocn++; -    mkn++; -    // 1~ Index -    type_heading=1; -    bi_tmp = "Index"; -    attrib=""; -    lev="1"; -    lvn="4"; -    lcn="2"; -    bookindex ~= -      set_oa.contents_heading( -        type_heading, -        bi_tmp, -        attrib, -        ocn, -        lev, -        lvn, -        lcn -      ); -    ocn++; -    mkn++; -    foreach (mainkey; mainkeys) { -      bi_tmp = "!{" ~ mainkey ~ "}! "; -      // bi_tmp = "_0_1 !{" ~ mainkey ~ "}! "; -      foreach (ref_; bookindex_unordered_hashes[mainkey]["_a"]) { -        auto go = replaceAll(ref_, rgx.book_index_go, "$1"); -        bi_tmp ~= " {" ~ ref_ ~ "}#" ~ go ~ ", "; -      } -      bi_tmp ~= " \\\\\n    "; -      bookindex_unordered_hashes[mainkey].remove("_a"); -      auto subkeys = -        bookindex_unordered_hashes[mainkey].byKey.array.sort().release; -      foreach (subkey; subkeys) { -        bi_tmp ~= subkey ~ ", "; -        foreach (ref_; bookindex_unordered_hashes[mainkey][subkey]) { -          auto go = replaceAll(ref_, rgx.book_index_go, "$1"); -          bi_tmp ~= " {" ~ ref_ ~ "}#" ~ go ~ ", "; -        } -        bi_tmp ~= " \\\\\n    "; -        skn++; -      } -      bi_tmp = replaceFirst(bi_tmp, rgx.trailing_linebreak, ""); -      type="para"; -      attrib=""; -      indent_first = "0"; -      indent_second = "1"; -      attrib=""; -      bookindex ~= -        set_oa.contents_para( -          type, -          bi_tmp, -          attrib, -          ocn, -          indent_first, -          indent_second, -          false -        ); -      ocn++; -      mkn++; -    } -    auto t = tuple(bookindex, ocn); -    return t; -  } -  auto bookindex_build_section_( -    string[][string][string] bookindex_unordered_hashes -  ) { -    auto mainkeys = -      bookindex_unordered_hashes.byKey.array.sort().release; -    string bi_tmp; -    string[] bookindex; -    // int bi_num; -    writeln(mainkeys.length); -    foreach (mainkey; mainkeys) { -      bi_tmp = "_0_1 !{" ~ mainkey ~ "}! "; -      foreach (ref_; bookindex_unordered_hashes[mainkey]["_a"]) { -        auto go = replaceAll(ref_, rgx.book_index_go, "$1"); -        bi_tmp ~= " {" ~ ref_ ~ "}#" ~ go ~ ", "; -      } -      bi_tmp ~= " \\\\\n    "; -      bookindex_unordered_hashes[mainkey].remove("_a"); -      auto subkeys = -        bookindex_unordered_hashes[mainkey].byKey.array.sort().release; -      foreach (subkey; subkeys) { -        bi_tmp ~= subkey ~ ", "; -        // bi_tmp ~= "  " ~ subkey ~ ", "; -        foreach (ref_; bookindex_unordered_hashes[mainkey][subkey]) { -          auto go = replaceAll(ref_, rgx.book_index_go, "$1"); -          bi_tmp ~= " {" ~ ref_ ~ "}#" ~ go ~ ", "; -        } -        bi_tmp ~= " \\\\\n    "; -        skn++; -      } -      bi_tmp = replaceFirst(bi_tmp, rgx.trailing_linebreak, ""); -      bookindex ~= bi_tmp; -      mkn++; -    } -    return bookindex; -  } -} -#+END_SRC -** (end)notes section -#+name: ao_emitter -#+BEGIN_SRC d -struct NotesSection { -  mixin ObjectSetters; -  string object_notes; -  ulong previous_count; -  int mkn; -  auto rgx = Rgx(); -  private auto gather_notes_for_endnote_section( -    string[string][] contents_arbitrary_max_length_set, -    ulong counter -  ) -  in { -    // endnotes/ footnotes for -    // doc objects other than paragraphs & headings -    // various forms of grouped text -    assert((contents_arbitrary_max_length_set[counter]["is"] == "para") -    || (contents_arbitrary_max_length_set[counter]["is"] == "heading")); -    assert(counter > previous_count); -    previous_count=counter; -    assert( -      match(contents_arbitrary_max_length_set[counter]["obj"], -      rgx.inline_notes_delimiter_al_regular_number_note) -    ); -  } -  body { -    foreach(m; -    matchAll(contents_arbitrary_max_length_set[counter]["obj"], -    rgx.inline_notes_delimiter_al_regular_number_note)) { -      debug(endnotes_build) { -        writeln( -          "{^{", m.captures[1], ".}^}#noteref_", m.captures[1], " ", -          m.captures[2]); // sometimes need segment name (segmented html & epub) -        // writeln("{^{", m.captures[1], ".}^}#", contents_arbitrary_max_length_set[counter]["ocn"], " ", m.captures[2]); -      } -      object_notes ~= -        "{^{" ~ m.captures[1] ~ ".}^}#noteref_" ~ -        m.captures[1] ~ " " ~ m.captures[2] ~ "』"; -    } -    return object_notes; -  } -  private auto gathered_notes() -  in { -  } -  body { -    string[] endnotes_; -    if (object_notes.length > 1) { -      endnotes_ = (split(object_notes, rgx.break_string))[0..$-1]; -    } -    return endnotes_; -  } -  private auto endnote_objects(int ocn) -  in { -  } -  body { -    auto set_oa = ObjectAbstractSet(); -    string[string][] endnotes; -    auto endnotes_ = gathered_notes(); -    // auto endnotes_ = (split(object_notes, rgx.break_string))[0..$-1]; -    string type; -    int type_heading; -    string lev, lvn, lcn; -    string attrib; -    string indent_first; -    string indent_second; -    // B~ Endnotes -    type_heading=1; -    attrib=""; -    lev="B"; -    lvn="1"; -    lcn="1"; -    endnotes ~= -      set_oa.contents_heading( -        type_heading, -        "Endnotes", -        attrib, -        ocn, -        lev, -        lvn, -        lcn -      ); -    ocn++; -    mkn++; -    // 1~ Endnotes -    type_heading=1; -    attrib=""; -    lev="1"; -    lvn="4"; -    lcn="2"; -    endnotes ~= -      set_oa.contents_heading( -        type_heading, -        "Endnotes", -        attrib, -        ocn, -        lev, -        lvn, -        lcn -      ); -    ocn++; -    mkn++; -    foreach (endnote; endnotes_) { -      type="para"; -      attrib=""; -      indent_first = "0"; -      indent_second = "0"; -      attrib=""; -      endnotes ~= -        set_oa.contents_para( -          type, -          endnote, -          attrib, -          ocn, -          indent_first, -          indent_second, -          false -        ); -      ocn++; -      mkn++; -    } -    auto t = tuple(endnotes, ocn); -    return t; -  } -} -#+END_SRC -** bibliography -#+name: ao_emitter -#+BEGIN_SRC d -struct Bibliography { -  public JSONValue[] bibliography(ref string[] biblio_unsorted_incomplete, ref JSONValue[] bib_arr_json) -  in { } -  body { -    JSONValue[] biblio_unsorted = -      biblio_unsorted_complete(biblio_unsorted_incomplete, bib_arr_json); -    JSONValue[] biblio_sorted = biblio_sort(biblio_unsorted); -    biblio_debug(biblio_sorted); -    return biblio_sorted; -  } -  final private JSONValue[] biblio_unsorted_complete( -    string[] biblio_unordered, -    ref JSONValue[] bib_arr_json -  ) { -    // JSONValue[] bib_arr_json; -    // int count_biblio_entry; -    // count_biblio_entry=0; // watch -    foreach (bibent; biblio_unordered) { -      // update bib to include deemed_author, needed for: -      // sort_bibliography_array_by_deemed_author_year_title -      // either: sort on multiple fields, or; create such sort field -      JSONValue j = parseJSON(bibent); -      if (!empty(j["fulltitle"].str)) { -        if (!empty(j["author_raw"].str)) { -          j["deemed_author"]=j["author_arr"][0]; -        } else if (!empty(j["editor_raw"].str)) { -          j["deemed_author"]=j["editor_arr"][0]; -        } -        j["sortby_deemed_author_year_title"] = ( -          j["deemed_author"].str ~ -           "; " ~ -           j["year"].str ~ -           "; "  ~ -           j["fulltitle"].str -        ); -        // bib[count_biblio_entry] = j.toString(); -      } -      bib_arr_json ~= j; -      // count_biblio_entry++; -      // bib_arr_json[count_biblio_entry] = j; -      // count_biblio_entry++; -    } -    JSONValue[] biblio_unsorted_array_of_json_objects = -      bib_arr_json.dup; -    return biblio_unsorted_array_of_json_objects; -  } -  final private JSONValue[] biblio_sort(JSONValue[] biblio_unordered) { -    JSONValue[] biblio_sorted; -    biblio_sorted = -      sort!((a, b){ -        return ((a["sortby_deemed_author_year_title"].str) < (b["sortby_deemed_author_year_title"].str)); -      })(biblio_unordered).array; -    debug(bibliosorted) { -      foreach (j; biblio_sorted) { -        if (!empty(j["fulltitle"].str)) { -          writeln(j["sortby_deemed_author_year_title"]); -          // writeln(j["deemed_author"], " (", j["author"], ") ",  j["fulltitle"]); -        } -      } -    } -    return biblio_sorted; -  } -  auto biblio_debug(JSONValue[] biblio_sorted) { -    debug(biblio) { -      foreach (j; biblio_sorted) { -        if (!empty(j["fulltitle"].str)) { -          writeln(j["sortby_deemed_author_year_title"]); -        } -      } -    } -  } -} -#+END_SRC -** node structure metadata -#+name: ao_emitter -#+BEGIN_SRC d -struct NodeStructureMetadata { -// class NodeStructureMetadata : AssertNodeJSON { -  int lv, lv0, lv1, lv2, lv3, lv4, lv5, lv6, lv7; -  uint ocn; -  uint[string] p_; // p_ parent_ -  string node; -  string node_emitter( -    string lvn, -    int ocn_, -    int counter_, -    int pointer_, -    string is_ -  ) -  in { -    auto rgx = Rgx(); -    assert(is_ != "heading"); -    assert(to!int(ocn_) >= 0); -  } -  body { -    // scope(failure) { -    //   writeln(__FILE__, ":", __LINE__, " failed here:"); -    //   writeln("  is  : ", is_); -    //   writeln("  node: ", node); -    // } -    assert(is_ != "heading"); // should not be necessary -    assert(to!int(ocn_) >= 0); // should not be necessary -    uint ocn=to!uint(ocn_); -    if (lv7 > 0) { -      p_["lvn"] = 7; p_["ocn"] = lv7; -    } else if (lv6 > 0) { -      p_["lvn"] = 6; p_["ocn"] = lv6; -    } else if (lv5 > 0) { -      p_["lvn"] = 5; p_["ocn"] = lv5; -    } else { -      p_["lvn"] = 4; p_["ocn"] = lv4; -    } -    node=("{ " ~ -      "\"is\": \"" ~ is_ ~ "\"" ~ -      ", \"heading_pointer\": " ~ to!string(pointer_) ~ -      ", \"doc_object_pointer\": " ~ to!string(counter_) ~ -      ", \"ocn\": " ~ to!string(ocn_) ~ -      ", \"parent_ocn\": " ~ to!string(p_["ocn"]) ~ -      ", \"parent_lvn\": " ~ to!string(p_["lvn"]) ~ -      " }" -    ); -    debug(node) { -      mixin ScreenTxtColors; -      if (match(lvn, rgx.levels_numbered_headings)) { -        writeln(scr_txt_marker["yellow"], to!string(node)); -      } else { -        writeln(scr_txt_marker["white"], to!string(node)); -      } -    } -    JSONValue j = parseJSON(node); -    assert(j["parent_lvn"].integer >= 4); -    assert(j["parent_lvn"].integer <= 7); -    assert(j["parent_ocn"].integer >= 0); -    return node; -  } -  invariant() { -  } -#+END_SRC - -#+name: ao_emitter -#+BEGIN_SRC d -  string node_emitter_heading( -    string lvn, -    string lcn, -    int ocn_, -    int counter_, -    int pointer_, -    string is_ -  ) -  in { -    auto rgx = Rgx(); -    assert(is_ == "heading"); -    assert(to!uint(ocn_) >= 0); -    assert( -      match(lvn, rgx.levels_numbered), -      ("not a valid heading level: " ~ lvn ~ " at " ~ to!string(ocn_)) -    ); -    // assert(to!uint(ocn_) >= 0); -    if (match(lvn, rgx.levels_numbered)) { -      if (to!uint(lvn) == 0) { -        assert(to!uint(ocn_) == 1); -        // writeln(lvn); -      } -    } -  } -  body { -    // scope(failure) { -    //   writeln(__FILE__, ":", __LINE__, " failed here:"); -    //   writeln("  is  : ", is_); -    //   writeln("  node: ", node); -    // } -    auto rgx = Rgx(); -    uint ocn=to!uint(ocn_); -    switch (lvn) { // switch (to!string(lv)) { -    case "0": -      lv=0; -      lv0=ocn; lv1=0; lv2=0; lv3=0; lv4=0; lv5=0; lv6=0; lv7=0; -      p_["lvn"] = 0; p_["ocn"] = 0; -      break; -    case "1": -      lv=1; -      lv1=ocn; lv2=0; lv3=0; lv4=0; lv5=0; lv6=0; lv7=0; -      p_["lvn"] = 0; p_["ocn"] = lv0; -      break; -    case "2": -      lv=2; -      lv2=ocn; lv3=0; lv4=0; lv5=0; lv6=0; lv7=0; -      p_["lvn"] = 1; p_["ocn"] = lv1; -      break; -    case "3": -      lv=3; -      lv3=ocn; lv4=0; lv5=0; lv6=0; lv7=0; -      p_["lvn"] = 2; p_["ocn"] = lv2; -      break; -    case "4": -      lv=4; -      lv4=ocn; lv5=0; lv6=0; lv7=0; -      if (lv3 > 0) { -        p_["lvn"] = 3; p_["ocn"] = lv3; -      } else if (lv2 > 0) { -        p_["lvn"] = 2; p_["ocn"] = lv2; -      } else if (lv1 > 0) { -        p_["lvn"] = 1; p_["ocn"] = lv1; -      } else { -        p_["lvn"] = 0; p_["ocn"] = lv0; -      } -      break; -    case "5": -      lv=5; -      lv5=ocn; lv6=0; lv7=0; -      p_["lvn"] = 4; p_["ocn"] = lv4; -      break; -    case "6": -      lv=6; -      lv6=ocn; lv7=0; -      p_["lvn"] = 5; p_["ocn"] = lv5; -      break; -    case "7": -      lv=7; -      lv7=ocn; -      p_["lvn"] = 6; p_["ocn"] = lv6; -      break; -    default: -      // if (lv7 > 0) { -      //   p_["lvn"] = 7; p_["ocn"] = lv7; -      // } else if (lv6 > 0) { -      //   p_["lvn"] = 6; p_["ocn"] = lv6; -      // } else if (lv5 > 0) { -      //   p_["lvn"] = 5; p_["ocn"] = lv5; -      // } else { -      //   p_["lvn"] = 4; p_["ocn"] = lv4; -      // } -      break; -    } -    node=("{ " ~ -      "\"is\": \"" ~ is_ ~ "\"" ~ -      ", \"heading_pointer\": " ~ to!string(pointer_) ~ -      ", \"doc_object_pointer\": " ~ to!string(counter_) ~ -      ", \"ocn\": " ~ to!string(ocn_) ~ -      ",  \"lvn\": " ~ to!string(lvn) ~ -      ",  \"lcn\": " ~ to!string(lcn) ~ -      ", \"parent_ocn\": " ~ to!string(p_["ocn"]) ~ -      ", \"parent_lvn\": " ~ to!string(p_["lvn"]) ~ -      " }" -    ); -    debug(heading) { -      mixin ScreenTxtColors; -      if (match(lvn, rgx.levels_numbered_headings)) { -        writeln(scr_txt_marker["yellow"], to!string(node)); -      } -    } -    debug(node) { -      mixin ScreenTxtColors; -      if (match(lvn, rgx.levels_numbered_headings)) { -        writeln(scr_txt_marker["yellow"], to!string(node)); -      } else { -        writeln(scr_txt_marker["white"], to!string(node)); -      } -    } -    JSONValue j = parseJSON(node); -    assert(j["parent_lvn"].integer <= 7); -    assert(j["parent_ocn"].integer >= 0); -    if (match(lvn, rgx.levels_numbered_headings)) { -      assert(j["lvn"].integer <= 7); -      assert(j["ocn"].integer >= 0); -      if (j["parent_lvn"].integer > 0) { -        assert(j["parent_lvn"].integer < j["lvn"].integer); -        if (j["ocn"].integer != 0) { -          assert(j["parent_ocn"].integer < j["ocn"].integer); -        } -      } -      if (j["lvn"].integer == 0) { -        assert(j["parent_lvn"].integer == 0); -      } else if  (j["lvn"].integer == 1) { -        assert(j["parent_lvn"].integer == 0); -      } else if  (j["lvn"].integer == 2) { -        assert(j["parent_lvn"].integer == 1); -      } else if  (j["lvn"].integer == 3) { -        assert(j["parent_lvn"].integer == 2); -      } else if  (j["lvn"].integer == 4) { -        assert(j["parent_lvn"].integer <= 3); -      } else if  (j["lvn"].integer == 5) { -        assert(j["parent_lvn"].integer == 4); -      } else if  (j["lvn"].integer == 6) { -        assert(j["parent_lvn"].integer == 5); -      } else if  (j["lvn"].integer == 7) { -        assert(j["parent_lvn"].integer == 6); -      } else if  (j["lvn"].integer == 8) { -        // writeln(j["parent_lvn"].integer); -        // assert(j["parent_lvn"].integer >= 4); -        // assert(j["parent_lvn"].integer <= 7); -      } -    } -    return node; -  } -  invariant() { -  } -} -#+END_SRC - -* tangles                                                            :tangle: -** code structure:                                                :ao_emitter.d: -#+name: tangle_ao_emitter -#+BEGIN_SRC d :tangle ../lib/sdp/ao_emitter.d -/+ -  emitters -  ao_emitters.d -+/ -mixin template Emitters() { -  mixin InternalMarkup; -  <<ao_emitter>> -} -#+END_SRC diff --git a/org/ao_output_debugs.org b/org/ao_output_debugs.org index 72ca80e..51054b7 100644 --- a/org/ao_output_debugs.org +++ b/org/ao_output_debugs.org @@ -449,19 +449,15 @@ debug(checkdoc) {  * tangles                                                            :tangle:  ** code structure:                                          :ao_output_debugs.d:  #+name: tangle_ao_output_debugs -#+BEGIN_SRC d :tangle ../lib/sdp/ao_output_debugs.d +#+BEGIN_SRC d :tangle ../src/sdp/ao_output_debugs.d  /+    output debugs    ao_output_debugs.d  +/  template SiSUoutputDebugs() {    struct SDPoutputDebugs { -    auto tst_debugs(S)(auto ref const S s) { -      mixin RgxInit; -      mixin ScreenTxtColors; -      auto rgx = Rgx(); -    } -    auto abstract_doc_source_debugs(S)(auto ref const S contents, +    auto abstract_doc_source_debugs(S)( +      auto ref const S         contents,        JSONValue[string]        docmake,        JSONValue[string]        dochead,        string[][string][string] bookindex_unordered_hashes, diff --git a/org/ao_read_source_files.org b/org/ao_read_source_files.org index c03ff94..3609fb5 100644 --- a/org/ao_read_source_files.org +++ b/org/ao_read_source_files.org @@ -12,10 +12,10 @@  #+FILETAGS: :sdp:niu:ao:  #+TAGS: assert(a) class(c) debug(d) mixin(m) sdp(s) tangle(T) template(t) WEB(W) noexport(n) -* markup source raw                                                  :markup: +* read file, get raw markup source                                   :markup:  [[./sdp.org][sdp]]  [[./][org/]] -** source string                                                     :string: +** [#A] read file, source string                                     :string:  #+name: ao_markup_source_raw  #+BEGIN_SRC d  final private string readInMarkupSource(in string fn_src) { @@ -319,7 +319,7 @@ return contents;  * tangles (code structure)                                           :tangle:  ** ao_markup_source_raw.d:   :ao_markup_source_raw.d: -#+BEGIN_SRC d :tangle ../lib/sdp/ao_read_source_files.d +#+BEGIN_SRC d :tangle ../src/sdp/ao_read_source_files.d  /+    ao_read_source_files.d    - open markup files diff --git a/org/compile_time_info.org b/org/compile_time_info.org index cc1ac62..1c3ab0d 100644 --- a/org/compile_time_info.org +++ b/org/compile_time_info.org @@ -90,7 +90,7 @@ version(D_LP64) {  * tangles                                                            :tangle:  ** compile_time_info:                                                      :compile_time_info.d: -#+begin_src d  :tangle ../lib/sdp/compile_time_info.d +#+begin_src d  :tangle ../src/sdp/compile_time_info.d  /+    compile_time_info    compile_time_info.d diff --git a/org/sdp.org b/org/sdp.org index a48fbfa..fe66ef3 100644 --- a/org/sdp.org +++ b/org/sdp.org @@ -65,41 +65,18 @@ private import  #+END_SRC  **** sdp                                                      :import:sdp: -***** TODO lib/sdp.d +***** TODO src/sdp.d -├── lib_ +├── src  │   ├── sdp.d -    ├── version.txt -    └── sdp -        ├── ao_abstract_doc_source.d -        ├── ... -        └── compile_time_info.d - -#+NAME: sdp_imports_use -#+BEGIN_SRC d -/+ sdp  sisu document parser +/ -import -  lib.sdp.compile_time_info,            // sdp/compile_time_info.d -  lib.sdp.ao_abstract_doc_source,       // sdp/ao_abstract_doc_source.d -  lib.sdp.ao_defaults,                  // sdp/ao_defaults.d -  lib.sdp.ao_read_source_files,         // sdp/ao_read_source_files.d -  lib.sdp.ao_output_debugs,             // sdp/ao_output_debugs.d -  lib.sdp.ao_rgx,                       // sdp/ao_rgx.d -  lib.sdp.ao_ansi_colors;               // sdp/ao_ansi_colors.d -  // std.conv; -#+END_SRC - -***** TODO lib/sdp/std.d (rdmd)                                    :rdmd: - -├── lib_  │   └── sdp -        ├── ao_abstract_doc_source.d -        ├── ... -        ├── compile_time_info.d -        ├── sdp.d -        └── version.txt +│       ├── ao_abstract_doc_source.d +│       ├── ... +│       └── compile_time_info.d +└── views +    └── version.txt -#+NAME: sdp_imports_rdmd +#+NAME: sdp_imports_use  #+BEGIN_SRC d  /+ sdp  sisu document parser +/  import @@ -324,25 +301,27 @@ break;  * tangles (code structure)                                           :tangle:  ** sdp                                                              :sdp.d: -*** TODO lib/sdp.d +*** TODO src/sdp.d -├── lib_ +├── src  │   ├── sdp.d -    ├── version.txt -    └── sdp -        ├── ao_abstract_doc_source.d -        ├── ... -        └── compile_time_info.d - -├── lib_ +│   └── sdp +│       ├── ao_abstract_doc_source.d +│       ├── ... +│       └── compile_time_info.d +├── views +│   └── version.txt + +├── src  │   ├── sdp -    │   ├── ao_abstract_doc_source.d -    │   ├── ... -    │   └── compile_time_info.d -    ├── sdp.d -    └── version.txt - -#+BEGIN_SRC d  :tangle ../lib/sdp.d :shebang #!/usr/bin/env rdmd +│   │   ├── ao_abstract_doc_source.d +│   │   ├── ... +│   │   └── compile_time_info.d +│   └── sdp.d +├── views +│   └── version.txt + +#+BEGIN_SRC d  :tangle ../src/sdp.d :shebang #!/usr/bin/env rdmd  /+    sdp    sdp.d @@ -366,57 +345,6 @@ void main(string[] args) {  }  #+END_SRC -*** TODO lib/sdp/sdp.d                                                 :rdmd: - -├── lib_ -│   └── sdp -        ├── ao_abstract_doc_source.d -        ├── ... -        ├── compile_time_info.d -        ├── sdp.d -        └── version.txt - -rdmd needs different paths (simple structure) to build, this solution could be -used by both but so far I prefer the other for dmd & ldc - -#+BEGIN_SRC d  :tangle ../lib/sdp/sdp.d :shebang #!/usr/bin/env rdmd -// [used by rdmd] -/+ -  sdp -  sdp.d -+/ -<<sdp_imports>> -<<sdp_imports_rdmd>> -<<sdp_version_mixin>> -mixin CompileTimeInfo; -mixin RgxInit; -void main(string[] args) { -  <<sdp_compilation>> -  <<sdp_args>> -  foreach(fn_src; fns_src) { -    if (!empty(fn_src)) { -      <<sdp_each_file_do>> -    } else { -      <<sdp_no_filename_provided>> -    } -  } -} -#+END_SRC - -** version.txt                                                      :version: - -*** TODO lib/version.txt -#+BEGIN_SRC d  :tangle ../lib/version.txt -/+ obt - org generated file +/ -<<version_txt>> -#+END_SRC - -*** TODO lib/sdp/version.txt                                           :rdmd: -#+BEGIN_SRC d  :tangle ../lib/sdp/version.txt -/+ obt - org generated file +/ -// [used by rdmd] -<<version_txt>> -#+END_SRC  * TODO work on  - figure out best program dir structure, issue with rdmd diff --git a/org/sdp_conf.org b/org/sdp_conf.org new file mode 100644 index 0000000..a078048 --- /dev/null +++ b/org/sdp_conf.org @@ -0,0 +1,40 @@ +* configuration tangles +** TODO version.txt: set version                                    :version: + +#+BEGIN_SRC d  :tangle ../views/version.txt +/+ obt - org generated file +/ +struct Version { +  int major; +  int minor; +  int patch; +} +enum ver = Version(1, 0, 0); +#+END_SRC + +** dub +*** dub.json                                                :dub:description: + +#+BEGIN_SRC json  :tangle ../dub.json +{ +  "name": "sdp", +  "targetType": "executable", +  "targetName": "sdp", +  "targetPath": "bin", +  "description": "sisu document parser.", +  "authors": ["Ralph Amissah"], +  "homepage": "http://sisudoc.org", +  "license": "GPL-3.0", +  "add-path": "./src/sdp", +  "dependencies": { +  } +} +#+END_SRC + +*** dub.selections.json                              :dub:selections:version: + +#+BEGIN_SRC json  :tangle ../dub.selections.json +{ +	"fileVersion": 1, +	"versions": {} +} +#+END_SRC diff --git a/sdp.org b/sdp.org deleted file mode 100644 index 4c2afcc..0000000 --- a/sdp.org +++ /dev/null @@ -1,385 +0,0 @@ -#+TITLE: sdp (project) discussion -#+AUTHOR: Ralph Amissah -#+EMAIL: ralph.amissah@gmail.com -#+STARTUP: indent -#+LANGUAGE: en -#+OPTIONS: H:3 num:nil toc:t \n:nil @:t ::t |:t ^:nil _:nil -:t f:t *:t <:t -#+OPTIONS: TeX:t LaTeX:t skip:nil d:nil todo:t pri:nil tags:not-in-toc -#+OPTIONS: author:nil email:nil creator:nil timestamp:nil -#+PROPERTY: header-args :padline no :exports none :noweb yes -#+EXPORT_SELECT_TAGS: export -#+EXPORT_EXCLUDE_TAGS: noexport -#+PRIORITIES: A F E -#+FILETAGS: :sdp:rel:makefile: -#+TAGS: assert(a) class(c) debug(d) mixin(m) sdp(s) tangle(T) template(t) WEB(W) noexport(n) - -* sdp                                                                 :sdp: -** debug                                                            :debug: -  objects -  header -  heading -  poem verse ocn - -** tasks                                                            :tasks: -*** sisu_loop.d -**** TODO [#A] header extraction (make instructions & metadata) -***** metadata -title & author heading -***** make -****** header make: search and replace -****** auto-identify structure from make instructions -****** auto-number headings, with starting level as instructed in header make -**** markup -***** para markers: extract (hash store) & clean (remove from text) -place in hash -  heading level,                 numeric -  indent info,                   numeric: first, rest -  bullet,                        bool -issue representing complex structures, consider using bits of json! -string h = "{ \"heading_level\": \"A\" }"; -string para_attrib = "{ \"indent_first\": 0, \"indent_rest\": 0, \"bullet\": false }"; -# string s = "{ -#   \"indent_first\": 0, -#   \"indent_rest\": 0, -#   \"bullet\": false, -#   \"name\": "", -#   \"tags\": "", -#   \"digest\": "", -# }"; -string para_attrib = "{ -  \"indent_first\": 0, -  \"indent_rest\": 0, -  \"bullet\": false, -}"; -string obj_ids = "{ -  \"name\": \"\", -  \"tags\": \"\", -  \"digest\": \"\", -}"; -string obj_lv = "{ -  \"markup\": \"\",         // [A-D1-4] lv -  \"num_markup\": 0,      // [0-7]    ln -  \"num_collapsed\": 0,   // [0-7]    lc -}"; -string obj_citation = "{ -  \"ocn\": 0,             // -  \"on\": true,           // -}"; - -***** structure as json? -odv -osp -node -parent - -you could keep ocn info (bool & number?) this way, consider - -also clean -  block markers -  book index! -***** inline markup of text for subsequent processing -regex substitution -search and replace inline ascii markup with less ambiguous utf-8 markers -****** footnotes count/number -**** TODO [#B] endnotes/ footnotes for doc objects other than paragraphs & headings various forms of grouped text -**** TODO [#C] ocn (on & off) -**** TODO [#B] headings -***** heading numbering? - -***** segment naming & id tags - -**** TODO [#B] backmatter -***** book index -clean from object, store under separate key -consider json -****** TODO [#B] book index sort -    sort algorithm currently sorts A-Za-z need Aa-Zz -    also main terms references need to follow main term ... placed under _a which is not always sorted first -***** bibliography -identify and store under separate hash -consider using json to structure -***** glossary -identify and store under separate hash? -**** DONE [#B] composite documents -import files - -<< path_to/file.sst - -*** cleanup - -*** ranges & node structure info: structure information (levels, ranges & relationships) -**** heading pointers -***** headings_array          heading pointer -***** data_abstration_array   heading pointer -**** ocn -**** heading -***** markup level            [A-D1-4] -***** collapsed level         [0-7] -**** parent -***** heading markup level    [A-D1-4] -***** heading ocn -**** range, heading & children (till next heading of same level or higher (close level mark)) -***** array range             (x..y) -includes sub headings and non-heading objects till next heading -debate whether to use relative or absolute values (i.e. array points) -***** ocn range               (x..y) - -NodeStructureMetadata see node_jstr -abstract_doc: obj_att\|node_jstr\|node -emitter: ObjAttrib -heading_pointer -*** misc -**** temporarily turn off ocn - ---~# ---# --+# - -~# & -# - -**** parent & children -heading parent & children -paragraph parent - -**** dir (read using dlang lib) -**** how to deal with complex data structures? -try hashes with json - -**** read files -***** config -***** make -** compile                                                        :compile: -  [[http://www.dprogramming.com/tutorial.php][tutorial]] -  [[http://www.dprogramming.com/tutorial.php#newusers][tutorial new users]] -*** rdmd -rdmd --build-only --chatty -d-debug sdp.d -rdmd -d-debug sisu_ref0.d ../markup/test0.sst - -VER='sdp2' && rdmd --build-only --chatty lib/${VER}/sdp.d - -*** dmd                                                                :dmd: -dmd -de -w -unittest -debug sdp.d - -VER='sdp2' -dmd -debug -of./bin/${VER} lib/${VER}/sdp.d -VER='sdp2' && dmd -debug=checkdoc -of./bin/${VER} lib/${VER}/sdp.d - -VER='2' && dmd -debug=checkdoc -debug=summary -of./bin/sdp${VER} lib/sdp${VER}/sdp.d - -*** ldc2                                                                :ldc: -ldc2 -d-debug=checkdoc -d-debug=summary -of=./bin/sdp lib/sdp/sdp.d - -ldc2 -de -w -unittest -d-debug sdp.d - -VER='2' && ldc2 -d-debug=checkdoc -d-debug=summary -of=./bin/sdp${VER} ./lib/sdp${VER}/sdp.d - -VER='2' && ldc2 -unittest -d-debug=summary -of=./bin/sdp${VER} lib/sdp${VER}/sdp.d -VER='2' && ldc2 -d-debug=checkdoc -d-debug=summary -of=./bin/sdp${VER} lib/sdp${VER}/sdp.d -VER='2' && ldc2 -d-debug=checkdoc -d-debug=summary -of=./bin/sdp${VER} lib/sdp${VER}/sdp.d - -VER='sdp2' && ldc2 -d-debug=objects -d-debug=summary -of=./bin/${VER} lib/${VER}/sdp.d - -VER='sdp0' -VER='sdp1' -VER='sdp2' -VER='sdp3' -VER='sdp' -ldc2 -d-debug -of=./bin/sisu_${VER} lib/${VER}/sdp.d -ldc2 -d-debug -of=./bin/sisu_${VER} lib/${VER}/sdp.d -ldc2 -d-debug=heading -of=./bin/sisu_${VER} lib/${VER}/sdp.d -ldc2 -d-debug=objects -of=./bin/sisu_${VER} lib/${VER}/sdp.d - -VER='sdp2' && ldc2 -d-debug=objects -d-debug=summary -of=./bin/sdp lib/${VER}/sdp.d - -// VER='2' && ldc2 -unittest -d-debug=insert -d-debug=objects -d-debug=headings -d-debug=summary -d-debug=checkdoc -d-debug=subheader -of=./bin/sdp${VER} lib/sdp${VER}/sdp.d - -**** remove later -binbuild="sdp1"; ldc2 -d-debug ./${binbuild}.d && time ./${binbuild} markup/test0.sst -binbuild="sdp1"; gdc -fdebug -o ./${binbuild} ./${binbuild}.d && time ./${binbuild} markup/test0.sst - -binbuild="sdp2" -ldc2 -release ./${binbuild}.d && time ./${binbuild} markup/test0.sst - -#cd lib -#ldc2 -d-debug -of=../bin/sdp0 sdp/sdp.d - -*** gdc                                                                 :gdc: -  [[http://wiki.dlang.org/GDC/Using_GDC][Using GDC]] -copy/symlink deps, and then "*.d **/*.d" - -gdc -o ./bin/sdp0 lib/sdp.d -VER='sdp2' && -gdc -o ./bin/${VER}  lib/${VER}/sdp.d - -VER='sdp2' && gdc-5 -o ./bin/${VER} ./lib/${VER}/sdp.d - -VER='sdp2' && gdc-5 -o ./bin/${VER} ./lib/${VER}/sdp.d ./lib/${VER}/*.d - -# VER='sdp2' &&  gdc -o -d-debug=objects -d-debug=summary -of=./bin/${VER} lib/${VER}/sdp.d - -# check flags -# VER='sdp2' && gdc-5 -I. -O3 -fPIC -c -o ./bin/${VER} ./lib/${VER}/sdp.d - -**** remove later -binbuild="sdp1" -ldc2 -d-debug ${binbuild}.d && time ${binbuild} markup/test0.sst -gdc -fdebug -o ${binbuild} ${binbuild}.d && time ${binbuild} markup/test0.sst - -// gdc -release -o ./${binbuild} ./${binbuild}.d && time ./${binbuild} markup/test0.sst - -**** bug - -[http://forum.dlang.org/thread/mailman.284.1442659522.22025.digitalmars-d-learn@puremagic.com?page=3] - -[http://www.mail-archive.com/digitalmars-d-learn@puremagic.com/msg61467.html] - -?? -[https://github.com/Dicebot/Arch-PKGBUILDs/blob/master/gdc/folders.diff] -from -[https://www.mail-archive.com/d.gnu@puremagic.com/msg03844.html] - -[https://www.mail-archive.com/digitalmars-d-learn@puremagic.com/msg61470.html] - -*** run                                                                 :run: - -sdp0 markup/test0.sst - -~utils/d/bin/sdp0 filename.sst -~utils/d/bin/sdp1 filename.sst - -cd markup -sdp0 test0.sst - -*** compile notes -**** ldc -import path[0] = /usr/include/d/ldc -import path[1] = /usr/include/d -**** gdc -gdmd -help -import path[0] = /usr/include/d/4.9/x86_64-linux-gnu -import path[1] = /usr/include/d/4.9 -**** dmd (non-free) install arch? -**** issues - -** notes                                                              :notes: -**** read file -char[][] markup_sourcefile_content = split(cast(char[])read(fn_src), rgx_line_delimiter); -char[][] markup_sourcefile_content = markupSourceLineArray(markupSourceString(fn_src)); -*** build - -** book index -// [http://forum.dlang.org/post/k8of07$1bgu$1@digitalmars.com] -// [http://forum.dlang.org/post/dfyowpjhdaemhxhepfmk@forum.dlang.org] -  // recast --- -  // ocns ; sub  ; main -  string[][string][string] bookindex; -  // as  --- -  // ocns ; sub  ; main -  string[]string[][string][] bookindex_the; -  // with sorted main & sub - -// vim ./lib/sdp2/sisu_*.d -// vim **/sdp2/sisu_*.d - -// emacs **/sdp2/sisu_*.d & -// emacs ./lib/sdp2/sisu_*.d & - -** bugs                                                                :bugs: -ok -time ~dlang/bin/sdp2 --html --no-assert en/[a-eg-z]* -not ok -time ~dlang/bin/sdp2 --html --no-assert en/free_for_all.peter_wayner.sst en/gpl3.fsf.sst -works if: -  poems removed from gpl3; -  biblio removed from free for all -time ~dlang/bin/sdp2 --html --no-assert en/free_for_all.peter_wayner.sst en/gpl2.fsf.sst en/gpl3.fsf.sst -time ~dlang/bin/sdp2 --html --no-assert en/[fg]* -time ~dlang/bin/sdp2 --html --no-assert en/[a-z]* -leaving out free_for_all seems to make it ok -time ~dlang/bin/sdp2 --html --no-assert en/[a-eg-z]* -leaving out any two bibliography entries within free_for_all appears to fix the problem! - -works in dmd not in ldc2 - -**** Error in `/home/ralph/grotto/repo/git.repo/utils/d/bin/sdp2': corrupted double-linked list: 0x00000008b905b310 *** -in free_for_all bibliography first instance FatBrain ref -gets stuck after: -en/free_for_all.peter_wayner.sst -** desc                                                                :desc: -*** process files -.sst (text) regular -.ssm (master) contains either .sst or .ssi -.ssi (insert) processed within .ssm (or called into a .ssm by another .ssi) -*** header -**** metadata -**** make (@make:) -cover_image -home_button_image -home_button_text -footer -headings -num_top -breaks -substitute -bold -italics -emphasis -texpdf_font -css - -*** structure -document structure is determined by headings of different levels -headings must either -(a) be explicitly marked as such, or -(b) given by a regex (in the appropriate make header) that allows the program determine headings within text -types of heading: -**** document separators (A-D) -level A is the title -**** text headings       (1-4) -**** header make heading regex -***** heading levels -****** markup level [A-D1-4] -****** markup level numbers [0-7] or [1-8] -****** collapsed level numbers [0-7] or [1-8] -****** nodes -****** json search segments? chapter equivalent, decide -***** switches, ocn on off (dummy header) -*** object attributes -types of object: -**** headings (document structure objects) -***** level -***** segment name -***** numbering -**** paragraphs -**** blocks -types of block object: -***** group -***** block -***** poem (verse) -***** code -***** table -***** quote -***** TODO alt? -*** paragraph attributes -types of paragraph attribute: -**** indent -***** paragraph (1 - 9) -***** first line level (1 - 9), & subsequent text level (1 - 9) -indent (first, rest), bullet -**** bullets -*** inline text (paragraph) attributes -bold, italics, emphasis, superscript, substcript, strike, add, monospace, footnote (number them) -types of text (within paragraph) attribute: -**** bold -**** italics -**** emphasis -**** underscore -**** strike -**** superscript -**** subscript -** package -*** dub -  against dub: -  [[http://forum.dlang.org/post/hmdyrzbbhbcgqwqznqwz@forum.dlang.org][against dub]] -** dlang general -*** books -   [[http://wiki.dlang.org/Bookshttp://wiki.dlang.org/Books][D books]] -   [[http://ddili.org/ders/d.en/index.html][programming in D]] diff --git a/lib/sdp/sdp.d b/src/sdp.d index 8c44d16..725c018 100755 --- a/lib/sdp/sdp.d +++ b/src/sdp.d @@ -1,5 +1,4 @@  #!/usr/bin/env rdmd -// [used by rdmd]  /+    sdp    sdp.d @@ -109,6 +108,7 @@ void main(string[] args) {      }    }    foreach(fn_src; fns_src) { +  // foreach(fn_src; fns_src) {      if (!empty(fn_src)) {        scope(success) {          debug(checkdoc) { diff --git a/lib/sdp/ao_abstract_doc_source.d b/src/sdp/ao_abstract_doc_source.d index 00cff5c..c1566d2 100644 --- a/lib/sdp/ao_abstract_doc_source.d +++ b/src/sdp/ao_abstract_doc_source.d @@ -8,10 +8,10 @@ template SiSUdocAbstraction() {      /+ ↓ abstraction imports +/      import -      lib.sdp.ao_defaults,                  // sdp/ao_defaults.d -      lib.sdp.ao_object_setter,             // sdp/ao_object_setter.d -      lib.sdp.ao_rgx,                       // sdp/ao_rgx.d -      lib.sdp.ao_ansi_colors;               // sdp/ao_ansi_colors.d +      ao_defaults,                  // sdp/ao_defaults.d +      ao_object_setter,             // sdp/ao_object_setter.d +      ao_rgx,                       // sdp/ao_rgx.d +      ao_ansi_colors;               // sdp/ao_ansi_colors.d      /+ ↓ abstraction mixins +/      mixin ObjectSetter; diff --git a/lib/sdp/ao_ansi_colors.d b/src/sdp/ao_ansi_colors.d index e5a46f9..e5a46f9 100644 --- a/lib/sdp/ao_ansi_colors.d +++ b/src/sdp/ao_ansi_colors.d diff --git a/lib/sdp/ao_defaults.d b/src/sdp/ao_defaults.d index 04f5506..314635c 100644 --- a/lib/sdp/ao_defaults.d +++ b/src/sdp/ao_defaults.d @@ -235,7 +235,9 @@ template SiSUbiblio() {  }  template InternalMarkup() {    struct InlineMarkup { +  // endnote en_a_o: '~{'; en_a_c: '}~'      auto en_a_o = "【"; auto en_a_c = "】"; +  // endnote en_b_o: '~['; en_b_c: ']~'      auto en_b_o = "〖"; auto en_b_c = "〗";      // auto segname_prefix_auto_num_extract = "c";      // auto segname_prefix_auto_num_provide = "s"; diff --git a/lib/sdp/ao_object_setter.d b/src/sdp/ao_object_setter.d index 4492e8a..4492e8a 100644 --- a/lib/sdp/ao_object_setter.d +++ b/src/sdp/ao_object_setter.d diff --git a/lib/sdp/ao_output_debugs.d b/src/sdp/ao_output_debugs.d index 9c66312..8c655e8 100644 --- a/lib/sdp/ao_output_debugs.d +++ b/src/sdp/ao_output_debugs.d @@ -4,12 +4,8 @@  +/  template SiSUoutputDebugs() {    struct SDPoutputDebugs { -    auto tst_debugs(S)(auto ref const S s) { -      mixin RgxInit; -      mixin ScreenTxtColors; -      auto rgx = Rgx(); -    } -    auto abstract_doc_source_debugs(S)(auto ref const S contents, +    auto abstract_doc_source_debugs(S)( +      auto ref const S         contents,        JSONValue[string]        docmake,        JSONValue[string]        dochead,        string[][string][string] bookindex_unordered_hashes, diff --git a/lib/sdp/ao_read_source_files.d b/src/sdp/ao_read_source_files.d index a5ca084..a5ca084 100644 --- a/lib/sdp/ao_read_source_files.d +++ b/src/sdp/ao_read_source_files.d diff --git a/lib/sdp/ao_rgx.d b/src/sdp/ao_rgx.d index e675ca1..e675ca1 100644 --- a/lib/sdp/ao_rgx.d +++ b/src/sdp/ao_rgx.d diff --git a/lib/sdp/compile_time_info.d b/src/sdp/compile_time_info.d index 783ac62..783ac62 100644 --- a/lib/sdp/compile_time_info.d +++ b/src/sdp/compile_time_info.d diff --git a/lib/version.txt b/views/version.txt index 33ae56d..7fc1957 100644 --- a/lib/version.txt +++ b/views/version.txt @@ -1,3 +1,4 @@ +  /+ obt - org generated file +/  struct Version {    int major; | 
