From ff590479eab42d472a9e219a41459f67ee8fcd3d Mon Sep 17 00:00:00 2001
From: Ralph Amissah
Date: Sun, 1 Aug 2021 05:20:29 -0400
Subject: spine_search
---
org/spine_build_scaffold.org | 2511 +++++++++++++++++++++++++++++++++++++++---
1 file changed, 2373 insertions(+), 138 deletions(-)
(limited to 'org')
diff --git a/org/spine_build_scaffold.org b/org/spine_build_scaffold.org
index a91caf1..6cc4c6d 100644
--- a/org/spine_build_scaffold.org
+++ b/org/spine_build_scaffold.org
@@ -19,15 +19,16 @@
[[./spine.org][spine.org]] VERSION
[[./spine_info.org][spine_info.org]] [[../org/][org/]]
-* version info VERSION :version:set:project:
-** project
+* spine
+** version info VERSION :version:set:project:
+*** project
#+NAME: version_spine
#+BEGIN_SRC sh
0.11.3
#+END_SRC
-** subprojects
+*** subprojects
- d2sqlite3
https://code.dlang.org/packages/d2sqlite3
@@ -61,7 +62,7 @@
0.2.0
#+END_SRC
-** build tools if specified
+*** build tools if specified
- meson
@@ -77,8 +78,8 @@
0
#+END_SRC
-* makefile :makefile:
-** tangle
+** makefile :makefile:
+*** tangle
#+HEADER: :tangle ../makefile
#+BEGIN_SRC makefile
@@ -139,8 +140,8 @@
<>
#+END_SRC
-** settings [+2] :settings:
-*** git version stamp :git:version:
+*** settings [+2] :settings:
+**** git version stamp :git:version:
#+NAME: make_set_var_0_git_version
#+BEGIN_SRC makefile
@@ -148,7 +149,7 @@ PROG_VER_GIT :=$(shell echo `git describe --long --tags | sed -e "s/^[ a-z_-]\+\
PROG_VER_DECLARED :=$(shell echo `cat ./views/version.txt | grep --color=never "enum" | sed 's/.\+(\([0-9]\+\),[ \t]\+\([0-9]\+\),[ \t]\+\([0-9]\+\)[ \t]*).\+/\1.\2.\3/g'`)
#+END_SRC
-*** dub (build tool) :dub:
+**** dub (build tool) :dub:
#+NAME: make_set_var_1_dub
#+BEGIN_SRC makefile
@@ -156,7 +157,7 @@ DUB=dub
DUB_FLAGS=-v --force --compiler=
#+END_SRC
-*** Project Details :project:spine:
+**** Project Details :project:spine:
#+NAME: make_set_var_2_project_details
#+BEGIN_SRC makefile
@@ -170,7 +171,7 @@ PRG_BINDIR=./bin
PRG_DOCDIR=./docs
#+END_SRC
-*** Emacs Org settings :settings:emacs:org:tangle:
+**** Emacs Org settings :settings:emacs:org:tangle:
#+NAME: make_set_var_3_emacs_org
#+BEGIN_SRC makefile
@@ -184,8 +185,8 @@ ORGFILES=""
ORGDIR :=$(shell echo `pwd`)
#+END_SRC
-*** Markup Samples
-**** pods
+**** Markup Samples
+***** pods
#+NAME: make_set_var_4_markup_samples_pods
#+BEGIN_SRC makefile
@@ -221,7 +222,7 @@ data/pod/un_contracts_international_sale_of_goods_convention_1980 \
data/pod/viral_spiral.david_bollier
#+END_SRC
-**** dir
+***** dir
#+NAME: make_set_var_5_markup_samples_search_dirs
#+BEGIN_SRC makefile
@@ -257,7 +258,7 @@ data/sisudir/media/text/un_contracts_international_sale_of_goods_convention_1980
data/sisudir/media/text/viral_spiral.david_bollier.sst
#+END_SRC
-******* sample markup file list
+******** sample markup file list
#+NAME: make_find_markup_samples_0_pod_and_dir
#+BEGIN_SRC makefile
@@ -278,7 +279,7 @@ markup_dir_samples:
find data/sisudir/media/text -name *.ss[tm] | sort
#+END_SRC
-** make archive
+*** make archive
#+NAME: make_project_zip_archive
#+BEGIN_SRC makefile
@@ -290,7 +291,7 @@ gitArchive:
echo "to unzip: tar -xzf spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT).tar.gz"
#+END_SRC
-** _make_ commands [+2] :make:commands:
+*** _make_ commands [+2] :make:commands:
- build commands
- build
@@ -307,7 +308,7 @@ gitArchive:
- git snapshot
- gitsnapshot
-*** version tag, stamp
+**** version tag, stamp
#+NAME: make_version_tag
#+BEGIN_SRC makefile
@@ -316,7 +317,7 @@ version_tag:
echo "git tag -f doc-reform_v<> -m\"doc-reform spine-<>\" HEAD"
#+END_SRC
-*** changelog
+**** changelog
#+NAME: make_changelog
#+BEGIN_SRC makefile
@@ -324,11 +325,11 @@ changelog:
git log --pretty=format:'---%+s %+as %ae%+h%d%+b' --no-merges | sed "/^\\s*$$/d" | sed "s/^---$$//" | sed "s/^\(\*\)\+/-/" | sed "s/ \+$$//" > CHANGELOG_
#+END_SRC
-*** build commands [+1] :build:compile:
-**** _dub_ build rebuild :dub:
-***** all compiler builds :all:dmd:gdc:ldc:
-****** all builds
-******* default
+**** build commands [+1] :build:compile:
+***** _dub_ build rebuild :dub:
+****** all compiler builds :all:dmd:gdc:ldc:
+******* all builds
+******** default
#+NAME: make_dub_upgrade
#+BEGIN_SRC makefile
@@ -341,22 +342,22 @@ dub_upgrade:
default: ldc
#+END_SRC
-******* quick :quick:
-******** default :default:
+******** quick :quick:
+********* default :default:
#+NAME: make_compile_1_compiler_all
#+BEGIN_SRC makefile
all: dmd ldc gdc
#+END_SRC
-******** version :version:
+********* version :version:
#+NAME: make_compile_2_compiler_all_versioned
#+BEGIN_SRC makefile
all_ver: dmd_ver ldc_ver gdc_ver
#+END_SRC
-******** get project dependencies
+********* get project dependencies
#+NAME: make_get_project_dependencies_github
#+BEGIN_SRC makefile
@@ -417,14 +418,14 @@ set_latest: set_depends flake_update rm_flakelock
gitDir=dub2nix; git clone --depth=1 https://github.com/lionello/${gitDir} | rm -rf ${gitDir}/.git
-******** debug :debug:
+********* debug :debug:
#+NAME: make_compile_3_compiler_all_debug
#+BEGIN_SRC makefile
all_debug: dmd_debug gdc_debug ldc_debug
#+END_SRC
-******** _meson_ build :meson:
+********* _meson_ build :meson:
meson using dub
- works & looks pretty clean
@@ -443,22 +444,22 @@ meson_project_build_clean: clean skel tangle dub_upgrade meson_build
meson: meson_clean_build_dir dub_upgrade meson_build
#+END_SRC
-******* clean & tangle :clean:tangle:
-******** default :default:
+******** clean & tangle :clean:tangle:
+********* default :default:
#+NAME: make_clean_tangle_compile_0_all_clean
#+BEGIN_SRC makefile
all_clean: clean tangle dmd ldc gdc
#+END_SRC
-******** version :version:
+********* version :version:
#+NAME: make_clean_tangle_compile_1_all_clean_versioned
#+BEGIN_SRC makefile
all_ver_clean: clean tangle dmd_ver ldc_ver gdc_ver
#+END_SRC
-******** debug :debug:
+********* debug :debug:
#+NAME: make_clean_tangle_compile_2_all_clean_debug
#+BEGIN_SRC makefile
@@ -466,10 +467,10 @@ all_debug_ver: dmd_debug_ver gdc_debug_ver ldc_debug_ver
all_debug_clean_ver: clean tangle dmd_debug_ver gdc_debug_ver ldc_debug_ver
#+END_SRC
-***** individual compiler builds :each:
-****** dmd :dmd:
-******* quick :quick:
-******** default :default:
+****** individual compiler builds :each:
+******* dmd :dmd:
+******** quick :quick:
+********* default :default:
#+NAME: make_dub_compile_dmd_0_default
#+BEGIN_SRC makefile
@@ -478,7 +479,7 @@ dmd: dub_upgrade
notify-send -t 0 'D dmd compiled test release executable ready' 'spine-dmd'
#+END_SRC
-******** debug :debug:
+********* debug :debug:
#+NAME: make_dub_compile_dmd_1_debug
#+BEGIN_SRC makefile
@@ -486,7 +487,7 @@ dmd_debug:
$(DUB) --compiler=dmd --config=dmd --build=debug
#+END_SRC
-******** version :version:
+********* version :version:
- assumes git tags with program version
@@ -499,7 +500,7 @@ dmd_ver: dub_upgrade
dmd_clean_ver: clean tangle dmd_ver
#+END_SRC
-******* clean & tangle :clean:tangle:
+******** clean & tangle :clean:tangle:
#+NAME: make_dub_compile_dmd_3_release
#+BEGIN_SRC makefile
@@ -512,9 +513,9 @@ dmd_debug_tangle: tangle
$(DUB) --compiler=dmd --config=spine-dmd-debug
#+END_SRC
-****** gdc :gdc:
-******* quick :quick:
-******** default :default:
+******* gdc :gdc:
+******** quick :quick:
+********* default :default:
#+NAME: make_dub_compile_gdc_0_default
#+BEGIN_SRC makefile
@@ -523,7 +524,7 @@ gdc: dub_upgrade
notify-send -t 0 'D gdc compiled test release executable ready' 'spine-gdc'
#+END_SRC
-******** debug :debug:
+********* debug :debug:
#+NAME: make_dub_compile_gdc_1_debug
#+BEGIN_SRC makefile
@@ -531,7 +532,7 @@ gdc_debug:
$(DUB) --compiler=gdc --config=gdc --build=debug
#+END_SRC
-******** version :version:
+********* version :version:
- assumes git tags with program version
@@ -544,7 +545,7 @@ gdc_ver: dub_upgrade
gdc_clean_ver: clean tangle gdc_ver
#+END_SRC
-******* clean & tangle :clean:tangle:
+******** clean & tangle :clean:tangle:
#+NAME: make_dub_compile_gdc_3_release
#+BEGIN_SRC makefile
@@ -557,9 +558,9 @@ gdc_debug_tangle: tangle
$(DUB) --compiler=gdc --config=spine-gdc-debug
#+END_SRC
-****** ldc :ldc:
-******* quick :quick:
-******** default :default:
+******* ldc :ldc:
+******** quick :quick:
+********* default :default:
#+NAME: make_dub_compile_ldc_0_default
#+BEGIN_SRC makefile
@@ -568,7 +569,7 @@ ldc: dub_upgrade
notify-send -t 0 'D ldc compiled test release executable ready' 'spine-ldc'
#+END_SRC
-******** debug :debug:
+********* debug :debug:
#+NAME: make_dub_compile_ldc_1_debug
#+BEGIN_SRC makefile
@@ -576,7 +577,7 @@ ldc_debug:
$(DUB) --compiler=ldc2 --config=ldc --build=debug
#+END_SRC
-******** version :version:
+********* version :version:
- assumes git tags with program version
@@ -589,7 +590,7 @@ ldc_ver: dub_upgrade
ldc_clean_ver: clean tangle ldc_ver
#+END_SRC
-******* clean & tangle :clean:tangle:
+******** clean & tangle :clean:tangle:
#+NAME: make_dub_compile_ldc_3_release
#+BEGIN_SRC makefile
@@ -602,8 +603,8 @@ ldc_debug_tangle: tangle
$(DUB) --compiler=ldc2 --config=spine-ldc-debug
#+END_SRC
-***** generic
-****** init clean distclean etc. :clean:
+****** generic
+******* init clean distclean etc. :clean:
#+NAME: make_initialization_operations_0_parts
#+BEGIN_SRC makefile
@@ -661,7 +662,7 @@ distclean_and_init: expunge
mkdir -p $(PRG_BINDIR);
#+END_SRC
-****** version :version:
+******* version :version:
#+NAME: make_initialization_operations_1_git_version
#+BEGIN_SRC makefile
@@ -669,7 +670,7 @@ ver:
echo spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT)
#+END_SRC
-****** tangle build rebuild :clean:tangle:build:
+******* tangle build rebuild :clean:tangle:build:
#+NAME: make_initialization_operations_2_assemble
#+BEGIN_SRC makefile
@@ -685,9 +686,9 @@ makefile_new:
restart: clean tangle
#+END_SRC
-*** testrun (program against document markup) :markup:sample:
-**** pods :pod:
-****** find files
+**** testrun (program against document markup) :markup:sample:
+***** pods :pod:
+******* find files
#+NAME: make_project_testrun_0_find_pods
#+BEGIN_SRC makefile
@@ -695,8 +696,8 @@ find_pods:
$(SiSU_MARKUP_SAMPLES_FIND_PODS)
#+END_SRC
-***** dmd
-****** find files
+****** dmd
+******* find files
#+NAME: make_project_testrun_1
#+BEGIN_SRC makefile
@@ -728,7 +729,7 @@ dmd_testrun_find_pod_all:
--output-dir=tmp/program_output_pod
#+END_SRC
-****** path list
+******* path list
#+NAME: make_project_testrun_2
#+BEGIN_SRC makefile
@@ -755,8 +756,8 @@ dmd_testrun_paths_pod_all:
$(SiSU_MARKUP_SAMPLES_POD)
#+END_SRC
-***** gdc
-****** find files
+****** gdc
+******* find files
#+NAME: make_project_testrun_3
#+BEGIN_SRC makefile
@@ -788,7 +789,7 @@ gdc_testrun_find_pod_all:
--output-dir=tmp/program_output_pod
#+END_SRC
-****** path list
+******* path list
#+NAME: make_project_testrun_4
#+BEGIN_SRC makefile
@@ -820,8 +821,8 @@ gdc_testrun_paths_pod_all:
$(SiSU_MARKUP_SAMPLES_POD)
#+END_SRC
-***** ldc
-****** find files
+****** ldc
+******* find files
#+NAME: make_project_testrun_5
#+BEGIN_SRC makefile
@@ -853,7 +854,7 @@ ldc_testrun_find_pod_all:
--output-dir=tmp/program_output_pod
#+END_SRC
-****** path list
+******* path list
#+NAME: make_project_testrun_6
#+BEGIN_SRC makefile
@@ -885,9 +886,9 @@ ldc_testrun_paths_pod_all:
$(SiSU_MARKUP_SAMPLES_POD)
#+END_SRC
-**** dir :dir:
-***** dmd
-****** find files
+***** dir :dir:
+****** dmd
+******* find files
#+NAME: make_project_testrun_7
#+BEGIN_SRC makefile
@@ -914,7 +915,7 @@ dmd_testrun_find_dir_all:
--output-dir=tmp/program_output_dir
#+END_SRC
-****** file list
+******* file list
#+NAME: make_project_testrun_8
#+BEGIN_SRC makefile
@@ -941,8 +942,8 @@ dmd_testrun_filelist_dir_all:
$(SiSU_MARKUP_SAMPLES_DIR)
#+END_SRC
-***** gdc
-****** find files
+****** gdc
+******* find files
#+NAME: make_project_testrun_9
#+BEGIN_SRC makefile
@@ -969,7 +970,7 @@ gdc_testrun_find_dir_all:
--output-dir=tmp/program_output_dir
#+END_SRC
-****** file list
+******* file list
#+NAME: make_project_testrun_10
#+BEGIN_SRC makefile
@@ -996,8 +997,8 @@ gdc_testrun_filelist_dir_all:
$(SiSU_MARKUP_SAMPLES_DIR)
#+END_SRC
-***** ldc
-****** find files
+****** ldc
+******* find files
#+NAME: make_project_testrun_11
#+BEGIN_SRC makefile
@@ -1024,7 +1025,7 @@ ldc_testrun_find_dir_all:
--output-dir=tmp/program_output_dir
#+END_SRC
-****** file list
+******* file list
#+NAME: make_project_testrun_12
#+BEGIN_SRC makefile
@@ -1051,8 +1052,8 @@ ldc_testrun_filelist_dir_all:
$(SiSU_MARKUP_SAMPLES_DIR)
#+END_SRC
-*** org babel tangle batch process command :tangle:
-**** tangle: org babel tangle *.org
+**** org babel tangle batch process command :tangle:
+***** tangle: org babel tangle *.org
#+NAME: make_tangle_0
#+BEGIN_SRC makefile
@@ -1076,7 +1077,7 @@ tangle: skel
(kill-buffer)) '($$ORGFILES)))" 2>&1
#+END_SRC
-**** tangle maker: org babel tangle makefile new
+***** tangle maker: org babel tangle makefile new
#+NAME: make_tangle_1
#+BEGIN_SRC makefile
@@ -1100,7 +1101,7 @@ tangle_maker:
(kill-buffer)) '($$ORGFILES)))" 2>&1
#+END_SRC
-**** tangle nix project
+***** tangle nix project
#+NAME: make_nix_project
#+BEGIN_SRC makefile
@@ -1118,7 +1119,7 @@ nix-build_default:
nix-build default.nix
#+END_SRC
-*** git snapshot
+**** git snapshot
#+NAME: make_git_snapshot
#+BEGIN_SRC makefile
@@ -1126,7 +1127,7 @@ gitsnapshot: distclean tangle
git commit -a
#+END_SRC
-** phony :phony:
+*** phony :phony:
#+NAME: make_phony
#+BEGIN_SRC makefile
@@ -1135,7 +1136,8 @@ gitsnapshot: distclean tangle
tangle gitsnapshot
#+END_SRC
-* dub.settings.json :dub:config:json:
+** dub
+*** dub.settings.json :dub:config:json:
#+HEADER: :NO-tangle ../dub.settings.json
#+BEGIN_SRC json
@@ -1145,9 +1147,9 @@ gitsnapshot: distclean tangle
}
#+END_SRC
-* dub.json :dub:config:json:
-** dub.json
-*** file-system download external dependencies
+*** dub.json :dub:config:json:
+**** dub.json
+***** file-system download external dependencies
- d2sqlite3
- dyaml
@@ -1260,7 +1262,7 @@ gitsnapshot: distclean tangle
}
#+END_SRC
-*** git (interim)
+***** git (interim)
#+BEGIN_SRC json
{
@@ -1375,7 +1377,7 @@ gitsnapshot: distclean tangle
}
#+END_SRC
-*** git fetched remote external dependencies (used previously)
+***** git fetched remote external dependencies (used previously)
#+BEGIN_SRC json
{
@@ -1432,7 +1434,7 @@ gitsnapshot: distclean tangle
}
#+END_SRC
-** +check dub.json+
+**** +check dub.json+
#+HEADER: :NO-tangle ../dub.json
#+BEGIN_SRC sh
@@ -1489,12 +1491,12 @@ gitsnapshot: distclean tangle
}
#+END_SRC
-* dub.sdl REFERENCE :dub:config:sdl:
+*** dub.sdl REFERENCE UNUSED:dub:config:sdl:
Every DUB package should contain a [[https://code.dlang.org/package-format?lang=json][dub.json]] (or [[https://code.dlang.org/package-format?lang=sdl][dub.sdl]])
-not configured here using dub.json for the moment
+not configured here, using dub.json for the moment
-** header (including dependencies) :header:
+**** header (including dependencies) :header:
#+HEADER: :NO-tangle ../dub.sdl
#+HEADER: :tangle-mode (identity #o755)
@@ -1519,7 +1521,7 @@ dependency "dyaml" version="~><>" # htt
dependency "tinyendian" version="~><>" # https://code.dlang.org/packages/tinyendian https://github.com/dlang-community/tinyendian // dyaml dependency
#+END_SRC
-** default
+**** default
time (dub --compiler=dmd --build=release)
time (dub --compiler=ldc2 --build=release)
@@ -1533,7 +1535,7 @@ configuration "default" {
}
#+END_SRC
-** dmd :dmd:
+**** dmd :dmd:
time (dub --compiler=dmd -color --config=dmd --build=dmd)
time (dub --compiler=dmd -color --config=dmd --build=dmd-release)
@@ -1565,7 +1567,7 @@ configuration "dmd-version" {
buildOptions "verbose" "optimize" "inline" // ok works takes ages to compile
buildOptions "verbose" "releaseMode" "optimize" "inline" // broken, runtime errors
-** ldc :ldc:
+**** ldc :ldc:
time (dub --compiler=ldc2 -color --config=ldc --build=ldc)
time (dub --compiler=ldc2 -color --config=ldc --build=ldc-release)
@@ -1598,7 +1600,7 @@ configuration "ldc-version" {
}
#+END_SRC
-** gdc :gdc:
+**** gdc :gdc:
time (dub --compiler=gdc --config=gdc --build=gdc)
time (dub --compiler=gdc --config=gdc --build=gdc-release)
@@ -1629,8 +1631,8 @@ configuration "gdc-version" {
}
#+END_SRC
-*** generic :generic:
-**** build (sdp) :release:
+***** generic :generic:
+****** build (sdp) :release:
#+BEGIN_SRC sh
configuration "build" {
@@ -1644,8 +1646,8 @@ configuration "build" {
}
#+END_SRC
-* meson REVISIT :meson:build:
-** notes
+** meson REVISIT :meson:build:
+*** notes
https://mesonbuild.com/D.html
https://mesonbuild.com/Dependencies.html#Dub
@@ -1667,7 +1669,7 @@ https://mesonbuild.com/Dependencies.html#Dub
- flags
DFLAGS= DC=ldc2 meson ..
-*** projects :project:
+**** projects :project:
ls -1 ./src/*/**/*.d
tree -fi ./src |rg "\.d$"
@@ -1678,7 +1680,7 @@ for i in spine_sources.txt; do; \
sed -i "s/\(.\+\.d$\)/ '\1',/g" $i; done && \
cat spine_sources.txt
-**** project meson.build :filelist:
+***** project meson.build :filelist:
#+HEADER: :tangle ../meson.build
#+BEGIN_SRC sh
@@ -1753,8 +1755,8 @@ spine_exe = executable('spine',
)
#+END_SRC
-** meson subprojects [taken care of using dub] :subprojects:
-*** README
+*** meson subprojects [taken care of using dub] :subprojects:
+**** README
#+HEADER: :tangle ../subprojects/README
#+BEGIN_SRC txt
@@ -1782,8 +1784,8 @@ spine_exe = executable('spine',
- https://github.com/dlang-community/tinyendian.git
#+END_SRC
-*** d2sqlite3 :d2sqlite3:
-**** wrap
+**** d2sqlite3 :d2sqlite3:
+***** wrap
#+HEADER: :tangle ../subprojects/d2sqlite3.wrap
#+BEGIN_SRC sh
@@ -1793,7 +1795,7 @@ url = https://github.com/dlang-community/d2sqlite3.git
revision = head
#+END_SRC
-**** meson.build
+***** meson.build
#+HEADER: :NO-tangle ../subprojects/d2sqlite3.meson.build
#+BEGIN_SRC sh
@@ -1847,8 +1849,8 @@ d2sqlite3_dep = declare_dependency(
)
#+END_SRC
-*** dyaml :dyaml:
-**** wrap
+**** dyaml :dyaml:
+***** wrap
#+HEADER: :tangle ../subprojects/dyaml.wrap
#+BEGIN_SRC sh
@@ -1858,7 +1860,7 @@ url = https://github.com/dlang-community/D-YAML.git
revision = head
#+END_SRC
-**** meson.build (provided by upstream)
+***** meson.build (provided by upstream)
Upstream provides meson.build
@@ -1936,8 +1938,8 @@ dyaml_dep = declare_dependency(
)
#+END_SRC
-*** imageformats :imageformats:
-**** wrap
+**** imageformats :imageformats:
+***** wrap
#+HEADER: :tangle ../subprojects/imageformats.wrap
#+BEGIN_SRC sh
@@ -1947,7 +1949,7 @@ url = https://github.com/lgvz/imageformats.git
revision = head
#+END_SRC
-**** meson.build
+***** meson.build
#+HEADER: :NO-tangle ../subprojects/imageformats.meson.build
#+BEGIN_SRC sh
@@ -1993,8 +1995,8 @@ imageformats_dep = declare_dependency(
)
#+END_SRC
-*** tinyendian (dyaml dependency) :tinyendian:
-**** wrap
+**** tinyendian (dyaml dependency) :tinyendian:
+***** wrap
#+HEADER: :tangle ../subprojects/tinyendian.wrap
#+BEGIN_SRC sh
@@ -2004,7 +2006,7 @@ url = https://github.com/dlang-community/tinyendian.git
revision = head
#+END_SRC
-**** meson.build (provided by upstream)
+***** meson.build (provided by upstream)
Upstream provides meson.build
@@ -2053,13 +2055,13 @@ tinyendian_dep = declare_dependency(
)
#+END_SRC
-* nix :nix:
+** nix :nix:
- default.nix
- shell.nix
-** envrc :envrc:
-*** .envrc
+*** envrc :envrc:
+**** .envrc
#+HEADER: :tangle ../.envrc
#+BEGIN_SRC sh
@@ -2118,7 +2120,7 @@ else
fi
#+END_SRC
-*** .envrc-local CHECK MODIFY
+**** .envrc-local CHECK MODIFY
- bespoke modify appropriately and generate if needed
@@ -2127,7 +2129,7 @@ fi
export NIX_PATH=<>
#export NIX_PATH=<>
## reload when these files change
-use flake
+#use flake
watch_file flake.nix
# watch_file flake.lock
## load the flake devShell
@@ -2260,15 +2262,15 @@ cat SHELL_NIX_NOTE_
echo "cat SHELL_NIX_NOTE_"
#+END_SRC
-**** nixpkgs_path SETUP
-***** nixpkgs select path SELECT
+***** nixpkgs_path SETUP
+****** nixpkgs select path SELECT
#+NAME: nixpkgs_path
#+BEGIN_SRC nix
<>
#+END_SRC
-***** nixpkgs path options
+****** nixpkgs path options
-
@@ -2284,7 +2286,7 @@ echo "cat SHELL_NIX_NOTE_"
/nixpkgs-ra/nixpkgs
#+END_SRC
-***** project path options
+****** project path options
- local path SET WARN
@@ -2333,7 +2335,7 @@ nixpkgs=<>
/var/www
#+END_SRC
-** flake :flake:
+*** flake :flake:
- flake.nix
/nixpkgs-ra/nixpkgs
@@ -2456,7 +2458,7 @@ github:nixos/nixpkgs
}
#+END_SRC
-** shell.nix :shell:
+*** shell.nix :shell:
nix-shell
@@ -2540,7 +2542,7 @@ pkgs.mkShell {
}
#+END_SRC
-** default.nix :default:
+*** default.nix :default:
- default.nix -I nixpkgs=
e.g. default.nix -I nixpkgs=/nixpkgs-ra/nixpkgs
@@ -2678,7 +2680,7 @@ installPhase = ''
'';
#+END_SRC
-** project meta
+*** project meta
#+NAME: nix_project_meta
#+BEGIN_SRC nix
@@ -2691,7 +2693,7 @@ meta = with pkgs.lib; {
};
#+END_SRC
-** dub.selections.json
+*** dub.selections.json
#+HEADER: :NO-tangle ../dub.selections.json
#+BEGIN_SRC nix
@@ -2706,7 +2708,7 @@ meta = with pkgs.lib; {
}
#+END_SRC
-** dub.selections.nix
+*** dub.selections.nix
#+HEADER: :NO-tangle ../nix/dub.selections.nix
#+BEGIN_SRC nix
@@ -2762,8 +2764,8 @@ meta = with pkgs.lib; {
} ]
#+END_SRC
-** dub2nix & shared pkgs SHARED
-*** dub2nix with pkgs shared
+*** dub2nix & shared pkgs SHARED
+**** dub2nix with pkgs shared
#+NAME: nix_with_pkgs
#+BEGIN_SRC nix
@@ -2772,7 +2774,7 @@ with pkgs; [
]
#+END_SRC
-*** with pkgs list
+**** with pkgs list
#+NAME: nix_shell_with_pkgs_list
#+BEGIN_SRC nix
@@ -2787,7 +2789,7 @@ jq
git
#+END_SRC
-* .gitignore :gitignore:
+** .gitignore :gitignore:
#+HEADER: :tangle "../.gitignore"
#+BEGIN_SRC sh
@@ -2866,6 +2868,2239 @@ tmp/**
#+END_SRC
* sundry misc
+** spine search cgi (in ./sundry)
+
+*** shell.nix
+
+#+HEADER: :tangle "../sundry/spine_search_cgi/shell.nix"
+#+HEADER: :tangle-mode (identity #o755)
+#+HEADER: :shebang "#!/usr/bin/env -S nix-shell --pure"
+#+BEGIN_SRC nix
+{ pkgs ? import {} }:
+pkgs.mkShell {
+ buildInputs = with pkgs; [(
+ with pkgs; [
+ nixFlakes
+ rund
+ dub
+ ldc
+ sqlite
+ nix-prefetch-git
+ validatePkgConfig
+ jq
+ git
+ ]
+ )];
+ shellHook = ''
+ if [[ -e ".envrc" ]]; then
+ source .envrc
+ fi
+ '';
+}
+#+END_SRC
+
+*** default.nix
+
+#+HEADER: :tangle "../sundry/spine_search_cgi/default.nix"
+#+HEADER: :tangle-mode (identity #o755)
+#+HEADER: :shebang "#!/usr/bin/env -S nix-build"
+#+BEGIN_SRC nix
+{ pkgs ? import {},
+ stdenv ? pkgs.stdenv,
+ lib ? pkgs.lib,
+ ldc ? null,
+ dcompiler ? pkgs.ldc,
+ dub ? pkgs.dub
+}:
+assert dcompiler != null;
+with (
+ assert dcompiler != null;
+ with lib;
+ let
+ # Filter function to remove the .dub package folder from src
+ filterDub = name: type: let baseName = baseNameOf (toString name); in ! (
+ type == "directory" && baseName == ".dub"
+ );
+ targetOf = package: "${package.targetPath or "."}/${package.targetName or package.name}";
+ # Remove reference to build tools and library sources
+ disallowedReferences = deps: [ dcompiler dub ];
+ removeExpr = refs: ''remove-references-to ${lib.concatMapStrings (ref: " -t ${ref}") refs}'';
+ in {
+ mkDubDerivation = lib.makeOverridable ({
+ src,
+ nativeBuildInputs ? [],
+ dubJSON ? src + "/dub.json",
+ passthru ? {},
+ package ? lib.importJSON dubJSON,
+ ...
+ } @ attrs: stdenv.mkDerivation (attrs // {
+ pname = package.name;
+ nativeBuildInputs = [ dcompiler dub pkgs.removeReferencesTo ] ++ nativeBuildInputs;
+ disallowedReferences = disallowedReferences deps;
+ passthru = passthru // {
+ inherit dub dcompiler pkgs;
+ };
+ src = lib.cleanSourceWith {
+ filter = filterDub;
+ src = lib.cleanSource src;
+ };
+ preFixup = ''
+ find $out/share/cgi-bin -type f -exec ${removeExpr (disallowedReferences deps)} '{}' + || true
+ '';
+ buildPhase = ''
+ runHook preBuild
+ export HOME=$PWD
+ for dc_ in dmd ldmd2 gdmd; do
+ echo "- check for D compiler $dc_"
+ dc=$(type -P $dc_ || echo "")
+ if [ ! "$dc" == "" ]; then
+ break
+ fi
+ done
+ if [ "$dc" == "" ]; then
+ exit "Error: could not find D compiler"
+ fi
+ echo "$dc_ used as D compiler to build $pname"
+ dub build --compiler=$dc --build=release --combined --skip-registry=all
+ runHook postBuild
+ '';
+ checkPhase = ''
+ runHook preCheck
+ export HOME=$PWD
+ dub test --combined --skip-registry=all
+ runHook postCheck
+ '';
+ installPhase = ''
+ runHook preInstall
+ mkdir -p $out/share/cgi-bin
+ cp -r "${targetOf package}" $out/share/cgi-bin
+ install -m755 -D $out/share/cgi-bin/spine_search spine_search
+ runHook postInstall
+ '';
+ postInstall = ''
+ echo "HERE ${targetOf package} $out/share/cgi-bin"
+ echo `ls -la $out/share/cgi-bin/spine_search`
+ '';
+ meta = lib.optionalAttrs (package ? description) {
+ description = package.description;
+ } // attrs.meta or {};
+ } // lib.optionalAttrs (!(attrs ? version)) {
+ # Use name from dub.json, unless pname and version are specified
+ name = package.name;
+ }));
+ }
+);
+mkDubDerivation rec {
+ name = "spine-search-${version}";
+ version = "0.11.3";
+ src = ./.;
+ buildInputs = [
+ pkgs.sqlite (
+ with pkgs; [
+ nixFlakes
+ rund
+ dub
+ ldc
+ sqlite
+ ]
+ )
+ ];
+ # # buildPhase = [ ];
+ # installPhase = ''
+ # install -m755 -D spine_search $out/bin/spine-search
+ # echo "built $out/bin/spine-search"
+ # '';
+ meta = with pkgs.lib; {
+ homepage = https://sisudoc.org;
+ description = "a sisu like document parser";
+ license = licenses.agpl3Plus;
+ platforms = platforms.linux;
+ maintainers = [ RalphAmissah ];
+ };
+}
+#+END_SRC
+
+*** .envrc
+
+#+HEADER: :tangle "../sundry/spine_search_cgi/.envrc"
+#+BEGIN_SRC sh
+NIX_ENFORCE_PURITY=0
+if [ -e .envrc-local ]; then # source an additional user-specific .envrc in ./.envrc-local
+ source .envrc-local
+fi
+#+END_SRC
+
+*** .envrc-local
+
+#+HEADER: :tangle "../sundry/spine_search_cgi/.envrc-local_"
+#+BEGIN_SRC sh
+#export NIX_PATH=/nix/var/nix/profiles/per-user/root/channels/nixos
+export NIX_PATH=nixpkgs=/nixpkgs-ra/nixpkgs
+## reload when these files change
+#use flake
+watch_file flake.nix
+# watch_file flake.lock
+## load the flake devShell
+#lorri direnv
+eval "$(nix print-dev-env)"
+# echo $NIX_BUILD_TOP
+export SpineVER=$(git describe --long --tags | sed 's/^[ a-z_-]\+\([0-9.]\+\)/\1/;s/\([^-]*-g\)/r\1/;s/-/./g')
+export SpineSRC=/grotto-ra/repo/git.repo/projects/project-spine/doc-reform
+export SpineDOC=/grotto-ra/repo/git.repo/projects/project-spine/doc-reform-markup/spine-markup-samples
+export SpineOUT=/tmp/spine/$SpineVER/www
+export SpineOUTstatic=/var/www
+
+echo '-*- mode: org -*-
+
+,* nixpkgs path?
+
+ eval "$(nix print-dev-env)"
+' > SHELL_NIX_NOTE_
+
+echo " == `nix-instantiate --find-file nixpkgs`" >> SHELL_NIX_NOTE_
+
+echo '
+,* nix build and show derivation
+
+ nix-shell --pure
+
+ nix-build
+ nix build -f default.nix
+ nix shell -f default.nix
+ nix-instantiate | nix-build
+ nix build `nix-instantiate`
+
+ nix develop
+
+ nix-instantiate | nix show-derivation | jq
+ nix-instantiate | nix show-derivation --recursive | jq
+
+ nix search --json 2>/dev/null |jq
+
+,* version and build info
+' >> SHELL_NIX_NOTE_
+
+echo " spine version (git) == $SpineVER" >> SHELL_NIX_NOTE_
+echo " nix-instantiate == `nix-instantiate`" >> SHELL_NIX_NOTE_
+
+echo '
+,* initialised shell variables
+
+ SpineSRC=/grotto-ra/repo/git.repo/projects/project-spine/doc-reform
+ SpineDOC=/grotto-ra/repo/git.repo/projects/project-spine/doc-reform-markup/spine-markup-samples
+ SpineOUT=/tmp/spine/$SpineVER/www
+ SpineOUTstatic=/var/www
+
+,* spine run instruction examples
+
+,** cgi operations (output to $SpineOUTstatic /var/www)
+
+ $SpineSRC/result/bin/spine --very-verbose --sqlite-db-create --output="$SpineOUTstatic" $SpineDOC/markup/pod/*
+
+ $SpineSRC/result/bin/spine -v --cgi-search-form-codegen --output=$SpineOUTstatic $SpineDOC/markup/pod/*
+
+ $SpineSRC/result/bin/spine -v --show-config --config=$SpineDOC/markup/pod/.dr
+
+ $SpineSRC/result/bin/spine --html $SpineDOC/markup/pod/*
+
+ $SpineSRC/result/bin/spine -v --sqlite-db-create --sqlite-db-filename="spine.search.db" --output="$SpineOUTstatic" $SpineDOC/markup/pod/*
+
+ $SpineSRC/result/bin/spine -v --sqlite-db-create --sqlite-db-filename="spine.search.db" --cgi-sqlite-search-filename="spine-search" --output=$SpineOUTstatic $SpineDOC/markup/pod/*
+ $SpineSRC/result/bin/spine -v --sqlite-db-recreate --sqlite-db-filename="spine.search.db" --cgi-sqlite-search-filename="spine-search" --output=$SpineOUTstatic $SpineDOC/markup/pod/*
+
+ $SpineSRC/result/bin/spine -v --sqlite-update --sqlite-db-filename="spine.search.db" --output=$SpineOUTstatic $SpineDOC/markup/pod/*
+
+ $SpineSRC/result/bin/spine -v --cgi-search-form-codegen --config=$SpineDOC/markup/pod/.dr/config_local_site
+
+ $SpineSRC/result/bin/spine -v --html --html-link-search --html-link-harvest --harvest --output=$SpineOUTstatic $SpineDOC/markup/pod/*
+
+ - generate html linked to search form
+ $SpineSRC/result/bin/spine -v --sqlite-db-recreate --sqlite-db-filename="spine.search.db" --cgi-sqlite-search-filename="spine-search" --sqlite-update --output=$SpineOUTstatic $SpineDOC/markup/pod/*
+ - make search form
+ $SpineSRC/result/bin/spine -v --cgi-search-form-codegen --config=$SpineDOC/markup/pod/.dr/config_local_site
+ #$SpineSRC/result/bin/spine -v --cgi-search-form-codegen --output=$SpineOUTstatic $SpineDOC/markup/pod/*
+
+' >> SHELL_NIX_NOTE_
+cat SHELL_NIX_NOTE_
+echo "see SHELL_NIX_NOTE"
+#+END_SRC
+
+*** dub.json
+
+#+HEADER: :tangle "../sundry/spine_search_cgi/dub.json"
+#+BEGIN_SRC json
+{
+ "authors": [
+ "Ralph Amissah"
+ ],
+ "copyright": "Copyright © 2015 - 2021 Ralph Amissah",
+ "name": "spine_search",
+ "version": "0.11.3",
+ "description": "cgi search for spine, a sisu like document parser",
+ "homepage": "https://sisudoc.org",
+ "license": "AGPL-3.0+",
+ "targetPath": "./cgi-bin",
+ "sourcePaths": [ "./src" ],
+ "stringImportPaths": [ "./views" ],
+ "buildRequirements": [ "allowWarnings" ],
+ "targetType": "executable",
+ "platforms": [ "posix" ],
+ "buildTypes": {
+ "dmd": {
+ "dflags": [ "-J=views", "-I=src" ],
+ "buildOptions": [ "verbose", "inline" ],
+ "buildRequirements": [ "allowWarnings" ]
+ },
+ "ldc": {
+ "dflags": [ "-O2", "-J=views", "-I=src" ],
+ "buildOptions": [ "verbose", "optimize", "inline" ],
+ "buildRequirements": [ "allowWarnings" ]
+ }
+ },
+ "dependencies": {
+ "spine_search:arsd.cgi": "*",
+ "spine_search:d2sqlite3": "*"
+ },
+ "subPackages": [
+ {
+ "name": "arsd.cgi",
+ "description": "cgi",
+ "homepage": "https://github.com/dlang-community/d2sqlite3",
+ "authors": [ "Aadam Ruppee" ],
+ "copyright": "Copyright 2011-18 Aadam Ruppee",
+ "license": "BSL-1.0",
+ "sourcePaths": [ "./src/ext_depends_cgi/arsd" ],
+ "configurations": [
+ {
+ "name": "cgi",
+ "cgi": "with-lib",
+ "targetType": "library",
+ "systemDependencies": "Arsd version >= 0.8.7"
+ }
+ ]
+ },
+ {
+ "name": "d2sqlite3",
+ "description": "A thin wrapper around SQLite 3",
+ "homepage": "https://github.com/dlang-community/d2sqlite3",
+ "authors": [ "Nicolas Sicard", "Other contributors: see Github repo" ],
+ "copyright": "Copyright 2011-18 Nicolas Sicard",
+ "license": "BSL-1.0",
+ "sourcePaths": [ "./src/ext_depends_cgi/d2sqlite3/source" ],
+ "configurations": [
+ {
+ "name": "d2sqlite3",
+ "d2sqlite3": "with-lib",
+ "targetType": "library",
+ "systemDependencies": "SQLite version >= 3.8.7",
+ "libs": [ "sqlite3" ],
+ "excludedSourceFiles": [ "source/tests.d" ]
+ }
+ ]
+ }
+ ],
+ "configurations": [
+ {
+ "name": "default",
+ "targetName": "spine_search"
+ },
+ {
+ "name": "ldc",
+ "targetName": "spine-search-ldc.cgi"
+ },
+ {
+ "name": "dmd",
+ "targetName": "spine-search-dmd.cgi"
+ }
+ ],
+ "subConfigurations": {
+ "d2sqlite3": "with-lib"
+ }
+}
+#+END_SRC
+
+*** spine_search.d
+**** settings forf spine search SET
+
+#+NAME: url_doc_root
+#+BEGIN_SRC text
+/srv/www
+#+END_SRC
+
+#+NAME: doc_root_path
+#+BEGIN_SRC text
+_cfg.doc_root_www
+#+END_SRC
+#+BEGIN_SRC text
+/var/www/html
+#+END_SRC
+
+#+NAME: cgi_root_path
+#+BEGIN_SRC text
+_cfg.doc_root_cgi
+#+END_SRC
+#+BEGIN_SRC text
+/usr/lib/cgi-bin/
+#+END_SRC
+
+#+NAME: db_root_path
+#+BEGIN_SRC text
+_cfg.doc_root_db
+#+END_SRC
+#+BEGIN_SRC text
+<>/spine/static/sqlite/
+#+END_SRC
+
+**** spine_search configuration.txt
+
+#+HEADER: :tangle "../sundry/spine_search_cgi/views/configuration_suggested.txt"
+#+BEGIN_SRC d
+/+ obt - org-mode generated file +/
+struct Cfg {
+ string doc_root_www = "/srv/www/spine/static";
+ string doc_root_cgi = "/var/www/cgi/cgi-bin/";
+ string doc_root_db = "/srv/www/spine/static/sqlite/";
+ string filename_cgi = "spine_search";
+ string filename_db = "spine.search.db";
+}
+enum _cfg = Cfg();
+#+END_SRC
+
+**** spine_search.d
+
+#+HEADER: :tangle "../sundry/spine_search_cgi/src/spine_search.d_"
+#+BEGIN_SRC d
+/+ dub.sdl
+ name "spine search"
+ description "spine cgi search"
+ /+ obt - org-mode generated file +/
++/
+import std.format;
+import std.range;
+import std.regex;
+import arsd.cgi;
+import d2sqlite3;
+import std.process : environment;
+void cgi_function_intro(Cgi cgi) {
+ mixin(import("configuration.txt"));
+ string header;
+ string table;
+ string form;
+ struct Config {
+ string http_request_type;
+ string http_host;
+ // string server_name;
+ string web_doc_root_path;
+ string doc_collection_sub_root;
+ string cgi_root;
+ string cgi_script;
+ string data_path_html;
+ string db_path;
+ string query_string;
+ string http_url;
+ string request_method;
+ }
+ auto conf = Config(); // SET
+ conf.http_request_type = environment.get("REQUEST_SCHEME", "http");
+ conf.http_host = environment.get("HTTP_HOST", "localhost");
+ // conf.server_name = environment.get("SERVER_NAME", "localhost");
+ conf.web_doc_root_path = environment.get("DOCUMENT_ROOT", <>);
+ conf.doc_collection_sub_root = <>; // (output_path - web_doc_root_path)
+ // conf.doc_collection_sub_root = "<>/spine/static"; // (output_path - web_doc_root_path) // problem FIX
+ conf.cgi_root = environment.get("CONTEXT_DOCUMENT_ROOT", <>);
+ // conf.cgi_script = environment.get("SCRIPT_NAME", "/cgi-bin/spine-search");
+ conf.query_string = environment.get("QUERY_STRING", "");
+ conf.http_url = environment.get("HTTP_REFERER", conf.http_request_type ~ "://" ~ conf.http_host ~ conf.cgi_script ~ "?" ~ conf.query_string);
+ conf.db_path = <>; // (output_path + /sqlite)
+ conf.request_method = environment.get("REQUEST_METHOD", "POST");
+ struct CGI_val {
+ string db_selected = "";
+ string sql_match_limit = ""; // radio: ( 1000 | 2500 )
+ string sql_match_offset = "";
+ string search_text = "";
+ string results_type = ""; // index
+ bool checked_echo = false;
+ bool checked_stats = false;
+ bool checked_url = false;
+ bool checked_searched = false;
+ bool checked_tip = false;
+ bool checked_sql = false;
+ }
+ auto cv = CGI_val();
+ cv.db_selected = _cfg.filename_db;
+ // cv.db_selected = "spine.search.db";
+ auto text_fields() {
+ string canned_query_str = environment.get("QUERY_STRING", "");
+ if ("query_string" in cgi.post) {
+ canned_query_str = environment.get("QUERY_STRING", "");
+ }
+ string[string] canned_query;
+ if (conf.request_method == "POST") {
+ } else if (conf.request_method == "GET") {
+ foreach (pair_str; canned_query_str.split("&")) {
+ // cgi.write(pair_str ~ "
");
+ string[] pair = pair_str.split("=");
+ canned_query[pair[0]] = pair[1];
+ }
+ // foreach (field, content; canned_query) {
+ // cgi.write(field ~ ": " ~ content ~ "
");
+ // }
+ }
+ static struct Rgx {
+ // static canned_query = ctRegex!(`\A(?P.+)\Z`, "m");
+ static search_text_area = ctRegex!(`\A(?P.+)\Z`, "m");
+ // static fulltext = ctRegex!(`\A(?P.+)\Z`, "m");
+ static line = ctRegex!(`^(?P.+?)(?: ~|$)`, "m");
+ static text = ctRegex!(`(?:^|\s~\s*)text:\s+(?P.+?)(?: ~|$)`, "m");
+ static author = ctRegex!(`(?:^|\s~\s*)author:\s+(?P.+)$`, "m");
+ static title = ctRegex!(`(?:^|\s~\s*)title:\s+(?P.+)$`, "m");
+ static uid = ctRegex!(`(?:^|\s~\s*)uid:\s+(?P.+)$`, "m");
+ static fn = ctRegex!(`(?:^|\s~\s*)fn:\s+(?P.+)$`, "m");
+ static keywords = ctRegex!(`(?:^|\s~\s*)keywords:\s+(?P.+)$`, "m");
+ static topic_register = ctRegex!(`(?:^|\s~\s*)topic_register:\s+(?P.+)$`, "m");
+ static subject = ctRegex!(`(?:^|\s~\s*)subject:\s+(?P.+)$`, "m");
+ static description = ctRegex!(`(?:^|\s~\s*)description:\s+(?P.+)$`, "m");
+ static publisher = ctRegex!(`(?:^|\s~\s*)publisher:\s+(?P.+)$`, "m");
+ static editor = ctRegex!(`(?:^|\s~\s*)editor:\s+(?P.+)$`, "m");
+ static contributor = ctRegex!(`(?:^|\s~\s*)contributor:\s+(?P.+)$`, "m");
+ static date = ctRegex!(`(?:^|\s~\s*)date:\s+(?P.+)$`, "m");
+ static results_type = ctRegex!(`(?:^|\s~\s*)type:\s+(?P.+)$`, "m");
+ static format = ctRegex!(`(?:^|\s~\s*)format:\s+(?P.+)$`, "m");
+ static source = ctRegex!(`(?:^|\s~\s*)source:\s+(?P.+)$`, "m");
+ static language = ctRegex!(`(?:^|\s~\s*)language:\s+(?P.+)$`, "m");
+ static relation = ctRegex!(`(?:^|\s~\s*)relation:\s+(?P.+)$`, "m");
+ static coverage = ctRegex!(`(?:^|\s~\s*)coverage:\s+(?P.+)$`, "m");
+ static rights = ctRegex!(`(?:^|\s~\s*)rights:\s+(?P.+)$`, "m");
+ static comment = ctRegex!(`(?:^|\s~\s*)comment:\s+(?P.+)$`, "m");
+ // static abstract_ = ctRegex!(`(?:^|\s~\s*)abstract:\s+(?P.+)$`, "m");
+ static src_filename_base = ctRegex!(`^src_filename_base:\s+(?P.+)$`, "m");
+ }
+ struct searchFields {
+ string canned_query = ""; // GET canned_query == cq
+ string search_text_area = ""; // POST search_text_area == tsa
+ string text = ""; // text == txt
+ string author = ""; // author == au
+ string title = ""; // title == ti
+ string uid = ""; // uid == uid
+ string fn = ""; // fn == fn
+ string keywords = ""; // keywords == kw
+ string topic_register = ""; // topic_register == tr
+ string subject = ""; // subject == su
+ string description = ""; // description == de
+ string publisher = ""; // publisher == pb
+ string editor = ""; // editor == ed
+ string contributor = ""; // contributor == ct
+ string date = ""; // date == dt
+ string format = ""; // format == fmt
+ string source = ""; // source == src sfn
+ string language = ""; // language == lng
+ string relation = ""; // relation == rl
+ string coverage = ""; // coverage == cv
+ string rights = ""; // rights == rgt
+ string comment = ""; // comment == cmt
+ // string abstract = "";
+ string src_filename_base = ""; // src_filename_base == bfn
+ string results_type = ""; // results_type == rt radio
+ string sql_match_limit = ""; // sql_match_limit == sml radio
+ string sql_match_offset = ""; // sql_match_offset == smo
+ string stats = ""; // stats == sts checked
+ string echo = ""; // echo == ec checked
+ string url = ""; // url == url checked
+ string searched = ""; // searched == se checked
+ string sql = ""; // sql == sql checked
+ }
+ auto rgx = Rgx();
+ auto got = searchFields();
+ if (environment.get("REQUEST_METHOD", "POST") == "POST") {
+ if ("sf" in cgi.post) {
+ got.search_text_area = cgi.post["sf"];
+ if (auto m = got.search_text_area.matchFirst(rgx.text)) {
+ got.text = m["matched"];
+ got.canned_query ~= "sf=" ~ m["matched"];
+ } else if (auto m = got.search_text_area.matchFirst(rgx.line)) {
+ if (
+ !(m["matched"].matchFirst(rgx.author))
+ && !(m["matched"].matchFirst(rgx.title))
+ ) {
+ got.text = m["matched"];
+ got.canned_query ~= "sf=" ~ m["matched"];
+ }
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.author)) {
+ got.author = m["matched"];
+ got.canned_query ~= "&au=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.title)) {
+ got.title = m["matched"];
+ got.canned_query ~= "&ti=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.uid)) {
+ got.uid = m["matched"];
+ got.canned_query ~= "&uid=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.fn)) {
+ got.fn = m["matched"];
+ got.canned_query ~= "&fn=" ~ m["matched"];
+ } else if ("fn" in cgi.post) {
+ got.search_text_area ~= "\nfn: " ~ cgi.post["fn"] ~ "\n";
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.keywords)) {
+ got.keywords = m["matched"];
+ got.canned_query ~= "&kw=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.topic_register)) {
+ got.topic_register = m["matched"];
+ got.canned_query ~= "&tr=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.subject)) {
+ got.subject = m["matched"];
+ got.canned_query ~= "&su=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.description)) {
+ got.description = m["matched"];
+ got.canned_query ~= "&de=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.publisher)) {
+ got.publisher = m["matched"];
+ got.canned_query ~= "&pb=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.editor)) {
+ got.editor = m["matched"];
+ got.canned_query ~= "&ed=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.contributor)) {
+ got.contributor = m["matched"];
+ got.canned_query ~= "&ct=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.date)) {
+ got.date = m["matched"];
+ got.canned_query ~= "&dt=" ~ m["matched"];
+ }
+ // if (auto m = got.search_text_area.matchFirst(rgx.results_type)) {
+ // got.results_type = m["matched"];
+ // got.canned_query ~= "&rt=" ~ m["matched"];
+ // }
+ if (auto m = got.search_text_area.matchFirst(rgx.format)) {
+ got.format = m["matched"];
+ got.canned_query ~= "&fmt=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.source)) {
+ got.source = m["matched"];
+ got.canned_query ~= "&src=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.language)) {
+ got.language = m["matched"];
+ got.canned_query ~= "&lng=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.relation)) {
+ got.relation = m["matched"];
+ got.canned_query ~= "&rl=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.coverage)) {
+ got.coverage = m["matched"];
+ got.canned_query ~= "&cv=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.rights)) {
+ got.rights = m["matched"];
+ got.canned_query ~= "&rgt=" ~ m["matched"];
+ }
+ if (auto m = got.search_text_area.matchFirst(rgx.comment)) {
+ got.comment = m["matched"];
+ got.canned_query ~= "&cmt=" ~ m["matched"];
+ }
+ // if (auto m = search_text_area.matchFirst(rgx.abstract)) {
+ // got.abstract = m["matched"];
+ // }
+ if (auto m = got.search_text_area.matchFirst(rgx.src_filename_base)) {
+ got.src_filename_base = m["matched"];
+ got.canned_query ~= "&bfn=" ~ m["matched"];
+ }
+ }
+ if ("fn" in cgi.post) {
+ got.fn = cgi.post["fn"];
+ got.canned_query ~= "&fn=" ~ cgi.post["fn"];
+ }
+ if ("rt" in cgi.post) {
+ got.results_type = cgi.post["rt"];
+ got.canned_query ~= "&rt=" ~ cgi.post["rt"];
+ }
+ if ("sts" in cgi.post) {
+ got.stats = cgi.post["sts"];
+ got.canned_query ~= "&sts=" ~ cgi.post["sts"];
+ }
+ if ("ec" in cgi.post) {
+ got.echo = cgi.post["ec"];
+ got.canned_query ~= "&ec=" ~ cgi.post["ec"];
+ }
+ if ("url" in cgi.post) {
+ got.url = cgi.post["url"];
+ got.canned_query ~= "&url=" ~ cgi.post["url"];
+ }
+ if ("se" in cgi.post) {
+ got.searched = cgi.post["se"];
+ got.canned_query ~= "&se=" ~ cgi.post["se"];
+ }
+ if ("sql" in cgi.post) {
+ got.sql = cgi.post["sql"];
+ got.canned_query ~= "&sql=" ~ cgi.post["sql"];
+ }
+ if ("sml" in cgi.post) {
+ got.sql_match_limit = cgi.post["sml"];
+ got.canned_query ~= "&sml=" ~ cgi.post["sml"];
+ }
+ if ("smo" in cgi.post) {
+ got.sql_match_offset = "0"; // cgi.post["smo"];
+ got.canned_query ~= "&smo=0"; // ~ cgi.post["smo"];
+ }
+ got.canned_query = got.canned_query.strip.split(" ").join("%20");
+ conf.query_string = got.canned_query;
+ // cgi.write("f.canned_query: " ~ got.canned_query ~ "
");
+ } else if (environment.get("REQUEST_METHOD", "POST") == "GET") {
+ got.canned_query = environment.get("QUERY_STRING", "");
+ // cgi.write("f.canned_query: " ~ got.canned_query ~ "
");
+ got.search_text_area = "";
+ if ("sf" in canned_query && !(canned_query["sf"]).empty) {
+ got.text = canned_query["sf"].split("%20").join(" ");
+ got.search_text_area ~= "text: " ~ got.text ~ "\n";
+ }
+ if ("au" in canned_query && !(canned_query["au"]).empty) {
+ got.author = canned_query["au"].split("%20").join(" ");
+ got.search_text_area ~= "author: " ~ got.author ~ "\n";
+ }
+ if ("ti" in canned_query && !(canned_query["ti"]).empty) {
+ got.title = canned_query["ti"].split("%20").join(" ");
+ got.search_text_area ~= "title: " ~ got.title ~ "\n";
+ }
+ if ("uid" in canned_query && !(canned_query["uid"]).empty) {
+ got.uid = canned_query["uid"].split("%20").join(" ");
+ got.search_text_area ~= "uid: " ~ got.uid ~ "\n";
+ }
+ if ("fn" in canned_query && !(canned_query["fn"]).empty) {
+ got.fn = canned_query["fn"].split("%20").join(" ");
+ got.search_text_area ~= "fn: " ~ got.fn ~ "\n";
+ }
+ if ("kw" in canned_query && !(canned_query["kw"]).empty) {
+ got.keywords = canned_query["kw"].split("%20").join(" ");
+ got.search_text_area ~= "keywords: " ~ got.keywords ~ "\n";
+ }
+ if ("tr" in canned_query && !(canned_query["tr"]).empty) {
+ got.topic_register = canned_query["tr"].split("%20").join(" ");
+ got.search_text_area ~= "topic_register: " ~ got.topic_register ~ "\n";
+ }
+ if ("su" in canned_query && !(canned_query["su"]).empty) {
+ got.subject = canned_query["su"].split("%20").join(" ");
+ got.search_text_area ~= "subject: " ~ got.subject ~ "\n";
+ }
+ if ("de" in canned_query && !(canned_query["de"]).empty) {
+ got.description = canned_query["de"].split("%20").join(" ");
+ got.search_text_area ~= "description: " ~ got.description ~ "\n";
+ }
+ if ("pb" in canned_query && !(canned_query["pb"]).empty) {
+ got.publisher = canned_query["pb"].split("%20").join(" ");
+ got.search_text_area ~= "publisher: " ~ got.publisher ~ "\n";
+ }
+ if ("ed" in canned_query && !(canned_query["ed"]).empty) {
+ got.editor = canned_query["ed"].split("%20").join(" ");
+ got.search_text_area ~= "editor: " ~ got.editor ~ "\n";
+ }
+ if ("ct" in canned_query && !(canned_query["ct"]).empty) {
+ got.contributor = canned_query["ct"].split("%20").join(" ");
+ got.search_text_area ~= "contributor: " ~ got.contributor ~ "\n";
+ }
+ if ("dt" in canned_query && !(canned_query["dt"]).empty) {
+ got.date = canned_query["dt"].split("%20").join(" ");
+ got.search_text_area ~= "date: " ~ got.date ~ "\n";
+ }
+ if ("rt" in canned_query && !(canned_query["rt"]).empty) {
+ got.results_type = canned_query["rt"].split("%20").join(" ");
+ // got.search_text_area ~= "results_type: " ~ got.results_type ~ "\n";
+ }
+ if ("fmt" in canned_query && !(canned_query["fmt"]).empty) {
+ got.format = canned_query["fmt"].split("%20").join(" ");
+ got.search_text_area ~= "format: " ~ got.format ~ "\n";
+ }
+ if ("src" in canned_query && !(canned_query["src"]).empty) {
+ got.source = canned_query["src"].split("%20").join(" ");
+ got.search_text_area ~= "source: " ~ got.source ~ "\n";
+ }
+ if ("lng" in canned_query && !(canned_query["lng"]).empty) {
+ got.language = canned_query["lng"].split("%20").join(" ");
+ got.search_text_area ~= "language: " ~ got.language ~ "\n";
+ }
+ if ("rl" in canned_query && !(canned_query["rl"]).empty) {
+ got.relation = canned_query["rl"].split("%20").join(" ");
+ got.search_text_area ~= "relation: " ~ got.relation ~ "\n";
+ }
+ if ("cv" in canned_query && !(canned_query["cv"]).empty) {
+ got.coverage = canned_query["cv"].split("%20").join(" ");
+ got.search_text_area ~= "coverage: " ~ got.coverage ~ "\n";
+ }
+ if ("rgt" in canned_query && !(canned_query["rgt"]).empty) {
+ got.rights = canned_query["rgt"].split("%20").join(" ");
+ got.search_text_area ~= "rights: " ~ got.rights ~ "\n";
+ }
+ if ("cmt" in canned_query && !(canned_query["cmt"]).empty) {
+ got.comment = canned_query["cmt"].split("%20").join(" ");
+ got.search_text_area ~= "comment: " ~ got.comment ~ "\n";
+ }
+ // if ("abstract" in canned_query && !(canned_query["abstract"]).empty) {
+ // got.abstract = canned_query["abstract"];
+ // }
+ if ("bfn" in canned_query && !(canned_query["bfn"]).empty) { // search_field
+ got.src_filename_base = canned_query["bfn"].split("%20").join(" ");
+ got.search_text_area ~= "src_filename_base: " ~ got.src_filename_base ~ "\n";
+ }
+ if ("sml" in canned_query && !(canned_query["sml"]).empty) {
+ got.sql_match_limit = canned_query["sml"].split("%20").join(" ");
+ // got.search_text_area ~= "sql_match_limit: " ~ got.sql_match_limit ~ "\n";
+ }
+ // cgi.write("f.search_text_area: " ~ got.search_text_area ~ "
");
+ }
+ return got;
+ }
+ auto tf = text_fields; //
+ struct SQL_select {
+ string the_body = "";
+ string the_range = "";
+ }
+ auto sql_select = SQL_select();
+ string canned_url () {
+ string _url = "";
+ if (environment.get("REQUEST_METHOD", "POST") == "POST") {
+ _url = conf.http_request_type ~ "://" ~ conf.http_host ~ conf.cgi_script ~ "?" ~ tf.canned_query;
+ } else if (environment.get("REQUEST_METHOD", "POST") == "GET") {
+ _url = conf.http_request_type ~ "://" ~ conf.http_host ~ conf.cgi_script ~ "?" ~ environment.get("QUERY_STRING", "");
+ }
+ return _url;
+ }
+ auto regex_canned_search () {
+ static struct RgxCS {
+ static track_offset = ctRegex!(`(?P[&]smo=)(?P[0-9]+)`);
+ static results_type = ctRegex!(`[&]rt=(?Pidx|txt)`);
+ static results_type_index = ctRegex!(`[&]rt=idx`);
+ static results_type_text = ctRegex!(`[&]rt=txt`);
+ static fn = ctRegex!(`[&]fn=(?P[^&]+)`);
+ }
+ return RgxCS();
+ }
+ string show_matched_objects (string fn) {
+ auto rgx = regex_canned_search;
+ string _matched_objects_text = "";
+ string _url = canned_url;
+ string _url_new = "";
+ string _matches_show_text = "&rt=txt";
+ string _matches_show_index = "&rt=idx";
+ string _fn = "&fn=" ~ fn;
+ _url_new = _url;
+ if (_url_new.match(rgx.results_type_index)) {
+ _url_new = _url_new.replace(rgx.results_type_index, _matches_show_text);
+ } else if (_url.match(rgx.results_type_text)) {
+ _url_new = _url_new.replace(rgx.results_type_text, _matches_show_index);
+ } else {
+ if (!(_url.match(rgx.results_type))) {
+ _url_new = _url ~ _matches_show_text;
+ }
+ }
+ if (!(_url_new.match(rgx.fn))) {
+ _url_new = _url_new ~ _fn;
+ }
+ _matched_objects_text =
+ ""
+ ~ ""
+ ~ "※"
+ ~ "";
+ return _matched_objects_text;
+ }
+ string base ; // = "";
+ string tip ; // = "";
+ string search_note ; // = "";
+ uint sql_match_offset_count = 0;
+ string previous_next () {
+ auto rgx = regex_canned_search;
+ string _previous_next = "";
+ int _current_offset_value = 0;
+ string _set_offset_next = "";
+ string _set_offset_previous = "";
+ string _url = canned_url;
+ string _url_previous = "";
+ string _url_next = "";
+ string arrow_previous = "";
+ string arrow_next = "";
+ if (auto m = _url.matchFirst(rgx.track_offset)) {
+ _current_offset_value = m.captures["offset_val"].to!int;
+ _set_offset_next = m.captures["offset_key"] ~ ((m.captures["offset_val"]).to!int + cv.sql_match_limit.to!int).to!string;
+ _url_next = _url.replace(rgx.track_offset, _set_offset_next);
+ if (_current_offset_value < cv.sql_match_limit.to!int) {
+ _url_previous = "";
+ } else {
+ _url_previous = "";
+ _set_offset_previous = m.captures["offset_key"] ~ ((m.captures["offset_val"]).to!int - cv.sql_match_limit.to!int).to!string;
+ _url_previous = _url.replace(rgx.track_offset, _set_offset_previous);
+ }
+ } else {// _current_offset_value = 0;
+ _url_next = _url ~= "&smo=" ~ cv.sql_match_limit.to!string;
+ }
+ if (_url_previous.empty) {
+ arrow_previous = "";
+ } else {
+ arrow_previous =
+ ""
+ ~ ""
+ ~ "<< prev"
+ ~ " || ";
+ }
+ arrow_next =
+ ""
+ ~ ""
+ ~ "next >>"
+ ~ "";
+ _previous_next = "
" ~ arrow_previous ~ arrow_next;
+ return _previous_next;
+ }
+ {
+ header = format(q"┃
+
+
+
+
+
+ ≅ SiSU spine search form
+
+
+
+
+
+
+
+┃",
+ conf.http_host,
+ );
+ }
+ {
+ table = format(q"┃
+
+
+
+
+ ≅ SiSU spine search form
+ |
+
+ |
+
+ |
+
+ ┃");
+ }
+ {
+ string post_value(string field_name, string type="box", string set="on") {
+ string val = "";
+ switch (type) {
+ case "field":
+ val = ((field_name in cgi.post && !(cgi.post[field_name]).empty)
+ ? cgi.post[field_name]
+ : (field_name in cgi.get)
+ ? cgi.get[field_name]
+ : "");
+ val = tf.search_text_area;
+ break;
+ case "box": // generic for checkbox or radio; checkbox set == "on" radio set == "name set"
+ val = ((field_name in cgi.post && !(cgi.post[field_name]).empty)
+ ? (cgi.post[field_name] == set ? "checked" : "off")
+ : (field_name in cgi.get)
+ ? (cgi.get[field_name] == set ? "checked" : "off")
+ : "off");
+ break;
+ case "radio": // used generic bo
+ val = ((field_name in cgi.post && !(cgi.post[field_name]).empty)
+ ? (cgi.post[field_name] == set ? "checked" : "off")
+ : (field_name in cgi.get)
+ ? (cgi.get[field_name] == set ? "checked" : "off")
+ : "checked");
+ break;
+ case "checkbox": // used generic bo
+ val = ((field_name in cgi.post && !(cgi.post[field_name]).empty)
+ ? (cgi.post[field_name] == set ? "checked" : "off")
+ : (field_name in cgi.get)
+ ? (cgi.get[field_name] == set ? "checked" : "off")
+ : "checked");
+ break;
+ default:
+ }
+ return val;
+ }
+ string the_can(string fv) {
+ string show_the_can = post_value("url");
+ string _the_can = "";
+ if (show_the_can == "checked") {
+ tf = text_fields;
+ string method_get_url = conf.http_request_type ~ "://" ~ conf.http_host ~ conf.cgi_script ~ "?" ~ environment.get("QUERY_STRING", "");
+ string method_post_url_construct = conf.http_request_type ~ "://" ~ conf.http_host ~ conf.cgi_script ~ "?" ~ tf.canned_query;
+ // assert(method_get_url == environment.get("HTTP_REFERER", conf.http_request_type ~ "://" ~ conf.http_host ~ conf.cgi_script ~ "?" ~ conf.query_string));
+ if (conf.request_method == "POST") {
+ _the_can =
+ ""
+ ~ "POST: "
+ ~ ""
+ ~ method_post_url_construct
+ ~ ""
+ ~ "
";
+ } else if (conf.request_method == "GET") {
+ _the_can =
+ ""
+ ~ "GET: "
+ ~ ""
+ ~ method_get_url
+ ~ "";
+ }
+ conf.http_url = conf.http_request_type ~ "://" ~ conf.http_host ~ conf.cgi_script ~ tf.canned_query;
+ }
+ return _the_can;
+ }
+ string provide_tip() {
+ string searched_tip = post_value("se");
+ string tip = "";
+ if (searched_tip == "checked") {
+ string search_field = post_value("sf", "field");
+ tf = text_fields;
+ tip = format(q"┃
+
+database: %s; selected view: index
+search string: %s %s %s %s %s %s
+%s %s %s %s %s %s
+
+┃",
+ cv.db_selected,
+ (tf.text.empty ? "" : "\"text: " ~ tf.text ~ "; "),
+ (tf.title.empty ? "" : "\"title: " ~ tf.title ~ "; "),
+ (tf.author.empty ? "" : "\"author: " ~ tf.author ~ "; "),
+ (tf.date.empty ? "" : "\"date " ~ tf.date ~ "; "),
+ (tf.uid.empty ? "" : "\"uid: " ~ tf.uid ~ "; "),
+ (tf.fn.empty ? "" : "\"fn: " ~ tf.fn ~ "; "),
+ (tf.text.empty ? "" : "text: " ~ tf.text ~ "
"),
+ (tf.title.empty ? "" : "title: " ~ tf.title ~ "
"),
+ (tf.author.empty ? "" : "author: " ~ tf.author ~ "
"),
+ (tf.date.empty ? "" : "date: " ~ tf.date ~ "
"),
+ (tf.uid.empty ? "" : "\"uid: " ~ tf.uid ~ "; "),
+ (tf.fn.empty ? "" : "\"fn: " ~ tf.fn ~ "; "),
+ );
+ }
+ return tip;
+ }
+ form = format(q"┃
+
+┃",
+ _cfg.filename_cgi,
+ (post_value("ec") == "checked") ? post_value("sf", "field") : "",
+ provide_tip,
+ search_note,
+ the_can(post_value("sf", "field")),
+ cv.db_selected,
+ post_value("rt", "box", "idx"),
+ post_value("rt", "box", "txt"),
+ post_value("sml", "box", "1000"),
+ post_value("sml", "box", "2500"),
+ post_value("ec"),
+ post_value("url"),
+ post_value("se"),
+ post_value("sql"),
+ );
+ {
+ string set_value(string field_name, string default_val) {
+ string val;
+ if (field_name in cgi.post) {
+ val = cgi.post[field_name];
+ } else if (field_name in cgi.get) {
+ val = cgi.get[field_name];
+ } else { val = default_val; }
+ return val;
+ }
+ bool set_bool(string field_name) {
+ bool val;
+ if (field_name in cgi.post
+ && cgi.post[field_name] == "on") {
+ val = true;
+ } else if (field_name in cgi.get
+ && cgi.get[field_name] == "on") {
+ val = true;
+ } else { val = false; }
+ return val;
+ }
+ cv.db_selected = set_value("selected_db", _cfg.filename_db); // selected_db_name == db (spine.search.db or whatever)
+ cv.sql_match_limit = set_value("sml", "1000");
+ cv.sql_match_offset = set_value("smo", "0");
+ cv.search_text = set_value("sf", "");
+ cv.results_type = set_value("rt", "idx");
+ cv.checked_echo = set_bool("ec");
+ cv.checked_stats = set_bool("sts");
+ cv.checked_url = set_bool("url");
+ cv.checked_searched = set_bool("se");
+ cv.checked_tip = set_bool("tip");
+ cv.checked_sql = set_bool("sql");
+ tf = text_fields;
+ }
+ }
+ {
+ cgi.write(header);
+ cgi.write(table);
+ cgi.write(form);
+ // cgi.write(previous_next);
+ { // debug environment
+ // foreach (k, d; environment.toAA) {
+ // cgi.write(k ~ ": " ~ d ~ "
");
+ // }
+ }
+ { // debug cgi info
+ // cgi.write("db_selected: " ~ cv.db_selected ~ "
\n");
+ // cgi.write("search_text: " ~ cv.search_text ~ "
\n");
+ // cgi.write("sql_match_limit: " ~ cv.sql_match_limit ~ ";\n");
+ // cgi.write("sql_match_offset: " ~ cv.sql_match_offset ~ ";\n");
+ // cgi.write("results_type: " ~ cv.results_type ~ "
\n");
+ // cgi.write("cv.checked_echo: " ~ (cv.checked_echo ? "checked" : "off") ~ "; \n");
+ // cgi.write("cv.checked_stats: " ~ (cv.checked_stats ? "checked" : "off") ~ "; \n");
+ // cgi.write("cv.checked_url: " ~ (cv.checked_url ? "checked" : "off") ~ "; \n");
+ // cgi.write("cv.checked_searched: " ~ (cv.checked_searched ? "checked" : "off") ~ ";
\n");
+ // cgi.write("cv.checked_tip: " ~ (cv.checked_tip ? "checked" : "off") ~ "; \n");
+ // cgi.write("cv.checked_sql: " ~ (cv.checked_sql ? "checked" : "off") ~ "
\n");
+ }
+ }
+ auto db = Database(conf.db_path ~ cv.db_selected);
+ {
+ uint sql_match_offset_counter(T)(T cv) {
+ sql_match_offset_count += cv.sql_match_limit.to!uint;
+ return sql_match_offset_count;
+ }
+ void sql_search_query() {
+ string highlight_text_matched(string _txt, string search_field) {
+ string _mark_open = "┤";
+ string _mark_close = "├";
+ string _span_match = "";
+ string _span_close = "";
+ string _sf_str = search_field.strip.split("%20").join(" ").strip;
+ string[] _sf_arr = _sf_str.split(regex(r"\s+AND\s+|\s+OR\s+"));
+ auto rgx_url = regex(r"]+?>");
+ foreach (_sf; _sf_arr) {
+ auto rgx_matched_text = regex(_sf, "i");
+ auto rgx_marked_pair = regex(r"┤(?P" ~ _sf ~ ")├", "i");
+ if (auto m = _txt.matchFirst(rgx_url)) {
+ _txt = replaceAll!(m =>
+ _mark_open
+ ~ m.captures[0]
+ ~ _mark_close
+ )(_txt, rgx_matched_text);
+ _txt = replaceAll!(m =>
+ replaceAll!(u =>
+ u["keep"]
+ )(m.hit, rgx_marked_pair)
+ )(_txt, rgx_url);
+ _txt = replaceAll!(m =>
+ _span_match
+ ~ m["keep"]
+ ~ _span_close
+ )(_txt, rgx_marked_pair);
+ } else {
+ _txt = replaceAll!(m =>
+ _span_match
+ ~ m.captures[0]
+ ~ _span_close
+ )(_txt, rgx_matched_text);
+ }
+ }
+ return _txt;
+ }
+ string select_field_like(string db_field, string search_field) {
+ string where_ = "";
+ if (!(search_field.empty)) {
+ string _sf = search_field.strip.split("%20").join(" ");
+ if (_sf.match(r" OR ")) {
+ _sf = _sf.split(" OR ").join("%' OR " ~ db_field ~ " LIKE '%");
+ }
+ if (_sf.match(r" AND ")) {
+ _sf = _sf.split(" AND ").join("%' AND " ~ db_field ~ " LIKE '%");
+ }
+ _sf = "( " ~ db_field ~ " LIKE\n '%" ~ _sf ~ "%' )";
+ where_ ~= format(q"┃
+ %s
+┃",
+ _sf
+ );
+ }
+ return where_;
+ }
+ string[] _fields;
+ _fields ~= select_field_like("doc_objects.clean", tf.text);
+ _fields ~= select_field_like("metadata_and_text.title", tf.title);
+ _fields ~= select_field_like("metadata_and_text.creator_author", tf.author);
+ _fields ~= select_field_like("metadata_and_text.uid", tf.uid);
+ _fields ~= select_field_like("metadata_and_text.src_filename_base", tf.fn);
+ _fields ~= select_field_like("metadata_and_text.src_filename_base", tf.src_filename_base);
+ _fields ~= select_field_like("metadata_and_text.language_document_char", tf.language);
+ _fields ~= select_field_like("metadata_and_text.date_published", tf.date);
+ _fields ~= select_field_like("metadata_and_text.classify_keywords", tf.keywords);
+ _fields ~= select_field_like("metadata_and_text.classify_topic_register", tf.topic_register);
+ string[] fields;
+ foreach (f; _fields) {
+ if (!(f.empty)) { fields ~= f; }
+ }
+ string fields_str = "";
+ fields_str ~= fields.join(" AND ");
+ sql_select.the_body ~= format(q"┃
+SELECT
+ metadata_and_text.uid,
+ metadata_and_text.title,
+ metadata_and_text.creator_author_last_first,
+ metadata_and_text.creator_author,
+ metadata_and_text.src_filename_base,
+ metadata_and_text.language_document_char,
+ metadata_and_text.date_published,
+ metadata_and_text.classify_keywords,
+ metadata_and_text.classify_topic_register,
+ doc_objects.body,
+ doc_objects.seg_name,
+ doc_objects.ocn,
+ metadata_and_text.uid
+FROM
+ doc_objects,
+ metadata_and_text
+WHERE (
+ %s
+ )
+AND
+ doc_objects.uid_metadata_and_text = metadata_and_text.uid
+ORDER BY
+ metadata_and_text.creator_author_last_first,
+ metadata_and_text.date_published DESC,
+ metadata_and_text.title,
+ metadata_and_text.language_document_char,
+ metadata_and_text.src_filename_base,
+ doc_objects.ocn
+LIMIT %s OFFSET %s
+;┃",
+ fields_str,
+ cv.sql_match_limit,
+ cv.sql_match_offset,
+ );
+ (cv.checked_sql)
+ ? cgi.write(previous_next
+ ~ "
"
+ ~ sql_select.the_body.strip.split("\n ").join(" ").split("\n").join("
")
+ ~ "\n"
+ )
+ : "";
+ cgi.write(previous_next);
+ auto select_query_results = db.execute(sql_select.the_body).cached;
+ string _old_uid = "";
+ if (!select_query_results.empty) {
+ string _date_published = "0000";
+ string _close_para = "";
+ string _matched_ocn_open = "";
+ foreach (idx, row; select_query_results) {
+ if (row["uid"].as!string != _old_uid) {
+ _close_para = (idx == 1) ? "" : "
";
+ _matched_ocn_open = (idx == 1) ? "" : "";
+ _old_uid = row["uid"].as!string;
+ _date_published = (row["date_published"].as!string.match(regex(r"^([0-9]{4})")))
+ ? row["date_published"].as!string : "0000"; // used in regex that breaks if no match
+ auto m = _date_published.match(regex(r"^([0-9]{4})"));
+ string _date = (m.hit == "0000") ? "(year?) " : "(" ~ m.hit ~ ") ";
+ cgi.write(
+ _close_para
+ ~ "
"
+ ~ "
\""
+ ~ row["title"].as!string ~ "\""
+ ~ " "
+ ~ _date
+ ~ "[" ~ row["language_document_char"].as!string ~ "] "
+ ~ row["creator_author_last_first"].as!string
+ ~ " "
+ ~ show_matched_objects(row["src_filename_base"].as!string)
+ ~ "
"
+ ~ "
"
+ );
+ }
+ if (cv.results_type == "txt") {
+ if (row["ocn"].as!string != "0") {
+ cgi.write(
+ ""
+ ~ "
"
+ ~ "
"
+ ~ highlight_text_matched(row["body"].as!string, tf.text)
+ ~ "
"
+ ~ "
"
+ );
+ } else {
+ cgi.write(
+ ""
+ ~ "
"
+ ~ "
"
+ ~ highlight_text_matched(row["body"].as!string, tf.text)
+ ~ "
"
+ ~ "
"
+ );
+ }
+ } else {
+ if (row["ocn"].as!string != "0") {
+ cgi.write(
+ _matched_ocn_open
+ ~ ""
+ ~ row["ocn"].as!string
+ ~ ", "
+ );
+ } else {
+ cgi.write(
+ _matched_ocn_open
+ ~ ""
+ ~ row["ocn"].as!string
+ ~ ", "
+ );
+ }
+ _matched_ocn_open = "";
+ }
+ }
+ cgi.write( previous_next);
+
+ } else { // offset_not_beyond_limit = false;
+ cgi.write("select_query_results empty\n");
+ }
+ cgi.write("
+
+
+
+ git
+
+");
+ }
+ sql_search_query;
+ }
+ {
+ db.close;
+ }
+ {
+ string tail = format(q"┃
+
+┃");
+ cgi.write(tail);
+ }
+}
+mixin GenericMain!cgi_function_intro;
+#+END_SRC
+
+*** .gitignore :gitignore:
+
+#+HEADER: :tangle "../sundry/spine_search_cgi/.gitignore"
+#+BEGIN_SRC sh
+# git ls-files --others --exclude-from=.git/info/exclude
+,*
+!.gitignore
+!README.md
+!COPYRIGHT
+!CHANGELOG
+!makefile
+!version.txt
+!*.json
+!*.sdl
+!meson.build
+!tangle
+!*.org
+!*.d
+!*.rb
+!*.txt
+!conf.sdl
+!*.nix
+!nix
+!nix/**
+!.envrc
+!src
+!src/**
+!*.sst
+!*.ssm
+!**/*.sst
+!**/*.ssm
+!config_local_site
+!views
+.dub/**
+,**/.dub/**
+,**/cgi-bin/**
+tmp/**
+,*_.org
+,*_.d
+,*_.txt
+,*_
+,*.swp
+,*~
+,*~
+\#*
+,*.\#*
+#!*/
+#\#*
+#*.\#*
+#.reggae/**
+#+END_SRC
+
** sh script to batch process _emacs org babel tangle_ :shell_script:tangle:
[[https://orgmode.org/manual/Batch-execution.html]]
creates a shell batch script called "tangle", that will tangle (emacs org
--
cgit v1.2.3