diff --git a/.commitlog.release b/.commitlog.release
index c714c07..a898134 100755
--- a/.commitlog.release
+++ b/.commitlog.release
@@ -1 +1 @@
-v0.2.15
\ No newline at end of file
+v0.2.16-beta.0
\ No newline at end of file
diff --git a/Makefile b/Makefile
index dbf92fa..3112c61 100644
--- a/Makefile
+++ b/Makefile
@@ -1,4 +1,3 @@
-
.PHONY.: all
all: clean build
@@ -16,7 +15,7 @@ docs: build
./alvu --path="docs" --baseurl="/alvu/" --highlight --hard-wrap=false
docs_dev: build
- ./alvu --highlight --hard-wrap=false --serve --path='./docs'
+ DEBUG=true ./alvu --highlight --hard-wrap=false --serve --path='./docs'
pages: docs
rm -rf alvu
diff --git a/commands/init.go b/commands/init.go
new file mode 100644
index 0000000..a60a1ea
--- /dev/null
+++ b/commands/init.go
@@ -0,0 +1,159 @@
+package commands
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "github.com/barelyhuman/alvu/pkg/alvu"
+ "github.com/barelyhuman/go/color"
+ "github.com/urfave/cli/v2"
+)
+
+func AlvuInit(c *cli.Context) (err error) {
+ basePath := c.Args().First()
+ forceFlag := c.Bool("force")
+ logger := alvu.NewLogger()
+ logger.LogPrefix = "[alvu]"
+
+ fileInfo, err := os.Stat(basePath)
+
+ if err == nil {
+ if fileInfo.IsDir() && !forceFlag {
+ logger.Error(fmt.Sprintf("Directory: %v , already exists, cannot overwrite, if you wish to force overwrite use the -f flag with the `init` command", basePath))
+ os.Exit(1)
+ }
+ }
+
+ mustCreateDir(basePath, "public")
+ mustCreateDir(basePath, "hooks")
+ mustCreateDir(basePath, "pages")
+ prepareBaseStyles(basePath)
+ preparePages(basePath)
+
+ logger.Success(
+ fmt.Sprintf("Alvu initialized in: %v", basePath),
+ )
+
+ runStr := color.ColorString{}
+
+ fmt.Println(runStr.Dim("\n> Run the following to get started").String())
+
+ commandStr := color.ColorString{}
+ commandStr.Cyan(
+ fmt.Sprintf("\n alvu -s -path %v\n", basePath),
+ )
+ fmt.Println(commandStr.String())
+ return
+}
+
+func mustCreateDir(root, dir string) {
+ pathToCreate := filepath.Join(root, dir)
+ err := os.MkdirAll(pathToCreate, os.ModePerm)
+ if err != nil {
+ panic(fmt.Sprintf("Failed to create %v due to error: %v\n", pathToCreate, err))
+ }
+}
+
+func prepareBaseStyles(root string) {
+ fileHandle, err := os.OpenFile(filepath.Join(root, "public", "styles.css"), os.O_CREATE|os.O_RDWR, os.ModePerm)
+ if err != nil {
+ fmt.Printf("Failed to open file public/styles.css with error: %v", err)
+ }
+ defer fileHandle.Sync()
+ defer fileHandle.Close()
+
+ fileHandle.WriteString(`
+/* Resets */
+html {
+ box-sizing: border-box;
+ font-size: 16px;
+ font-family: -apple-system, BlinkMacSystemFont, avenir next, avenir, segoe ui, helvetica neue, helvetica, Cantarell, Ubuntu, roboto, noto, arial, sans-serif;
+}
+
+*, *:before, *:after {
+ box-sizing: inherit;
+}
+
+body, h1, h2, h3, h4, h5, h6, p {
+ margin: 0;
+ padding: 0;
+ font-weight: normal;
+}
+
+img {
+ max-width: 100%;
+ height: auto;
+}
+
+/* Styles */
+
+:root {
+ --base: #efefef;
+ --text: #181819;
+}
+
+@media (prefers-color-scheme: dark) {
+ :root {
+ --base: #181819;
+ --text: #efefef;
+ }
+}
+
+body {
+
+ background: var(--base);
+ color: var(--text);
+
+ max-width: 900px;
+ margin: 0 auto;
+ padding: 4px;
+ display: flex;
+ flex-direction: column;
+ justify-content: center;
+ min-height: 100vh;
+ }
+
+
+ ol,ul,p{
+ line-height: 1.7;
+ }
+
+ `)
+
+}
+
+func preparePages(root string) {
+ layoutHandle, _ := os.OpenFile(filepath.Join(root, "pages", "_layout.html"), os.O_CREATE|os.O_RDWR, os.ModePerm)
+ defer layoutHandle.Sync()
+ defer layoutHandle.Close()
+
+ rootPageHandle, _ := os.OpenFile(filepath.Join(root, "pages", "index.md"), os.O_CREATE|os.O_RDWR, os.ModePerm)
+
+ defer rootPageHandle.Sync()
+ defer rootPageHandle.Close()
+
+ layoutHandle.WriteString(`
+
+
+
+
+ Alvu | Minimal Starter
+
+
+
+
+
+`)
+
+ rootPageHandle.WriteString(`# Alvu
+
+- Scriptable
+- Fast
+- Tiny
+
+In whatever order you'd like...
+
+`)
+
+}
diff --git a/commands/root.go b/commands/root.go
new file mode 100644
index 0000000..ece0e6a
--- /dev/null
+++ b/commands/root.go
@@ -0,0 +1,37 @@
+package commands
+
+import (
+ "os"
+
+ "github.com/barelyhuman/alvu/pkg/alvu"
+ "github.com/joho/godotenv"
+ "github.com/urfave/cli/v2"
+)
+
+func Alvu(c *cli.Context) (err error) {
+ // Prepare Environment
+ envFilePath := c.String("env")
+ if _, err := os.Stat(envFilePath); err == nil {
+ godotenv.Load(envFilePath)
+ }
+
+ baseConfig := alvu.AlvuConfig{}
+
+ // Basics
+ baseConfig.HookDir = c.String("hooks")
+ baseConfig.OutDir = c.String("out")
+ baseConfig.RootPath = c.String("path")
+
+ // Transformation Config
+ baseConfig.BaseURL = c.String("baseurl")
+ baseConfig.EnableHardWrap = c.Bool("hard-wrap")
+ baseConfig.EnableHighlighting = c.Bool("highlight")
+ baseConfig.HighlightingTheme = c.String("highlight-theme")
+
+ // Serve config
+ baseConfig.Serve = c.Bool("serve")
+ baseConfig.PollDuration = c.Int("poll")
+ baseConfig.PortNumber = c.String("port")
+
+ return baseConfig.Run()
+}
diff --git a/docs/hooks/00-copy-readme.lua b/docs/hooks/00-copy-readme.lua
index dbc53c5..4596386 100644
--- a/docs/hooks/00-copy-readme.lua
+++ b/docs/hooks/00-copy-readme.lua
@@ -6,13 +6,12 @@ local json = require("json")
ForFile = "00-readme.md"
function Writer(filedata)
- local sourcedata = json.decode(filedata)
- if sourcedata.name == "00-readme.html"
- then
- local f = assert(io.open(wdir.."/../readme.md", "rb"))
- local content = f:read("*all")
- f:close()
- sourcedata.content = content
- end
- return json.encode(sourcedata)
-end
\ No newline at end of file
+ local sourcedata = json.decode(filedata)
+ if sourcedata.name == "00-readme.html" then
+ local f = assert(io.open(wdir .. "/../readme.md", "rb"))
+ local content = f:read("*all")
+ f:close()
+ sourcedata.content = content
+ end
+ return json.encode(sourcedata)
+end
diff --git a/docs/hooks/01-add-navigation.lua b/docs/hooks/01-add-navigation.lua
index 1c1bfc0..5be4761 100644
--- a/docs/hooks/01-add-navigation.lua
+++ b/docs/hooks/01-add-navigation.lua
@@ -1,5 +1,6 @@
---@diagnostic disable-next-line: undefined-global
local wdir = workingdir
+
package.path = package.path .. ";" .. wdir .. "/lib/?.lua"
local json = require("json")
@@ -7,33 +8,34 @@ local alvu = require("alvu")
local utils = require(wdir .. ".lib.utils")
function Writer(filedata)
- local pagesPath = wdir .. "/pages"
- local index = {}
- local files = alvu.files(pagesPath)
-
- for fileIndex = 1, #files do
- local file_name = files[fileIndex]
- if not (file_name == "_layout.html" or file_name == "index.md" or utils.starts_with(file_name,"concepts/"))
- then
- local name = string.gsub(file_name, ".md", "")
- name = string.gsub(name, ".html", "")
- local title, _ = utils.normalize(name):lower()
-
- table.insert(index, {
- name = title,
- slug = name
- })
- end
- end
-
- table.insert(index, 1, {
- name = "..",
- slug = "index"
- })
-
- local source_data = json.decode(filedata)
-
- local template = [[
+ local pagesPath = wdir .. "/pages"
+ local index = {}
+ local files = alvu.files(pagesPath)
+
+ for fileIndex = 1, #files do
+ local file_name = files[fileIndex]
+ if
+ not (file_name == "_layout.html" or file_name == "index.md" or utils.starts_with(file_name, "concepts/"))
+ then
+ local name = string.gsub(file_name, ".md", "")
+ name = string.gsub(name, ".html", "")
+ local title, _ = utils.normalize(name):lower()
+
+ table.insert(index, {
+ name = title,
+ slug = name,
+ })
+ end
+ end
+
+ table.insert(index, 1, {
+ name = "..",
+ slug = "index",
+ })
+
+ local source_data = json.decode(filedata)
+
+ local template = [[
-
]]
- source_data.content = template .. "\n" .. source_data.content .. ""
- source_data.data = {
- index = index
- }
+ source_data.content = template .. "\n" .. source_data.content
+ source_data.data = {
+ index = index,
+ }
- return json.encode(source_data)
+ return json.encode(source_data)
end
diff --git a/docs/pages/01-basics.md b/docs/pages/01-basics.md
index 53c5f8c..19cae64 100644
--- a/docs/pages/01-basics.md
+++ b/docs/pages/01-basics.md
@@ -47,7 +47,7 @@ can be defined as shown below
- { { .Content } }
+
```
diff --git a/docs/pages/_layout.html b/docs/pages/_layout.html
index 7545bfd..3460ec8 100644
--- a/docs/pages/_layout.html
+++ b/docs/pages/_layout.html
@@ -15,7 +15,9 @@
- {{.Content}}
+
+
+
diff --git a/docs/pages/index.md b/docs/pages/index.md
index ff7ca63..ec4c5a0 100644
--- a/docs/pages/index.md
+++ b/docs/pages/index.md
@@ -16,4 +16,4 @@ rather tight flow made me think this'd work well.
As always, a tiny little tool built for me and hopefully someday someone else
might like it.
-Well, let's head to [the basics →]({{.Meta.BaseURL}}01-basics)
+Well, let's head to [the basics →](/01-basics)
diff --git a/go.mod b/go.mod
index 648c2cd..fa135a7 100644
--- a/go.mod
+++ b/go.mod
@@ -3,22 +3,24 @@ module github.com/barelyhuman/alvu
go 1.18
require (
- github.com/barelyhuman/go v0.2.2-0.20230713173609-2ee88bb52634
+ github.com/barelyhuman/go v0.2.3-0.20240516192751-30a6c804e4e5
github.com/cjoudrey/gluahttp v0.0.0-20201111170219-25003d9adfa9
github.com/joho/godotenv v1.5.1
- github.com/otiai10/copy v1.9.0
- github.com/vadv/gopher-lua-libs v0.4.1
- github.com/yuin/goldmark v1.5.4
+ github.com/urfave/cli/v2 v2.27.1
+ github.com/vadv/gopher-lua-libs v0.5.0
+ github.com/yuin/goldmark v1.7.1
github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594
github.com/yuin/gopher-lua v1.1.0
- golang.org/x/net v0.0.0-20200202094626-16171245cfb2
+ golang.org/x/net v0.17.0
gopkg.in/yaml.v3 v3.0.1
layeh.com/gopher-json v0.0.0-20201124131017-552bb3c4c3bf
)
require (
github.com/alecthomas/chroma v0.10.0 // indirect
+ github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect
github.com/dlclark/regexp2 v1.4.0 // indirect
- golang.org/x/sys v0.0.0-20220908164124-27713097b956 // indirect
+ github.com/russross/blackfriday/v2 v2.1.0 // indirect
+ github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect
gopkg.in/yaml.v2 v2.3.0 // indirect
)
diff --git a/go.sum b/go.sum
index b123d5a..c7ecdcf 100644
--- a/go.sum
+++ b/go.sum
@@ -1,3 +1,4 @@
+cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
github.com/VividCortex/ewma v1.1.1/go.mod h1:2Tkkvm3sRDVXaiyucHiACn4cqf7DpdyLvmxzcbUokwA=
github.com/alecthomas/chroma v0.10.0 h1:7XDcGkCQopCNKjZHfYrNLraA+M7e0fMiJ/Mfikbfjek=
github.com/alecthomas/chroma v0.10.0/go.mod h1:jtJATyUxlIORhUOFNA9NZDWGAQ8wpxQQqNSB4rjA/1s=
@@ -5,10 +6,11 @@ github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuy
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
+github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/alessio/shellescape v1.4.1/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30=
-github.com/aws/aws-sdk-go v1.33.0/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=
-github.com/barelyhuman/go v0.2.2-0.20230713173609-2ee88bb52634 h1:a53Bc1LuSAB9rGbQkBopsYFJNVTgeoUSgnd0do7PDxw=
-github.com/barelyhuman/go v0.2.2-0.20230713173609-2ee88bb52634/go.mod h1:hox2iDYZAarjpS7jKQeYIi2F+qMA8KLMtCws++L2sSY=
+github.com/aws/aws-sdk-go v1.34.0/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=
+github.com/barelyhuman/go v0.2.3-0.20240516192751-30a6c804e4e5 h1:AbJ6ZaRkEc6CguQ6rXe0epKmFe/TmwSSFX5N6hsNO+A=
+github.com/barelyhuman/go v0.2.3-0.20240516192751-30a6c804e4e5/go.mod h1:hox2iDYZAarjpS7jKQeYIi2F+qMA8KLMtCws++L2sSY=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
@@ -20,6 +22,8 @@ github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5P
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/cjoudrey/gluahttp v0.0.0-20201111170219-25003d9adfa9 h1:rdWOzitWlNYeUsXmz+IQfa9NkGEq3gA/qQ3mOEqBU6o=
github.com/cjoudrey/gluahttp v0.0.0-20201111170219-25003d9adfa9/go.mod h1:X97UjDTXp+7bayQSFZk2hPvCTmTZIicUjZQRtkwgAKY=
+github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w=
+github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@@ -29,24 +33,40 @@ github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25Kn
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
+github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
+github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
+github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
+github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
+github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
+github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
+github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
+github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
+github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik=
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
+github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
-github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
+github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
+github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
+github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
+github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
@@ -67,13 +87,7 @@ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lN
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/montanaflynn/stats v0.6.3/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
-github.com/otiai10/copy v1.9.0 h1:7KFNiCgZ91Ru4qW4CWPf/7jqtxLagGRmIxWldPP9VY4=
-github.com/otiai10/copy v1.9.0/go.mod h1:hsfX19wcn0UWIHUQ3/4fHuehhk2UyArQ9dVFAn3FczI=
-github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE=
-github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs=
-github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo=
-github.com/otiai10/mint v1.4.0 h1:umwcf7gbpEwf7WFzqmWwSv0CzbeMsae2u9ZvpP8j2q4=
-github.com/otiai10/mint v1.4.0/go.mod h1:gifjb2MYOoULtKLqUAEILUG/9KONW6f7YsJ6vQLTlFI=
+github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
@@ -81,17 +95,23 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
-github.com/prometheus/client_golang v1.5.1/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
+github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
+github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
-github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=
+github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
+github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
-github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
+github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
+github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
+github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
+github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
+github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
@@ -101,12 +121,16 @@ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/technoweenie/multipartstreamer v1.0.1/go.mod h1:jNVxdtShOxzAsukZwTSw6MDx5eUJoiEBsSvzDU9uzog=
-github.com/vadv/gopher-lua-libs v0.4.1 h1:NgxYEQ0C027X1U348GnFBxf6S8nqYtgHUEuZnA6w2bU=
-github.com/vadv/gopher-lua-libs v0.4.1/go.mod h1:j16bcBLqJUwpQT75QztdmfOa8J7CXMmf8BLbtvAR9NY=
+github.com/urfave/cli/v2 v2.27.1 h1:8xSQ6szndafKVRmfyeUMxkNUJQMjL1F2zmsZ+qHpfho=
+github.com/urfave/cli/v2 v2.27.1/go.mod h1:8qnjx1vcq5s2/wpsqoZFndg2CE5tNFyrTvS6SinrnYQ=
+github.com/vadv/gopher-lua-libs v0.5.0 h1:m0hhWia1A1U3PIRmtdHWBj88ogzuIjm6HUBmtUa0Tz4=
+github.com/vadv/gopher-lua-libs v0.5.0/go.mod h1:mlSOxmrjug7DwisiH7xBFnBellHobPbvAIhVeI/4SYY=
+github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 h1:bAn7/zixMGCfxrRTfdpNzjtPYqr8smhKouy9mxVdGPU=
+github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8=
github.com/yuin/gluamapper v0.0.0-20150323120927-d836955830e7/go.mod h1:bbMEM6aU1WDF1ErA5YJ0p91652pGv140gGw4Ww3RGp8=
github.com/yuin/goldmark v1.4.5/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg=
-github.com/yuin/goldmark v1.5.4 h1:2uY/xC0roWy8IBEGLgB1ywIoEJFGmRrX21YQcvGZzjU=
-github.com/yuin/goldmark v1.5.4/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+github.com/yuin/goldmark v1.7.1 h1:3bajkSilaCbjdKVsKdZjZCLBNPL9pYzrCakKaf4U49U=
+github.com/yuin/goldmark v1.7.1/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 h1:yHfZyN55+5dp1wG7wDKv8HQ044moxkyGq12KFFMFDxg=
github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594/go.mod h1:U9ihbh+1ZN7fR5Se3daSPoz1CGF9IYtSvWwVQtnzGHU=
github.com/yuin/gopher-lua v0.0.0-20200816102855-ee81675732da/go.mod h1:E1AXubJBdNmFERAOucpDIxNzeGfLzg0mYh+UfMWdChA=
@@ -114,28 +138,48 @@ github.com/yuin/gopher-lua v1.1.0 h1:BojcDhfyDWgU2f2TOzYK/g5p2gxMrku8oupLDqlnSqE
github.com/yuin/gopher-lua v1.1.0/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200202094626-16171245cfb2 h1:CCH4IOTTfewWjGOlSp+zGcjutRKlBEZQ6wTn8ozI/nI=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
+golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
+golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
+golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220908164124-27713097b956 h1:XeJjHH1KiLpKGb6lvMiksZ9l0fVUh+AmGcm0nOMEBOY=
-golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
+google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
+google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
+google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
+google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
+google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
diff --git a/main.go b/main.go
index 660a0a9..bea4a29 100644
--- a/main.go
+++ b/main.go
@@ -1,1043 +1,104 @@
package main
import (
- "bytes"
- "encoding/json"
- "errors"
- "flag"
+ _ "embed"
"fmt"
- "html/template"
- textTmpl "text/template"
-
- "io"
- "io/fs"
- "log"
- "net/http"
"os"
- "path"
- "path/filepath"
- "regexp"
- "runtime"
- "strings"
- "sync"
-
- _ "embed"
-
- "github.com/barelyhuman/go/env"
- "github.com/barelyhuman/go/poller"
- ghttp "github.com/cjoudrey/gluahttp"
-
- "github.com/barelyhuman/go/color"
- cp "github.com/otiai10/copy"
-
- stringsLib "github.com/vadv/gopher-lua-libs/strings"
-
- yamlLib "github.com/vadv/gopher-lua-libs/yaml"
- "github.com/yuin/goldmark"
- "github.com/yuin/goldmark/extension"
- "github.com/yuin/goldmark/parser"
- "github.com/yuin/goldmark/renderer"
- "github.com/yuin/goldmark/renderer/html"
-
- highlighting "github.com/yuin/goldmark-highlighting"
-
- lua "github.com/yuin/gopher-lua"
- "gopkg.in/yaml.v3"
+ "time"
- luaAlvu "github.com/barelyhuman/alvu/lua/alvu"
- "golang.org/x/net/websocket"
- luajson "layeh.com/gopher-json"
+ "github.com/barelyhuman/alvu/commands"
+ "github.com/urfave/cli/v2"
)
-const logPrefix = "[alvu] "
-
-var mdProcessor goldmark.Markdown
-var baseurl string
-var basePath string
-var outPath string
-var hardWraps bool
-var hookCollection HookCollection
-var reloadCh = []chan bool{}
-var serveFlag *bool
-var notFoundPageExists bool
-
//go:embed .commitlog.release
-var release string
+var version string
-var layoutFiles []string = []string{"_head.html", "_tail.html", "_layout.html"}
-
-type SiteMeta struct {
- BaseURL string
-}
-
-type PageRenderData struct {
- Meta SiteMeta
- Data map[string]interface{}
- Extras map[string]interface{}
-}
-
-type LayoutRenderData struct {
- PageRenderData
- Content template.HTML
-}
-
-// TODO: move stuff into the alvu struct type
-// on each newly added feature or during improving
-// older features.
-type Alvu struct {
- publicPath string
- files []*AlvuFile
- filesIndex []string
-}
-
-func (al *Alvu) AddFile(file *AlvuFile) {
- al.files = append(al.files, file)
- al.filesIndex = append(al.filesIndex, file.sourcePath)
-}
-
-func (al *Alvu) IsAlvuFile(filePath string) bool {
- for _, af := range al.filesIndex {
- if af == filePath {
- return true
- }
- }
- return false
-}
-
-func (al *Alvu) Build() {
- for ind := range al.files {
- alvuFile := al.files[ind]
- alvuFile.Build()
- }
-
- onDebug(func() {
- debugInfo("Run all OnFinish Hooks")
- memuse()
- })
-
- // right before completion run all hooks again but for the onFinish
- hookCollection.RunAll("OnFinish")
-}
-
-func (al *Alvu) CopyPublic() {
- onDebug(func() {
- debugInfo("Before copying files")
- memuse()
- })
- // copy public to out
- _, err := os.Stat(al.publicPath)
- if err == nil {
- err = cp.Copy(al.publicPath, outPath)
- if err != nil {
- bail(err)
- }
- }
- onDebug(func() {
- debugInfo("After copying files")
- memuse()
- })
-}
+const logPrefix string = "[alvu] %v"
func main() {
- onDebug(func() {
- debugInfo("Before Exec")
- memuse()
- })
-
- var versionFlag bool
-
- flag.BoolVar(&versionFlag, "version", false, "version info")
- flag.BoolVar(&versionFlag, "v", false, "version info")
- basePathFlag := flag.String("path", ".", "`DIR` to search for the needed folders in")
- outPathFlag := flag.String("out", "./dist", "`DIR` to output the compiled files to")
- baseurlFlag := flag.String("baseurl", "/", "`URL` to be used as the root of the project")
- hooksPathFlag := flag.String("hooks", "./hooks", "`DIR` that contains hooks for the content")
- enableHighlightingFlag := flag.Bool("highlight", false, "enable highlighting for markdown files")
- highlightThemeFlag := flag.String("highlight-theme", "bw", "`THEME` to use for highlighting (supports most themes from pygments)")
- serveFlag = flag.Bool("serve", false, "start a local server")
- hardWrapsFlag := flag.Bool("hard-wrap", true, "enable hard wrapping of elements with `
`")
- portFlag := flag.String("port", "3000", "`PORT` to start the server on")
- pollDurationFlag := flag.Int("poll", 350, "Polling duration for file changes in milliseconds")
-
- flag.Parse()
-
- // Show version and exit
- if versionFlag {
- println(release)
- os.Exit(0)
- }
-
- baseurl = *baseurlFlag
- basePath = path.Join(*basePathFlag)
- pagesPath := path.Join(*basePathFlag, "pages")
- publicPath := path.Join(*basePathFlag, "public")
- headFilePath := path.Join(pagesPath, "_head.html")
- baseFilePath := path.Join(pagesPath, "_layout.html")
- tailFilePath := path.Join(pagesPath, "_tail.html")
- notFoundFilePath := path.Join(pagesPath, "404.html")
- outPath = path.Join(*outPathFlag)
- hooksPath := path.Join(*basePathFlag, *hooksPathFlag)
- hardWraps = *hardWrapsFlag
-
- headTailDeprecationWarning := color.ColorString{}
- headTailDeprecationWarning.Yellow(logPrefix).Yellow("[WARN] use of _tail.html and _head.html is deprecated, please use _layout.html instead")
-
- os.MkdirAll(publicPath, os.ModePerm)
-
- alvuApp := &Alvu{
- publicPath: publicPath,
- }
-
- watcher := NewWatcher(alvuApp, *pollDurationFlag)
-
- if *serveFlag {
- watcher.AddDir(pagesPath)
- watcher.AddDir(publicPath)
- }
-
- onDebug(func() {
- debugInfo("Opening _head")
- memuse()
- })
- headFileFd, err := os.Open(headFilePath)
- if err != nil {
- if err == fs.ErrNotExist {
- log.Println("no _head.html found,skipping")
- }
- } else {
- fmt.Println(headTailDeprecationWarning.String())
- }
-
- onDebug(func() {
- debugInfo("Opening _layout")
- memuse()
- })
- baseFileFd, err := os.Open(baseFilePath)
- if err != nil {
- if err == fs.ErrNotExist {
- log.Println("no _layout.html found,skipping")
- }
- }
-
- onDebug(func() {
- debugInfo("Opening _tail")
- memuse()
- })
- tailFileFd, err := os.Open(tailFilePath)
- if err != nil {
- if err == fs.ErrNotExist {
- log.Println("no _tail.html found, skipping")
- }
- } else {
- fmt.Println(headTailDeprecationWarning.String())
- }
-
- onDebug(func() {
- debugInfo("Checking if 404.html exists")
- memuse()
- })
- if _, err := os.Stat(notFoundFilePath); errors.Is(err, os.ErrNotExist) {
- notFoundPageExists = false
- log.Println("no 404.html found, skipping")
- } else {
- notFoundPageExists = true
- }
-
- alvuApp.CopyPublic()
-
- onDebug(func() {
- debugInfo("Reading hook and to process files")
- memuse()
- })
- CollectHooks(basePath, hooksPath)
- toProcess := CollectFilesToProcess(pagesPath)
- onDebug(func() {
- log.Println("printing files to process")
- log.Println(toProcess)
- })
-
- initMDProcessor(*enableHighlightingFlag, *highlightThemeFlag)
-
- onDebug(func() {
- debugInfo("Running all OnStart hooks")
- memuse()
- })
-
- hookCollection.RunAll("OnStart")
-
- prefixSlashPath := regexp.MustCompile(`^\/`)
-
- onDebug(func() {
- debugInfo("Creating Alvu Files")
- memuse()
- })
- for _, toProcessItem := range toProcess {
- fileName := strings.Replace(toProcessItem, pagesPath, "", 1)
- fileName = prefixSlashPath.ReplaceAllString(fileName, "")
- destFilePath := strings.Replace(toProcessItem, pagesPath, outPath, 1)
- isHTML := strings.HasSuffix(fileName, ".html")
-
- alvuFile := &AlvuFile{
- lock: &sync.Mutex{},
- sourcePath: toProcessItem,
- hooks: hookCollection,
- destPath: destFilePath,
- name: fileName,
- isHTML: isHTML,
- headFile: headFileFd,
- tailFile: tailFileFd,
- baseTemplate: baseFileFd,
- data: map[string]interface{}{},
- extras: map[string]interface{}{},
- }
-
- alvuApp.AddFile(alvuFile)
-
- // If serving, also add the nested path into it
- if *serveFlag {
- watcher.AddDir(path.Dir(alvuFile.sourcePath))
- }
- }
-
- alvuApp.Build()
-
- onDebug(func() {
- runtime.GC()
- debugInfo("On Completions")
- memuse()
- })
-
- cs := &color.ColorString{}
- fmt.Println(cs.Blue(logPrefix).Green("Compiled ").Cyan("\"" + basePath + "\"").Green(" to ").Cyan("\"" + outPath + "\"").String())
-
- if *serveFlag {
- watcher.StartWatching()
- runServer(*portFlag)
- }
-
- hookCollection.Shutdown()
-}
-
-func runServer(port string) {
- normalizedPort := port
-
- if !strings.HasPrefix(normalizedPort, ":") {
- normalizedPort = ":" + normalizedPort
- }
-
- cs := &color.ColorString{}
- cs.Blue(logPrefix).Green("Serving on").Reset(" ").Cyan(normalizedPort)
- fmt.Println(cs.String())
-
- http.Handle("/", http.HandlerFunc(ServeHandler))
- AddWebsocketHandler()
-
- err := http.ListenAndServe(normalizedPort, nil)
-
- if strings.Contains(err.Error(), "address already in use") {
- bail(errors.New("port already in use, use another port with the `-port` flag instead"))
- }
-}
-
-func CollectFilesToProcess(basepath string) []string {
- files := []string{}
-
- pathstoprocess, err := os.ReadDir(basepath)
- if err != nil {
- panic(err)
- }
-
- for _, pathInfo := range pathstoprocess {
- _path := path.Join(basepath, pathInfo.Name())
-
- if Contains(layoutFiles, pathInfo.Name()) {
- continue
- }
-
- if pathInfo.IsDir() {
- files = append(files, CollectFilesToProcess(_path)...)
- } else {
- files = append(files, _path)
- }
-
- }
-
- return files
-}
-
-func CollectHooks(basePath, hooksBasePath string) {
- if _, err := os.Stat(hooksBasePath); err != nil {
- return
- }
- pathsToProcess, err := os.ReadDir(hooksBasePath)
- if err != nil {
- panic(err)
- }
-
- for _, pathInfo := range pathsToProcess {
- if !strings.HasSuffix(pathInfo.Name(), ".lua") {
- continue
- }
- hook := NewHook()
- hookPath := path.Join(hooksBasePath, pathInfo.Name())
- if err := hook.DoFile(hookPath); err != nil {
- panic(err)
- }
- hookCollection = append(hookCollection, &Hook{
- path: hookPath,
- state: hook,
- })
- }
-
-}
-
-func initMDProcessor(highlight bool, theme string) {
-
- rendererOptions := []renderer.Option{
- html.WithXHTML(),
- html.WithUnsafe(),
- }
-
- if hardWraps {
- rendererOptions = append(rendererOptions, html.WithHardWraps())
- }
- gmPlugins := []goldmark.Option{
- goldmark.WithExtensions(extension.GFM, extension.Footnote),
- goldmark.WithParserOptions(
- parser.WithAutoHeadingID(),
- ),
- goldmark.WithRendererOptions(
- rendererOptions...,
- ),
- }
-
- if highlight {
- gmPlugins = append(gmPlugins, goldmark.WithExtensions(
- highlighting.NewHighlighting(
- highlighting.WithStyle(theme),
- ),
- ))
- }
-
- mdProcessor = goldmark.New(gmPlugins...)
-}
-
-type Hook struct {
- path string
- state *lua.LState
-}
-
-type HookCollection []*Hook
-
-func (hc HookCollection) Shutdown() {
- for _, hook := range hc {
- hook.state.Close()
- }
-}
-
-func (hc HookCollection) RunAll(funcName string) {
- for _, hook := range hc {
- hookFunc := hook.state.GetGlobal(funcName)
-
- if hookFunc == lua.LNil {
- continue
- }
-
- if err := hook.state.CallByParam(lua.P{
- Fn: hookFunc,
- NRet: 0,
- Protect: true,
- }); err != nil {
- bail(err)
- }
- }
-}
-
-type AlvuFile struct {
- lock *sync.Mutex
- hooks HookCollection
- name string
- sourcePath string
- isHTML bool
- destPath string
- meta map[string]interface{}
- content []byte
- writeableContent []byte
- headFile *os.File
- tailFile *os.File
- baseTemplate *os.File
- targetName []byte
- data map[string]interface{}
- extras map[string]interface{}
-}
-
-func (alvuFile *AlvuFile) Build() {
- bail(alvuFile.ReadFile())
- bail(alvuFile.ParseMeta())
-
- if len(alvuFile.hooks) == 0 {
- alvuFile.ProcessFile(nil)
- }
-
- for _, hook := range hookCollection {
-
- isForSpecificFile := hook.state.GetGlobal("ForFile")
-
- if isForSpecificFile != lua.LNil {
- if alvuFile.name == isForSpecificFile.String() {
- alvuFile.ProcessFile(hook.state)
- } else {
- bail(alvuFile.ProcessFile(nil))
- }
- } else {
- bail(alvuFile.ProcessFile(hook.state))
- }
- }
-
- alvuFile.FlushFile()
-}
-
-func (af *AlvuFile) ReadFile() error {
- filecontent, err := os.ReadFile(af.sourcePath)
- if err != nil {
- return fmt.Errorf("error reading file, error: %v", err)
- }
- af.content = filecontent
- return nil
-}
-
-func (af *AlvuFile) ParseMeta() error {
- sep := []byte("---")
- if !bytes.HasPrefix(af.content, sep) {
- af.writeableContent = af.content
- return nil
- }
-
- metaParts := bytes.SplitN(af.content, sep, 3)
-
- var meta map[string]interface{}
- err := yaml.Unmarshal([]byte(metaParts[1]), &meta)
- if err != nil {
- return err
- }
-
- af.meta = meta
- af.writeableContent = []byte(metaParts[2])
-
- return nil
-}
-
-func (af *AlvuFile) ProcessFile(hook *lua.LState) error {
- // pre process hook => should return back json with `content` and `data`
- af.lock.Lock()
- defer af.lock.Unlock()
-
- af.targetName = regexp.MustCompile(`\.md$`).ReplaceAll([]byte(af.name), []byte(".html"))
- onDebug(func() {
- debugInfo(af.name + " will be changed to " + string(af.targetName))
- })
-
- buf := bytes.NewBuffer([]byte(""))
- mdToHTML := ""
-
- if filepath.Ext(af.name) == ".md" {
- newName := strings.Replace(af.name, filepath.Ext(af.name), ".html", 1)
- af.targetName = []byte(newName)
- mdProcessor.Convert(af.writeableContent, buf)
- mdToHTML = buf.String()
- }
-
- if hook == nil {
- return nil
- }
-
- hookInput := struct {
- Name string `json:"name"`
- SourcePath string `json:"source_path"`
- DestPath string `json:"dest_path"`
- Meta map[string]interface{} `json:"meta"`
- WriteableContent string `json:"content"`
- HTMLContent string `json:"html"`
- }{
- Name: string(af.targetName),
- SourcePath: af.sourcePath,
- DestPath: af.destPath,
- Meta: af.meta,
- WriteableContent: string(af.writeableContent),
- HTMLContent: mdToHTML,
- }
-
- hookJsonInput, err := json.Marshal(hookInput)
- bail(err)
-
- if err := hook.CallByParam(lua.P{
- Fn: hook.GetGlobal("Writer"),
- NRet: 1,
- Protect: true,
- }, lua.LString(hookJsonInput)); err != nil {
- panic(err)
- }
-
- ret := hook.Get(-1)
-
- var fromPlug map[string]interface{}
-
- err = json.Unmarshal([]byte(ret.String()), &fromPlug)
- bail(err)
-
- if fromPlug["content"] != nil {
- stringVal := fmt.Sprintf("%s", fromPlug["content"])
- af.writeableContent = []byte(stringVal)
- }
-
- if fromPlug["name"] != nil {
- af.targetName = []byte(fmt.Sprintf("%v", fromPlug["name"]))
- }
-
- if fromPlug["data"] != nil {
- af.data = mergeMapWithCheck(af.data, fromPlug["data"])
- }
-
- if fromPlug["extras"] != nil {
- af.extras = mergeMapWithCheck(af.extras, fromPlug["extras"])
- }
-
- hook.Pop(1)
- return nil
-}
-
-func (af *AlvuFile) FlushFile() {
- destFolder := filepath.Dir(af.destPath)
- os.MkdirAll(destFolder, os.ModePerm)
-
- targetFile := strings.Replace(path.Join(af.destPath), af.name, string(af.targetName), 1)
- onDebug(func() {
- debugInfo("flushing for file: " + af.name + string(af.targetName))
- debugInfo("flusing file: " + targetFile)
- })
-
- f, err := os.Create(targetFile)
- bail(err)
- defer f.Sync()
-
- writeHeadTail := false
-
- if af.baseTemplate == nil && (filepath.Ext(af.sourcePath) == ".md" || filepath.Ext(af.sourcePath) == "html") {
- writeHeadTail = true
- }
-
- if writeHeadTail && af.headFile != nil {
- shouldCopyContentsWithReset(af.headFile, f)
- }
-
- renderData := PageRenderData{
- Meta: SiteMeta{
- BaseURL: baseurl,
+ app := &cli.App{
+ Name: "alvu",
+ Usage: "A scriptable static site generator",
+ CommandNotFound: cli.ShowCommandCompletions,
+ Action: func(c *cli.Context) error {
+ return commands.Alvu(c)
+ },
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "hooks",
+ Value: "./hooks",
+ },
+ &cli.StringFlag{
+ Name: "out",
+ Value: "./dist",
+ },
+ &cli.StringFlag{
+ Name: "path",
+ Value: ".",
+ },
+ &cli.StringFlag{
+ Name: "baseurl",
+ Value: "/",
+ },
+ &cli.BoolFlag{
+ Name: "hard-wrap",
+ Value: false,
+ },
+ &cli.BoolFlag{
+ Name: "highlight",
+ Value: false,
+ },
+ &cli.StringFlag{
+ Name: "highlight-theme",
+ Value: "bw",
+ },
+ &cli.BoolFlag{
+ Name: "serve",
+ Value: false,
+ Aliases: []string{"s"},
+ },
+ &cli.IntFlag{
+ Name: "poll",
+ Usage: "Define the poll duration in seconds",
+ Value: 1000,
+ },
+ &cli.StringFlag{
+ Name: "env",
+ Usage: "Environment File to consider",
+ Value: ".env",
+ },
+ &cli.StringFlag{
+ Name: "port",
+ Usage: "port to use for serving the application",
+ Value: "3000",
+ Aliases: []string{"p"},
+ },
+ },
+ Version: version,
+ Compiled: time.Now(),
+ HideVersion: false,
+ Commands: []*cli.Command{
+ {
+ Name: "init",
+ Description: "Initialise a new alvu Project",
+ Args: true,
+ ArgsUsage: "",
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "force",
+ Aliases: []string{"f"},
+ Usage: "Force create in the directory even overwriting any files that exist",
+ },
+ },
+ Action: func(ctx *cli.Context) error {
+ return commands.AlvuInit(ctx)
+ },
+ },
},
- Data: af.data,
- Extras: af.extras,
- }
-
- // Run the Markdown file through the conversion
- // process to be able to use template variables in
- // the markdown instead of writing them in
- // raw HTML
- var preConvertHTML bytes.Buffer
- preConvertTmpl := textTmpl.New("temporary_pre_template")
- preConvertTmpl.Parse(string(af.writeableContent))
- err = preConvertTmpl.Execute(&preConvertHTML, renderData)
- bail(err)
-
- var toHtml bytes.Buffer
- if !af.isHTML {
- err = mdProcessor.Convert(preConvertHTML.Bytes(), &toHtml)
- bail(err)
- } else {
- toHtml = preConvertHTML
- }
-
- layoutData := LayoutRenderData{
- PageRenderData: renderData,
- Content: template.HTML(toHtml.Bytes()),
- }
-
- // If a layout file was found
- // write the converted html content into the
- // layout template file
-
- layout := template.New("layout")
- var layoutTemplateData string
- if af.baseTemplate != nil {
- layoutTemplateData = string(readFileToBytes(af.baseTemplate))
- } else {
- layoutTemplateData = `{{.Content}}`
- }
-
- layoutTemplateData = _injectLiveReload(&layoutTemplateData)
- toHtml.Reset()
- layout.Parse(layoutTemplateData)
- layout.Execute(&toHtml, layoutData)
-
- io.Copy(
- f, &toHtml,
- )
-
- if writeHeadTail && af.tailFile != nil && af.baseTemplate == nil {
- shouldCopyContentsWithReset(af.tailFile, f)
- }
-
- data, err := os.ReadFile(targetFile)
- bail(err)
-
- onDebug(func() {
- debugInfo("template path: %v", af.sourcePath)
- })
-
- t := template.New(path.Join(af.sourcePath))
- t.Parse(string(data))
-
- f.Seek(0, 0)
-
- err = t.Execute(f, renderData)
- bail(err)
-}
-
-func NewHook() *lua.LState {
- lState := lua.NewState()
- luaAlvu.Preload(lState)
- luajson.Preload(lState)
- yamlLib.Preload(lState)
- stringsLib.Preload(lState)
- lState.PreloadModule("http", ghttp.NewHttpModule(&http.Client{}).Loader)
- if basePath == "." {
- lState.SetGlobal("workingdir", lua.LString(""))
- } else {
- lState.SetGlobal("workingdir", lua.LString(basePath))
- }
- return lState
-}
-
-// UTILS
-func memuse() {
- var m runtime.MemStats
- runtime.ReadMemStats(&m)
- fmt.Printf("heap: %v MiB\n", bytesToMB(m.HeapAlloc))
-}
-
-func bytesToMB(inBytes uint64) uint64 {
- return inBytes / 1024 / 1024
-}
-
-func bail(err error) {
- if err == nil {
- return
- }
- cs := &color.ColorString{}
- fmt.Fprintln(os.Stderr, cs.Red(logPrefix).Red(": "+err.Error()).String())
- panic("")
-}
-
-func debugInfo(msg string, a ...any) {
- cs := &color.ColorString{}
- prefix := logPrefix
- baseMessage := cs.Reset("").Yellow(prefix).Reset(" ").Gray(msg).String()
- fmt.Fprintf(os.Stdout, baseMessage+" \n", a...)
-}
-
-func showDebug() bool {
- showInfo := env.Get("DEBUG_ALVU", "")
- return len(showInfo) != 0
-}
-
-func onDebug(fn func()) {
- if !showDebug() {
- return
- }
-
- fn()
-}
-
-func mergeMapWithCheck(maps ...any) (source map[string]interface{}) {
- source = map[string]interface{}{}
- for _, toCheck := range maps {
- if pairs, ok := toCheck.(map[string]interface{}); ok {
- for k, v := range pairs {
- source[k] = v
- }
- }
- }
- return source
-}
-
-func readFileToBytes(fd *os.File) []byte {
- buf := &bytes.Buffer{}
- fd.Seek(0, 0)
- _, err := io.Copy(buf, fd)
- bail(err)
- return buf.Bytes()
-}
-
-func shouldCopyContentsWithReset(src *os.File, target *os.File) {
- src.Seek(0, 0)
- _, err := io.Copy(target, src)
- bail(err)
-}
-
-func ServeHandler(rw http.ResponseWriter, req *http.Request) {
- path := req.URL.Path
-
- if path == "/" {
- path = filepath.Join(outPath, "index.html")
- http.ServeFile(rw, req, path)
- return
- }
-
- // check if the requested file already exists
- file := filepath.Join(outPath, path)
- info, err := os.Stat(file)
-
- // if not, check if it's a directory
- // and if it's a directory, we look for
- // a index.html inside the directory to return instead
- if err == nil {
- if info.Mode().IsDir() {
- file = filepath.Join(outPath, path, "index.html")
- _, err := os.Stat(file)
- if err != nil {
- notFoundHandler(rw, req)
- return
- }
- }
-
- http.ServeFile(rw, req, file)
- return
}
- // if neither a directory or file was found
- // try a secondary case where the file might be missing
- // a `.html` extension for cleaner url so append a .html
- // to look for the file.
+ err := app.Run(os.Args)
if err != nil {
- file := filepath.Join(outPath, normalizeFilePath(path))
- _, err := os.Stat(file)
-
- if err != nil {
- notFoundHandler(rw, req)
- return
- }
-
- http.ServeFile(rw, req, file)
- return
- }
-
- notFoundHandler(rw, req)
-}
-
-// _webSocketHandler Internal function to setup a listener loop
-// for the live reload setup
-func _webSocketHandler(ws *websocket.Conn) {
- reloadCh = append(reloadCh, make(chan bool, 1))
- currIndex := len(reloadCh) - 1
-
- defer ws.Close()
-
- for range reloadCh[currIndex] {
- err := websocket.Message.Send(ws, "reload")
- if err != nil {
- // For debug only
- // log.Printf("Error sending message: %s", err.Error())
- break
- }
- onDebug(func() {
- debugInfo("Reload message sent")
- })
- }
-
-}
-
-func AddWebsocketHandler() {
- wsHandler := websocket.Handler(_webSocketHandler)
-
- // Use a custom HTTP handler function to upgrade the HTTP request to WebSocket
- http.HandleFunc("/ws", func(w http.ResponseWriter, r *http.Request) {
- // Check the request's 'Upgrade' header to see if it's a WebSocket request
- if r.Header.Get("Upgrade") != "websocket" {
- http.Error(w, "Not a WebSocket handshake request", http.StatusBadRequest)
- return
- }
-
- // Upgrade the HTTP connection to a WebSocket connection
- wsHandler.ServeHTTP(w, r)
- })
-
-}
-
-// _clientNotifyReload Internal function to
-// report changes to all possible reload channels
-func _clientNotifyReload() {
- for ind := range reloadCh {
- reloadCh[ind] <- true
- }
- reloadCh = []chan bool{}
-}
-
-func normalizeFilePath(path string) string {
- if strings.HasSuffix(path, ".html") {
- return path
+ fmt.Fprintf(os.Stderr, logPrefix, err)
}
- return path + ".html"
-}
-
-func notFoundHandler(w http.ResponseWriter, r *http.Request) {
- if notFoundPageExists {
- compiledNotFoundFile := filepath.Join(outPath, "404.html")
- notFoundFile, err := os.ReadFile(compiledNotFoundFile)
- if err != nil {
- http.Error(w, "404, Page not found....", http.StatusNotFound)
- return
- }
- w.WriteHeader(http.StatusNotFound)
- w.Header().Set("Content-Type", "text/html; charset=utf-8")
- w.Write(notFoundFile)
- return
- }
- http.Error(w, "404, Page not found....", http.StatusNotFound)
-}
-
-func Contains(collection []string, item string) bool {
- for _, x := range collection {
- if item == x {
- return true
- }
- }
- return false
-}
-
-// Watcher , create an interface over the fsnotify watcher
-// to be able to run alvu compile processes again
-// FIXME: redundant compile process for the files
-type Watcher struct {
- alvu *Alvu
- poller *poller.Poller
- dirs []string
-}
-
-func NewWatcher(alvu *Alvu, interval int) *Watcher {
- watcher := &Watcher{
- alvu: alvu,
- poller: poller.NewPollWatcher(interval),
- }
-
- return watcher
-}
-
-func (w *Watcher) AddDir(dirPath string) {
-
- for _, pth := range w.dirs {
- if pth == dirPath {
- return
- }
- }
-
- w.dirs = append(w.dirs, dirPath)
- w.poller.Add(dirPath)
-}
-
-func (w *Watcher) RebuildAlvu() {
- onDebug(func() {
- debugInfo("Rebuild Started")
- })
- w.alvu.CopyPublic()
- w.alvu.Build()
- onDebug(func() {
- debugInfo("Build Completed")
- })
-}
-
-func (w *Watcher) RebuildFile(filePath string) {
- onDebug(func() {
- debugInfo("RebuildFile Started")
- })
- for i, af := range w.alvu.files {
- if af.sourcePath != filePath {
- continue
- }
-
- w.alvu.files[i].Build()
- break
- }
- onDebug(func() {
- debugInfo("RebuildFile Completed")
- })
-}
-
-func (w *Watcher) StartWatching() {
- go w.poller.StartPoller()
- go func() {
- for {
- select {
- case evt := <-w.poller.Events:
- onDebug(func() {
- debugInfo("Events registered")
- })
-
- recompiledText := &color.ColorString{}
- recompiledText.Blue(logPrefix).Green("Recompiled!").Reset(" ")
-
- _, err := os.Stat(evt.Path)
-
- // Do nothing if the file doesn't exit, just continue
- if err != nil {
- continue
- }
-
- // If alvu file then just build the file, else
- // just rebuilt the whole folder since it could
- // be a file from the public folder or the _layout file
- if w.alvu.IsAlvuFile(evt.Path) {
- recompilingText := &color.ColorString{}
- recompilingText.Blue(logPrefix).Cyan("Recompiling: ").Gray(evt.Path).Reset(" ")
- fmt.Println(recompilingText.String())
- w.RebuildFile(evt.Path)
- } else {
- recompilingText := &color.ColorString{}
- recompilingText.Blue(logPrefix).Cyan("Recompiling: ").Gray("All").Reset(" ")
- fmt.Println(recompilingText.String())
- w.RebuildAlvu()
- }
-
- _clientNotifyReload()
- fmt.Println(recompiledText.String())
- continue
-
- case err := <-w.poller.Errors:
- // If the poller has an error, just crash,
- // digesting polling issues without killing the program would make it complicated
- // to handle cleanup of all the kind of files that are being maintained by alvu
- bail(err)
- }
- }
- }()
-}
-
-func _injectLiveReload(layoutHTML *string) string {
- if !*serveFlag {
- return *layoutHTML
- }
- return *layoutHTML + ``
}
diff --git a/pkg/alvu/alvu.go b/pkg/alvu/alvu.go
new file mode 100644
index 0000000..6926661
--- /dev/null
+++ b/pkg/alvu/alvu.go
@@ -0,0 +1,627 @@
+package alvu
+
+import (
+ "fmt"
+ "io"
+ "net/http"
+ "os"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ templateHTML "html/template"
+
+ "github.com/barelyhuman/alvu/transformers"
+ "github.com/barelyhuman/alvu/transformers/markdown"
+ "golang.org/x/net/websocket"
+
+ htmlT "github.com/barelyhuman/alvu/transformers/html"
+)
+
+// Constants
+const slotStartTag = ""
+const slotEndTag = ""
+const contentTag = "{{.Content}}"
+
+type SiteMeta struct {
+ BaseURL string
+}
+
+type PageRenderData struct {
+ Meta SiteMeta
+ Data map[string]interface{}
+ Extras map[string]interface{}
+}
+
+type AlvuConfig struct {
+ HookDir string
+ OutDir string
+ RootPath string
+
+ BaseURL string
+ EnableHardWrap bool
+ EnableHighlighting bool
+ HighlightingTheme string
+
+ Serve bool
+ PollDuration int
+ PortNumber string
+
+ Transformers map[string][]transformers.Transfomer
+
+ // Internals
+ logger Logger
+ hookHandler *Hooks
+ watcher *Watcher
+
+ // Internals - Websockets // can be separated
+ shouldRebuild bool
+ rebuildLock sync.Mutex
+ rebuildCond *sync.Cond
+ connections map[*websocket.Conn]struct{}
+}
+
+func (ac *AlvuConfig) Rebuild(path string) {
+ ac.rebuildLock.Lock()
+ ac.logger.Info(fmt.Sprintf("Changed: %v, Recompiling.", path))
+ err := ac.Build()
+ if err != nil {
+ ac.logger.Error(err.Error())
+ }
+ ac.shouldRebuild = true
+ ac.rebuildLock.Unlock()
+ ac.rebuildCond.Broadcast()
+}
+
+func (ac *AlvuConfig) Run() error {
+ ac.logger = NewLogger()
+ ac.logger.LogPrefix = "[alvu]"
+
+ ac.rebuildCond = sync.NewCond(&ac.rebuildLock)
+ ac.connections = make(map[*websocket.Conn]struct{})
+
+ if ac.Serve {
+ ac.watcher = NewWatcher()
+ ac.watcher.logger = ac.logger
+
+ go func(ac *AlvuConfig) {
+ for path := range ac.watcher.recompile {
+ ac.Rebuild(path)
+ }
+ }(ac)
+
+ ac.watcher.Start()
+ ac.monitorRebuilds()
+ }
+
+ err := ac.Build()
+ if err != nil {
+ return err
+ }
+
+ return ac.StartServer()
+}
+
+func (ac *AlvuConfig) monitorRebuilds() {
+ go func() {
+ for {
+ ac.rebuildCond.L.Lock()
+ for !ac.shouldRebuild {
+ ac.rebuildCond.Wait()
+ }
+ for conn := range ac.connections {
+ err := websocket.Message.Send(conn, "reload")
+ if err != nil {
+ delete(ac.connections, conn)
+ }
+ }
+ ac.shouldRebuild = false
+ ac.rebuildCond.L.Unlock()
+ }
+ }()
+}
+
+func (ac *AlvuConfig) Build() error {
+ hooksHandler := Hooks{
+ ac: ac,
+ }
+ ac.hookHandler = &hooksHandler
+
+ ac.Transformers = map[string][]transformers.Transfomer{
+ ".html": {
+ &htmlT.HTMLTransformer{},
+ },
+ ".md": {
+ &markdown.MarkdownTransformer{
+ EnableHardWrap: ac.EnableHardWrap,
+ EnableHighlighting: ac.EnableHighlighting,
+ HighlightingTheme: ac.HighlightingTheme,
+ BaseURL: ac.BaseURL,
+ },
+ },
+ }
+
+ pageDir := filepath.Join(ac.RootPath, "pages")
+ publicDir := filepath.Join(ac.RootPath, "public")
+ hooksDir := filepath.Join(ac.RootPath, ac.HookDir)
+
+ filesToProcess, err := ac.ReadDir(pageDir)
+ if err != nil {
+ return err
+ }
+
+ ac.logger.Debug(fmt.Sprintf("filesToProcess: %v", filesToProcess))
+
+ publicFiles, err := ac.ReadDir(publicDir)
+ if err != nil {
+ return err
+ }
+
+ if ac.Serve {
+ ac.watcher.AddDir(pageDir)
+ ac.watcher.AddDir(publicDir)
+ ac.watcher.AddDir(hooksDir)
+ }
+
+ normalizedFiles, err := runTransfomers(filesToProcess, ac)
+ if err != nil {
+ return err
+ }
+
+ var processedFiles []HookedFile
+
+ ac.hookHandler.Load()
+
+ ac.hookHandler.runLifeCycleHooks("OnStart")
+
+ for _, tf := range normalizedFiles {
+ processedFiles = append(processedFiles, hooksHandler.ProcessFile(tf))
+ }
+
+ ac.HandlePublicFiles(publicFiles)
+ return ac.FlushFiles(processedFiles)
+}
+
+func (ac *AlvuConfig) ReadLayout() string {
+ layoutFilePath := filepath.Join(ac.RootPath, "pages", "_layout.html")
+ fileInfo, err := os.Stat(layoutFilePath)
+ defaultLayout := ""
+
+ if ac.Serve {
+ defaultLayout = injectWebsocketConnection(defaultLayout, ac.PortNumber)
+ }
+
+ if os.IsNotExist(err) {
+ return defaultLayout
+ }
+ if fileInfo.IsDir() {
+ return defaultLayout
+ }
+ data, _ := os.ReadFile(
+ layoutFilePath,
+ )
+
+ if ac.Serve {
+ return injectWebsocketConnection(string(data), ac.PortNumber)
+ }
+
+ return string(data)
+}
+
+func (ac *AlvuConfig) HandlePublicFiles(files []string) (err error) {
+ var wg sync.WaitGroup
+ for _, v := range files {
+ wg.Add(1)
+ file := v
+ go func() {
+ destFile := filepath.Clean(file)
+ destFile = strings.TrimPrefix(destFile, filepath.Join(ac.RootPath, "public"))
+ destFile = filepath.Join(ac.OutDir, destFile)
+ os.MkdirAll(filepath.Dir(destFile), os.ModePerm)
+
+ fileToCreate, _ := os.Create(destFile)
+ reader, _ := os.OpenFile(file, os.O_RDONLY, os.ModePerm)
+ io.Copy(fileToCreate, reader)
+ wg.Done()
+ }()
+ }
+ wg.Wait()
+ return
+}
+
+func (ac *AlvuConfig) createTransformedFile(filePath string, content string) (tranformedFile transformers.TransformedFile, err error) {
+ fileExt := filepath.Ext(filePath)
+ fileWriter, err := os.CreateTemp("", "alvu-")
+ if err != nil {
+ return
+ }
+ defer fileWriter.Close()
+
+ _, err = fileWriter.WriteString(content)
+ if err != nil {
+ return
+ }
+ tranformedFile.TransformedFile = fileWriter.Name()
+ tranformedFile.SourcePath = filePath
+ tranformedFile.Extension = fileExt
+ return
+}
+
+func (ac *AlvuConfig) FlushFiles(files []HookedFile) error {
+ if err := os.MkdirAll(ac.OutDir, os.ModePerm); err != nil {
+ return err
+ }
+
+ if hasLegacySlot(ac.ReadLayout()) {
+ ac.logger.Warning("Please use `` instead of `{{.Content}}` in _layout.html")
+ }
+
+ for i := range files {
+ hookedFile := files[i]
+ originalDir, baseFile := filepath.Split(hookedFile.SourcePath)
+ newDir := strings.TrimPrefix(originalDir, filepath.Join(ac.RootPath, "pages"))
+ fileWithNewExtension := strings.TrimSuffix(baseFile, hookedFile.Extension) + ".html"
+ destFile := filepath.Join(
+ ac.OutDir,
+ newDir,
+ fileWithNewExtension,
+ )
+
+ ac.logger.Debug(fmt.Sprintf("originalFile:%v, desFile: %v", hookedFile.SourcePath, destFile))
+
+ err := os.MkdirAll(filepath.Dir(destFile), os.ModePerm)
+ if err != nil {
+ return err
+ }
+
+ destWriter, err := os.Create(destFile)
+ if err != nil {
+ return err
+ }
+ defer destWriter.Close()
+
+ if len(hookedFile.transform) > 1 {
+ for _, t := range ac.Transformers[hookedFile.transform] {
+ afterTransform, err := t.TransformContent(hookedFile.content)
+ if err != nil {
+ return err
+ }
+ hookedFile.content = afterTransform
+ }
+ }
+
+ replaced, err := ac.injectInSlot(
+ ac.ReadLayout(),
+ string(hookedFile.content),
+ )
+
+ if err != nil {
+ return err
+ }
+
+ template := templateHTML.New("temporaryTemplate")
+ template = template.Funcs(templateHTML.FuncMap{
+ "transform": func(extension string, content string) templateHTML.HTML {
+ var transformed []byte = []byte(content)
+ for _, t := range ac.Transformers[extension] {
+ transformed, _ = t.TransformContent(transformed)
+ }
+ return templateHTML.HTML(transformed)
+ },
+ })
+ template, err = template.Parse(replaced)
+ if err != nil {
+ ac.logger.Error(fmt.Sprintf("Failed to write to dist file with error: %v", err))
+ panic("")
+ }
+
+ renderData := PageRenderData{
+ Meta: SiteMeta{
+ BaseURL: ac.BaseURL,
+ },
+ Data: hookedFile.data,
+ Extras: hookedFile.extras,
+ }
+
+ err = template.Execute(destWriter, renderData)
+ if err != nil {
+ return err
+ }
+ }
+
+ ac.logger.Info("Output in: " + ac.OutDir)
+ ac.logger.Success("Done")
+ ac.hookHandler.runLifeCycleHooks("OnFinish")
+
+ return nil
+}
+
+func runTransfomers(filesToProcess []string, ac *AlvuConfig) ([]transformers.TransformedFile, error) {
+ normalizedFiles := []transformers.TransformedFile{}
+
+ for _, fileToNormalize := range filesToProcess {
+ extension := filepath.Ext(fileToNormalize)
+
+ if len(ac.Transformers[extension]) < 1 {
+ continue
+ }
+
+ originalContent, err := os.ReadFile(fileToNormalize)
+ mutableContent := originalContent
+ if err != nil {
+ return nil, fmt.Errorf("failed to read file %v with error %v", fileToNormalize, err)
+ }
+
+ var meta map[string]interface{}
+ for _, transformer := range ac.Transformers[extension] {
+ nextContent, err := transformer.TransformContent(mutableContent)
+ if err != nil {
+ return nil, fmt.Errorf("failed to transform file: %v, with error: %v", fileToNormalize, err)
+ }
+ newMeta, _, _ := transformer.ExtractMeta(originalContent)
+ if err != nil {
+ return nil, fmt.Errorf("failed to extract meta from file: %v, with error: %v", fileToNormalize, err)
+ }
+ if hasKeys(newMeta) {
+ meta = newMeta
+ }
+ mutableContent = nextContent
+ }
+
+ transformedFile, err := ac.createTransformedFile(fileToNormalize, string(mutableContent))
+ if err != nil {
+ return nil, fmt.Errorf("failed to transform file: %v, with error: %v", fileToNormalize, err)
+ }
+
+ transformedFile.Meta = meta
+ normalizedFiles = append(normalizedFiles, transformedFile)
+ }
+ return normalizedFiles, nil
+}
+
+func (ac *AlvuConfig) ReadDir(dir string) (filepaths []string, err error) {
+ readFilepaths, err := recursiveRead(dir)
+ if err != nil {
+ return
+ }
+ sanitizedCollection := []string{}
+ for _, v := range readFilepaths {
+ if filepath.Base(v) == "_layout.html" {
+ continue
+ }
+ sanitizedCollection = append(sanitizedCollection, v)
+ }
+ return sanitizedCollection, nil
+}
+
+func (ac *AlvuConfig) injectInSlot(htmlString string, replacement string) (string, error) {
+ if hasLegacySlot(htmlString) {
+ return injectInLegacySlot(htmlString, replacement), nil
+ }
+ slotStartPos := strings.Index(htmlString, slotStartTag)
+ slotEndPos := strings.Index(htmlString, slotEndTag)
+ if slotStartPos == -1 && slotEndPos == -1 {
+ return htmlString, nil
+ }
+ baseString := strings.Replace(htmlString, slotEndTag, "", slotEndPos)
+ return strings.Replace(baseString, slotStartTag, replacement, slotStartPos), nil
+}
+
+func (ac *AlvuConfig) NormalisePort(port string) string {
+ normalisedPort := port
+
+ if !strings.HasPrefix(normalisedPort, ":") {
+ normalisedPort = ":" + normalisedPort
+ }
+
+ return normalisedPort
+}
+
+func (ac *AlvuConfig) StartServer() error {
+ if !ac.Serve {
+ return nil
+ }
+
+ wsHandler := websocket.Handler(ac._webSocketHandler)
+ http.HandleFunc("/ws", func(w http.ResponseWriter, r *http.Request) {
+ // Check the request's 'Upgrade' header to see if it's a WebSocket request
+ if r.Header.Get("Upgrade") != "websocket" {
+ http.Error(w, "Not a WebSocket handshake request", http.StatusBadRequest)
+ return
+ }
+
+ // Upgrade the HTTP connection to a WebSocket connection
+ wsHandler.ServeHTTP(w, r)
+ })
+
+ normalisedPort := ac.NormalisePort(ac.PortNumber)
+
+ http.Handle("/", http.HandlerFunc(ac.ServeHandler))
+ ac.logger.Info(fmt.Sprintf("Starting Server - %v:%v", "http://localhost", ac.PortNumber))
+
+ err := http.ListenAndServe(normalisedPort, nil)
+ if strings.Contains(err.Error(), "address already in use") {
+ ac.logger.Error("port already in use, use another port with the `-port` flag instead")
+ return err
+ }
+
+ return nil
+}
+
+// _webSocketHandler Internal function to setup a listener loop
+// for the live reload setup
+func (ac *AlvuConfig) _webSocketHandler(ws *websocket.Conn) {
+ // collect connections
+ ac.connections[ws] = struct{}{}
+
+ defer ws.Close()
+ // message loop, till connection breaks
+ for {
+ var msg string
+ err := websocket.Message.Receive(ws, &msg)
+ if err != nil {
+ delete(ac.connections, ws)
+ break
+ }
+ }
+}
+
+func (ac *AlvuConfig) ServeHandler(rw http.ResponseWriter, req *http.Request) {
+ path := req.URL.Path
+
+ if path == "/" {
+ path = filepath.Join(ac.OutDir, "index.html")
+ http.ServeFile(rw, req, path)
+ return
+ }
+
+ // check if the requested file already exists
+ file := filepath.Join(ac.OutDir, path)
+ info, err := os.Stat(file)
+
+ // if not, check if it's a directory
+ // and if it's a directory, we look for
+ // a index.html inside the directory to return instead
+ if err == nil {
+ if info.Mode().IsDir() {
+ file = filepath.Join(ac.OutDir, path, "index.html")
+ _, err := os.Stat(file)
+ if err != nil {
+ ac.notFoundHandler(rw, req)
+ return
+ }
+ }
+
+ http.ServeFile(rw, req, file)
+ return
+ }
+
+ // if neither a directory or file was found
+ // try a secondary case where the file might be missing
+ // a `.html` extension for cleaner url so append a .html
+ // to look for the file.
+ if err != nil {
+ file := filepath.Join(ac.OutDir, normalizeStaticLookupPath(path))
+ _, err := os.Stat(file)
+
+ if err != nil {
+ ac.notFoundHandler(rw, req)
+ return
+ }
+
+ http.ServeFile(rw, req, file)
+ return
+ }
+
+ ac.notFoundHandler(rw, req)
+}
+
+func (ac *AlvuConfig) notFoundHandler(w http.ResponseWriter, r *http.Request) {
+ var notFoundPageExists bool
+ filePointer, err := os.Stat(filepath.Join(ac.OutDir, "404.html"))
+ if err != nil {
+ if os.IsNotExist(err) {
+ notFoundPageExists = false
+ }
+ }
+
+ if filePointer.Size() > 0 {
+ notFoundPageExists = true
+ }
+
+ if notFoundPageExists {
+ compiledNotFoundFile := filepath.Join(ac.OutDir, "404.html")
+ notFoundFile, err := os.ReadFile(compiledNotFoundFile)
+ if err != nil {
+ http.Error(w, "404, Page not found....", http.StatusNotFound)
+ return
+ }
+ w.WriteHeader(http.StatusNotFound)
+ w.Header().Set("Content-Type", "text/html; charset=utf-8")
+ w.Write(notFoundFile)
+ return
+ }
+
+ http.Error(w, "404, Page not found....", http.StatusNotFound)
+}
+
+func normalizeStaticLookupPath(path string) string {
+ if strings.HasSuffix(path, ".html") {
+ return path
+ }
+ return path + ".html"
+}
+
+func recursiveRead(dir string) (filepaths []string, err error) {
+ dirEntry, err := os.ReadDir(
+ dir,
+ )
+
+ if err != nil {
+ return
+ }
+
+ for _, de := range dirEntry {
+ if de.IsDir() {
+ subDirs, err := recursiveRead(filepath.Join(dir, de.Name()))
+ if err != nil {
+ return filepaths, err
+ }
+ filepaths = append(filepaths, subDirs...)
+ } else {
+ filepaths = append(filepaths, filepath.Join(dir, de.Name()))
+ }
+ }
+
+ return
+}
+
+func hasLegacySlot(htmlString string) bool {
+ return strings.Contains(htmlString, contentTag)
+}
+
+func injectInLegacySlot(htmlString string, replacement string) string {
+ contentTagPos := strings.Index(htmlString, contentTag)
+ if contentTagPos == -1 {
+ return htmlString
+ }
+ return strings.Replace(htmlString, contentTag, replacement, contentTagPos)
+}
+
+func injectWebsocketConnection(htmlString string, port string) string {
+ return htmlString + fmt.Sprintf(``, port)
+}
+
+func hasKeys(i map[string]interface{}) bool {
+ keys := make([]string, 0, len(i))
+ for k := range i {
+ keys = append(keys, k)
+ }
+ return len(keys) > 0
+}
diff --git a/pkg/alvu/hooks.go b/pkg/alvu/hooks.go
new file mode 100644
index 0000000..60df1f4
--- /dev/null
+++ b/pkg/alvu/hooks.go
@@ -0,0 +1,287 @@
+package alvu
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "io/fs"
+ "net/http"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "github.com/barelyhuman/alvu/transformers"
+ lua "github.com/yuin/gopher-lua"
+
+ luaAlvu "github.com/barelyhuman/alvu/lua/alvu"
+ ghttp "github.com/cjoudrey/gluahttp"
+ stringsLib "github.com/vadv/gopher-lua-libs/strings"
+ yamlLib "github.com/vadv/gopher-lua-libs/yaml"
+ luajson "layeh.com/gopher-json"
+)
+
+type HookSource struct {
+ luaState *lua.LState
+ filename string
+ ForSingleFile bool
+ ForFile string
+}
+
+type Hooks struct {
+ ac *AlvuConfig
+ collection []*HookSource
+ forSpecificFiles map[string][]*HookSource
+
+ _legacyTransformLogSent bool
+}
+
+type HookedFile struct {
+ transformers.TransformedFile
+ content []byte
+ transform string
+ data map[string]interface{}
+ extras map[string]interface{}
+}
+
+func (h *Hooks) Load() {
+ hookDir := filepath.Clean(filepath.Join(h.ac.RootPath, h.ac.HookDir))
+ h.ac.logger.Debug(fmt.Sprintf("hookDir: %v\n", hookDir))
+ hookFiles := []string{}
+ _, err := os.Stat(hookDir)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return
+ }
+
+ readHookDirError(err, hookDir, h.ac.logger)
+ }
+
+ file, err := os.Open(hookDir)
+ readHookDirError(err, hookDir, h.ac.logger)
+ childs, err := file.Readdirnames(1)
+ if err != nil {
+ if err == io.EOF {
+ return
+ }
+ readHookDirError(err, hookDir, h.ac.logger)
+ }
+
+ if len(childs) == 0 {
+ return
+ }
+
+ filepath.WalkDir(hookDir, func(path string, d fs.DirEntry, err error) error {
+ if err != nil {
+ h.ac.logger.Error(fmt.Sprintf("Issue reading %v, with error: %v", path, err))
+ return nil
+ }
+ if d.IsDir() {
+ return nil
+ }
+ if filepath.Ext(path) != ".lua" {
+ return nil
+ }
+ hookFiles = append(hookFiles, filepath.Join(path))
+ return nil
+ })
+
+ h.forSpecificFiles = map[string][]*HookSource{}
+ for _, filename := range hookFiles {
+ hookSource := h.readHookFile(filename, h.ac.RootPath, h.ac.logger)
+ if hookSource.ForSingleFile {
+ if h.forSpecificFiles[hookSource.ForFile] == nil {
+ h.forSpecificFiles[hookSource.ForFile] = []*HookSource{}
+ }
+ h.forSpecificFiles[hookSource.ForFile] = append(h.forSpecificFiles[hookSource.ForFile], hookSource)
+ } else {
+ h.collection = append(h.collection, hookSource)
+ }
+ }
+}
+
+func (h *Hooks) runLifeCycleHooks(hookType string) error {
+ keys := make([]string, 0, len(h.forSpecificFiles))
+ for k := range h.forSpecificFiles {
+ keys = append(keys, k)
+ }
+
+ localCollection := []*HookSource{}
+ localCollection = append(localCollection, h.collection...)
+
+ for _, v := range keys {
+ localCollection = append(localCollection, h.forSpecificFiles[v]...)
+ }
+
+ sort.Slice(localCollection, func(i, j int) bool {
+ return strings.Compare(localCollection[i].filename, localCollection[j].filename) == -1
+ })
+
+ for i := range localCollection {
+ s := localCollection[i]
+ hookFunc := s.luaState.GetGlobal(hookType)
+
+ if hookFunc == lua.LNil {
+ continue
+ }
+
+ if err := s.luaState.CallByParam(lua.P{
+ Fn: hookFunc,
+ NRet: 0,
+ Protect: true,
+ }); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (h *Hooks) readHookFile(filename string, basepath string, logger Logger) *HookSource {
+ lState := lua.NewState()
+ luaAlvu.Preload(lState)
+ luajson.Preload(lState)
+ yamlLib.Preload(lState)
+ stringsLib.Preload(lState)
+ lState.PreloadModule("http", ghttp.NewHttpModule(&http.Client{}).Loader)
+
+ if basepath == "." {
+ lState.SetGlobal("workingdir", lua.LString(""))
+ } else {
+ lState.SetGlobal("workingdir", lua.LString(basepath))
+ }
+
+ if err := lState.DoFile(filename); err != nil {
+ logger.Error(fmt.Sprintf("Failed to execute hook: %v, with error: %v\n", filename, err))
+ panic("")
+ }
+ forFile := lState.GetGlobal("ForFile")
+ forFileValue := forFile.String()
+ return &HookSource{
+ filename: filename,
+ luaState: lState,
+ ForSingleFile: forFileValue != "nil",
+ ForFile: forFileValue,
+ }
+}
+
+func (h *Hooks) ProcessFile(file transformers.TransformedFile) (hookedFile HookedFile) {
+ hookedFile.TransformedFile = file
+
+ fileData, _ := os.ReadFile(file.TransformedFile)
+ hookedFile.content = fileData
+
+ fileTargetName := strings.TrimPrefix(
+ file.SourcePath,
+ filepath.Join(h.ac.RootPath, "pages"),
+ )
+ fileTargetName = filepath.Clean(strings.TrimPrefix(fileTargetName, "/"))
+ fileTargetName = strings.Replace(fileTargetName, filepath.Ext(fileTargetName), ".html", 1)
+ fileTargetName = strings.TrimSpace(fileTargetName)
+
+ hookInput := struct {
+ Name string `json:"name"`
+ SourcePath string `json:"source_path"`
+ // DestPath string `json:"dest_path"`
+ Meta map[string]interface{} `json:"meta"`
+ WriteableContent string `json:"content"`
+ // HTMLContent string `json:"html"`
+ }{
+ Name: fileTargetName,
+ SourcePath: file.SourcePath,
+ Meta: file.Meta,
+ }
+
+ localCollection := []*HookSource{}
+
+ nonRootPath := strings.TrimPrefix(file.SourcePath, filepath.Join(h.ac.RootPath, "pages"))
+ nonRootPath = strings.TrimPrefix(nonRootPath, "/")
+
+ if len(h.forSpecificFiles[nonRootPath]) > 0 {
+ localCollection = append(localCollection, h.forSpecificFiles[nonRootPath]...)
+ }
+ localCollection = append(localCollection, h.collection...)
+
+ sort.Slice(localCollection, func(i, j int) bool {
+ return strings.Compare(localCollection[i].filename, localCollection[j].filename) == -1
+ })
+
+ for i := range localCollection {
+ hook := localCollection[i]
+ hookFunc := hook.luaState.GetGlobal("Writer")
+
+ hookInput.WriteableContent = string(hookedFile.content)
+ hookJsonInput, _ := json.Marshal(hookInput)
+
+ if hookFunc == lua.LNil {
+ continue
+ }
+
+ if err := hook.luaState.CallByParam(lua.P{
+ Fn: hookFunc,
+ NRet: 1,
+ Protect: true,
+ }, lua.LString(hookJsonInput)); err != nil {
+ h.ac.logger.Error(fmt.Sprintf("Failed to execute %v's Writer on %v, with err: %v", hook.filename, file.SourcePath, err))
+ panic("")
+ }
+
+ ret := hook.luaState.Get(-1)
+
+ var fromPlug map[string]interface{}
+
+ err := json.Unmarshal([]byte(ret.String()), &fromPlug)
+ if err != nil {
+ h.ac.logger.Error(fmt.Sprintf("Invalid return value in hook %v", hook.filename))
+ return
+ }
+
+ if fromPlug["content"] != nil {
+ stringVal := fmt.Sprintf("%s", fromPlug["content"])
+ hookedFile.content = []byte(stringVal)
+ }
+
+ if fromPlug["transform"] != nil {
+ hookedFile.transform = fmt.Sprintf("%v", fromPlug["transform"])
+ } else {
+ if !h._legacyTransformLogSent {
+ h.ac.logger.Warning("Auto transformation of content returned from the hooks will be removed in v0.3,\n please return a `transform` property from the hooks instead.")
+ h._legacyTransformLogSent = true
+ }
+ hookedFile.transform = ".md"
+ }
+
+ if fromPlug["data"] != nil {
+ hookedFile.data = mergeMapWithCheck(hookedFile.data, fromPlug["data"])
+ }
+
+ if fromPlug["extras"] != nil {
+ hookedFile.extras = mergeMapWithCheck(hookedFile.extras, fromPlug["data"])
+ }
+
+ hook.luaState.Pop(1)
+ }
+ return
+}
+
+func readHookDirError(err error, directory string, logger Logger) {
+ if err == nil {
+ return
+ }
+ logger.Error(
+ fmt.Sprintf("Failed to read the hooks dir: %v, with error: %v\n", directory, err),
+ )
+ panic("")
+}
+
+func mergeMapWithCheck(maps ...any) (source map[string]interface{}) {
+ source = map[string]interface{}{}
+ for _, toCheck := range maps {
+ if pairs, ok := toCheck.(map[string]interface{}); ok {
+ for k, v := range pairs {
+ source[k] = v
+ }
+ }
+ }
+ return source
+}
diff --git a/pkg/alvu/logger.go b/pkg/alvu/logger.go
new file mode 100644
index 0000000..67511bd
--- /dev/null
+++ b/pkg/alvu/logger.go
@@ -0,0 +1,50 @@
+package alvu
+
+import (
+ "fmt"
+
+ "github.com/barelyhuman/go/color"
+ "github.com/barelyhuman/go/env"
+)
+
+type Logger struct {
+ LogPrefix string
+}
+
+func NewLogger() Logger {
+ return Logger{}
+}
+
+func (l *Logger) Debug(msg string) {
+ env := env.Get("DEBUG", "false")
+ if env == "false" {
+ return
+ }
+ cs := color.ColorString{}
+ cs.Gray(l.LogPrefix).Reset(" ").Gray("-").Reset(" ").Gray(msg)
+ fmt.Println(cs.String())
+}
+
+func (l *Logger) Success(msg string) {
+ cs := color.ColorString{}
+ cs.Gray(l.LogPrefix).Reset(" ").Green("✔").Reset(" ").Green(msg)
+ fmt.Println(cs.String())
+}
+
+func (l *Logger) Info(msg string) {
+ cs := color.ColorString{}
+ cs.Gray(l.LogPrefix).Reset(" ").Cyan("ℹ").Reset(" ").Cyan(msg)
+ fmt.Println(cs.String())
+}
+
+func (l *Logger) Warning(msg string) {
+ cs := color.ColorString{}
+ cs.Gray(l.LogPrefix).Reset(" ").Yellow(msg)
+ fmt.Println(cs.String())
+}
+
+func (l *Logger) Error(msg string) {
+ cs := color.ColorString{}
+ cs.Gray(l.LogPrefix).Reset(" ").Red(msg)
+ fmt.Println(cs.String())
+}
diff --git a/pkg/alvu/watcher.go b/pkg/alvu/watcher.go
new file mode 100644
index 0000000..0ad0f89
--- /dev/null
+++ b/pkg/alvu/watcher.go
@@ -0,0 +1,54 @@
+package alvu
+
+import (
+ "fmt"
+ "os"
+
+ "github.com/barelyhuman/go/poller"
+)
+
+type Watcher struct {
+ poller *poller.Poller
+ logger Logger
+ recompile chan string
+}
+
+type HookFn func(path string)
+
+func NewWatcher() *Watcher {
+ return &Watcher{
+ poller: poller.NewPollWatcher(2000),
+ recompile: make(chan string, 1),
+ }
+}
+
+func (p *Watcher) AddDir(path string) {
+ p.poller.Add(path)
+}
+
+func (p *Watcher) Start() {
+ go p.poller.Start()
+ go func() {
+ for {
+ select {
+ case evt := <-p.poller.Events:
+ _, err := os.Stat(evt.Path)
+
+ p.logger.Debug(fmt.Sprintf("Change Event: %v", evt.Path))
+
+ // Do nothing if the file doesn't exit, just continue
+ if err != nil {
+ if os.IsNotExist(err) {
+ continue
+ }
+ p.logger.Error(err.Error())
+ }
+
+ p.recompile <- evt.Path
+ continue
+ case err := <-p.poller.Errors:
+ p.logger.Error(err.Error())
+ }
+ }
+ }()
+}
diff --git a/transformers/html/html.go b/transformers/html/html.go
new file mode 100644
index 0000000..64d5391
--- /dev/null
+++ b/transformers/html/html.go
@@ -0,0 +1,15 @@
+package html
+
+type HTMLTransformer struct{}
+
+func (mt *HTMLTransformer) TransformContent(input []byte) (result []byte, err error) {
+ result = input
+ return
+}
+
+func (mt *HTMLTransformer) ExtractMeta(input []byte) (result map[string]interface{}, content []byte, err error) {
+ result = map[string]interface{}{}
+ return
+}
+
+func (mt *HTMLTransformer) Init() {}
diff --git a/transformers/markdown/markdown.go b/transformers/markdown/markdown.go
new file mode 100644
index 0000000..da10257
--- /dev/null
+++ b/transformers/markdown/markdown.go
@@ -0,0 +1,164 @@
+package markdown
+
+import (
+ "bytes"
+ "fmt"
+ "net/url"
+ "strings"
+
+ "github.com/barelyhuman/go/color"
+ "github.com/yuin/goldmark"
+ highlighting "github.com/yuin/goldmark-highlighting"
+ "github.com/yuin/goldmark/ast"
+ "github.com/yuin/goldmark/extension"
+ "github.com/yuin/goldmark/parser"
+ "github.com/yuin/goldmark/renderer"
+ "github.com/yuin/goldmark/renderer/html"
+ "github.com/yuin/goldmark/text"
+ "github.com/yuin/goldmark/util"
+ "gopkg.in/yaml.v3"
+)
+
+type MarkdownTransformer struct {
+ processor goldmark.Markdown
+ EnableHardWrap bool
+ EnableHighlighting bool
+ HighlightingTheme string
+ BaseURL string
+}
+
+func (mt *MarkdownTransformer) TransformContent(input []byte) (result []byte, err error) {
+ mt.EnsureProcessor()
+
+ var buffer bytes.Buffer
+ _, content, err := mt.ExtractMeta(input)
+
+ if err != nil {
+ return
+ }
+
+ err = mt.processor.Convert(content, &buffer)
+ if err != nil {
+ return
+ }
+
+ result = buffer.Bytes()
+ return
+}
+
+func (mt *MarkdownTransformer) ExtractMeta(input []byte) (result map[string]interface{}, content []byte, err error) {
+ result = map[string]interface{}{}
+ sep := []byte("---")
+
+ content = input
+
+ if !bytes.HasPrefix(input, sep) {
+ return
+ }
+
+ metaParts := bytes.SplitN(content, sep, 3)
+ if len(metaParts) > 2 {
+ err = yaml.Unmarshal([]byte(metaParts[1]), &result)
+ if err != nil {
+ return
+ }
+ content = metaParts[2]
+ }
+ return
+}
+
+func (mt *MarkdownTransformer) EnsureProcessor() {
+ if mt.processor != nil {
+ return
+ }
+
+ rendererOptions := []renderer.Option{
+ html.WithXHTML(),
+ html.WithUnsafe(),
+ }
+
+ if mt.EnableHardWrap {
+ rendererOptions = append(rendererOptions, html.WithHardWraps())
+ }
+
+ linkRewriter := &relativeLinkRewriter{
+ baseURL: mt.BaseURL,
+ }
+
+ gmPlugins := []goldmark.Option{
+ goldmark.WithExtensions(extension.GFM, extension.Footnote),
+ goldmark.WithParserOptions(
+ parser.WithASTTransformers(util.Prioritized(linkRewriter, 100)),
+ parser.WithAutoHeadingID(),
+ ),
+ goldmark.WithRendererOptions(
+ rendererOptions...,
+ ),
+ }
+
+ if mt.EnableHighlighting {
+ gmPlugins = append(gmPlugins, goldmark.WithExtensions(
+ highlighting.NewHighlighting(
+ highlighting.WithStyle(mt.HighlightingTheme),
+ ),
+ ))
+ }
+
+ mt.processor = goldmark.New(gmPlugins...)
+}
+
+type relativeLinkRewriter struct {
+ baseURL string
+}
+
+func (rlr *relativeLinkRewriter) Transform(doc *ast.Document, reader text.Reader, pctx parser.Context) {
+ ast.Walk(doc, func(node ast.Node, enter bool) (ast.WalkStatus, error) {
+ if !enter {
+ return ast.WalkContinue, nil
+ }
+
+ link, ok := node.(*ast.Link)
+ if !ok {
+ return ast.WalkContinue, nil
+ }
+
+ validURL, _ := url.Parse(string(link.Destination))
+
+ if validURL.Scheme == "http" || validURL.Scheme == "https" || validURL.Scheme == "mailto" {
+ return ast.WalkContinue, nil
+ }
+
+ if strings.HasPrefix(validURL.Path, "{{.Meta.BaseURL}}") {
+ newDestination, _ := url.JoinPath(
+ rlr.baseURL,
+ strings.TrimPrefix(validURL.Path, "{{.Meta.BaseURL}}"),
+ )
+ link.Destination = []byte(newDestination)
+ printMetaLinkWarning()
+ } else if strings.HasPrefix(validURL.Path, "/") {
+ // from root
+ newDestination, _ := url.JoinPath(
+ rlr.baseURL,
+ validURL.Path,
+ )
+ link.Destination = []byte(newDestination)
+ }
+
+ return ast.WalkSkipChildren, nil
+ })
+}
+
+// TODO: remove in v0.3
+var _warningPrinted bool = false
+
+// TODO: remove in v0.3
+func printMetaLinkWarning() {
+ if _warningPrinted {
+ return
+ }
+ _warningPrinted = true
+ warning := "{{.Meta.BaseURL}} is no more needed in markdown files, links will be rewritten automatically.\n Use root first links, eg: pages/docs/some-topic.md would be linked as /docs/some-topic"
+ cs := color.ColorString{}
+ cs.Reset(" ").Yellow(warning)
+ fmt.Println(cs.String())
+}
diff --git a/transformers/transformers.go b/transformers/transformers.go
new file mode 100644
index 0000000..bbd8638
--- /dev/null
+++ b/transformers/transformers.go
@@ -0,0 +1,13 @@
+package transformers
+
+type TransformedFile struct {
+ SourcePath string
+ TransformedFile string
+ Extension string
+ Meta map[string]interface{}
+}
+
+type Transfomer interface {
+ TransformContent(data []byte) ([]byte, error)
+ ExtractMeta(data []byte) (map[string]interface{}, []byte, error)
+}