Skip to content

Commit

Permalink
Merge branch 'uploads'
Browse files Browse the repository at this point in the history
  • Loading branch information
thijzert committed Feb 16, 2023
2 parents a0d68da + 4cbf8f2 commit f5a6d1f
Show file tree
Hide file tree
Showing 5 changed files with 253 additions and 9 deletions.
1 change: 1 addition & 0 deletions bin/journal-server/assets/js/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@ import "./update-timestamp.js";
import "./set-post-body-height.js";
import "./autosave-draft.js";
import "./wordcount.js";
import "./file-uploads.js";
99 changes: 99 additions & 0 deletions bin/journal-server/assets/js/file-uploads.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
(() => {
const body_ipt = document.getElementById("ipt-body");
const file_ipt = document.getElementById("ipt-file-upload");
const file_list = document.getElementById("list-of-attached-files");

if ( !file_ipt || !file_ipt.files || !file_list ) {
return;
}

const CHUNKSIZE = 125000;

let attachments = {};

const upload_buf = async (hash) => {
const file = attachments[hash];
if ( !file ) {
return;
}

let chunk = file.buf.slice(file.offset, file.offset+CHUNKSIZE);
if ( chunk.byteLength == 0 ) {
file.pr.parentNode.appendChild(document.createTextNode("√"));
file.pr.remove();
return;
}

let this_url = new URL(location.href);
let att_url = new URL("journal/attachment", location.href);
for ( let k of this_url.searchParams.keys() ) {
att_url.searchParams.set(k, this_url.searchParams.get(k));
}
att_url.searchParams.set("att_hash", file.hash);

let q = await fetch(att_url, {method: "POST", body: chunk});
q = await q.json();
if ( !q.ok ) {
console.error(q);
file.pr.parentNode.appendChild(document.createTextNode("×"));
file.pr.remove();
return;
}

file.pr.max = Math.ceil(file.buf.byteLength / CHUNKSIZE);
file.pr.value = Math.floor(file.offset / CHUNKSIZE);
file.offset += chunk.byteLength;

return await upload_buf(hash);
}

file_ipt.addEventListener("input", async (e) => {
let files = [];
for ( let f of file_ipt.files ) {
files.push(f);
}
file_ipt.value = null;

let insert_text = "";
let headstart = 0;
for ( let f of files ) {
let buf = await f.arrayBuffer();
let hashBuffer = await crypto.subtle.digest('SHA-256', buf);
let hash = Array.from(new Uint8Array(hashBuffer)).map((b) => b.toString(16).padStart(2, '0')).join('');

if ( hash in attachments ) {
continue;
}

if ( f.type.slice(0,6) == "image/" ) {
insert_text += `\n{{./${hash}}}`
} else {
insert_text += `\n[[./${hash}|${f.name}]]`
}

let li = document.createElement("li");
let lbl = document.createElement("label");
lbl.textContent = f.name;
let check = document.createElement("input");
check.type = "checkbox";
check.name = "attachment-"+hash;
check.checked = true;
lbl.insertBefore(check, lbl.firstChild);
li.appendChild(lbl);

let pr = document.createElement("progress");
li.appendChild(pr);

file_list.appendChild(li);
let offset = 0;
attachments[hash] = {hash, buf, pr, li, offset};
window.setTimeout(() => { upload_buf(hash); }, headstart);
headstart += 200;
}

console.log(body_ipt)
if ( insert_text != "" && body_ipt ) {
body_ipt.value = body_ipt.value.slice(0,body_ipt.selectedIndex) + insert_text + body_ipt.value.slice(0,body_ipt.selectedIndex);
}
})
})();
8 changes: 8 additions & 0 deletions bin/journal-server/assets/templates/editor.html
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,14 @@
<input type="hidden" id="ipt-draft-id" name="draft_id" />
<input type="submit" value="Save" />
</p>
{{if .CanAttachFiles}}
<ul id="list-of-attached-files"></ul>
<p>
<p>
<label>Attach a file: <input type="file" multiple id="ipt-file-upload" placeholder="Attach files..." /></label>
</p>
</p>
{{end}}
</form>
<section class="indicator-tray">
<aside class="save-status"></aside>
Expand Down
134 changes: 125 additions & 9 deletions bin/journal-server/journal-server.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@ import (
"context"
"crypto/rand"
"encoding/hex"
"encoding/json"
"errors"
"flag"
"fmt"
"io"
"log"
"net"
"net/http"
Expand All @@ -27,6 +28,7 @@ var (
journal_file = flag.String("journal_file", "journal.txt", "Use this file for Journal storage")
password_file = flag.String("password_file", ".htpasswd", "File containing passwords")
secret_parameter = flag.String("secret_parameter", "apikey", "Parameter name containing the API key")
attachments_dir = flag.String("attachments_dir", "", "Directory for storing attached files")
)

// DraftTimeout measures how long it takes for an unsaved draft to get added to the journal.
Expand All @@ -46,9 +48,26 @@ var (
drafts map[string]draftEntry
)

// AttachmentTimeout measures how long an attachment should remain cached
const AttachmentTimeout time.Duration = 30 * time.Minute

// AttachmentTimeout measures how long an attachment should remain cached
const AttachmentPurgeInterval time.Duration = 5 * time.Minute

type attachmentEntry struct {
PurgeAt time.Time
Buf []byte
}

var (
attachmentMutex sync.Mutex
attachments map[string]attachmentEntry
)

func init() {
flag.Parse()
drafts = make(map[string]draftEntry)
attachments = make(map[string]attachmentEntry)
}

func main() {
Expand All @@ -59,6 +78,7 @@ func main() {
}
func run() error {
r := mux.NewRouter()
r.Methods("POST").Path("/journal/attachment").HandlerFunc(RequireLoggedIn(FileUploadHandler))
r.Methods("POST").Path("/journal/draft").HandlerFunc(RequireLoggedIn(SaveDraftHandler))
r.Methods("GET").Path("/journal").HandlerFunc(RequireLoggedIn(WriterHandler))
r.Methods("POST").Path("/journal").HandlerFunc(RequireLoggedIn(SaveHandler))
Expand All @@ -80,6 +100,7 @@ func run() error {
defer cancel()

go autoAddDrafts(ctx)
go autoPurgeAttachments(ctx)

var lc net.ListenConfig
var err error
Expand Down Expand Up @@ -159,6 +180,32 @@ func autoAddDrafts(ctx context.Context) {
}
}

func autoPurgeAttachments(ctx context.Context) {
ticker := time.NewTicker(AttachmentPurgeInterval)
defer ticker.Stop()

for {
select {
case <-ctx.Done():
break
case <-ticker.C:
toDelete := []string{}
attachmentMutex.Lock()
for att_hash, entry := range attachments {
if entry.PurgeAt.After(time.Now()) {
continue
}
toDelete = append(toDelete, att_hash)
}
for _, att_hash := range toDelete {
log.Printf("Deleting attachment with hash '%s'", att_hash)
delete(attachments, att_hash)
}
attachmentMutex.Unlock()
}
}
}

func IndexHandler(w http.ResponseWriter, r *http.Request) {
indexData := struct {
}{}
Expand All @@ -172,16 +219,18 @@ func WriterHandler(w http.ResponseWriter, r *http.Request) {
getv.Del("success")
getv.Del("failure")

homeData := struct {
pageData := struct {
Success, Failure bool
Callback string
CanAttachFiles bool
}{
r.URL.Query().Get("success") != "",
r.URL.Query().Get("failure") != "",
"journal?" + getv.Encode(),
*attachments_dir != "",
}

executeTemplate(editor, homeData, w, r)
executeTemplate(editor, pageData, w, r)
}

func DailyHandler(w http.ResponseWriter, r *http.Request) {
Expand Down Expand Up @@ -223,6 +272,32 @@ func SaveHandler(w http.ResponseWriter, r *http.Request) {
body = body[0 : len(body)-1]
}

var nonFatalError error

if *attachments_dir != "" {
attachmentMutex.Lock()
defer attachmentMutex.Unlock()

for att_hash, entry := range attachments {
if r.PostFormValue("attachment-"+att_hash) == "" {
continue
}

delete(attachments, att_hash)

// Link the attachment in the post body
body = fmt.Sprintf("%s\n@attachment %s", body, att_hash)

f, err := os.Create(path.Join(*attachments_dir, att_hash))
if err != nil {
nonFatalError = err
continue
}
f.Write(entry.Buf)
f.Close()
}
}

err := saveJournalEntry(timestamp, body, starred)
if err != nil {
errorHandler(err, w, r)
Expand All @@ -238,7 +313,12 @@ func SaveHandler(w http.ResponseWriter, r *http.Request) {

getv := r.URL.Query()
getv.Del("failure")
getv.Set("success", "1")
getv.Del("success")
if nonFatalError != nil {
getv.Set("failure", "1")
} else {
getv.Set("success", "1")
}

w.Header().Set("Location", path.Base(r.URL.Path)+"?"+getv.Encode())
w.WriteHeader(http.StatusFound)
Expand Down Expand Up @@ -270,13 +350,49 @@ func SaveDraftHandler(w http.ResponseWriter, r *http.Request) {
}
}

rv := struct {
writeJSON(w, struct {
OK int `json:"ok"`
Message string `json:"_"`
DraftID string `json:"draft_id"`
}{1, "Draft saved", draft_id}
}{1, "Draft saved", draft_id})
}

w.Header().Set("Content-Type", "application/json")
enc := json.NewEncoder(w)
enc.Encode(rv)
func FileUploadHandler(w http.ResponseWriter, r *http.Request) {
if *attachments_dir == "" {
writeJSONError(w, 503, 503, "This feature is not available")
return
}

att_hash := r.URL.Query().Get("att_hash")
if len(att_hash) != 64 {
writeJSONError(w, 400, 400, "Invalid attachment hash")
return
}
if _, err := hex.DecodeString(att_hash); err != nil {
writeJSONError(w, 400, 400, "Invalid attachment hash")
return
}

chunk, err := io.ReadAll(r.Body)
if err != nil {
log.Printf("Error reading chunk: %v")
writeJSONError(w, 400, 400, "Error reading chunk")
return
}

attachmentMutex.Lock()

e := attachments[att_hash]
e.PurgeAt = time.Now().Add(AttachmentTimeout)
e.Buf = append(e.Buf, chunk...)
file_length := len(e.Buf)
attachments[att_hash] = e

attachmentMutex.Unlock()

writeJSON(w, struct {
OK int `json:"ok"`
Message string `json:"_"`
Length int `json:"file_length"`
}{1, "Chunk saved", file_length})
}
20 changes: 20 additions & 0 deletions bin/journal-server/plumbing.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package main

import (
"encoding/json"
"flag"
"html/template"
"log"
Expand Down Expand Up @@ -117,3 +118,22 @@ func errorHandler(e error, w http.ResponseWriter, r *http.Request) {
w.Write([]byte("TODO: error handling\n"))
w.Write([]byte(e.Error()))
}

func writeJSON(w http.ResponseWriter, val any) {
w.Header().Set("Content-Type", "application/json")
enc := json.NewEncoder(w)
enc.Encode(val)
}

func writeJSONError(w http.ResponseWriter, statusCode int, errorCode int, message string) {
val := struct {
Error int `json:"error"`
Message string `json:"_"`
OK int `json:"ok"`
}{errorCode, message, 0}

w.Header().Set("Content-Type", "application/json")
w.WriteHeader(statusCode)
enc := json.NewEncoder(w)
enc.Encode(val)
}

0 comments on commit f5a6d1f

Please sign in to comment.