gosora/common/files.go

511 lines
17 KiB
Go
Raw Normal View History

package common
import (
2022-02-21 03:53:13 +00:00
"bytes"
"compress/gzip"
"crypto/sha256"
"encoding/base64"
"encoding/hex"
"errors"
"fmt"
"io/ioutil"
"mime"
"net/http"
"net/url"
"os"
"path/filepath"
"strconv"
"strings"
"sync"
2022-02-21 03:32:53 +00:00
2022-02-21 03:53:13 +00:00
tmpl "git.tuxpa.in/a/gosora/tmpl_client"
"github.com/andybalholm/brotli"
)
//type SFileList map[string]*SFile
//type SFileListShort map[string]*SFile
var StaticFiles = SFileList{"/s/", make(map[string]*SFile), make(map[string]*SFile)}
//var StaticFilesShort SFileList = make(map[string]*SFile)
var staticFileMutex sync.RWMutex
// ? Is it efficient to have two maps for this?
type SFileList struct {
2022-02-21 03:32:53 +00:00
Prefix string
Long map[string]*SFile
Short map[string]*SFile
}
type SFile struct {
2022-02-21 03:32:53 +00:00
// TODO: Move these to the end?
Data []byte
GzipData []byte
BrData []byte
Sha256 string
Sha256I string
OName string
Pos int64
Length int64
StrLength string
GzipLength int64
StrGzipLength string
BrLength int64
StrBrLength string
Mimetype string
Info os.FileInfo
FormattedModTime string
}
type CSSData struct {
2022-02-21 03:32:53 +00:00
Phrases map[string]string
}
func (l SFileList) JSTmplInit() error {
2022-02-21 03:32:53 +00:00
DebugLog("Initialising the client side templates")
return filepath.Walk("./tmpl_client", func(path string, f os.FileInfo, err error) error {
if f.IsDir() || strings.HasSuffix(path, "tmpl_list.go") || strings.HasSuffix(path, "stub.go") {
return nil
}
path = strings.Replace(path, "\\", "/", -1)
DebugLog("Processing client template " + path)
data, err := ioutil.ReadFile(path)
if err != nil {
return err
}
path = strings.TrimPrefix(path, "tmpl_client/")
tmplName := strings.TrimSuffix(path, ".jgo")
shortName := strings.TrimPrefix(tmplName, "tmpl_")
replace := func(data []byte, replaceThis, withThis string) []byte {
return bytes.Replace(data, []byte(replaceThis), []byte(withThis), -1)
}
rep := func(replaceThis, withThis string) {
data = replace(data, replaceThis, withThis)
}
startIndex, hasFunc := skipAllUntilCharsExist(data, 0, []byte("if(tmplInits===undefined)"))
if !hasFunc {
return errors.New("no init map found")
}
data = data[startIndex-len([]byte("if(tmplInits===undefined)")):]
rep("// nolint", "")
//rep("func ", "function ")
rep("func ", "function ")
rep(" error {\n", " {\nlet o=\"\"\n")
funcIndex, hasFunc := skipAllUntilCharsExist(data, 0, []byte("function Tmpl_"))
if !hasFunc {
return errors.New("no template function found")
}
spaceIndex, hasSpace := skipUntilIfExists(data, funcIndex, ' ')
if !hasSpace {
return errors.New("no spaces found after the template function name")
}
endBrace, hasBrace := skipUntilIfExists(data, spaceIndex, ')')
if !hasBrace {
return errors.New("no right brace found after the template function name")
}
fmt.Println("spaceIndex: ", spaceIndex)
fmt.Println("endBrace: ", endBrace)
fmt.Println("string(data[spaceIndex:endBrace]): ", string(data[spaceIndex:endBrace]))
preLen := len(data)
rep(string(data[spaceIndex:endBrace]), "")
rep("))\n", " \n")
endBrace -= preLen - len(data) // Offset it as we've deleted portions
fmt.Println("new endBrace: ", endBrace)
fmt.Println("data: ", string(data))
/*showPos := func(data []byte, index int) (out string) {
out = "["
for j, char := range data {
if index == j {
out += "[" + string(char) + "] "
} else {
out += string(char) + " "
}
}
return out + "]"
}*/
// ? Can we just use a regex? I'm thinking of going more efficient, or just outright rolling wasm, this is a temp hack in a place where performance doesn't particularly matter
each := func(phrase string, h func(index int)) {
//fmt.Println("find each '" + phrase + "'")
index := endBrace
if index < 0 {
panic("index under zero: " + strconv.Itoa(index))
}
var foundIt bool
for {
//fmt.Println("in index: ", index)
//fmt.Println("pos: ", showPos(data, index))
index, foundIt = skipAllUntilCharsExist(data, index, []byte(phrase))
if !foundIt {
break
}
h(index)
}
}
each("strconv.Itoa(", func(index int) {
braceAt, hasEndBrace := skipUntilIfExistsOrLine(data, index, ')')
if hasEndBrace {
data[braceAt] = ' ' // Blank it
}
})
each("[]byte(", func(index int) {
braceAt, hasEndBrace := skipUntilIfExistsOrLine(data, index, ')')
if hasEndBrace {
data[braceAt] = ' ' // Blank it
}
})
each("StringToBytes(", func(index int) {
braceAt, hasEndBrace := skipUntilIfExistsOrLine(data, index, ')')
if hasEndBrace {
data[braceAt] = ' ' // Blank it
}
})
each("w.Write(", func(index int) {
braceAt, hasEndBrace := skipUntilIfExistsOrLine(data, index, ')')
if hasEndBrace {
data[braceAt] = ' ' // Blank it
}
})
each("RelativeTime(", func(index int) {
braceAt, _ := skipUntilIfExistsOrLine(data, index, 10)
if data[braceAt-1] == ' ' {
data[braceAt-1] = ' ' // Blank it
}
})
each("if ", func(index int) {
//fmt.Println("if index: ", index)
braceAt, hasBrace := skipUntilIfExistsOrLine(data, index, '{')
if hasBrace {
if data[braceAt-1] != ' ' {
panic("couldn't find space before brace, found ' " + string(data[braceAt-1]) + "' instead")
}
data[braceAt-1] = ')' // Drop a brace here to satisfy JS
}
})
each("for _, item := range ", func(index int) {
//fmt.Println("for index: ", index)
braceAt, hasBrace := skipUntilIfExists(data, index, '{')
if hasBrace {
if data[braceAt-1] != ' ' {
panic("couldn't find space before brace, found ' " + string(data[braceAt-1]) + "' instead")
}
data[braceAt-1] = ')' // Drop a brace here to satisfy JS
}
})
rep("for _, item := range ", "for(item of ")
rep("w.Write([]byte(", "o += ")
rep("w.Write(StringToBytes(", "o += ")
rep("w.Write(", "o += ")
rep("+= c.", "+= ")
rep("strconv.Itoa(", "")
rep("strconv.FormatInt(", "")
rep(" c.", "")
rep("phrases.", "")
rep(", 10;", "")
//rep("var plist = GetTmplPhrasesBytes("+shortName+"_tmpl_phrase_id)", "const plist = tmplPhrases[\""+tmplName+"\"];")
//rep("//var plist = GetTmplPhrasesBytes("+shortName+"_tmpl_phrase_id)", "const "+shortName+"_phrase_arr = tmplPhrases[\""+tmplName+"\"];")
rep("//var plist = GetTmplPhrasesBytes("+shortName+"_tmpl_phrase_id)", "const pl=tmplPhrases[\""+tmplName+"\"];")
rep(shortName+"_phrase_arr", "pl")
rep(shortName+"_phrase", "pl")
rep("tmpl_"+shortName+"_vars", "t_v")
rep("var c_v_", "let c_v_")
rep(`t_vars, ok := tmpl_i.`, `/*`)
rep("[]byte(", "")
rep("StringToBytes(", "")
rep("RelativeTime(t_v.", "t_v.Relative")
// TODO: Format dates properly on the client side
rep(".Format(\"2006-01-02 15:04:05\"", "")
rep(", 10", "")
rep("if ", "if(")
rep("return nil", "return o")
rep(" )", ")")
rep(" \n", "\n")
rep("\n", ";\n")
rep("{;", "{")
rep("};", "}")
rep("[;", "[")
rep(",;", ",")
rep("=;", "=")
rep(`,
});
Almost finished live topic lists, you can find them at /topics/. You can disable them via config.json The topic list cache can handle more groups now, but don't go too crazy with groups (e.g. thousands of them). Make the suspicious request logs more descriptive. Added the phrases API endpoint. Split the template phrases up by prefix, more work on this coming up. Removed #dash_saved and part of #dash_username. Removed some temporary artifacts from trying to implement FA5 in Nox. Removed some commented CSS. Fixed template artifact deletion on Windows. Tweaked HTTPSRedirect to make it more compact. Fixed NullUserCache not complying with the expectations for BulkGet. Swapped out a few RunVhook calls for more appropriate RunVhookNoreturn calls. Removed a few redundant IsAdmin checks when IsMod would suffice. Commented out a few pushers. Desktop notification permission requests are no longer served to guests. Split topics.html into topics.html and topics_topic.html RunThemeTemplate should now fallback to interpreted templates properly when the transpiled variants aren't avaialb.e Changed TopicsRow.CreatedAt from a string to a time.Time Added SkipTmplPtrMap to CTemplateConfig. Added SetBuildTags to CTemplateSet. A bit more data is dumped when something goes wrong while transpiling templates now. topics_topic, topic_posts, and topic_alt_posts are now transpiled for the client, although not all of them are ready to be served to the client yet. Client rendered templates now support phrases. Client rendered templates now support loops. Fixed loadAlerts in global.js Refactored some of the template initialisation code to make it less repetitive. Split topic.html into topic.html and topic_posts.html Split topic_alt.html into topic_alt.html and topic_alt_posts.html Added comments for PollCache. Fixed a data race in the MemoryPollCache. The writer is now closed properly in WsHubImpl.broadcastMessage. Fixed a potential deadlock in WsHubImpl.broadcastMessage. Removed some old commented code in websockets.go Added the DisableLiveTopicList config setting.
2018-06-24 13:49:29 +00:00
}`, "\n\t];")
2022-02-21 03:32:53 +00:00
rep(`=
2020-03-05 04:43:40 +00:00
}`, "=[]")
2022-02-21 03:32:53 +00:00
rep("o += ", "o+=")
rep(shortName+"_frags[", "fr[")
rep("function Tmpl_"+shortName+"(t_v) {", "var Tmpl_"+shortName+"=(t_v)=>{")
fragset := tmpl.GetFrag(shortName)
if fragset != nil {
//sfrags := []byte("let " + shortName + "_frags=[\n")
sfrags := []byte("{const fr=[")
for i, frags := range fragset {
//sfrags = append(sfrags, []byte(shortName+"_frags.push(`"+string(frags)+"`);\n")...)
//sfrags = append(sfrags, []byte("`"+string(frags)+"`,\n")...)
if i == 0 {
sfrags = append(sfrags, []byte("`"+string(frags)+"`")...)
} else {
sfrags = append(sfrags, []byte(",`"+string(frags)+"`")...)
}
}
//sfrags = append(sfrags, []byte("];\n")...)
sfrags = append(sfrags, []byte("];")...)
data = append(sfrags, data...)
}
rep("\n;", "\n")
rep(";;", ";")
data = append(data, '}')
for name, _ := range Themes {
if strings.HasSuffix(shortName, "_"+name) {
data = append(data, "var Tmpl_"+strings.TrimSuffix(shortName, "_"+name)+"=Tmpl_"+shortName+";"...)
break
}
}
path = tmplName + ".js"
DebugLog("js path: ", path)
ext := filepath.Ext("/tmpl_client/" + path)
brData, err := CompressBytesBrotli(data)
if err != nil {
return err
}
// Don't use Brotli if we get meagre gains from it as it takes longer to process the responses
if len(brData) >= (len(data) + 110) {
brData = nil
} else {
diff := len(data) - len(brData)
if diff <= len(data)/100 {
brData = nil
}
}
gzipData, err := CompressBytesGzip(data)
if err != nil {
return err
}
// Don't use Gzip if we get meagre gains from it as it takes longer to process the responses
if len(gzipData) >= (len(data) + 120) {
gzipData = nil
} else {
diff := len(data) - len(gzipData)
if diff <= len(data)/100 {
gzipData = nil
}
}
// Get a checksum for CSPs and cache busting
hasher := sha256.New()
hasher.Write(data)
sum := hasher.Sum(nil)
checksum := hex.EncodeToString(sum)
integrity := base64.StdEncoding.EncodeToString(sum)
l.Set(l.Prefix+path, &SFile{data, gzipData, brData, checksum, integrity, l.Prefix + path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
DebugLogf("Added the '%s' static file.", path)
return nil
})
}
func (l SFileList) Init() error {
2022-02-21 03:32:53 +00:00
return filepath.Walk("./public", func(path string, f os.FileInfo, err error) error {
if f.IsDir() {
return nil
}
path = strings.Replace(path, "\\", "/", -1)
data, err := ioutil.ReadFile(path)
if err != nil {
return err
}
path = strings.TrimPrefix(path, "public/")
ext := filepath.Ext("/public/" + path)
if ext == ".js" {
data = bytes.Replace(data, []byte("\r"), []byte(""), -1)
}
mimetype := mime.TypeByExtension(ext)
// Get a checksum for CSPs and cache busting
hasher := sha256.New()
hasher.Write(data)
sum := hasher.Sum(nil)
checksum := hex.EncodeToString(sum)
integrity := base64.StdEncoding.EncodeToString(sum)
// Avoid double-compressing images
var gzipData, brData []byte
if mimetype != "image/jpeg" && mimetype != "image/png" && mimetype != "image/gif" {
brData, err = CompressBytesBrotli(data)
if err != nil {
return err
}
// Don't use Brotli if we get meagre gains from it as it takes longer to process the responses
if len(brData) >= (len(data) + 130) {
brData = nil
} else {
diff := len(data) - len(brData)
if diff <= len(data)/100 {
brData = nil
}
}
gzipData, err = CompressBytesGzip(data)
if err != nil {
return err
}
// Don't use Gzip if we get meagre gains from it as it takes longer to process the responses
if len(gzipData) >= (len(data) + 150) {
gzipData = nil
} else {
diff := len(data) - len(gzipData)
if diff <= len(data)/100 {
gzipData = nil
}
}
}
l.Set(l.Prefix+path, &SFile{data, gzipData, brData, checksum, integrity, l.Prefix + path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mimetype, f, f.ModTime().UTC().Format(http.TimeFormat)})
DebugLogf("Added the '%s' static file.", path)
return nil
})
}
func (l SFileList) Add(path, prefix string) error {
2022-02-21 03:32:53 +00:00
data, err := ioutil.ReadFile(path)
if err != nil {
return err
}
fi, err := os.Open(path)
if err != nil {
return err
}
f, err := fi.Stat()
if err != nil {
return err
}
ext := filepath.Ext(path)
path = strings.TrimPrefix(path, prefix)
brData, err := CompressBytesBrotli(data)
if err != nil {
return err
}
// Don't use Brotli if we get meagre gains from it as it takes longer to process the responses
if len(brData) >= (len(data) + 130) {
brData = nil
} else {
diff := len(data) - len(brData)
if diff <= len(data)/100 {
brData = nil
}
}
gzipData, err := CompressBytesGzip(data)
if err != nil {
return err
}
// Don't use Gzip if we get meagre gains from it as it takes longer to process the responses
if len(gzipData) >= (len(data) + 150) {
gzipData = nil
} else {
diff := len(data) - len(gzipData)
if diff <= len(data)/100 {
gzipData = nil
}
}
// Get a checksum for CSPs and cache busting
hasher := sha256.New()
hasher.Write(data)
sum := hasher.Sum(nil)
checksum := hex.EncodeToString(sum)
integrity := base64.StdEncoding.EncodeToString(sum)
l.Set(l.Prefix+path, &SFile{data, gzipData, brData, checksum, integrity, l.Prefix + path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
DebugLogf("Added the '%s' static file", path)
return nil
}
func (l SFileList) Get(path string) (file *SFile, exists bool) {
2022-02-21 03:32:53 +00:00
staticFileMutex.RLock()
defer staticFileMutex.RUnlock()
file, exists = l.Long[path]
return file, exists
}
// fetch without /s/ to avoid allocing in pages.go
func (l SFileList) GetShort(name string) (file *SFile, exists bool) {
2022-02-21 03:32:53 +00:00
staticFileMutex.RLock()
defer staticFileMutex.RUnlock()
file, exists = l.Short[name]
return file, exists
}
func (l SFileList) Set(name string, data *SFile) {
2022-02-21 03:32:53 +00:00
staticFileMutex.Lock()
defer staticFileMutex.Unlock()
// TODO: Propagate errors back up
uurl, err := url.Parse(name)
if err != nil {
return
}
l.Long[uurl.Path] = data
l.Short[strings.TrimPrefix(strings.TrimPrefix(name, l.Prefix), "/")] = data
}
var gzipBestCompress sync.Pool
func CompressBytesGzip(in []byte) (b []byte, err error) {
2022-02-21 03:32:53 +00:00
var buf bytes.Buffer
ii := gzipBestCompress.Get()
var gz *gzip.Writer
if ii == nil {
gz, err = gzip.NewWriterLevel(&buf, gzip.BestCompression)
if err != nil {
return nil, err
}
} else {
gz = ii.(*gzip.Writer)
gz.Reset(&buf)
}
_, err = gz.Write(in)
if err != nil {
return nil, err
}
err = gz.Close()
if err != nil {
return nil, err
}
gzipBestCompress.Put(gz)
return buf.Bytes(), nil
}
func CompressBytesBrotli(in []byte) ([]byte, error) {
2022-02-21 03:32:53 +00:00
var buff bytes.Buffer
br := brotli.NewWriterLevel(&buff, brotli.BestCompression)
_, err := br.Write(in)
if err != nil {
return nil, err
}
err = br.Close()
if err != nil {
return nil, err
}
return buff.Bytes(), nil
}