eliminate allocs for getting static resources
optimise two char emoticons add 30 parser tests
This commit is contained in:
parent
101b045000
commit
ed64b8f29b
|
@ -20,11 +20,19 @@ import (
|
|||
"github.com/andybalholm/brotli"
|
||||
)
|
||||
|
||||
type SFileList map[string]SFile
|
||||
//type SFileList map[string]*SFile
|
||||
//type SFileListShort map[string]*SFile
|
||||
|
||||
var StaticFiles SFileList = make(map[string]SFile)
|
||||
var StaticFiles = SFileList{make(map[string]*SFile),make(map[string]*SFile)}
|
||||
//var StaticFilesShort SFileList = make(map[string]*SFile)
|
||||
var staticFileMutex sync.RWMutex
|
||||
|
||||
// ? Is it efficient to have two maps for this?
|
||||
type SFileList struct {
|
||||
Long map[string]*SFile
|
||||
Short map[string]*SFile
|
||||
}
|
||||
|
||||
type SFile struct {
|
||||
// TODO: Move these to the end?
|
||||
Data []byte
|
||||
|
@ -51,7 +59,7 @@ type CSSData struct {
|
|||
Phrases map[string]string
|
||||
}
|
||||
|
||||
func (list SFileList) JSTmplInit() error {
|
||||
func (l SFileList) JSTmplInit() error {
|
||||
DebugLog("Initialising the client side templates")
|
||||
return filepath.Walk("./tmpl_client", func(path string, f os.FileInfo, err error) error {
|
||||
if f.IsDir() || strings.HasSuffix(path, "tmpl_list.go") || strings.HasSuffix(path, "stub.go") {
|
||||
|
@ -297,14 +305,14 @@ func (list SFileList) JSTmplInit() error {
|
|||
hasher.Write(data)
|
||||
checksum := hex.EncodeToString(hasher.Sum(nil))
|
||||
|
||||
list.Set("/s/"+path, SFile{data, gzipData, brData, checksum, path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
|
||||
l.Set("/s/"+path, &SFile{data, gzipData, brData, checksum, path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
|
||||
|
||||
DebugLogf("Added the '%s' static file.", path)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func (list SFileList) Init() error {
|
||||
func (l SFileList) Init() error {
|
||||
return filepath.Walk("./public", func(path string, f os.FileInfo, err error) error {
|
||||
if f.IsDir() {
|
||||
return nil
|
||||
|
@ -359,14 +367,14 @@ func (list SFileList) Init() error {
|
|||
}
|
||||
}
|
||||
|
||||
list.Set("/s/"+path, SFile{data, gzipData, brData, checksum, path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mimetype, f, f.ModTime().UTC().Format(http.TimeFormat)})
|
||||
l.Set("/s/"+path, &SFile{data, gzipData, brData, checksum, path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mimetype, f, f.ModTime().UTC().Format(http.TimeFormat)})
|
||||
|
||||
DebugLogf("Added the '%s' static file.", path)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func (list SFileList) Add(path, prefix string) error {
|
||||
func (l SFileList) Add(path, prefix string) error {
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -416,23 +424,32 @@ func (list SFileList) Add(path, prefix string) error {
|
|||
hasher.Write(data)
|
||||
checksum := hex.EncodeToString(hasher.Sum(nil))
|
||||
|
||||
list.Set("/s/"+path, SFile{data, gzipData, brData, checksum, path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
|
||||
l.Set("/s/"+path, &SFile{data, gzipData, brData, checksum, path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
|
||||
|
||||
DebugLogf("Added the '%s' static file", path)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (list SFileList) Get(name string) (file SFile, exists bool) {
|
||||
func (l SFileList) Get(path string) (file *SFile, exists bool) {
|
||||
staticFileMutex.RLock()
|
||||
defer staticFileMutex.RUnlock()
|
||||
file, exists = list[name]
|
||||
file, exists = l.Long[path]
|
||||
return file, exists
|
||||
}
|
||||
|
||||
func (list SFileList) Set(name string, data SFile) {
|
||||
// fetch without /s/ to avoid allocing in pages.go
|
||||
func (l SFileList) GetShort(name string) (file *SFile, exists bool) {
|
||||
staticFileMutex.RLock()
|
||||
defer staticFileMutex.RUnlock()
|
||||
file, exists = l.Short[name]
|
||||
return file, exists
|
||||
}
|
||||
|
||||
func (l SFileList) Set(name string, data *SFile) {
|
||||
staticFileMutex.Lock()
|
||||
defer staticFileMutex.Unlock()
|
||||
list[name] = data
|
||||
l.Long[name] = data
|
||||
l.Short[strings.TrimPrefix("/s/",name)] = data
|
||||
}
|
||||
|
||||
var gzipBestCompress sync.Pool
|
||||
|
|
|
@ -55,8 +55,7 @@ type Header struct {
|
|||
func (h *Header) AddScript(name string) {
|
||||
if name[0] == '/' && name[1] == '/' {
|
||||
} else {
|
||||
// TODO: Use a secondary static file map to avoid this concatenation?
|
||||
file, ok := StaticFiles.Get("/s/" + name)
|
||||
file, ok := StaticFiles.GetShort(name)
|
||||
if ok {
|
||||
name = file.OName
|
||||
}
|
||||
|
@ -68,7 +67,7 @@ func (h *Header) AddScript(name string) {
|
|||
func (h *Header) AddPreScriptAsync(name string) {
|
||||
if name[0] == '/' && name[1] == '/' {
|
||||
} else {
|
||||
file, ok := StaticFiles.Get("/s/" + name)
|
||||
file, ok := StaticFiles.GetShort(name)
|
||||
if ok {
|
||||
name = file.OName
|
||||
}
|
||||
|
@ -79,7 +78,7 @@ func (h *Header) AddPreScriptAsync(name string) {
|
|||
func (h *Header) AddScriptAsync(name string) {
|
||||
if name[0] == '/' && name[1] == '/' {
|
||||
} else {
|
||||
file, ok := StaticFiles.Get("/s/" + name)
|
||||
file, ok := StaticFiles.GetShort(name)
|
||||
if ok {
|
||||
name = file.OName
|
||||
}
|
||||
|
@ -94,7 +93,7 @@ func (h *Header) AddScriptAsync(name string) {
|
|||
func (h *Header) AddSheet(name string) {
|
||||
if name[0] == '/' && name[1] == '/' {
|
||||
} else {
|
||||
file, ok := StaticFiles.Get("/s/" + name)
|
||||
file, ok := StaticFiles.GetShort(name)
|
||||
if ok {
|
||||
name = file.OName
|
||||
}
|
||||
|
@ -108,7 +107,7 @@ func (h *Header) AddXRes(names ...string) {
|
|||
for i, name := range names {
|
||||
if name[0] == '/' && name[1] == '/' {
|
||||
} else {
|
||||
file, ok := StaticFiles.Get("/s/" + name)
|
||||
file, ok := StaticFiles.GetShort(name)
|
||||
if ok {
|
||||
name = file.OName
|
||||
}
|
||||
|
|
|
@ -247,6 +247,8 @@ func PreparseMessage(msg string) string {
|
|||
},
|
||||
}
|
||||
// TODO: Implement a less literal parser
|
||||
// TODO: Use a string builder
|
||||
// TODO: Implement faster emoji parser
|
||||
for i := 0; i < len(runes); i++ {
|
||||
char := runes[i]
|
||||
// TODO: Make the slashes escapable too in case someone means to use a literaly slash, maybe as an example of how to escape elements?
|
||||
|
@ -480,6 +482,9 @@ func ParseMessage(msg string, sectionID int, sectionType string, settings *Parse
|
|||
msg, _ = ParseMessage2(msg, sectionID, sectionType, settings, user)
|
||||
return msg
|
||||
}
|
||||
var litRepPrefix = []byte{':',';'}
|
||||
//var litRep = [][]byte{':':[]byte{')','(','D','O','o','P','p'},';':[]byte{')'}}
|
||||
var litRep = [][]string{':':[]string{')':"😀",'(':"😞",'D':"😃",'O':"😲",'o':"😲",'P':"😛",'p':"😛"},';':[]string{')':"😉"}}
|
||||
|
||||
// TODO: Write a test for this
|
||||
// TODO: We need a lot more hooks here. E.g. To add custom media types and handlers.
|
||||
|
@ -492,7 +497,7 @@ func ParseMessage2(msg string, sectionID int, sectionType string, settings *Pars
|
|||
user = &GuestUser
|
||||
}
|
||||
// TODO: Word boundary detection for these to avoid mangling code
|
||||
rep := func(find, replace string) {
|
||||
/*rep := func(find, replace string) {
|
||||
msg = strings.Replace(msg, find, replace, -1)
|
||||
}
|
||||
rep(":)", "😀")
|
||||
|
@ -502,18 +507,17 @@ func ParseMessage2(msg string, sectionID int, sectionType string, settings *Pars
|
|||
rep(":O", "😲")
|
||||
rep(":p", "😛")
|
||||
rep(":o", "😲")
|
||||
rep(";)", "😉")
|
||||
rep(";)", "😉")*/
|
||||
|
||||
// Word filter list. E.g. Swear words and other things the admins don't like
|
||||
wordFilters, err := WordFilters.GetAll()
|
||||
filters, err := WordFilters.GetAll()
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
return "", false
|
||||
}
|
||||
for _, f := range wordFilters {
|
||||
for _, f := range filters {
|
||||
msg = strings.Replace(msg, f.Find, f.Replace, -1)
|
||||
}
|
||||
|
||||
if len(msg) < 2 {
|
||||
msg = strings.Replace(msg, "\n", "<br>", -1)
|
||||
msg = GetHookTable().Sshook("parse_assign", msg)
|
||||
|
@ -539,6 +543,33 @@ func ParseMessage2(msg string, sectionID int, sectionType string, settings *Pars
|
|||
}
|
||||
//fmt.Println("s2")
|
||||
ch := msg[i]
|
||||
|
||||
// Very short literal matcher
|
||||
if len(litRep) > int(ch) {
|
||||
sl := litRep[ch]
|
||||
if sl != nil {
|
||||
i++
|
||||
ch := msg[i]
|
||||
if len(sl) > int(ch) {
|
||||
val := sl[ch]
|
||||
if val != "" {
|
||||
i--
|
||||
sb.WriteString(msg[lastItem:i])
|
||||
i++
|
||||
sb.WriteString(val)
|
||||
i++
|
||||
lastItem = i
|
||||
i--
|
||||
continue
|
||||
}
|
||||
}
|
||||
i--
|
||||
}
|
||||
//lastItem = i
|
||||
//i--
|
||||
//continue
|
||||
}
|
||||
|
||||
switch ch {
|
||||
case '#':
|
||||
//fmt.Println("msg[i+1]:", msg[i+1])
|
||||
|
|
|
@ -284,7 +284,7 @@ func (t *Theme) AddThemeStaticFiles() error {
|
|||
hasher.Write(data)
|
||||
checksum := hex.EncodeToString(hasher.Sum(nil))
|
||||
|
||||
StaticFiles.Set("/s/"+t.Name+path, SFile{data, gzipData, brData, checksum, t.Name + path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
|
||||
StaticFiles.Set("/s/"+t.Name+path, &SFile{data, gzipData, brData, checksum, t.Name + path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
|
||||
|
||||
DebugLog("Added the '/" + t.Name + path + "' static file for theme " + t.Name + ".")
|
||||
return nil
|
||||
|
@ -429,8 +429,7 @@ func (w GzipResponseWriter) Write(b []byte) (int, error) {
|
|||
// TODO: Cut the number of types in half
|
||||
func (t *Theme) RunTmpl(template string, pi interface{}, w io.Writer) error {
|
||||
// Unpack this to avoid an indirect call
|
||||
gzw, ok := w.(GzipResponseWriter)
|
||||
if ok {
|
||||
if gzw, ok := w.(GzipResponseWriter); ok {
|
||||
w = gzw.Writer
|
||||
gzw.Header().Set("Content-Type", "text/html;charset=utf-8")
|
||||
}
|
||||
|
|
|
@ -23,6 +23,12 @@ func TestPreparser(t *testing.T) {
|
|||
l.Add("hi ", "hi")
|
||||
l.Add("hi", "hi")
|
||||
l.Add(":grinning:", "😀")
|
||||
l.Add(":grinning: :grinning:", "😀 😀")
|
||||
l.Add(" :grinning: ", "😀")
|
||||
l.Add(": :grinning: :", ": 😀 :")
|
||||
l.Add("::grinning::", ":😀:")
|
||||
//l.Add("d:grinning:d", "d:grinning:d") // todo
|
||||
l.Add("d :grinning: d", "d 😀 d")
|
||||
l.Add("😀", "😀")
|
||||
l.Add(" ", "")
|
||||
l.Add("<p>", "")
|
||||
|
@ -148,6 +154,27 @@ func TestParser(t *testing.T) {
|
|||
eurl := "<a rel='ugc'href='//" + url + "'>" + url + "</a>"
|
||||
l.Add("", "")
|
||||
l.Add("haha", "haha")
|
||||
l.Add(":P", "😛")
|
||||
l.Add(" :P ", " 😛 ")
|
||||
l.Add(":p", "😛")
|
||||
l.Add("d:p", "d:p")
|
||||
l.Add(":pd", "😛d")
|
||||
l.Add(":pdd", "😛dd")
|
||||
l.Add(":pddd", "😛ddd")
|
||||
l.Add(":p d", "😛 d")
|
||||
l.Add(":p dd", "😛 dd")
|
||||
l.Add(":p ddd", "😛 ddd")
|
||||
//l.Add(":p:p:p", "😛😛😛")
|
||||
l.Add(":p:p:p", "😛:p:p")
|
||||
l.Add(":p :p", "😛 😛")
|
||||
l.Add(":p :p :p", "😛 😛 😛")
|
||||
l.Add(":p :p :p :p", "😛 😛 😛 😛")
|
||||
l.Add(":p :p :p", "😛 😛 😛")
|
||||
l.Add("word:p", "word:p")
|
||||
l.Add("word:pword", "word:pword")
|
||||
l.Add(":pword", "😛word") // TODO: Change the semantics on this to detect the succeeding character?
|
||||
l.Add("word :p", "word 😛")
|
||||
l.Add(":p word", "😛 word")
|
||||
l.Add("<b>t</b>", "<b>t</b>")
|
||||
l.Add("//", "//")
|
||||
l.Add("http://", "<red>[Invalid URL]</red>")
|
||||
|
@ -210,6 +237,12 @@ func TestParser(t *testing.T) {
|
|||
l.Add("\n//"+url+"\n", "<br>"+eurl+"<br>")
|
||||
l.Add("\n//"+url+"\n\n", "<br>"+eurl+"<br><br>")
|
||||
l.Add("//"+url+"\n//"+url, eurl+"<br>"+eurl)
|
||||
l.Add("//"+url+" //"+url, eurl+" "+eurl)
|
||||
l.Add("//"+url+" //"+url, eurl+" "+eurl)
|
||||
//l.Add("//"+url+"//"+url, eurl+""+eurl)
|
||||
//l.Add("//"+url+"|//"+url, eurl+"|"+eurl)
|
||||
l.Add("//"+url+"|//"+url, "<red>[Invalid URL]</red>|//"+url)
|
||||
l.Add("//"+url+"//"+url, "<a rel='ugc'href='//" + url + "//"+url+ "'>" + url +"//"+url+ "</a>")
|
||||
l.Add("//"+url+"\n\n//"+url, eurl+"<br><br>"+eurl)
|
||||
|
||||
pre2 := c.Config.SslSchema
|
||||
|
@ -293,6 +326,7 @@ func TestParser(t *testing.T) {
|
|||
l.Add("//www.youtube.com/watch?v=lalalalala&t=30s", "<iframe class='postIframe'src='https://www.youtube-nocookie.com/embed/lalalalala?start=30'frameborder=0 allowfullscreen></iframe><noscript><a href='https://www.youtube.com/watch?v=lalalalala&t=30s'>https://www.youtube.com/watch?v=lalalalala&t=30s</a></noscript>")
|
||||
|
||||
l.Add("#tid-1", "<a href='/topic/1'>#tid-1</a>")
|
||||
l.Add("#tid-1#tid-1", "<a href='/topic/1'>#tid-1</a>#tid-1")
|
||||
l.Add("##tid-1", "##tid-1")
|
||||
l.Add("#@tid-1", "#@tid-1")
|
||||
l.Add("# #tid-1", "# #tid-1")
|
||||
|
|
Loading…
Reference in New Issue