diff --git a/common/files.go b/common/files.go
index 86218753..59735109 100644
--- a/common/files.go
+++ b/common/files.go
@@ -20,11 +20,19 @@ import (
"github.com/andybalholm/brotli"
)
-type SFileList map[string]SFile
+//type SFileList map[string]*SFile
+//type SFileListShort map[string]*SFile
-var StaticFiles SFileList = make(map[string]SFile)
+var StaticFiles = SFileList{make(map[string]*SFile),make(map[string]*SFile)}
+//var StaticFilesShort SFileList = make(map[string]*SFile)
var staticFileMutex sync.RWMutex
+// ? Is it efficient to have two maps for this?
+type SFileList struct {
+ Long map[string]*SFile
+ Short map[string]*SFile
+}
+
type SFile struct {
// TODO: Move these to the end?
Data []byte
@@ -51,7 +59,7 @@ type CSSData struct {
Phrases map[string]string
}
-func (list SFileList) JSTmplInit() error {
+func (l SFileList) JSTmplInit() error {
DebugLog("Initialising the client side templates")
return filepath.Walk("./tmpl_client", func(path string, f os.FileInfo, err error) error {
if f.IsDir() || strings.HasSuffix(path, "tmpl_list.go") || strings.HasSuffix(path, "stub.go") {
@@ -297,14 +305,14 @@ func (list SFileList) JSTmplInit() error {
hasher.Write(data)
checksum := hex.EncodeToString(hasher.Sum(nil))
- list.Set("/s/"+path, SFile{data, gzipData, brData, checksum, path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
+ l.Set("/s/"+path, &SFile{data, gzipData, brData, checksum, path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
DebugLogf("Added the '%s' static file.", path)
return nil
})
}
-func (list SFileList) Init() error {
+func (l SFileList) Init() error {
return filepath.Walk("./public", func(path string, f os.FileInfo, err error) error {
if f.IsDir() {
return nil
@@ -359,14 +367,14 @@ func (list SFileList) Init() error {
}
}
- list.Set("/s/"+path, SFile{data, gzipData, brData, checksum, path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mimetype, f, f.ModTime().UTC().Format(http.TimeFormat)})
+ l.Set("/s/"+path, &SFile{data, gzipData, brData, checksum, path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mimetype, f, f.ModTime().UTC().Format(http.TimeFormat)})
DebugLogf("Added the '%s' static file.", path)
return nil
})
}
-func (list SFileList) Add(path, prefix string) error {
+func (l SFileList) Add(path, prefix string) error {
data, err := ioutil.ReadFile(path)
if err != nil {
return err
@@ -416,23 +424,32 @@ func (list SFileList) Add(path, prefix string) error {
hasher.Write(data)
checksum := hex.EncodeToString(hasher.Sum(nil))
- list.Set("/s/"+path, SFile{data, gzipData, brData, checksum, path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
+ l.Set("/s/"+path, &SFile{data, gzipData, brData, checksum, path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
DebugLogf("Added the '%s' static file", path)
return nil
}
-func (list SFileList) Get(name string) (file SFile, exists bool) {
+func (l SFileList) Get(path string) (file *SFile, exists bool) {
staticFileMutex.RLock()
defer staticFileMutex.RUnlock()
- file, exists = list[name]
+ file, exists = l.Long[path]
return file, exists
}
-func (list SFileList) Set(name string, data SFile) {
+// fetch without /s/ to avoid allocing in pages.go
+func (l SFileList) GetShort(name string) (file *SFile, exists bool) {
+ staticFileMutex.RLock()
+ defer staticFileMutex.RUnlock()
+ file, exists = l.Short[name]
+ return file, exists
+}
+
+func (l SFileList) Set(name string, data *SFile) {
staticFileMutex.Lock()
defer staticFileMutex.Unlock()
- list[name] = data
+ l.Long[name] = data
+ l.Short[strings.TrimPrefix("/s/",name)] = data
}
var gzipBestCompress sync.Pool
diff --git a/common/pages.go b/common/pages.go
index aef699a1..13da442d 100644
--- a/common/pages.go
+++ b/common/pages.go
@@ -55,8 +55,7 @@ type Header struct {
func (h *Header) AddScript(name string) {
if name[0] == '/' && name[1] == '/' {
} else {
- // TODO: Use a secondary static file map to avoid this concatenation?
- file, ok := StaticFiles.Get("/s/" + name)
+ file, ok := StaticFiles.GetShort(name)
if ok {
name = file.OName
}
@@ -68,7 +67,7 @@ func (h *Header) AddScript(name string) {
func (h *Header) AddPreScriptAsync(name string) {
if name[0] == '/' && name[1] == '/' {
} else {
- file, ok := StaticFiles.Get("/s/" + name)
+ file, ok := StaticFiles.GetShort(name)
if ok {
name = file.OName
}
@@ -79,7 +78,7 @@ func (h *Header) AddPreScriptAsync(name string) {
func (h *Header) AddScriptAsync(name string) {
if name[0] == '/' && name[1] == '/' {
} else {
- file, ok := StaticFiles.Get("/s/" + name)
+ file, ok := StaticFiles.GetShort(name)
if ok {
name = file.OName
}
@@ -94,7 +93,7 @@ func (h *Header) AddScriptAsync(name string) {
func (h *Header) AddSheet(name string) {
if name[0] == '/' && name[1] == '/' {
} else {
- file, ok := StaticFiles.Get("/s/" + name)
+ file, ok := StaticFiles.GetShort(name)
if ok {
name = file.OName
}
@@ -108,7 +107,7 @@ func (h *Header) AddXRes(names ...string) {
for i, name := range names {
if name[0] == '/' && name[1] == '/' {
} else {
- file, ok := StaticFiles.Get("/s/" + name)
+ file, ok := StaticFiles.GetShort(name)
if ok {
name = file.OName
}
diff --git a/common/parser.go b/common/parser.go
index 5b132f9e..47e36116 100644
--- a/common/parser.go
+++ b/common/parser.go
@@ -247,6 +247,8 @@ func PreparseMessage(msg string) string {
},
}
// TODO: Implement a less literal parser
+ // TODO: Use a string builder
+ // TODO: Implement faster emoji parser
for i := 0; i < len(runes); i++ {
char := runes[i]
// TODO: Make the slashes escapable too in case someone means to use a literaly slash, maybe as an example of how to escape elements?
@@ -480,6 +482,9 @@ func ParseMessage(msg string, sectionID int, sectionType string, settings *Parse
msg, _ = ParseMessage2(msg, sectionID, sectionType, settings, user)
return msg
}
+var litRepPrefix = []byte{':',';'}
+//var litRep = [][]byte{':':[]byte{')','(','D','O','o','P','p'},';':[]byte{')'}}
+var litRep = [][]string{':':[]string{')':"π",'(':"π",'D':"π",'O':"π²",'o':"π²",'P':"π",'p':"π"},';':[]string{')':"π"}}
// TODO: Write a test for this
// TODO: We need a lot more hooks here. E.g. To add custom media types and handlers.
@@ -492,7 +497,7 @@ func ParseMessage2(msg string, sectionID int, sectionType string, settings *Pars
user = &GuestUser
}
// TODO: Word boundary detection for these to avoid mangling code
- rep := func(find, replace string) {
+ /*rep := func(find, replace string) {
msg = strings.Replace(msg, find, replace, -1)
}
rep(":)", "π")
@@ -502,18 +507,17 @@ func ParseMessage2(msg string, sectionID int, sectionType string, settings *Pars
rep(":O", "π²")
rep(":p", "π")
rep(":o", "π²")
- rep(";)", "π")
+ rep(";)", "π")*/
// Word filter list. E.g. Swear words and other things the admins don't like
- wordFilters, err := WordFilters.GetAll()
+ filters, err := WordFilters.GetAll()
if err != nil {
LogError(err)
return "", false
}
- for _, f := range wordFilters {
+ for _, f := range filters {
msg = strings.Replace(msg, f.Find, f.Replace, -1)
}
-
if len(msg) < 2 {
msg = strings.Replace(msg, "\n", "
", -1)
msg = GetHookTable().Sshook("parse_assign", msg)
@@ -539,6 +543,33 @@ func ParseMessage2(msg string, sectionID int, sectionType string, settings *Pars
}
//fmt.Println("s2")
ch := msg[i]
+
+ // Very short literal matcher
+ if len(litRep) > int(ch) {
+ sl := litRep[ch]
+ if sl != nil {
+ i++
+ ch := msg[i]
+ if len(sl) > int(ch) {
+ val := sl[ch]
+ if val != "" {
+ i--
+ sb.WriteString(msg[lastItem:i])
+ i++
+ sb.WriteString(val)
+ i++
+ lastItem = i
+ i--
+ continue
+ }
+ }
+ i--
+ }
+ //lastItem = i
+ //i--
+ //continue
+ }
+
switch ch {
case '#':
//fmt.Println("msg[i+1]:", msg[i+1])
diff --git a/common/theme.go b/common/theme.go
index cfcc4bd5..a2a92f43 100644
--- a/common/theme.go
+++ b/common/theme.go
@@ -284,7 +284,7 @@ func (t *Theme) AddThemeStaticFiles() error {
hasher.Write(data)
checksum := hex.EncodeToString(hasher.Sum(nil))
- StaticFiles.Set("/s/"+t.Name+path, SFile{data, gzipData, brData, checksum, t.Name + path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
+ StaticFiles.Set("/s/"+t.Name+path, &SFile{data, gzipData, brData, checksum, t.Name + path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
DebugLog("Added the '/" + t.Name + path + "' static file for theme " + t.Name + ".")
return nil
@@ -429,8 +429,7 @@ func (w GzipResponseWriter) Write(b []byte) (int, error) {
// TODO: Cut the number of types in half
func (t *Theme) RunTmpl(template string, pi interface{}, w io.Writer) error {
// Unpack this to avoid an indirect call
- gzw, ok := w.(GzipResponseWriter)
- if ok {
+ if gzw, ok := w.(GzipResponseWriter); ok {
w = gzw.Writer
gzw.Header().Set("Content-Type", "text/html;charset=utf-8")
}
diff --git a/parser_test.go b/parser_test.go
index 5f837d35..c7f2f5ca 100644
--- a/parser_test.go
+++ b/parser_test.go
@@ -23,6 +23,12 @@ func TestPreparser(t *testing.T) {
l.Add("hi ", "hi")
l.Add("hi", "hi")
l.Add(":grinning:", "π")
+ l.Add(":grinning: :grinning:", "π π")
+ l.Add(" :grinning: ", "π")
+ l.Add(": :grinning: :", ": π :")
+ l.Add("::grinning::", ":π:")
+ //l.Add("d:grinning:d", "d:grinning:d") // todo
+ l.Add("d :grinning: d", "d π d")
l.Add("π", "π")
l.Add(" ", "")
l.Add("
", "")
@@ -148,6 +154,27 @@ func TestParser(t *testing.T) {
eurl := "" + url + ""
l.Add("", "")
l.Add("haha", "haha")
+ l.Add(":P", "π")
+ l.Add(" :P ", " π ")
+ l.Add(":p", "π")
+ l.Add("d:p", "d:p")
+ l.Add(":pd", "πd")
+ l.Add(":pdd", "πdd")
+ l.Add(":pddd", "πddd")
+ l.Add(":p d", "π d")
+ l.Add(":p dd", "π dd")
+ l.Add(":p ddd", "π ddd")
+ //l.Add(":p:p:p", "πππ")
+ l.Add(":p:p:p", "π:p:p")
+ l.Add(":p :p", "π π")
+ l.Add(":p :p :p", "π π π")
+ l.Add(":p :p :p :p", "π π π π")
+ l.Add(":p :p :p", "π π π")
+ l.Add("word:p", "word:p")
+ l.Add("word:pword", "word:pword")
+ l.Add(":pword", "πword") // TODO: Change the semantics on this to detect the succeeding character?
+ l.Add("word :p", "word π")
+ l.Add(":p word", "π word")
l.Add("t", "t")
l.Add("//", "//")
l.Add("http://", "
"+eurl+"
")
l.Add("\n//"+url+"\n\n", "
"+eurl+"
")
l.Add("//"+url+"\n//"+url, eurl+"
"+eurl)
+ l.Add("//"+url+" //"+url, eurl+" "+eurl)
+ l.Add("//"+url+" //"+url, eurl+" "+eurl)
+ //l.Add("//"+url+"//"+url, eurl+""+eurl)
+ //l.Add("//"+url+"|//"+url, eurl+"|"+eurl)
+ l.Add("//"+url+"|//"+url, "
"+eurl)
pre2 := c.Config.SslSchema
@@ -293,6 +326,7 @@ func TestParser(t *testing.T) {
l.Add("//www.youtube.com/watch?v=lalalalala&t=30s", "")
l.Add("#tid-1", "#tid-1")
+ l.Add("#tid-1#tid-1", "#tid-1#tid-1")
l.Add("##tid-1", "##tid-1")
l.Add("#@tid-1", "#@tid-1")
l.Add("# #tid-1", "# #tid-1")