Use easier web page caching method (Remove ETag and If-Modified-Since)

This commit is contained in:
NI
2022-05-14 21:16:55 +08:00
parent de23d076c6
commit 2340636c94
4 changed files with 2 additions and 235 deletions

View File

@@ -20,56 +20,9 @@ package controller
import ( import (
"net/http" "net/http"
"strings" "strings"
"time"
) )
func clientSupportGZIP(r *http.Request) bool { func clientSupportGZIP(r *http.Request) bool {
// Should be good enough // Should be good enough
return strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") return strings.Contains(r.Header.Get("Accept-Encoding"), "gzip")
} }
func clientContentEtagIsValid(r *http.Request, eTag string) bool {
d := r.Header.Get("If-None-Match")
if len(d) < 0 {
return false
}
dStart := 0
qETag := "\"" + eTag + "\""
for {
dIdx := strings.Index(d[dStart:], ",")
if dIdx < 0 {
return strings.Contains(d[dStart:], qETag) ||
strings.Contains(d[dStart:], "*")
}
if strings.Contains(d[dStart:dStart+dIdx], qETag) {
return true
}
if strings.Contains(d[dStart:dStart+dIdx], "*") {
return true
}
dStart += dIdx + 1
}
}
func clientContentModifiedSince(r *http.Request, mod time.Time) bool {
d := r.Header.Get("If-Modified-Since")
if len(d) < 0 {
return false
}
dt, dtErr := time.Parse(time.RFC1123, d)
if dtErr != nil {
return false
}
return !mod.Before(dt)
}

View File

@@ -16,32 +16,3 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>. // along with this program. If not, see <https://www.gnu.org/licenses/>.
package controller package controller
import (
"net/http"
"testing"
)
func TestClientContentEtagIsValid(t *testing.T) {
test := func(id int, hd []string, etag string, expected bool) {
r := http.Request{
Header: http.Header{
"If-None-Match": hd,
},
}
rr := clientContentEtagIsValid(&r, etag)
if rr != expected {
t.Errorf("Test: %d: Expecting the result to be %v, got %v instead",
id, expected, rr)
return
}
}
test(0, []string{""}, "test", false)
test(1, []string{"*"}, "test", true)
test(2, []string{"W/\"67ab43\", \"54ed21\", \"7892dd\""}, "54ed21", true)
test(3, []string{"\"bfc13a64729c4290ef5b2c2730249c88ca92d82d\""},
"bfc13a64729c4290ef5b2c2730249c88ca92d82d", true)
}

View File

@@ -44,11 +44,9 @@ func (s staticData) hasCompressed() bool {
func staticFileExt(fileName string) string { func staticFileExt(fileName string) string {
extIdx := strings.LastIndex(fileName, ".") extIdx := strings.LastIndex(fileName, ".")
if extIdx < 0 { if extIdx < 0 {
return "" return ""
} }
return strings.ToLower(fileName[extIdx:]) return strings.ToLower(fileName[extIdx:])
} }
@@ -62,7 +60,6 @@ func serveStaticCacheData(
if fileExt == ".html" || fileExt == ".htm" { if fileExt == ".html" || fileExt == ".htm" {
return ErrNotFound return ErrNotFound
} }
return serveStaticCachePage(dataName, w, r, l) return serveStaticCachePage(dataName, w, r, l)
} }
@@ -73,56 +70,26 @@ func serveStaticCachePage(
l log.Logger, l log.Logger,
) error { ) error {
d, dFound := staticPages[dataName] d, dFound := staticPages[dataName]
if !dFound { if !dFound {
return ErrNotFound return ErrNotFound
} }
selectedData := d.data selectedData := d.data
selectedDataHash := d.dataHash
selectedLength := len(d.data) selectedLength := len(d.data)
compressEnabled := false compressEnabled := false
if clientSupportGZIP(r) && d.hasCompressed() { if clientSupportGZIP(r) && d.hasCompressed() {
selectedData = d.compressd selectedData = d.compressd
selectedDataHash = d.compressdHash
selectedLength = len(d.compressd) selectedLength = len(d.compressd)
compressEnabled = true compressEnabled = true
w.Header().Add("Vary", "Accept-Encoding") w.Header().Add("Vary", "Accept-Encoding")
} }
w.Header().Add("Cache-Control", "public, max-age=5184000")
canUseCache := true
if !clientContentEtagIsValid(r, selectedDataHash) {
canUseCache = false
}
if clientContentModifiedSince(r, d.created) {
canUseCache = false
}
if canUseCache {
w.WriteHeader(http.StatusNotModified)
return nil
}
w.Header().Add("Cache-Control", "public, max-age=31536000")
w.Header().Add("ETag", "\""+selectedDataHash+"\"")
w.Header().Add("Content-Type", d.contentType) w.Header().Add("Content-Type", d.contentType)
if compressEnabled { if compressEnabled {
w.Header().Add("Content-Encoding", "gzip") w.Header().Add("Content-Encoding", "gzip")
} }
w.Header().Add("Content-Length", w.Header().Add("Content-Length",
strconv.FormatInt(int64(selectedLength), 10)) strconv.FormatInt(int64(selectedLength), 10))
_, wErr := w.Write(selectedData) _, wErr := w.Write(selectedData)
return wErr return wErr
} }
@@ -134,36 +101,25 @@ func serveStaticPage(
l log.Logger, l log.Logger,
) error { ) error {
d, dFound := staticPages[dataName] d, dFound := staticPages[dataName]
if !dFound { if !dFound {
return ErrNotFound return ErrNotFound
} }
selectedData := d.data selectedData := d.data
selectedLength := len(d.data) selectedLength := len(d.data)
compressEnabled := false compressEnabled := false
if clientSupportGZIP(r) && d.hasCompressed() { if clientSupportGZIP(r) && d.hasCompressed() {
selectedData = d.compressd selectedData = d.compressd
selectedLength = len(d.compressd) selectedLength = len(d.compressd)
compressEnabled = true compressEnabled = true
w.Header().Add("Vary", "Accept-Encoding") w.Header().Add("Vary", "Accept-Encoding")
} }
w.Header().Add("Content-Type", d.contentType) w.Header().Add("Content-Type", d.contentType)
if compressEnabled { if compressEnabled {
w.Header().Add("Content-Encoding", "gzip") w.Header().Add("Content-Encoding", "gzip")
} }
w.Header().Add("Content-Length", w.Header().Add("Content-Length",
strconv.FormatInt(int64(selectedLength), 10)) strconv.FormatInt(int64(selectedLength), 10))
w.WriteHeader(code) w.WriteHeader(code)
_, wErr := w.Write(selectedData) _, wErr := w.Write(selectedData)
return wErr return wErr
} }

View File

@@ -20,8 +20,6 @@ package main
import ( import (
"bytes" "bytes"
"compress/gzip" "compress/gzip"
"crypto/sha256"
"encoding/base64"
"fmt" "fmt"
"io" "io"
"io/ioutil" "io/ioutil"
@@ -77,17 +75,13 @@ func parseStaticData(
fileEnd int, fileEnd int,
compressedStart int, compressedStart int,
compressedEnd int, compressedEnd int,
contentHash string,
compressedHash string,
creation time.Time, creation time.Time,
data []byte, data []byte,
contentType string, contentType string,
) staticData { ) staticData {
return staticData{ return staticData{
data: data[fileStart:fileEnd], data: data[fileStart:fileEnd],
dataHash: contentHash,
compressd: data[compressedStart:compressedEnd], compressd: data[compressedStart:compressedEnd],
compressdHash: compressedHash,
created: creation, created: creation,
contentType: contentType, contentType: contentType,
} }
@@ -98,9 +92,7 @@ func parseStaticData(
import "bytes" import "bytes"
import "fmt" import "fmt"
import "compress/gzip" import "compress/gzip"
import "encoding/base64"
import "time" import "time"
import "crypto/sha256"
import "mime" import "mime"
import "strings" import "strings"
@@ -111,10 +103,8 @@ func getMimeTypeByExtension(ext string) string {
switch ext { switch ext {
case ".ico": case ".ico":
return "image/x-icon" return "image/x-icon"
case ".md": case ".md":
return "text/markdown" return "text/markdown"
default: default:
return mime.TypeByExtension(ext) return mime.TypeByExtension(ext)
} }
@@ -122,64 +112,38 @@ func getMimeTypeByExtension(ext string) string {
func staticFileGen(fileName, filePath string) staticData { func staticFileGen(fileName, filePath string) staticData {
content, readErr := ioutil.ReadFile(filePath) content, readErr := ioutil.ReadFile(filePath)
if readErr != nil { if readErr != nil {
panic(fmt.Sprintln("Cannot read file:", readErr)) panic(fmt.Sprintln("Cannot read file:", readErr))
} }
compressed := bytes.NewBuffer(make([]byte, 0, 1024)) compressed := bytes.NewBuffer(make([]byte, 0, 1024))
compresser, compresserBuildErr := gzip.NewWriterLevel( compresser, compresserBuildErr := gzip.NewWriterLevel(
compressed, gzip.BestSpeed) compressed, gzip.BestSpeed)
if compresserBuildErr != nil { if compresserBuildErr != nil {
panic(fmt.Sprintln("Cannot build data compresser:", compresserBuildErr)) panic(fmt.Sprintln("Cannot build data compresser:", compresserBuildErr))
} }
contentLen := len(content) contentLen := len(content)
_, compressErr := compresser.Write(content) _, compressErr := compresser.Write(content)
if compressErr != nil { if compressErr != nil {
panic(fmt.Sprintln("Cannot write compressed data:", compressErr)) panic(fmt.Sprintln("Cannot write compressed data:", compressErr))
} }
compressErr = compresser.Flush() compressErr = compresser.Flush()
if compressErr != nil { if compressErr != nil {
panic(fmt.Sprintln("Cannot write compressed data:", compressErr)) panic(fmt.Sprintln("Cannot write compressed data:", compressErr))
} }
content = append(content, compressed.Bytes()...) content = append(content, compressed.Bytes()...)
getHash := func(b []byte) []byte {
h := sha256.New()
h.Write(b)
return h.Sum(nil)
}
fileExtDotIdx := strings.LastIndex(fileName, ".") fileExtDotIdx := strings.LastIndex(fileName, ".")
fileExt := "" fileExt := ""
if fileExtDotIdx >= 0 { if fileExtDotIdx >= 0 {
fileExt = fileName[fileExtDotIdx:len(fileName)] fileExt = fileName[fileExtDotIdx:len(fileName)]
} }
mimeType := getMimeTypeByExtension(fileExt) mimeType := getMimeTypeByExtension(fileExt)
if len(mimeType) <= 0 { if len(mimeType) <= 0 {
mimeType = "application/binary" mimeType = "application/binary"
} }
return staticData{ return staticData{
data: content[0:contentLen], data: content[0:contentLen],
contentType: mimeType, contentType: mimeType,
dataHash: base64.StdEncoding.EncodeToString(
getHash(content[0:contentLen])[:8]),
compressd: content[contentLen:], compressd: content[contentLen:],
compressdHash: base64.StdEncoding.EncodeToString(
getHash(content[contentLen:])[:8]),
created: time.Now(), created: time.Now(),
} }
} }
@@ -217,22 +181,17 @@ func {{ .GOVariableName }}() (
int, // FileEnd int, // FileEnd
int, // CompressedStart int, // CompressedStart
int, // CompressedEnd int, // CompressedEnd
string, // ContentHash
string, // CompressedHash
time.Time, // Time of creation time.Time, // Time of creation
[]byte, // Data []byte, // Data
string, // ContentType string, // ContentType
) { ) {
created, createErr := time.Parse( created, createErr := time.Parse(
time.RFC1123, "{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 MST" }}") time.RFC1123, "{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 MST" }}")
if createErr != nil { if createErr != nil {
panic(createErr) panic(createErr)
} }
return {{ .FileStart }}, {{ .FileEnd }}, return {{ .FileStart }}, {{ .FileEnd }},
{{ .CompressedStart }}, {{ .CompressedEnd }}, {{ .CompressedStart }}, {{ .CompressedEnd }},
"{{ .ContentHash }}", "{{ .CompressedHash }}",
created, []byte({{ .Data }}), "{{ .ContentType }}" created, []byte({{ .Data }}), "{{ .ContentType }}"
} }
` `
@@ -255,36 +214,24 @@ type parsedFile struct {
CompressedStart int CompressedStart int
CompressedEnd int CompressedEnd int
ContentType string ContentType string
ContentHash string
CompressedHash string
Date time.Time Date time.Time
} }
func getHash(b []byte) []byte {
h := sha256.New()
h.Write(b)
return h.Sum(nil)
}
func buildListFile(w io.Writer, data interface{}) error { func buildListFile(w io.Writer, data interface{}) error {
tpl := template.Must(template.New( tpl := template.Must(template.New(
"StaticPageList").Parse(staticListTemplate)) "StaticPageList").Parse(staticListTemplate))
return tpl.Execute(w, data) return tpl.Execute(w, data)
} }
func buildListFileDev(w io.Writer, data interface{}) error { func buildListFileDev(w io.Writer, data interface{}) error {
tpl := template.Must(template.New( tpl := template.Must(template.New(
"StaticPageList").Parse(staticListTemplateDev)) "StaticPageList").Parse(staticListTemplateDev))
return tpl.Execute(w, data) return tpl.Execute(w, data)
} }
func buildDataFile(w io.Writer, data interface{}) error { func buildDataFile(w io.Writer, data interface{}) error {
tpl := template.Must(template.New( tpl := template.Must(template.New(
"StaticPageData").Parse(staticPageTemplate)) "StaticPageData").Parse(staticPageTemplate))
return tpl.Execute(w, data) return tpl.Execute(w, data)
} }
@@ -296,22 +243,16 @@ func getMimeTypeByExtension(ext string) string {
switch ext { switch ext {
case ".ico": case ".ico":
return "image/x-icon" return "image/x-icon"
case ".md": case ".md":
return "text/markdown" return "text/markdown"
case ".map": case ".map":
return "text/plain" return "text/plain"
case ".txt": case ".txt":
return "text/plain" return "text/plain"
case ".woff": case ".woff":
return "application/font-woff" return "application/font-woff"
case ".woff2": case ".woff2":
return "application/font-woff2" return "application/font-woff2"
default: default:
return mime.TypeByExtension(ext) return mime.TypeByExtension(ext)
} }
@@ -320,26 +261,19 @@ func getMimeTypeByExtension(ext string) string {
func parseFile( func parseFile(
id int, name string, filePath string, packageName string) parsedFile { id int, name string, filePath string, packageName string) parsedFile {
content, readErr := ioutil.ReadFile(filePath) content, readErr := ioutil.ReadFile(filePath)
if readErr != nil { if readErr != nil {
panic(fmt.Sprintln("Cannot read file:", readErr)) panic(fmt.Sprintln("Cannot read file:", readErr))
} }
contentLen := len(content) contentLen := len(content)
fileExtDotIdx := strings.LastIndex(name, ".") fileExtDotIdx := strings.LastIndex(name, ".")
fileExt := "" fileExt := ""
if fileExtDotIdx >= 0 { if fileExtDotIdx >= 0 {
fileExt = name[fileExtDotIdx:] fileExt = name[fileExtDotIdx:]
} }
mimeType := getMimeTypeByExtension(fileExt) mimeType := getMimeTypeByExtension(fileExt)
if len(mimeType) <= 0 { if len(mimeType) <= 0 {
mimeType = "application/binary" mimeType = "application/binary"
} }
if strings.HasPrefix(mimeType, "image/") { if strings.HasPrefix(mimeType, "image/") {
// Don't compress images // Don't compress images
} else if strings.HasPrefix(mimeType, "application/font-woff") { } else if strings.HasPrefix(mimeType, "application/font-woff") {
@@ -348,32 +282,23 @@ func parseFile(
// Don't compress plain text // Don't compress plain text
} else { } else {
compressed := bytes.NewBuffer(make([]byte, 0, 1024)) compressed := bytes.NewBuffer(make([]byte, 0, 1024))
compresser, compresserBuildErr := gzip.NewWriterLevel( compresser, compresserBuildErr := gzip.NewWriterLevel(
compressed, gzip.BestCompression) compressed, gzip.BestCompression)
if compresserBuildErr != nil { if compresserBuildErr != nil {
panic(fmt.Sprintln( panic(fmt.Sprintln(
"Cannot build data compresser:", compresserBuildErr)) "Cannot build data compresser:", compresserBuildErr))
} }
_, compressErr := compresser.Write(content) _, compressErr := compresser.Write(content)
if compressErr != nil { if compressErr != nil {
panic(fmt.Sprintln("Cannot write compressed data:", compressErr)) panic(fmt.Sprintln("Cannot write compressed data:", compressErr))
} }
compressErr = compresser.Flush() compressErr = compresser.Flush()
if compressErr != nil { if compressErr != nil {
panic(fmt.Sprintln("Cannot write compressed data:", compressErr)) panic(fmt.Sprintln("Cannot write compressed data:", compressErr))
} }
content = append(content, compressed.Bytes()...) content = append(content, compressed.Bytes()...)
} }
goFileName := "Static" + strconv.FormatInt(int64(id), 10) goFileName := "Static" + strconv.FormatInt(int64(id), 10)
return parsedFile{ return parsedFile{
Name: name, Name: name,
GOVariableName: strings.Title(goFileName), GOVariableName: strings.Title(goFileName),
@@ -386,10 +311,6 @@ func parseFile(
CompressedStart: contentLen, CompressedStart: contentLen,
CompressedEnd: len(content), CompressedEnd: len(content),
ContentType: mimeType, ContentType: mimeType,
ContentHash: base64.StdEncoding.EncodeToString(
getHash(content[0:contentLen])[:8]),
CompressedHash: base64.StdEncoding.EncodeToString(
getHash(content[contentLen:])[:8]),
Date: time.Now(), Date: time.Now(),
} }
} }
@@ -398,126 +319,92 @@ func main() {
if len(os.Args) < 3 { if len(os.Args) < 3 {
panic("Usage: <Source Folder> <(Destination) List File>") panic("Usage: <Source Folder> <(Destination) List File>")
} }
sourcePath, sourcePathErr := filepath.Abs(os.Args[1]) sourcePath, sourcePathErr := filepath.Abs(os.Args[1])
if sourcePathErr != nil { if sourcePathErr != nil {
panic(fmt.Sprintf("Invalid source folder path %s: %s", panic(fmt.Sprintf("Invalid source folder path %s: %s",
os.Args[1], sourcePathErr)) os.Args[1], sourcePathErr))
} }
listFilePath, listFilePathErr := filepath.Abs(os.Args[2]) listFilePath, listFilePathErr := filepath.Abs(os.Args[2])
if listFilePathErr != nil { if listFilePathErr != nil {
panic(fmt.Sprintf("Invalid destination list file path %s: %s", panic(fmt.Sprintf("Invalid destination list file path %s: %s",
os.Args[2], listFilePathErr)) os.Args[2], listFilePathErr))
} }
listFileName := filepath.Base(listFilePath) listFileName := filepath.Base(listFilePath)
destFolderPackage := strings.TrimSuffix( destFolderPackage := strings.TrimSuffix(
listFileName, filepath.Ext(listFileName)) listFileName, filepath.Ext(listFileName))
destFolderPath := filepath.Join( destFolderPath := filepath.Join(
filepath.Dir(listFilePath), destFolderPackage) filepath.Dir(listFilePath), destFolderPackage)
destFolderPathErr := os.RemoveAll(destFolderPath) destFolderPathErr := os.RemoveAll(destFolderPath)
if destFolderPathErr != nil { if destFolderPathErr != nil {
panic(fmt.Sprintf("Unable to remove data destination folder %s: %s", panic(fmt.Sprintf("Unable to remove data destination folder %s: %s",
destFolderPath, destFolderPathErr)) destFolderPath, destFolderPathErr))
} }
destFolderPathErr = os.Mkdir(destFolderPath, 0777) destFolderPathErr = os.Mkdir(destFolderPath, 0777)
if destFolderPathErr != nil { if destFolderPathErr != nil {
panic(fmt.Sprintf("Unable to build data destination folder %s: %s", panic(fmt.Sprintf("Unable to build data destination folder %s: %s",
destFolderPath, destFolderPathErr)) destFolderPath, destFolderPathErr))
} }
listFile, listFileErr := os.Create(listFilePath) listFile, listFileErr := os.Create(listFilePath)
if listFileErr != nil { if listFileErr != nil {
panic(fmt.Sprintf("Unable to open destination list file %s: %s", panic(fmt.Sprintf("Unable to open destination list file %s: %s",
listFilePath, listFileErr)) listFilePath, listFileErr))
} }
defer listFile.Close() defer listFile.Close()
files, dirOpenErr := ioutil.ReadDir(sourcePath) files, dirOpenErr := ioutil.ReadDir(sourcePath)
if dirOpenErr != nil { if dirOpenErr != nil {
panic(fmt.Sprintf("Unable to open dir: %s", dirOpenErr)) panic(fmt.Sprintf("Unable to open dir: %s", dirOpenErr))
} }
listFile.WriteString(staticListHeader) listFile.WriteString(staticListHeader)
listFile.WriteString("\n// This file is generated by `go generate` at " + listFile.WriteString("\n// This file is generated by `go generate` at " +
time.Now().Format(time.RFC1123) + "\n// DO NOT EDIT!\n\n") time.Now().Format(time.RFC1123) + "\n// DO NOT EDIT!\n\n")
switch os.Getenv("NODE_ENV") { switch os.Getenv("NODE_ENV") {
case "development": case "development":
type sourceFiles struct { type sourceFiles struct {
Name string Name string
Path string Path string
} }
var sources []sourceFiles var sources []sourceFiles
for f := range files { for f := range files {
if !files[f].Mode().IsRegular() { if !files[f].Mode().IsRegular() {
continue continue
} }
sources = append(sources, sourceFiles{ sources = append(sources, sourceFiles{
Name: files[f].Name(), Name: files[f].Name(),
Path: filepath.Join(sourcePath, files[f].Name()), Path: filepath.Join(sourcePath, files[f].Name()),
}) })
} }
tempBuildErr := buildListFileDev(listFile, sources) tempBuildErr := buildListFileDev(listFile, sources)
if tempBuildErr != nil { if tempBuildErr != nil {
panic(fmt.Sprintf( panic(fmt.Sprintf(
"Unable to build destination file due to error: %s", "Unable to build destination file due to error: %s",
tempBuildErr)) tempBuildErr))
} }
default: default:
var parsedFiles []parsedFile var parsedFiles []parsedFile
for f := range files { for f := range files {
if !files[f].Mode().IsRegular() { if !files[f].Mode().IsRegular() {
continue continue
} }
currentFilePath := filepath.Join(sourcePath, files[f].Name()) currentFilePath := filepath.Join(sourcePath, files[f].Name())
parsedFiles = append(parsedFiles, parseFile( parsedFiles = append(parsedFiles, parseFile(
f, files[f].Name(), currentFilePath, destFolderPackage)) f, files[f].Name(), currentFilePath, destFolderPackage))
} }
for f := range parsedFiles { for f := range parsedFiles {
fn := filepath.Join(destFolderPath, parsedFiles[f].GOFileName) fn := filepath.Join(destFolderPath, parsedFiles[f].GOFileName)
ff, ffErr := os.Create(fn) ff, ffErr := os.Create(fn)
if ffErr != nil { if ffErr != nil {
panic(fmt.Sprintf("Unable to create static page file %s: %s", panic(fmt.Sprintf("Unable to create static page file %s: %s",
fn, ffErr)) fn, ffErr))
} }
bErr := buildDataFile(ff, parsedFiles[f]) bErr := buildDataFile(ff, parsedFiles[f])
if bErr != nil { if bErr != nil {
panic(fmt.Sprintf("Unable to build static page file %s: %s", panic(fmt.Sprintf("Unable to build static page file %s: %s",
fn, bErr)) fn, bErr))
} }
} }
listFile.WriteString( listFile.WriteString(
"\nimport \"" + parentPackage + "/" + destFolderPackage + "\"\n") "\nimport \"" + parentPackage + "/" + destFolderPackage + "\"\n")
tempBuildErr := buildListFile(listFile, parsedFiles) tempBuildErr := buildListFile(listFile, parsedFiles)
if tempBuildErr != nil { if tempBuildErr != nil {
panic(fmt.Sprintf( panic(fmt.Sprintf(
"Unable to build destination file due to error: %s", "Unable to build destination file due to error: %s",