Use easier web page caching method (Remove ETag and If-Modified-Since)

This commit is contained in:
NI
2022-05-14 21:16:55 +08:00
parent de23d076c6
commit 2340636c94
4 changed files with 2 additions and 235 deletions

View File

@@ -20,56 +20,9 @@ package controller
import (
"net/http"
"strings"
"time"
)
func clientSupportGZIP(r *http.Request) bool {
// Should be good enough
return strings.Contains(r.Header.Get("Accept-Encoding"), "gzip")
}
func clientContentEtagIsValid(r *http.Request, eTag string) bool {
d := r.Header.Get("If-None-Match")
if len(d) < 0 {
return false
}
dStart := 0
qETag := "\"" + eTag + "\""
for {
dIdx := strings.Index(d[dStart:], ",")
if dIdx < 0 {
return strings.Contains(d[dStart:], qETag) ||
strings.Contains(d[dStart:], "*")
}
if strings.Contains(d[dStart:dStart+dIdx], qETag) {
return true
}
if strings.Contains(d[dStart:dStart+dIdx], "*") {
return true
}
dStart += dIdx + 1
}
}
func clientContentModifiedSince(r *http.Request, mod time.Time) bool {
d := r.Header.Get("If-Modified-Since")
if len(d) < 0 {
return false
}
dt, dtErr := time.Parse(time.RFC1123, d)
if dtErr != nil {
return false
}
return !mod.Before(dt)
}

View File

@@ -16,32 +16,3 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.
package controller
import (
"net/http"
"testing"
)
func TestClientContentEtagIsValid(t *testing.T) {
test := func(id int, hd []string, etag string, expected bool) {
r := http.Request{
Header: http.Header{
"If-None-Match": hd,
},
}
rr := clientContentEtagIsValid(&r, etag)
if rr != expected {
t.Errorf("Test: %d: Expecting the result to be %v, got %v instead",
id, expected, rr)
return
}
}
test(0, []string{""}, "test", false)
test(1, []string{"*"}, "test", true)
test(2, []string{"W/\"67ab43\", \"54ed21\", \"7892dd\""}, "54ed21", true)
test(3, []string{"\"bfc13a64729c4290ef5b2c2730249c88ca92d82d\""},
"bfc13a64729c4290ef5b2c2730249c88ca92d82d", true)
}

View File

@@ -44,11 +44,9 @@ func (s staticData) hasCompressed() bool {
func staticFileExt(fileName string) string {
extIdx := strings.LastIndex(fileName, ".")
if extIdx < 0 {
return ""
}
return strings.ToLower(fileName[extIdx:])
}
@@ -62,7 +60,6 @@ func serveStaticCacheData(
if fileExt == ".html" || fileExt == ".htm" {
return ErrNotFound
}
return serveStaticCachePage(dataName, w, r, l)
}
@@ -73,56 +70,26 @@ func serveStaticCachePage(
l log.Logger,
) error {
d, dFound := staticPages[dataName]
if !dFound {
return ErrNotFound
}
selectedData := d.data
selectedDataHash := d.dataHash
selectedLength := len(d.data)
compressEnabled := false
if clientSupportGZIP(r) && d.hasCompressed() {
selectedData = d.compressd
selectedDataHash = d.compressdHash
selectedLength = len(d.compressd)
compressEnabled = true
w.Header().Add("Vary", "Accept-Encoding")
}
canUseCache := true
if !clientContentEtagIsValid(r, selectedDataHash) {
canUseCache = false
}
if clientContentModifiedSince(r, d.created) {
canUseCache = false
}
if canUseCache {
w.WriteHeader(http.StatusNotModified)
return nil
}
w.Header().Add("Cache-Control", "public, max-age=31536000")
w.Header().Add("ETag", "\""+selectedDataHash+"\"")
w.Header().Add("Cache-Control", "public, max-age=5184000")
w.Header().Add("Content-Type", d.contentType)
if compressEnabled {
w.Header().Add("Content-Encoding", "gzip")
}
w.Header().Add("Content-Length",
strconv.FormatInt(int64(selectedLength), 10))
_, wErr := w.Write(selectedData)
return wErr
}
@@ -134,36 +101,25 @@ func serveStaticPage(
l log.Logger,
) error {
d, dFound := staticPages[dataName]
if !dFound {
return ErrNotFound
}
selectedData := d.data
selectedLength := len(d.data)
compressEnabled := false
if clientSupportGZIP(r) && d.hasCompressed() {
selectedData = d.compressd
selectedLength = len(d.compressd)
compressEnabled = true
w.Header().Add("Vary", "Accept-Encoding")
}
w.Header().Add("Content-Type", d.contentType)
if compressEnabled {
w.Header().Add("Content-Encoding", "gzip")
}
w.Header().Add("Content-Length",
strconv.FormatInt(int64(selectedLength), 10))
w.WriteHeader(code)
_, wErr := w.Write(selectedData)
return wErr
}

View File

@@ -20,8 +20,6 @@ package main
import (
"bytes"
"compress/gzip"
"crypto/sha256"
"encoding/base64"
"fmt"
"io"
"io/ioutil"
@@ -77,17 +75,13 @@ func parseStaticData(
fileEnd int,
compressedStart int,
compressedEnd int,
contentHash string,
compressedHash string,
creation time.Time,
data []byte,
contentType string,
) staticData {
return staticData{
data: data[fileStart:fileEnd],
dataHash: contentHash,
compressd: data[compressedStart:compressedEnd],
compressdHash: compressedHash,
created: creation,
contentType: contentType,
}
@@ -98,9 +92,7 @@ func parseStaticData(
import "bytes"
import "fmt"
import "compress/gzip"
import "encoding/base64"
import "time"
import "crypto/sha256"
import "mime"
import "strings"
@@ -111,10 +103,8 @@ func getMimeTypeByExtension(ext string) string {
switch ext {
case ".ico":
return "image/x-icon"
case ".md":
return "text/markdown"
default:
return mime.TypeByExtension(ext)
}
@@ -122,64 +112,38 @@ func getMimeTypeByExtension(ext string) string {
func staticFileGen(fileName, filePath string) staticData {
content, readErr := ioutil.ReadFile(filePath)
if readErr != nil {
panic(fmt.Sprintln("Cannot read file:", readErr))
}
compressed := bytes.NewBuffer(make([]byte, 0, 1024))
compresser, compresserBuildErr := gzip.NewWriterLevel(
compressed, gzip.BestSpeed)
if compresserBuildErr != nil {
panic(fmt.Sprintln("Cannot build data compresser:", compresserBuildErr))
}
contentLen := len(content)
_, compressErr := compresser.Write(content)
if compressErr != nil {
panic(fmt.Sprintln("Cannot write compressed data:", compressErr))
}
compressErr = compresser.Flush()
if compressErr != nil {
panic(fmt.Sprintln("Cannot write compressed data:", compressErr))
}
content = append(content, compressed.Bytes()...)
getHash := func(b []byte) []byte {
h := sha256.New()
h.Write(b)
return h.Sum(nil)
}
fileExtDotIdx := strings.LastIndex(fileName, ".")
fileExt := ""
if fileExtDotIdx >= 0 {
fileExt = fileName[fileExtDotIdx:len(fileName)]
}
mimeType := getMimeTypeByExtension(fileExt)
if len(mimeType) <= 0 {
mimeType = "application/binary"
}
return staticData{
data: content[0:contentLen],
contentType: mimeType,
dataHash: base64.StdEncoding.EncodeToString(
getHash(content[0:contentLen])[:8]),
compressd: content[contentLen:],
compressdHash: base64.StdEncoding.EncodeToString(
getHash(content[contentLen:])[:8]),
created: time.Now(),
}
}
@@ -217,22 +181,17 @@ func {{ .GOVariableName }}() (
int, // FileEnd
int, // CompressedStart
int, // CompressedEnd
string, // ContentHash
string, // CompressedHash
time.Time, // Time of creation
[]byte, // Data
string, // ContentType
) {
created, createErr := time.Parse(
time.RFC1123, "{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 MST" }}")
if createErr != nil {
panic(createErr)
}
return {{ .FileStart }}, {{ .FileEnd }},
{{ .CompressedStart }}, {{ .CompressedEnd }},
"{{ .ContentHash }}", "{{ .CompressedHash }}",
created, []byte({{ .Data }}), "{{ .ContentType }}"
}
`
@@ -255,36 +214,24 @@ type parsedFile struct {
CompressedStart int
CompressedEnd int
ContentType string
ContentHash string
CompressedHash string
Date time.Time
}
func getHash(b []byte) []byte {
h := sha256.New()
h.Write(b)
return h.Sum(nil)
}
func buildListFile(w io.Writer, data interface{}) error {
tpl := template.Must(template.New(
"StaticPageList").Parse(staticListTemplate))
return tpl.Execute(w, data)
}
func buildListFileDev(w io.Writer, data interface{}) error {
tpl := template.Must(template.New(
"StaticPageList").Parse(staticListTemplateDev))
return tpl.Execute(w, data)
}
func buildDataFile(w io.Writer, data interface{}) error {
tpl := template.Must(template.New(
"StaticPageData").Parse(staticPageTemplate))
return tpl.Execute(w, data)
}
@@ -296,22 +243,16 @@ func getMimeTypeByExtension(ext string) string {
switch ext {
case ".ico":
return "image/x-icon"
case ".md":
return "text/markdown"
case ".map":
return "text/plain"
case ".txt":
return "text/plain"
case ".woff":
return "application/font-woff"
case ".woff2":
return "application/font-woff2"
default:
return mime.TypeByExtension(ext)
}
@@ -320,26 +261,19 @@ func getMimeTypeByExtension(ext string) string {
func parseFile(
id int, name string, filePath string, packageName string) parsedFile {
content, readErr := ioutil.ReadFile(filePath)
if readErr != nil {
panic(fmt.Sprintln("Cannot read file:", readErr))
}
contentLen := len(content)
fileExtDotIdx := strings.LastIndex(name, ".")
fileExt := ""
if fileExtDotIdx >= 0 {
fileExt = name[fileExtDotIdx:]
}
mimeType := getMimeTypeByExtension(fileExt)
if len(mimeType) <= 0 {
mimeType = "application/binary"
}
if strings.HasPrefix(mimeType, "image/") {
// Don't compress images
} else if strings.HasPrefix(mimeType, "application/font-woff") {
@@ -348,32 +282,23 @@ func parseFile(
// Don't compress plain text
} else {
compressed := bytes.NewBuffer(make([]byte, 0, 1024))
compresser, compresserBuildErr := gzip.NewWriterLevel(
compressed, gzip.BestCompression)
if compresserBuildErr != nil {
panic(fmt.Sprintln(
"Cannot build data compresser:", compresserBuildErr))
}
_, compressErr := compresser.Write(content)
if compressErr != nil {
panic(fmt.Sprintln("Cannot write compressed data:", compressErr))
}
compressErr = compresser.Flush()
if compressErr != nil {
panic(fmt.Sprintln("Cannot write compressed data:", compressErr))
}
content = append(content, compressed.Bytes()...)
}
goFileName := "Static" + strconv.FormatInt(int64(id), 10)
return parsedFile{
Name: name,
GOVariableName: strings.Title(goFileName),
@@ -386,11 +311,7 @@ func parseFile(
CompressedStart: contentLen,
CompressedEnd: len(content),
ContentType: mimeType,
ContentHash: base64.StdEncoding.EncodeToString(
getHash(content[0:contentLen])[:8]),
CompressedHash: base64.StdEncoding.EncodeToString(
getHash(content[contentLen:])[:8]),
Date: time.Now(),
Date: time.Now(),
}
}
@@ -398,126 +319,92 @@ func main() {
if len(os.Args) < 3 {
panic("Usage: <Source Folder> <(Destination) List File>")
}
sourcePath, sourcePathErr := filepath.Abs(os.Args[1])
if sourcePathErr != nil {
panic(fmt.Sprintf("Invalid source folder path %s: %s",
os.Args[1], sourcePathErr))
}
listFilePath, listFilePathErr := filepath.Abs(os.Args[2])
if listFilePathErr != nil {
panic(fmt.Sprintf("Invalid destination list file path %s: %s",
os.Args[2], listFilePathErr))
}
listFileName := filepath.Base(listFilePath)
destFolderPackage := strings.TrimSuffix(
listFileName, filepath.Ext(listFileName))
destFolderPath := filepath.Join(
filepath.Dir(listFilePath), destFolderPackage)
destFolderPathErr := os.RemoveAll(destFolderPath)
if destFolderPathErr != nil {
panic(fmt.Sprintf("Unable to remove data destination folder %s: %s",
destFolderPath, destFolderPathErr))
}
destFolderPathErr = os.Mkdir(destFolderPath, 0777)
if destFolderPathErr != nil {
panic(fmt.Sprintf("Unable to build data destination folder %s: %s",
destFolderPath, destFolderPathErr))
}
listFile, listFileErr := os.Create(listFilePath)
if listFileErr != nil {
panic(fmt.Sprintf("Unable to open destination list file %s: %s",
listFilePath, listFileErr))
}
defer listFile.Close()
files, dirOpenErr := ioutil.ReadDir(sourcePath)
if dirOpenErr != nil {
panic(fmt.Sprintf("Unable to open dir: %s", dirOpenErr))
}
listFile.WriteString(staticListHeader)
listFile.WriteString("\n// This file is generated by `go generate` at " +
time.Now().Format(time.RFC1123) + "\n// DO NOT EDIT!\n\n")
switch os.Getenv("NODE_ENV") {
case "development":
type sourceFiles struct {
Name string
Path string
}
var sources []sourceFiles
for f := range files {
if !files[f].Mode().IsRegular() {
continue
}
sources = append(sources, sourceFiles{
Name: files[f].Name(),
Path: filepath.Join(sourcePath, files[f].Name()),
})
}
tempBuildErr := buildListFileDev(listFile, sources)
if tempBuildErr != nil {
panic(fmt.Sprintf(
"Unable to build destination file due to error: %s",
tempBuildErr))
}
default:
var parsedFiles []parsedFile
for f := range files {
if !files[f].Mode().IsRegular() {
continue
}
currentFilePath := filepath.Join(sourcePath, files[f].Name())
parsedFiles = append(parsedFiles, parseFile(
f, files[f].Name(), currentFilePath, destFolderPackage))
}
for f := range parsedFiles {
fn := filepath.Join(destFolderPath, parsedFiles[f].GOFileName)
ff, ffErr := os.Create(fn)
if ffErr != nil {
panic(fmt.Sprintf("Unable to create static page file %s: %s",
fn, ffErr))
}
bErr := buildDataFile(ff, parsedFiles[f])
if bErr != nil {
panic(fmt.Sprintf("Unable to build static page file %s: %s",
fn, bErr))
}
}
listFile.WriteString(
"\nimport \"" + parentPackage + "/" + destFolderPackage + "\"\n")
tempBuildErr := buildListFile(listFile, parsedFiles)
if tempBuildErr != nil {
panic(fmt.Sprintf(
"Unable to build destination file due to error: %s",