Co-authored-by: Moritz Marquardt <git@momar.de>
This commit is contained in:
crapStone 2024-11-21 22:10:44 +01:00
parent 23a8e83e80
commit 81bdbaf261
No known key found for this signature in database
GPG key ID: 22D4BF0CF7CC29C8
2 changed files with 102 additions and 110 deletions

View file

@ -2,6 +2,7 @@ package gitea
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
@ -34,23 +35,18 @@ const (
)
type FileResponse struct {
Exists bool
IsSymlink bool
ETag string
// uncompressed MIME type
MimeType string
// raw MIME type (if compressed, type of compression)
RawMime string
Body []byte
Exists bool `json:"exists"`
IsSymlink bool `json:"isSymlink"`
ETag string `json:"eTag"`
MimeType string `json:"mimeType"`
Body []byte `json:"-"` // saved separately
}
func (f FileResponse) IsEmpty() bool {
return len(f.Body) == 0
}
func (f FileResponse) createHttpResponse(cacheKey string, decompress bool) (header http.Header, statusCode int) {
func (f FileResponse) createHttpResponse(cacheKey string) (header http.Header, statusCode int) {
header = make(http.Header)
if f.Exists {
@ -63,12 +59,7 @@ func (f FileResponse) createHttpResponse(cacheKey string, decompress bool) (head
header.Set(giteaObjectTypeHeader, objTypeSymlink)
}
header.Set(ETagHeader, f.ETag)
if decompress {
header.Set(ContentTypeHeader, f.MimeType)
} else {
header.Set(ContentTypeHeader, f.RawMime)
}
header.Set(ContentTypeHeader, f.MimeType)
header.Set(ContentLengthHeader, fmt.Sprintf("%d", len(f.Body)))
header.Set(PagesCacheIndicatorHeader, "true")
@ -77,9 +68,8 @@ func (f FileResponse) createHttpResponse(cacheKey string, decompress bool) (head
}
type BranchTimestamp struct {
Branch string
Timestamp time.Time
notFound bool
Branch string `json:"branch"`
Timestamp time.Time `json:"timestamp"`
}
type writeCacheReader struct {
@ -89,32 +79,46 @@ type writeCacheReader struct {
cacheKey string
cache cache.ICache
hasError bool
doNotCache bool
complete bool
}
func (t *writeCacheReader) Read(p []byte) (n int, err error) {
log.Trace().Msgf("[cache] read %q", t.cacheKey)
n, err = t.originalReader.Read(p)
if err == io.EOF {
t.complete = true
}
if err != nil && err != io.EOF {
log.Trace().Err(err).Msgf("[cache] original reader for %q has returned an error", t.cacheKey)
t.hasError = true
} else if n > 0 {
_, _ = t.buffer.Write(p[:n])
if t.buffer.Len()+n > int(fileCacheSizeLimit) {
t.doNotCache = true
t.buffer.Reset()
} else {
_, _ = t.buffer.Write(p[:n])
}
}
return
}
func (t *writeCacheReader) Close() error {
doWrite := !t.hasError
doWrite := !t.hasError && !t.doNotCache && t.complete
fc := *t.fileResponse
fc.Body = t.buffer.Bytes()
if fc.IsEmpty() {
log.Trace().Msg("[cache] file response is empty")
doWrite = false
}
if doWrite {
err := t.cache.Set(t.cacheKey, fc, fileCacheTimeout)
jsonToCache, err := json.Marshal(fc)
if err != nil {
log.Trace().Err(err).Msgf("[cache] writer for %q has returned an error", t.cacheKey)
log.Trace().Err(err).Msgf("[cache] marshaling json for %q has returned an error", t.cacheKey+"|Metadata")
}
err = t.cache.Set(t.cacheKey+"|Metadata", jsonToCache, fileCacheTimeout)
if err != nil {
log.Trace().Err(err).Msgf("[cache] writer for %q has returned an error", t.cacheKey+"|Metadata")
}
err = t.cache.Set(t.cacheKey+"|Body", fc.Body, fileCacheTimeout)
if err != nil {
log.Trace().Err(err).Msgf("[cache] writer for %q has returned an error", t.cacheKey+"|Body")
}
}
log.Trace().Msgf("cacheReader for %q saved=%t closed", t.cacheKey, doWrite)