mirror of
https://codeberg.org/Codeberg/pages-server.git
synced 2025-05-01 16:41:59 +00:00
Compare commits
No commits in common. "v1.0_php" and "main" have entirely different histories.
86 changed files with 9286 additions and 428 deletions
9
.ecrc
Normal file
9
.ecrc
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
{
|
||||||
|
"Exclude": [
|
||||||
|
".git",
|
||||||
|
"go.mod", "go.sum",
|
||||||
|
"vendor",
|
||||||
|
"LICENSE",
|
||||||
|
"_test.go"
|
||||||
|
]
|
||||||
|
}
|
17
.editorconfig
Normal file
17
.editorconfig
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
tab_width = 2
|
||||||
|
end_of_line = lf
|
||||||
|
charset = utf-8
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
insert_final_newline = true
|
||||||
|
|
||||||
|
[*.go]
|
||||||
|
indent_style = tab
|
||||||
|
|
||||||
|
[*.md]
|
||||||
|
trim_trailing_whitespace = false
|
||||||
|
indent_size = 1
|
11
.env-dev
Normal file
11
.env-dev
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
ACME_API=https://acme.mock.directory
|
||||||
|
ACME_ACCEPT_TERMS=true
|
||||||
|
PAGES_DOMAIN=localhost.mock.directory
|
||||||
|
RAW_DOMAIN=raw.localhost.mock.directory
|
||||||
|
PAGES_BRANCHES=pages,master,main
|
||||||
|
GITEA_ROOT=https://codeberg.org
|
||||||
|
PORT=4430
|
||||||
|
HTTP_PORT=8880
|
||||||
|
ENABLE_HTTP_SERVER=true
|
||||||
|
LOG_LEVEL=trace
|
||||||
|
ACME_ACCOUNT_CONFIG=integration/acme-account.json
|
1
.envrc
Normal file
1
.envrc
Normal file
|
@ -0,0 +1 @@
|
||||||
|
use_flake
|
5
.gitea/ISSUE_TEMPLATE/config.yml
Normal file
5
.gitea/ISSUE_TEMPLATE/config.yml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
blank_issues_enabled: true
|
||||||
|
contact_links:
|
||||||
|
- name: Codeberg Pages Usage Support
|
||||||
|
url: https://codeberg.org/Codeberg/Community/issues/
|
||||||
|
about: If you need help with configuring Codeberg Pages on codeberg.org, please go here.
|
12
.gitignore
vendored
Normal file
12
.gitignore
vendored
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
.idea/
|
||||||
|
.cache/
|
||||||
|
*.iml
|
||||||
|
key-database.pogreb/
|
||||||
|
acme-account.json
|
||||||
|
build/
|
||||||
|
vendor/
|
||||||
|
pages
|
||||||
|
certs.sqlite
|
||||||
|
.bash_history
|
||||||
|
pkg/
|
||||||
|
.direnv/
|
34
.golangci.yml
Normal file
34
.golangci.yml
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
linters-settings:
|
||||||
|
gocritic:
|
||||||
|
enabled-tags:
|
||||||
|
- diagnostic
|
||||||
|
- experimental
|
||||||
|
- opinionated
|
||||||
|
- performance
|
||||||
|
- style
|
||||||
|
disabled-checks:
|
||||||
|
- importShadow
|
||||||
|
- ifElseChain
|
||||||
|
- hugeParam
|
||||||
|
|
||||||
|
linters:
|
||||||
|
disable-all: true
|
||||||
|
enable:
|
||||||
|
- unconvert
|
||||||
|
- gocritic
|
||||||
|
- gofumpt
|
||||||
|
- bidichk
|
||||||
|
- errcheck
|
||||||
|
- gofmt
|
||||||
|
- goimports
|
||||||
|
- gosimple
|
||||||
|
- govet
|
||||||
|
- ineffassign
|
||||||
|
- misspell
|
||||||
|
- staticcheck
|
||||||
|
- typecheck
|
||||||
|
- unused
|
||||||
|
- whitespace
|
||||||
|
|
||||||
|
run:
|
||||||
|
timeout: 5m
|
8
.prettierrc.json
Normal file
8
.prettierrc.json
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
{
|
||||||
|
"semi": true,
|
||||||
|
"trailingComma": "all",
|
||||||
|
"singleQuote": true,
|
||||||
|
"printWidth": 120,
|
||||||
|
"tabWidth": 2,
|
||||||
|
"endOfLine": "lf"
|
||||||
|
}
|
26
.vscode/launch.json
vendored
Normal file
26
.vscode/launch.json
vendored
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
{
|
||||||
|
// Use IntelliSense to learn about possible attributes.
|
||||||
|
// Hover to view descriptions of existing attributes.
|
||||||
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"name": "Launch PagesServer",
|
||||||
|
"type": "go",
|
||||||
|
"request": "launch",
|
||||||
|
"mode": "auto",
|
||||||
|
"program": "${workspaceFolder}/main.go",
|
||||||
|
"args": ["sqlite", "sqlite_unlock_notify", "netgo"],
|
||||||
|
"envFile": "${workspaceFolder}/.env-dev"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Launch PagesServer integration test",
|
||||||
|
"type": "go",
|
||||||
|
"request": "launch",
|
||||||
|
"mode": "auto",
|
||||||
|
"program": "${workspaceFolder}/integration/main_test.go",
|
||||||
|
"args": ["codeberg.org/codeberg/pages/integration/..."],
|
||||||
|
"buildFlags": ["-tags", "'integration sqlite sqlite_unlock_notify netgo'"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
123
.woodpecker/build.yml
Normal file
123
.woodpecker/build.yml
Normal file
|
@ -0,0 +1,123 @@
|
||||||
|
when:
|
||||||
|
- event: [push, pull_request, tag, cron]
|
||||||
|
branch: ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# use vendor to cache dependencies
|
||||||
|
vendor:
|
||||||
|
image: golang:1.24
|
||||||
|
commands:
|
||||||
|
- go mod vendor
|
||||||
|
|
||||||
|
build:
|
||||||
|
depends_on: vendor
|
||||||
|
image: codeberg.org/6543/docker-images/golang_just:go-1.24
|
||||||
|
commands:
|
||||||
|
- go version
|
||||||
|
- just build
|
||||||
|
when:
|
||||||
|
- event: [push, pull_request, tag]
|
||||||
|
|
||||||
|
docker-dryrun:
|
||||||
|
depends_on: vendor
|
||||||
|
image: woodpeckerci/plugin-docker-buildx:5.2.1
|
||||||
|
settings:
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
platforms: linux/amd64
|
||||||
|
dry-run: true
|
||||||
|
tags: latest
|
||||||
|
when:
|
||||||
|
- event: [pull_request]
|
||||||
|
path: Dockerfile
|
||||||
|
|
||||||
|
build-tag:
|
||||||
|
depends_on: vendor
|
||||||
|
image: codeberg.org/6543/docker-images/golang_just:go-1.24
|
||||||
|
commands:
|
||||||
|
- go version
|
||||||
|
- just build-tag ${CI_COMMIT_TAG##v}
|
||||||
|
when:
|
||||||
|
- event: [tag]
|
||||||
|
|
||||||
|
test:
|
||||||
|
depends_on: build
|
||||||
|
image: codeberg.org/6543/docker-images/golang_just:go-1.24
|
||||||
|
commands:
|
||||||
|
- just test
|
||||||
|
when:
|
||||||
|
- event: [pull_request]
|
||||||
|
|
||||||
|
integration-tests:
|
||||||
|
failure: ignore
|
||||||
|
depends_on: build
|
||||||
|
image: codeberg.org/6543/docker-images/golang_just:go-1.24
|
||||||
|
commands:
|
||||||
|
- just integration
|
||||||
|
environment:
|
||||||
|
ACME_API: https://acme.mock.directory
|
||||||
|
PAGES_DOMAIN: localhost.mock.directory
|
||||||
|
RAW_DOMAIN: raw.localhost.mock.directory
|
||||||
|
PORT: 4430
|
||||||
|
when:
|
||||||
|
- event: [pull_request]
|
||||||
|
|
||||||
|
release:
|
||||||
|
depends_on: build
|
||||||
|
image: woodpeckerci/plugin-release:0.2.5
|
||||||
|
settings:
|
||||||
|
base_url: https://codeberg.org
|
||||||
|
file_exists: overwrite
|
||||||
|
files: build/codeberg-pages-server
|
||||||
|
api_key:
|
||||||
|
from_secret: bot_token
|
||||||
|
when:
|
||||||
|
- event: [tag]
|
||||||
|
|
||||||
|
docker-next:
|
||||||
|
depends_on: vendor
|
||||||
|
image: woodpeckerci/plugin-docker-buildx:5.2.1
|
||||||
|
settings:
|
||||||
|
registry: codeberg.org
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
platforms: linux/amd64,arm64
|
||||||
|
repo: codeberg.org/codeberg/pages-server
|
||||||
|
tags: next
|
||||||
|
username:
|
||||||
|
from_secret: bot_user
|
||||||
|
password:
|
||||||
|
from_secret: bot_token
|
||||||
|
when:
|
||||||
|
- event: [push]
|
||||||
|
|
||||||
|
'Publish PR image':
|
||||||
|
image: woodpeckerci/plugin-docker-buildx:5.2.1
|
||||||
|
depends_on: test
|
||||||
|
settings:
|
||||||
|
registry: codeberg.org
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
platforms: linux/amd64
|
||||||
|
repo: codeberg.org/codeberg/pages-server
|
||||||
|
tags: next
|
||||||
|
username:
|
||||||
|
from_secret: bot_user
|
||||||
|
password:
|
||||||
|
from_secret: bot_token
|
||||||
|
when:
|
||||||
|
evaluate: 'CI_COMMIT_PULL_REQUEST_LABELS contains "build_pr_image"'
|
||||||
|
event: pull_request
|
||||||
|
|
||||||
|
docker-release:
|
||||||
|
depends_on: vendor
|
||||||
|
image: woodpeckerci/plugin-docker-buildx:5.2.1
|
||||||
|
settings:
|
||||||
|
registry: codeberg.org
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
platforms: linux/amd64,arm64
|
||||||
|
repo: codeberg.org/codeberg/pages-server
|
||||||
|
tags: [latest, '${CI_COMMIT_TAG}']
|
||||||
|
username:
|
||||||
|
from_secret: bot_user
|
||||||
|
password:
|
||||||
|
from_secret: bot_token
|
||||||
|
when:
|
||||||
|
- event: [tag]
|
30
.woodpecker/lint.yml
Normal file
30
.woodpecker/lint.yml
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
when:
|
||||||
|
- event: pull_request
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
lint:
|
||||||
|
depends_on: []
|
||||||
|
image: golangci/golangci-lint:v1.64.8
|
||||||
|
commands:
|
||||||
|
- go version
|
||||||
|
- go install mvdan.cc/gofumpt@latest
|
||||||
|
- "[ $(gofumpt -extra -l . | wc -l) != 0 ] && { echo 'code not formated'; exit 1; }"
|
||||||
|
- golangci-lint run --timeout 10m --build-tags integration
|
||||||
|
|
||||||
|
editor-config:
|
||||||
|
depends_on: []
|
||||||
|
image: mstruebing/editorconfig-checker:v3.2.0
|
||||||
|
|
||||||
|
yamllint:
|
||||||
|
image: pipelinecomponents/yamllint:0.33.0
|
||||||
|
depends_on: []
|
||||||
|
commands:
|
||||||
|
- yamllint .
|
||||||
|
|
||||||
|
prettier:
|
||||||
|
image: docker.io/woodpeckerci/plugin-prettier:1.3.0
|
||||||
|
depends_on: []
|
||||||
|
settings:
|
||||||
|
version: 3.2.5
|
19
.yamllint.yaml
Normal file
19
.yamllint.yaml
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
extends: default
|
||||||
|
|
||||||
|
rules:
|
||||||
|
comments:
|
||||||
|
require-starting-space: false
|
||||||
|
ignore-shebangs: true
|
||||||
|
min-spaces-from-content: 1
|
||||||
|
braces:
|
||||||
|
min-spaces-inside: 1
|
||||||
|
max-spaces-inside: 1
|
||||||
|
document-start:
|
||||||
|
present: false
|
||||||
|
indentation:
|
||||||
|
spaces: 2
|
||||||
|
indent-sequences: true
|
||||||
|
line-length:
|
||||||
|
max: 256
|
||||||
|
new-lines:
|
||||||
|
type: unix
|
36
Dockerfile
Normal file
36
Dockerfile
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
# Set the default Go version as a build argument
|
||||||
|
ARG XGO="go-1.24.x"
|
||||||
|
|
||||||
|
# Use xgo (a Go cross-compiler tool) as build image
|
||||||
|
FROM --platform=$BUILDPLATFORM techknowlogick/xgo:${XGO} AS build
|
||||||
|
|
||||||
|
# Set the working directory and copy the source code
|
||||||
|
WORKDIR /go/src/codeberg.org/codeberg/pages
|
||||||
|
COPY . /go/src/codeberg.org/codeberg/pages
|
||||||
|
|
||||||
|
# Set the target architecture (can be set using --build-arg), buildx set it automatically
|
||||||
|
ARG TARGETOS TARGETARCH
|
||||||
|
|
||||||
|
# Build the binary using xgo
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||||
|
--mount=type=cache,target=/go/pkg \
|
||||||
|
GOOS=${TARGETOS} GOARCH=${TARGETARCH} CGO_ENABLED=1 \
|
||||||
|
xgo -x -v --targets=${TARGETOS}/${TARGETARCH} -tags='sqlite sqlite_unlock_notify netgo' -ldflags='-s -w -extldflags "-static" -linkmode external' -out pages .
|
||||||
|
RUN mv -vf /build/pages-* /go/src/codeberg.org/codeberg/pages/pages
|
||||||
|
|
||||||
|
# Use a scratch image as the base image for the final container,
|
||||||
|
# which will contain only the built binary and the CA certificates
|
||||||
|
FROM scratch
|
||||||
|
|
||||||
|
# Copy the built binary and the CA certificates from the build container to the final container
|
||||||
|
COPY --from=build /go/src/codeberg.org/codeberg/pages/pages /pages
|
||||||
|
COPY --from=build \
|
||||||
|
/etc/ssl/certs/ca-certificates.crt \
|
||||||
|
/etc/ssl/certs/ca-certificates.crt
|
||||||
|
|
||||||
|
# Expose ports 80 and 443 for the built binary to listen on
|
||||||
|
EXPOSE 80/tcp
|
||||||
|
EXPOSE 443/tcp
|
||||||
|
|
||||||
|
# Set the entrypoint for the container to the built binary
|
||||||
|
ENTRYPOINT ["/pages"]
|
51
FEATURES.md
Normal file
51
FEATURES.md
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
# Features
|
||||||
|
|
||||||
|
## Custom domains
|
||||||
|
|
||||||
|
Custom domains can be used by creating a `.domains` file with the domain name, e.g.:
|
||||||
|
|
||||||
|
```text
|
||||||
|
codeberg.page
|
||||||
|
```
|
||||||
|
|
||||||
|
You also have to set some DNS records, see the [Codeberg Documentation](https://docs.codeberg.org/codeberg-pages/using-custom-domain/).
|
||||||
|
|
||||||
|
## Redirects
|
||||||
|
|
||||||
|
Redirects can be created with a `_redirects` file with the following format:
|
||||||
|
|
||||||
|
```text
|
||||||
|
# Comment
|
||||||
|
from to [status]
|
||||||
|
```
|
||||||
|
|
||||||
|
- Lines starting with `#` are ignored
|
||||||
|
- `from` - the path to redirect from (Note: repository and branch names are removed from request URLs)
|
||||||
|
- `to` - the path or URL to redirect to
|
||||||
|
- `status` - status code to use when redirecting (default 301)
|
||||||
|
|
||||||
|
### Status codes
|
||||||
|
|
||||||
|
- `200` - returns content from specified path (no external URLs) without changing the URL (rewrite)
|
||||||
|
- `301` - Moved Permanently (Permanent redirect)
|
||||||
|
- `302` - Found (Temporary redirect)
|
||||||
|
|
||||||
|
### Examples
|
||||||
|
|
||||||
|
#### SPA (single-page application) rewrite
|
||||||
|
|
||||||
|
Redirects all paths to `/index.html` for single-page apps.
|
||||||
|
|
||||||
|
```text
|
||||||
|
/* /index.html 200
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Splats
|
||||||
|
|
||||||
|
Redirects every path under `/articles` to `/posts` while keeping the path.
|
||||||
|
|
||||||
|
```text
|
||||||
|
/articles/* /posts/:splat 302
|
||||||
|
```
|
||||||
|
|
||||||
|
Example: `/articles/2022/10/12/post-1/` -> `/posts/2022/10/12/post-1/`
|
52
Justfile
Normal file
52
Justfile
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
CGO_FLAGS := '-extldflags "-static" -linkmode external'
|
||||||
|
TAGS := 'sqlite sqlite_unlock_notify netgo'
|
||||||
|
|
||||||
|
dev *FLAGS:
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euxo pipefail
|
||||||
|
set -a # automatically export all variables
|
||||||
|
source .env-dev
|
||||||
|
set +a
|
||||||
|
go run -tags '{{TAGS}}' . {{FLAGS}}
|
||||||
|
|
||||||
|
build:
|
||||||
|
CGO_ENABLED=1 go build -tags '{{TAGS}}' -ldflags '-s -w {{CGO_FLAGS}}' -v -o build/codeberg-pages-server ./
|
||||||
|
|
||||||
|
build-tag VERSION:
|
||||||
|
CGO_ENABLED=1 go build -tags '{{TAGS}}' -ldflags '-s -w -X "codeberg.org/codeberg/pages/server/version.Version={{VERSION}}" {{CGO_FLAGS}}' -v -o build/codeberg-pages-server ./
|
||||||
|
|
||||||
|
lint: tool-golangci tool-gofumpt
|
||||||
|
golangci-lint run --timeout 5m --build-tags integration
|
||||||
|
# TODO: run editorconfig-checker
|
||||||
|
|
||||||
|
fmt: tool-gofumpt
|
||||||
|
gofumpt -w --extra .
|
||||||
|
|
||||||
|
clean:
|
||||||
|
go clean ./...
|
||||||
|
rm -rf build/ integration/certs.sqlite integration/acme-account.json
|
||||||
|
|
||||||
|
tool-golangci:
|
||||||
|
@hash golangci-lint> /dev/null 2>&1; if [ $? -ne 0 ]; then \
|
||||||
|
go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
tool-gofumpt:
|
||||||
|
@hash gofumpt> /dev/null 2>&1; if [ $? -ne 0 ]; then \
|
||||||
|
go install mvdan.cc/gofumpt@latest; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
test:
|
||||||
|
go test -race -cover -tags '{{TAGS}}' codeberg.org/codeberg/pages/config/ codeberg.org/codeberg/pages/html/ codeberg.org/codeberg/pages/server/...
|
||||||
|
|
||||||
|
test-run TEST:
|
||||||
|
go test -race -tags '{{TAGS}}' -run "^{{TEST}}$" codeberg.org/codeberg/pages/config/ codeberg.org/codeberg/pages/html/ codeberg.org/codeberg/pages/server/...
|
||||||
|
|
||||||
|
integration:
|
||||||
|
go test -race -tags 'integration {{TAGS}}' codeberg.org/codeberg/pages/integration/...
|
||||||
|
|
||||||
|
integration-run TEST:
|
||||||
|
go test -race -tags 'integration {{TAGS}}' -run "^{{TEST}}$" codeberg.org/codeberg/pages/integration/...
|
||||||
|
|
||||||
|
docker:
|
||||||
|
docker run --rm -it --user $(id -u) -v $(pwd):/work --workdir /work -e HOME=/work codeberg.org/6543/docker-images/golang_just
|
305
LICENSE
Normal file
305
LICENSE
Normal file
|
@ -0,0 +1,305 @@
|
||||||
|
European Union Public Licence v. 1.2
|
||||||
|
|
||||||
|
EUPL © the European Union 2007, 2016
|
||||||
|
|
||||||
|
This European Union Public Licence (the 'EUPL') applies to the Work (as defined
|
||||||
|
below) which is provided under the terms of this Licence. Any use of the Work,
|
||||||
|
other than as authorised under this Licence is prohibited (to the extent such
|
||||||
|
use is covered by a right of the copyright holder of the Work).
|
||||||
|
|
||||||
|
The Work is provided under the terms of this Licence when the Licensor (as
|
||||||
|
defined below) has placed the following notice immediately following the copyright
|
||||||
|
notice for the Work:
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Licensed under the EUPL
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
or has expressed by any other means his willingness to license under the EUPL.
|
||||||
|
|
||||||
|
1. Definitions
|
||||||
|
|
||||||
|
In this Licence, the following terms have the following meaning:
|
||||||
|
|
||||||
|
— 'The Licence': this Licence.
|
||||||
|
|
||||||
|
— 'The Original Work': the work or software distributed or communicated by
|
||||||
|
the Licensor under this Licence, available as Source Code and also as Executable
|
||||||
|
Code as the case may be.
|
||||||
|
|
||||||
|
— 'Derivative Works': the works or software that could be created by the Licensee,
|
||||||
|
based upon the Original Work or modifications thereof. This Licence does not
|
||||||
|
define the extent of modification or dependence on the Original Work required
|
||||||
|
in order to classify a work as a Derivative Work; this extent is determined
|
||||||
|
by copyright law applicable in the country mentioned in Article 15.
|
||||||
|
|
||||||
|
— 'The Work': the Original Work or its Derivative Works.
|
||||||
|
|
||||||
|
— 'The Source Code': the human-readable form of the Work which is the most
|
||||||
|
convenient for people to study and modify.
|
||||||
|
|
||||||
|
— 'The Executable Code': any code which has generally been compiled and which
|
||||||
|
is meant to be interpreted by a computer as a program.
|
||||||
|
|
||||||
|
— 'The Licensor': the natural or legal person that distributes or communicates
|
||||||
|
the Work under the Licence.
|
||||||
|
|
||||||
|
— 'Contributor(s)': any natural or legal person who modifies the Work under
|
||||||
|
the Licence, or otherwise contributes to the creation of a Derivative Work.
|
||||||
|
|
||||||
|
— 'The Licensee' or 'You': any natural or legal person who makes any usage
|
||||||
|
of the Work under the terms of the Licence.
|
||||||
|
|
||||||
|
— 'Distribution' or 'Communication': any act of selling, giving, lending,
|
||||||
|
renting, distributing, communicating, transmitting, or otherwise making available,
|
||||||
|
online or offline, copies of the Work or providing access to its essential
|
||||||
|
functionalities at the disposal of any other natural or legal person.
|
||||||
|
|
||||||
|
2. Scope of the rights granted by the Licence
|
||||||
|
|
||||||
|
The Licensor hereby grants You a worldwide, royalty-free, non-exclusive, sublicensable
|
||||||
|
licence to do the following, for the duration of copyright vested in the Original
|
||||||
|
Work:
|
||||||
|
|
||||||
|
— use the Work in any circumstance and for all usage,
|
||||||
|
|
||||||
|
— reproduce the Work,
|
||||||
|
|
||||||
|
— modify the Work, and make Derivative Works based upon the Work,
|
||||||
|
|
||||||
|
— communicate to the public, including the right to make available or display
|
||||||
|
the Work or copies thereof to the public and perform publicly, as the case
|
||||||
|
may be, the Work,
|
||||||
|
|
||||||
|
— distribute the Work or copies thereof,
|
||||||
|
|
||||||
|
— lend and rent the Work or copies thereof,
|
||||||
|
|
||||||
|
— sublicense rights in the Work or copies thereof.
|
||||||
|
|
||||||
|
Those rights can be exercised on any media, supports and formats, whether
|
||||||
|
now known or later invented, as far as the applicable law permits so.
|
||||||
|
|
||||||
|
In the countries where moral rights apply, the Licensor waives his right to
|
||||||
|
exercise his moral right to the extent allowed by law in order to make effective
|
||||||
|
the licence of the economic rights here above listed.
|
||||||
|
|
||||||
|
The Licensor grants to the Licensee royalty-free, non-exclusive usage rights
|
||||||
|
to any patents held by the Licensor, to the extent necessary to make use of
|
||||||
|
the rights granted on the Work under this Licence.
|
||||||
|
|
||||||
|
3. Communication of the Source Code
|
||||||
|
|
||||||
|
The Licensor may provide the Work either in its Source Code form, or as Executable
|
||||||
|
Code. If the Work is provided as Executable Code, the Licensor provides in
|
||||||
|
addition a machine-readable copy of the Source Code of the Work along with
|
||||||
|
each copy of the Work that the Licensor distributes or indicates, in a notice
|
||||||
|
following the copyright notice attached to the Work, a repository where the
|
||||||
|
Source Code is easily and freely accessible for as long as the Licensor continues
|
||||||
|
to distribute or communicate the Work.
|
||||||
|
|
||||||
|
4. Limitations on copyright
|
||||||
|
|
||||||
|
Nothing in this Licence is intended to deprive the Licensee of the benefits
|
||||||
|
from any exception or limitation to the exclusive rights of the rights owners
|
||||||
|
in the Work, of the exhaustion of those rights or of other applicable limitations
|
||||||
|
thereto.
|
||||||
|
|
||||||
|
5. Obligations of the Licensee
|
||||||
|
|
||||||
|
The grant of the rights mentioned above is subject to some restrictions and
|
||||||
|
obligations imposed on the Licensee. Those obligations are the following:
|
||||||
|
|
||||||
|
Attribution right: The Licensee shall keep intact all copyright, patent or
|
||||||
|
trademarks notices and all notices that refer to the Licence and to the disclaimer
|
||||||
|
of warranties. The Licensee must include a copy of such notices and a copy
|
||||||
|
of the Licence with every copy of the Work he/she distributes or communicates.
|
||||||
|
The Licensee must cause any Derivative Work to carry prominent notices stating
|
||||||
|
that the Work has been modified and the date of modification.
|
||||||
|
|
||||||
|
Copyleft clause: If the Licensee distributes or communicates copies of the
|
||||||
|
Original Works or Derivative Works, this Distribution or Communication will
|
||||||
|
be done under the terms of this Licence or of a later version of this Licence
|
||||||
|
unless the Original Work is expressly distributed only under this version
|
||||||
|
of the Licence — for example by communicating 'EUPL v. 1.2 only'. The Licensee
|
||||||
|
(becoming Licensor) cannot offer or impose any additional terms or conditions
|
||||||
|
on the Work or Derivative Work that alter or restrict the terms of the Licence.
|
||||||
|
|
||||||
|
Compatibility clause: If the Licensee Distributes or Communicates Derivative
|
||||||
|
Works or copies thereof based upon both the Work and another work licensed
|
||||||
|
under a Compatible Licence, this Distribution or Communication can be done
|
||||||
|
under the terms of this Compatible Licence. For the sake of this clause, 'Compatible
|
||||||
|
Licence' refers to the licences listed in the appendix attached to this Licence.
|
||||||
|
Should the Licensee's obligations under the Compatible Licence conflict with
|
||||||
|
his/her obligations under this Licence, the obligations of the Compatible
|
||||||
|
Licence shall prevail.
|
||||||
|
|
||||||
|
Provision of Source Code: When distributing or communicating copies of the
|
||||||
|
Work, the Licensee will provide a machine-readable copy of the Source Code
|
||||||
|
or indicate a repository where this Source will be easily and freely available
|
||||||
|
for as long as the Licensee continues to distribute or communicate the Work.
|
||||||
|
|
||||||
|
Legal Protection: This Licence does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or names of the Licensor, except as required
|
||||||
|
for reasonable and customary use in describing the origin of the Work and
|
||||||
|
reproducing the content of the copyright notice.
|
||||||
|
|
||||||
|
6. Chain of Authorship
|
||||||
|
|
||||||
|
The original Licensor warrants that the copyright in the Original Work granted
|
||||||
|
hereunder is owned by him/her or licensed to him/her and that he/she has the
|
||||||
|
power and authority to grant the Licence.
|
||||||
|
|
||||||
|
Each Contributor warrants that the copyright in the modifications he/she brings
|
||||||
|
to the Work are owned by him/her or licensed to him/her and that he/she has
|
||||||
|
the power and authority to grant the Licence.
|
||||||
|
|
||||||
|
Each time You accept the Licence, the original Licensor and subsequent Contributors
|
||||||
|
grant You a licence to their contributions to the Work, under the terms of
|
||||||
|
this Licence.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty
|
||||||
|
|
||||||
|
The Work is a work in progress, which is continuously improved by numerous
|
||||||
|
Contributors. It is not a finished work and may therefore contain defects
|
||||||
|
or 'bugs' inherent to this type of development.
|
||||||
|
|
||||||
|
For the above reason, the Work is provided under the Licence on an 'as is'
|
||||||
|
basis and without warranties of any kind concerning the Work, including without
|
||||||
|
limitation merchantability, fitness for a particular purpose, absence of defects
|
||||||
|
or errors, accuracy, non-infringement of intellectual property rights other
|
||||||
|
than copyright as stated in Article 6 of this Licence.
|
||||||
|
|
||||||
|
This disclaimer of warranty is an essential part of the Licence and a condition
|
||||||
|
for the grant of any rights to the Work.
|
||||||
|
|
||||||
|
8. Disclaimer of Liability
|
||||||
|
|
||||||
|
Except in the cases of wilful misconduct or damages directly caused to natural
|
||||||
|
persons, the Licensor will in no event be liable for any direct or indirect,
|
||||||
|
material or moral, damages of any kind, arising out of the Licence or of the
|
||||||
|
use of the Work, including without limitation, damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, loss of data or any commercial
|
||||||
|
damage, even if the Licensor has been advised of the possibility of such damage.
|
||||||
|
However, the Licensor will be liable under statutory product liability laws
|
||||||
|
as far such laws apply to the Work.
|
||||||
|
|
||||||
|
9. Additional agreements
|
||||||
|
|
||||||
|
While distributing the Work, You may choose to conclude an additional agreement,
|
||||||
|
defining obligations or services consistent with this Licence. However, if
|
||||||
|
accepting obligations, You may act only on your own behalf and on your sole
|
||||||
|
responsibility, not on behalf of the original Licensor or any other Contributor,
|
||||||
|
and only if You agree to indemnify, defend, and hold each Contributor harmless
|
||||||
|
for any liability incurred by, or claims asserted against such Contributor
|
||||||
|
by the fact You have accepted any warranty or additional liability.
|
||||||
|
|
||||||
|
10. Acceptance of the Licence
|
||||||
|
|
||||||
|
The provisions of this Licence can be accepted by clicking on an icon 'I agree'
|
||||||
|
placed under the bottom of a window displaying the text of this Licence or
|
||||||
|
by affirming consent in any other similar way, in accordance with the rules
|
||||||
|
of applicable law. Clicking on that icon indicates your clear and irrevocable
|
||||||
|
acceptance of this Licence and all of its terms and conditions.
|
||||||
|
|
||||||
|
Similarly, you irrevocably accept this Licence and all of its terms and conditions
|
||||||
|
by exercising any rights granted to You by Article 2 of this Licence, such
|
||||||
|
as the use of the Work, the creation by You of a Derivative Work or the Distribution
|
||||||
|
or Communication by You of the Work or copies thereof.
|
||||||
|
|
||||||
|
11. Information to the public
|
||||||
|
|
||||||
|
In case of any Distribution or Communication of the Work by means of electronic
|
||||||
|
communication by You (for example, by offering to download the Work from a
|
||||||
|
remote location) the distribution channel or media (for example, a website)
|
||||||
|
must at least provide to the public the information requested by the applicable
|
||||||
|
law regarding the Licensor, the Licence and the way it may be accessible,
|
||||||
|
concluded, stored and reproduced by the Licensee.
|
||||||
|
|
||||||
|
12. Termination of the Licence
|
||||||
|
|
||||||
|
The Licence and the rights granted hereunder will terminate automatically
|
||||||
|
upon any breach by the Licensee of the terms of the Licence.
|
||||||
|
|
||||||
|
Such a termination will not terminate the licences of any person who has received
|
||||||
|
the Work from the Licensee under the Licence, provided such persons remain
|
||||||
|
in full compliance with the Licence.
|
||||||
|
|
||||||
|
13. Miscellaneous
|
||||||
|
|
||||||
|
Without prejudice of Article 9 above, the Licence represents the complete
|
||||||
|
agreement between the Parties as to the Work.
|
||||||
|
|
||||||
|
If any provision of the Licence is invalid or unenforceable under applicable
|
||||||
|
law, this will not affect the validity or enforceability of the Licence as
|
||||||
|
a whole. Such provision will be construed or reformed so as necessary to make
|
||||||
|
it valid and enforceable.
|
||||||
|
|
||||||
|
The European Commission may publish other linguistic versions or new versions
|
||||||
|
of this Licence or updated versions of the Appendix, so far this is required
|
||||||
|
and reasonable, without reducing the scope of the rights granted by the Licence.
|
||||||
|
New versions of the Licence will be published with a unique version number.
|
||||||
|
|
||||||
|
All linguistic versions of this Licence, approved by the European Commission,
|
||||||
|
have identical value. Parties can take advantage of the linguistic version
|
||||||
|
of their choice.
|
||||||
|
|
||||||
|
14. Jurisdiction
|
||||||
|
|
||||||
|
Without prejudice to specific agreement between parties,
|
||||||
|
|
||||||
|
— any litigation resulting from the interpretation of this License, arising
|
||||||
|
between the European Union institutions, bodies, offices or agencies, as a
|
||||||
|
Licensor, and any Licensee, will be subject to the jurisdiction of the Court
|
||||||
|
of Justice of the European Union, as laid down in article 272 of the Treaty
|
||||||
|
on the Functioning of the European Union,
|
||||||
|
|
||||||
|
— any litigation arising between other parties and resulting from the interpretation
|
||||||
|
of this License, will be subject to the exclusive jurisdiction of the competent
|
||||||
|
court where the Licensor resides or conducts its primary business.
|
||||||
|
|
||||||
|
15. Applicable Law
|
||||||
|
|
||||||
|
Without prejudice to specific agreement between parties,
|
||||||
|
|
||||||
|
— this Licence shall be governed by the law of the European Union Member State
|
||||||
|
where the Licensor has his seat, resides or has his registered office,
|
||||||
|
|
||||||
|
— this licence shall be governed by Belgian law if the Licensor has no seat,
|
||||||
|
residence or registered office inside a European Union Member State.
|
||||||
|
|
||||||
|
Appendix
|
||||||
|
|
||||||
|
'Compatible Licences' according to Article 5 EUPL are:
|
||||||
|
|
||||||
|
— GNU General Public License (GPL) v. 2, v. 3
|
||||||
|
|
||||||
|
— GNU Affero General Public License (AGPL) v. 3
|
||||||
|
|
||||||
|
— Open Software License (OSL) v. 2.1, v. 3.0
|
||||||
|
|
||||||
|
— Eclipse Public License (EPL) v. 1.0
|
||||||
|
|
||||||
|
— CeCILL v. 2.0, v. 2.1
|
||||||
|
|
||||||
|
— Mozilla Public Licence (MPL) v. 2
|
||||||
|
|
||||||
|
— GNU Lesser General Public Licence (LGPL) v. 2.1, v. 3
|
||||||
|
|
||||||
|
— Creative Commons Attribution-ShareAlike v. 3.0 Unported (CC BY-SA 3.0) for
|
||||||
|
works other than software
|
||||||
|
|
||||||
|
— European Union Public Licence (EUPL) v. 1.1, v. 1.2
|
||||||
|
|
||||||
|
— Québec Free and Open-Source Licence — Reciprocity (LiLiQ-R) or Strong Reciprocity
|
||||||
|
(LiLiQ-R+).
|
||||||
|
|
||||||
|
The European Commission may update this Appendix to later versions of the
|
||||||
|
above licences without producing a new version of the EUPL, as long as they
|
||||||
|
provide the rights granted in Article 2 of this Licence and protect the covered
|
||||||
|
Source Code from exclusive appropriation.
|
||||||
|
|
||||||
|
All other changes or additions to this Appendix require the production of
|
||||||
|
a new EUPL version.
|
150
README.md
Normal file
150
README.md
Normal file
|
@ -0,0 +1,150 @@
|
||||||
|
# Codeberg Pages
|
||||||
|
|
||||||
|
[](https://opensource.org/license/eupl-1-2/)
|
||||||
|
[](https://ci.codeberg.org/Codeberg/pages-server)
|
||||||
|
<a href="https://matrix.to/#/#gitea-pages-server:matrix.org" title="Join the Matrix room at https://matrix.to/#/#gitea-pages-server:matrix.org">
|
||||||
|
<img src="https://img.shields.io/matrix/gitea-pages-server:matrix.org?label=matrix">
|
||||||
|
</a>
|
||||||
|
|
||||||
|
Gitea lacks the ability to host static pages from Git.
|
||||||
|
The Codeberg Pages Server addresses this lack by implementing a standalone service
|
||||||
|
that connects to Gitea via API.
|
||||||
|
It is suitable to be deployed by other Gitea instances, too, to offer static pages hosting to their users.
|
||||||
|
|
||||||
|
**End user documentation** can mainly be found at the [Wiki](https://codeberg.org/Codeberg/pages-server/wiki/Overview)
|
||||||
|
and the [Codeberg Documentation](https://docs.codeberg.org/codeberg-pages/).
|
||||||
|
|
||||||
|
<a href="https://codeberg.org/Codeberg/pages-server"> <img src="https://codeberg.org/Codeberg/GetItOnCodeberg/raw/branch/main/get-it-on-blue-on-white.svg" alt="Get It On Codeberg" width="250"/> </a>
|
||||||
|
|
||||||
|
## Quickstart
|
||||||
|
|
||||||
|
This is the new Codeberg Pages server, a solution for serving static pages from Gitea repositories.
|
||||||
|
Mapping custom domains is not static anymore, but can be done with DNS:
|
||||||
|
|
||||||
|
1. add a `.domains` text file to your repository, containing the allowed domains, separated by new lines. The
|
||||||
|
first line will be the canonical domain/URL; all other occurrences will be redirected to it.
|
||||||
|
|
||||||
|
2. add a CNAME entry to your domain, pointing to `[[{branch}.]{repo}.]{owner}.codeberg.page` (repo defaults to
|
||||||
|
"pages", "branch" defaults to the default branch if "repo" is "pages", or to "pages" if "repo" is something else.
|
||||||
|
If the branch name contains slash characters, you need to replace "/" in the branch name to "~"):
|
||||||
|
`www.example.org. IN CNAME main.pages.example.codeberg.page.`
|
||||||
|
|
||||||
|
3. if a CNAME is set for "www.example.org", you can redirect there from the naked domain by adding an ALIAS record
|
||||||
|
for "example.org" (if your provider allows ALIAS or similar records, otherwise use A/AAAA), together with a TXT
|
||||||
|
record that points to your repo (just like the CNAME record):
|
||||||
|
`example.org IN ALIAS codeberg.page.`
|
||||||
|
`example.org IN TXT main.pages.example.codeberg.page.`
|
||||||
|
|
||||||
|
Certificates are generated, updated and cleaned up automatically via Let's Encrypt through a TLS challenge.
|
||||||
|
|
||||||
|
## Chat for admins & devs
|
||||||
|
|
||||||
|
[matrix: #gitea-pages-server:matrix.org](https://matrix.to/#/#gitea-pages-server:matrix.org)
|
||||||
|
|
||||||
|
## Deployment
|
||||||
|
|
||||||
|
**Warning: Some Caveats Apply**
|
||||||
|
|
||||||
|
> Currently, the deployment requires you to have some knowledge of system administration as well as understanding and building code,
|
||||||
|
> so you can eventually edit non-configurable and codeberg-specific settings.
|
||||||
|
> In the future, we'll try to reduce these and make hosting Codeberg Pages as easy as setting up Gitea.
|
||||||
|
> If you consider using Pages in practice, please consider contacting us first,
|
||||||
|
> we'll then try to share some basic steps and document the current usage for admins
|
||||||
|
> (might be changing in the current state).
|
||||||
|
|
||||||
|
Deploying the software itself is very easy. You can grab a current release binary or build yourself,
|
||||||
|
configure the environment as described below, and you are done.
|
||||||
|
|
||||||
|
The hard part is about adding **custom domain support** if you intend to use it.
|
||||||
|
SSL certificates (request + renewal) is automatically handled by the Pages Server,
|
||||||
|
but if you want to run it on a shared IP address (and not a standalone),
|
||||||
|
you'll need to configure your reverse proxy not to terminate the TLS connections,
|
||||||
|
but forward the requests on the IP level to the Pages Server.
|
||||||
|
|
||||||
|
You can check out a proof of concept in the `examples/haproxy-sni` folder,
|
||||||
|
and especially have a look at [this section of the haproxy.cfg](https://codeberg.org/Codeberg/pages-server/src/branch/main/examples/haproxy-sni/haproxy.cfg#L38).
|
||||||
|
|
||||||
|
If you want to test a change, you can open a PR and ask for the label `build_pr_image` to be added.
|
||||||
|
This will trigger a build of the PR which will build a docker image to be used for testing.
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
|
||||||
|
- `ACME_ACCEPT_TERMS` (default: use self-signed certificate): Set this to "true" to accept the Terms of Service of your ACME provider.
|
||||||
|
- `ACME_API` (default: <https://acme-v02.api.letsencrypt.org/directory>): set this to <https://acme.mock.directory> to use invalid certificates without any verification (great for debugging). ZeroSSL might be better in the future as it doesn't have rate limits and doesn't clash with the official Codeberg certificates (which are using Let's Encrypt), but I couldn't get it to work yet.
|
||||||
|
- `ACME_EAB_KID` & `ACME_EAB_HMAC` (default: don't use EAB): EAB credentials, for example for ZeroSSL.
|
||||||
|
- `ACME_EMAIL` (default: `noreply@example.email`): Set the email sent to the ACME API server to receive, for example, renewal reminders.
|
||||||
|
- `ACME_USE_RATE_LIMITS` (default: true): Set this to false to disable rate limits, e.g. with ZeroSSL.
|
||||||
|
- `DNS_PROVIDER` (default: use self-signed certificate): Code of the ACME DNS provider for the main domain wildcard. See <https://go-acme.github.io/lego/dns/> for available values & additional environment variables.
|
||||||
|
- `ENABLE_HTTP_SERVER` (default: false): Set this to true to enable the HTTP-01 challenge and redirect all other HTTP requests to HTTPS. Currently only works with port 80.
|
||||||
|
- `GITEA_API_TOKEN` (default: empty): API token for the Gitea instance to access non-public (e.g. limited) repos.
|
||||||
|
- `GITEA_ROOT` (default: `https://codeberg.org`): root of the upstream Gitea instance.
|
||||||
|
- `HOST` & `PORT` (default: `[::]` & `443`): listen address.
|
||||||
|
- `LOG_LEVEL` (default: warn): Set this to specify the level of logging.
|
||||||
|
- `NO_DNS_01` (default: `false`): Disable the use of ACME DNS. This means that the wildcard certificate is self-signed and all domains and subdomains will have a distinct certificate. Because this may lead to a rate limit from the ACME provider, this option is not recommended for Gitea/Forgejo instances with open registrations or a great number of users/orgs.
|
||||||
|
- `PAGES_DOMAIN` (default: `codeberg.page`): main domain for pages.
|
||||||
|
- `RAW_DOMAIN` (default: `raw.codeberg.page`): domain for raw resources (must be subdomain of `PAGES_DOMAIN`).
|
||||||
|
|
||||||
|
### Custom Error Page
|
||||||
|
|
||||||
|
A custom error page template can be served by creating `custom/error.html`.
|
||||||
|
Data available to the template includes:
|
||||||
|
|
||||||
|
- `{{ .StatusCode }}`: The HTTP status code (e.g. 404)
|
||||||
|
- `{{ .StatusText }}`: The textual name associated with the status code (e.g. Not Found)
|
||||||
|
- `{{ .Message }}`: The reason for the error
|
||||||
|
|
||||||
|
## Contributing to the development
|
||||||
|
|
||||||
|
The Codeberg team is very open to your contribution.
|
||||||
|
Since we are working nicely in a team, it might be hard at times to get started
|
||||||
|
(still check out the issues, we always aim to have some things to get you started).
|
||||||
|
|
||||||
|
If you have any questions, want to work on a feature or could imagine collaborating with us for some time,
|
||||||
|
feel free to ping us in an issue or in a general [Matrix chat room](#chat-for-admins--devs).
|
||||||
|
|
||||||
|
You can also contact the maintainer(s) of this project:
|
||||||
|
|
||||||
|
- [crapStone](https://codeberg.org/crapStone) [(Matrix)](https://matrix.to/#/@crapstone:obermui.de)
|
||||||
|
|
||||||
|
Previous maintainers:
|
||||||
|
|
||||||
|
- [momar](https://codeberg.org/momar) [(Matrix)](https://matrix.to/#/@moritz:wuks.space)
|
||||||
|
- [6543](https://codeberg.org/6543) [(Matrix)](https://matrix.to/#/@marddl:obermui.de)
|
||||||
|
|
||||||
|
### First steps
|
||||||
|
|
||||||
|
The code of this repository is split in several modules.
|
||||||
|
The [Architecture is explained](https://codeberg.org/Codeberg/pages-server/wiki/Architecture) in the wiki.
|
||||||
|
|
||||||
|
The `cmd` folder holds the data necessary for interacting with the service via the cli.
|
||||||
|
The heart of the software lives in the `server` folder and is split in several modules.
|
||||||
|
|
||||||
|
Again: Feel free to get in touch with us for any questions that might arise.
|
||||||
|
Thank you very much.
|
||||||
|
|
||||||
|
### Test Server
|
||||||
|
|
||||||
|
Make sure you have [golang](https://go.dev) v1.21 or newer and [just](https://just.systems/man/en/) installed.
|
||||||
|
|
||||||
|
run `just dev`
|
||||||
|
now these pages should work:
|
||||||
|
|
||||||
|
- <https://cb_pages_tests.localhost.mock.directory:4430/images/827679288a.jpg>
|
||||||
|
- <https://momar.localhost.mock.directory:4430/ci-testing/>
|
||||||
|
- <https://momar.localhost.mock.directory:4430/pag/@master/>
|
||||||
|
- <https://mock-pages.codeberg-test.org:4430/README.md>
|
||||||
|
|
||||||
|
### Profiling
|
||||||
|
|
||||||
|
> This section is just a collection of commands for quick reference. If you want to learn more about profiling read [this](https://go.dev/doc/diagnostics) article or google `golang profiling`.
|
||||||
|
|
||||||
|
First enable profiling by supplying the cli arg `--enable-profiling` or using the environment variable `EENABLE_PROFILING`.
|
||||||
|
|
||||||
|
Get cpu and mem stats:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
go tool pprof -raw -output=cpu.txt 'http://localhost:9999/debug/pprof/profile?seconds=60' &
|
||||||
|
curl -so mem.txt 'http://localhost:9999/debug/pprof/heap?seconds=60'
|
||||||
|
```
|
||||||
|
|
||||||
|
More endpoints are documented here: <https://pkg.go.dev/net/http/pprof>
|
69
cli/certs.go
Normal file
69
cli/certs.go
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
package cli
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/urfave/cli/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
var Certs = &cli.Command{
|
||||||
|
Name: "certs",
|
||||||
|
Usage: "manage certs manually",
|
||||||
|
Subcommands: []*cli.Command{
|
||||||
|
{
|
||||||
|
Name: "list",
|
||||||
|
Usage: "list all certificates in the database",
|
||||||
|
Action: listCerts,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "remove",
|
||||||
|
Usage: "remove a certificate from the database",
|
||||||
|
Action: removeCert,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Flags: CertStorageFlags,
|
||||||
|
}
|
||||||
|
|
||||||
|
func listCerts(ctx *cli.Context) error {
|
||||||
|
certDB, closeFn, err := OpenCertDB(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer closeFn()
|
||||||
|
|
||||||
|
items, err := certDB.Items(0, 0)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Domain\tValidTill\n\n")
|
||||||
|
for _, cert := range items {
|
||||||
|
fmt.Printf("%s\t%s\n",
|
||||||
|
cert.Domain,
|
||||||
|
time.Unix(cert.ValidTill, 0).Format(time.RFC3339))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func removeCert(ctx *cli.Context) error {
|
||||||
|
if ctx.Args().Len() < 1 {
|
||||||
|
return fmt.Errorf("'certs remove' requires at least one domain as an argument")
|
||||||
|
}
|
||||||
|
|
||||||
|
domains := ctx.Args().Slice()
|
||||||
|
|
||||||
|
certDB, closeFn, err := OpenCertDB(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer closeFn()
|
||||||
|
|
||||||
|
for _, domain := range domains {
|
||||||
|
fmt.Printf("Removing domain %s from the database...\n", domain)
|
||||||
|
if err := certDB.Delete(domain); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
214
cli/flags.go
Normal file
214
cli/flags.go
Normal file
|
@ -0,0 +1,214 @@
|
||||||
|
package cli
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/urfave/cli/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
CertStorageFlags = []cli.Flag{
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "db-type",
|
||||||
|
Usage: "Specify the database driver. Valid options are \"sqlite3\", \"mysql\" and \"postgres\". Read more at https://xorm.io",
|
||||||
|
Value: "sqlite3",
|
||||||
|
EnvVars: []string{"DB_TYPE"},
|
||||||
|
},
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "db-conn",
|
||||||
|
Usage: "Specify the database connection. For \"sqlite3\" it's the filepath. Read more at https://go.dev/doc/tutorial/database-access",
|
||||||
|
Value: "certs.sqlite",
|
||||||
|
EnvVars: []string{"DB_CONN"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
ServerFlags = append(CertStorageFlags, []cli.Flag{
|
||||||
|
// #############
|
||||||
|
// ### Forge ###
|
||||||
|
// #############
|
||||||
|
// ForgeRoot specifies the root URL of the Forge instance, without a trailing slash.
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "forge-root",
|
||||||
|
Aliases: []string{"gitea-root"},
|
||||||
|
Usage: "specifies the root URL of the Forgejo/Gitea instance, without a trailing slash.",
|
||||||
|
EnvVars: []string{"FORGE_ROOT", "GITEA_ROOT"},
|
||||||
|
},
|
||||||
|
// ForgeApiToken specifies an api token for the Forge instance
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "forge-api-token",
|
||||||
|
Aliases: []string{"gitea-api-token"},
|
||||||
|
Usage: "specifies an api token for the Forgejo/Gitea instance",
|
||||||
|
EnvVars: []string{"FORGE_API_TOKEN", "GITEA_API_TOKEN"},
|
||||||
|
},
|
||||||
|
&cli.BoolFlag{
|
||||||
|
Name: "enable-lfs-support",
|
||||||
|
Usage: "enable lfs support, gitea must be version v1.17.0 or higher",
|
||||||
|
EnvVars: []string{"ENABLE_LFS_SUPPORT"},
|
||||||
|
Value: false,
|
||||||
|
},
|
||||||
|
&cli.BoolFlag{
|
||||||
|
Name: "enable-symlink-support",
|
||||||
|
Usage: "follow symlinks if enabled, gitea must be version v1.18.0 or higher",
|
||||||
|
EnvVars: []string{"ENABLE_SYMLINK_SUPPORT"},
|
||||||
|
Value: false,
|
||||||
|
},
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "default-mime-type",
|
||||||
|
Usage: "specifies the default mime type for files that don't have a specific mime type.",
|
||||||
|
EnvVars: []string{"DEFAULT_MIME_TYPE"},
|
||||||
|
Value: "application/octet-stream",
|
||||||
|
},
|
||||||
|
&cli.StringSliceFlag{
|
||||||
|
Name: "forbidden-mime-types",
|
||||||
|
Usage: "specifies the forbidden mime types. Use this flag multiple times for multiple mime types.",
|
||||||
|
EnvVars: []string{"FORBIDDEN_MIME_TYPES"},
|
||||||
|
},
|
||||||
|
|
||||||
|
// ###########################
|
||||||
|
// ### Page Server Domains ###
|
||||||
|
// ###########################
|
||||||
|
// MainDomainSuffix specifies the main domain (starting with a dot) for which subdomains shall be served as static
|
||||||
|
// pages, or used for comparison in CNAME lookups. Static pages can be accessed through
|
||||||
|
// https://{owner}.{MainDomain}[/{repo}], with repo defaulting to "pages".
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "pages-domain",
|
||||||
|
Usage: "specifies the main domain (starting with a dot) for which subdomains shall be served as static pages",
|
||||||
|
EnvVars: []string{"PAGES_DOMAIN"},
|
||||||
|
},
|
||||||
|
// RawDomain specifies the domain from which raw repository content shall be served in the following format:
|
||||||
|
// https://{RawDomain}/{owner}/{repo}[/{branch|tag|commit}/{version}]/{filepath...}
|
||||||
|
// (set to []byte(nil) to disable raw content hosting)
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "raw-domain",
|
||||||
|
Usage: "specifies the domain from which raw repository content shall be served, not set disable raw content hosting",
|
||||||
|
EnvVars: []string{"RAW_DOMAIN"},
|
||||||
|
},
|
||||||
|
|
||||||
|
// #########################
|
||||||
|
// ### Page Server Setup ###
|
||||||
|
// #########################
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "host",
|
||||||
|
Usage: "specifies host of listening address",
|
||||||
|
EnvVars: []string{"HOST"},
|
||||||
|
Value: "[::]",
|
||||||
|
},
|
||||||
|
&cli.UintFlag{
|
||||||
|
Name: "port",
|
||||||
|
Usage: "specifies the https port to listen to ssl requests",
|
||||||
|
EnvVars: []string{"PORT", "HTTPS_PORT"},
|
||||||
|
Value: 443,
|
||||||
|
},
|
||||||
|
&cli.UintFlag{
|
||||||
|
Name: "http-port",
|
||||||
|
Usage: "specifies the http port, you also have to enable http server via ENABLE_HTTP_SERVER=true",
|
||||||
|
EnvVars: []string{"HTTP_PORT"},
|
||||||
|
Value: 80,
|
||||||
|
},
|
||||||
|
&cli.BoolFlag{
|
||||||
|
Name: "enable-http-server",
|
||||||
|
Usage: "start a http server to redirect to https and respond to http acme challenges",
|
||||||
|
EnvVars: []string{"ENABLE_HTTP_SERVER"},
|
||||||
|
Value: false,
|
||||||
|
},
|
||||||
|
&cli.BoolFlag{
|
||||||
|
Name: "use-proxy-protocol",
|
||||||
|
Usage: "use the proxy protocol",
|
||||||
|
EnvVars: []string{"USE_PROXY_PROTOCOL"},
|
||||||
|
Value: false,
|
||||||
|
},
|
||||||
|
|
||||||
|
// Default branches to fetch assets from
|
||||||
|
&cli.StringSliceFlag{
|
||||||
|
Name: "pages-branch",
|
||||||
|
Usage: "define a branch to fetch assets from. Use this flag multiple times for multiple branches.",
|
||||||
|
EnvVars: []string{"PAGES_BRANCHES"},
|
||||||
|
Value: cli.NewStringSlice("pages"),
|
||||||
|
},
|
||||||
|
|
||||||
|
&cli.StringSliceFlag{
|
||||||
|
Name: "allowed-cors-domains",
|
||||||
|
Usage: "specify allowed CORS domains. Use this flag multiple times for multiple domains.",
|
||||||
|
EnvVars: []string{"ALLOWED_CORS_DOMAINS"},
|
||||||
|
},
|
||||||
|
&cli.StringSliceFlag{
|
||||||
|
Name: "blacklisted-paths",
|
||||||
|
Usage: "return an error on these url paths.Use this flag multiple times for multiple paths.",
|
||||||
|
EnvVars: []string{"BLACKLISTED_PATHS"},
|
||||||
|
},
|
||||||
|
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "log-level",
|
||||||
|
Value: "warn",
|
||||||
|
Usage: "specify at which log level should be logged. Possible options: info, warn, error, fatal",
|
||||||
|
EnvVars: []string{"LOG_LEVEL"},
|
||||||
|
},
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "config-file",
|
||||||
|
Usage: "specify the location of the config file",
|
||||||
|
Aliases: []string{"config"},
|
||||||
|
EnvVars: []string{"CONFIG_FILE"},
|
||||||
|
},
|
||||||
|
|
||||||
|
&cli.BoolFlag{
|
||||||
|
Name: "enable-profiling",
|
||||||
|
Usage: "enables the go http profiling endpoints",
|
||||||
|
EnvVars: []string{"ENABLE_PROFILING"},
|
||||||
|
},
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "profiling-address",
|
||||||
|
Usage: "specify ip address and port the profiling server should listen on",
|
||||||
|
EnvVars: []string{"PROFILING_ADDRESS"},
|
||||||
|
Value: "localhost:9999",
|
||||||
|
},
|
||||||
|
|
||||||
|
// ############################
|
||||||
|
// ### ACME Client Settings ###
|
||||||
|
// ############################
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "acme-api-endpoint",
|
||||||
|
EnvVars: []string{"ACME_API"},
|
||||||
|
Value: "https://acme-v02.api.letsencrypt.org/directory",
|
||||||
|
},
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "acme-email",
|
||||||
|
EnvVars: []string{"ACME_EMAIL"},
|
||||||
|
Value: "noreply@example.email",
|
||||||
|
},
|
||||||
|
&cli.BoolFlag{
|
||||||
|
Name: "acme-use-rate-limits",
|
||||||
|
// TODO: Usage
|
||||||
|
EnvVars: []string{"ACME_USE_RATE_LIMITS"},
|
||||||
|
Value: true,
|
||||||
|
},
|
||||||
|
&cli.BoolFlag{
|
||||||
|
Name: "acme-accept-terms",
|
||||||
|
Usage: "To accept the ACME ToS",
|
||||||
|
EnvVars: []string{"ACME_ACCEPT_TERMS"},
|
||||||
|
},
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "acme-eab-kid",
|
||||||
|
Usage: "Register the current account to the ACME server with external binding.",
|
||||||
|
EnvVars: []string{"ACME_EAB_KID"},
|
||||||
|
},
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "acme-eab-hmac",
|
||||||
|
Usage: "Register the current account to the ACME server with external binding.",
|
||||||
|
EnvVars: []string{"ACME_EAB_HMAC"},
|
||||||
|
},
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "dns-provider",
|
||||||
|
Usage: "Use DNS-Challenge for main domain. Read more at: https://go-acme.github.io/lego/dns/",
|
||||||
|
EnvVars: []string{"DNS_PROVIDER"},
|
||||||
|
},
|
||||||
|
&cli.BoolFlag{
|
||||||
|
Name: "no-dns-01",
|
||||||
|
Usage: "Always use individual certificates instead of a DNS-01 wild card certificate",
|
||||||
|
EnvVars: []string{"NO_DNS_01"},
|
||||||
|
},
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "acme-account-config",
|
||||||
|
Usage: "json file of acme account",
|
||||||
|
Value: "acme-account.json",
|
||||||
|
EnvVars: []string{"ACME_ACCOUNT_CONFIG"},
|
||||||
|
},
|
||||||
|
}...)
|
||||||
|
)
|
39
cli/setup.go
Normal file
39
cli/setup.go
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
package cli
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
"github.com/urfave/cli/v2"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/server/database"
|
||||||
|
"codeberg.org/codeberg/pages/server/version"
|
||||||
|
)
|
||||||
|
|
||||||
|
func CreatePagesApp() *cli.App {
|
||||||
|
app := cli.NewApp()
|
||||||
|
app.Name = "pages-server"
|
||||||
|
app.Version = version.Version
|
||||||
|
app.Usage = "pages server"
|
||||||
|
app.Flags = ServerFlags
|
||||||
|
app.Commands = []*cli.Command{
|
||||||
|
Certs,
|
||||||
|
}
|
||||||
|
|
||||||
|
return app
|
||||||
|
}
|
||||||
|
|
||||||
|
func OpenCertDB(ctx *cli.Context) (certDB database.CertDB, closeFn func(), err error) {
|
||||||
|
certDB, err = database.NewXormDB(ctx.String("db-type"), ctx.String("db-conn"))
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("could not connect to database: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
closeFn = func() {
|
||||||
|
if err := certDB.Close(); err != nil {
|
||||||
|
log.Error().Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return certDB, closeFn, nil
|
||||||
|
}
|
33
config/assets/test_config.toml
Normal file
33
config/assets/test_config.toml
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
logLevel = 'trace'
|
||||||
|
|
||||||
|
[server]
|
||||||
|
host = '127.0.0.1'
|
||||||
|
port = 443
|
||||||
|
httpPort = 80
|
||||||
|
httpServerEnabled = true
|
||||||
|
mainDomain = 'codeberg.page'
|
||||||
|
rawDomain = 'raw.codeberg.page'
|
||||||
|
allowedCorsDomains = ['fonts.codeberg.org', 'design.codeberg.org']
|
||||||
|
blacklistedPaths = ['do/not/use']
|
||||||
|
|
||||||
|
[forge]
|
||||||
|
root = 'https://codeberg.org'
|
||||||
|
token = 'XXXXXXXX'
|
||||||
|
lfsEnabled = true
|
||||||
|
followSymlinks = true
|
||||||
|
defaultMimeType = "application/wasm"
|
||||||
|
forbiddenMimeTypes = ["text/html"]
|
||||||
|
|
||||||
|
[database]
|
||||||
|
type = 'sqlite'
|
||||||
|
conn = 'certs.sqlite'
|
||||||
|
|
||||||
|
[ACME]
|
||||||
|
email = 'a@b.c'
|
||||||
|
apiEndpoint = 'https://example.com'
|
||||||
|
acceptTerms = false
|
||||||
|
useRateLimits = true
|
||||||
|
eab_hmac = 'asdf'
|
||||||
|
eab_kid = 'qwer'
|
||||||
|
dnsProvider = 'cloudflare.com'
|
||||||
|
accountConfigFile = 'nope'
|
48
config/config.go
Normal file
48
config/config.go
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
type Config struct {
|
||||||
|
LogLevel string `default:"warn"`
|
||||||
|
Server ServerConfig
|
||||||
|
Forge ForgeConfig
|
||||||
|
Database DatabaseConfig
|
||||||
|
ACME ACMEConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
type ServerConfig struct {
|
||||||
|
Host string `default:"[::]"`
|
||||||
|
Port uint16 `default:"443"`
|
||||||
|
HttpPort uint16 `default:"80"`
|
||||||
|
HttpServerEnabled bool `default:"true"`
|
||||||
|
UseProxyProtocol bool `default:"false"`
|
||||||
|
MainDomain string
|
||||||
|
RawDomain string
|
||||||
|
PagesBranches []string
|
||||||
|
AllowedCorsDomains []string
|
||||||
|
BlacklistedPaths []string
|
||||||
|
}
|
||||||
|
|
||||||
|
type ForgeConfig struct {
|
||||||
|
Root string
|
||||||
|
Token string
|
||||||
|
LFSEnabled bool `default:"false"`
|
||||||
|
FollowSymlinks bool `default:"false"`
|
||||||
|
DefaultMimeType string `default:"application/octet-stream"`
|
||||||
|
ForbiddenMimeTypes []string
|
||||||
|
}
|
||||||
|
|
||||||
|
type DatabaseConfig struct {
|
||||||
|
Type string `default:"sqlite3"`
|
||||||
|
Conn string `default:"certs.sqlite"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ACMEConfig struct {
|
||||||
|
Email string
|
||||||
|
APIEndpoint string `default:"https://acme-v02.api.letsencrypt.org/directory"`
|
||||||
|
AcceptTerms bool `default:"false"`
|
||||||
|
UseRateLimits bool `default:"true"`
|
||||||
|
EAB_HMAC string
|
||||||
|
EAB_KID string
|
||||||
|
DNSProvider string
|
||||||
|
NoDNS01 bool `default:"false"`
|
||||||
|
AccountConfigFile string `default:"acme-account.json"`
|
||||||
|
}
|
154
config/setup.go
Normal file
154
config/setup.go
Normal file
|
@ -0,0 +1,154 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
|
||||||
|
"github.com/creasty/defaults"
|
||||||
|
"github.com/pelletier/go-toml/v2"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
"github.com/urfave/cli/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
var ALWAYS_BLACKLISTED_PATHS = []string{
|
||||||
|
"/.well-known/acme-challenge/",
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewDefaultConfig() Config {
|
||||||
|
config := Config{}
|
||||||
|
if err := defaults.Set(&config); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// defaults does not support setting arrays from strings
|
||||||
|
config.Server.PagesBranches = []string{"main", "master", "pages"}
|
||||||
|
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
func ReadConfig(ctx *cli.Context) (*Config, error) {
|
||||||
|
config := NewDefaultConfig()
|
||||||
|
// if config is not given as argument return empty config
|
||||||
|
if !ctx.IsSet("config-file") {
|
||||||
|
return &config, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
configFile := path.Clean(ctx.String("config-file"))
|
||||||
|
|
||||||
|
log.Debug().Str("config-file", configFile).Msg("reading config file")
|
||||||
|
content, err := os.ReadFile(configFile)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = toml.Unmarshal(content, &config)
|
||||||
|
return &config, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func MergeConfig(ctx *cli.Context, config *Config) {
|
||||||
|
if ctx.IsSet("log-level") {
|
||||||
|
config.LogLevel = ctx.String("log-level")
|
||||||
|
}
|
||||||
|
|
||||||
|
mergeServerConfig(ctx, &config.Server)
|
||||||
|
mergeForgeConfig(ctx, &config.Forge)
|
||||||
|
mergeDatabaseConfig(ctx, &config.Database)
|
||||||
|
mergeACMEConfig(ctx, &config.ACME)
|
||||||
|
}
|
||||||
|
|
||||||
|
func mergeServerConfig(ctx *cli.Context, config *ServerConfig) {
|
||||||
|
if ctx.IsSet("host") {
|
||||||
|
config.Host = ctx.String("host")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("port") {
|
||||||
|
config.Port = uint16(ctx.Uint("port"))
|
||||||
|
}
|
||||||
|
if ctx.IsSet("http-port") {
|
||||||
|
config.HttpPort = uint16(ctx.Uint("http-port"))
|
||||||
|
}
|
||||||
|
if ctx.IsSet("enable-http-server") {
|
||||||
|
config.HttpServerEnabled = ctx.Bool("enable-http-server")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("use-proxy-protocol") {
|
||||||
|
config.UseProxyProtocol = ctx.Bool("use-proxy-protocol")
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.IsSet("pages-domain") {
|
||||||
|
config.MainDomain = ctx.String("pages-domain")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("raw-domain") {
|
||||||
|
config.RawDomain = ctx.String("raw-domain")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("pages-branch") {
|
||||||
|
config.PagesBranches = ctx.StringSlice("pages-branch")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("allowed-cors-domains") {
|
||||||
|
config.AllowedCorsDomains = ctx.StringSlice("allowed-cors-domains")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("blacklisted-paths") {
|
||||||
|
config.BlacklistedPaths = ctx.StringSlice("blacklisted-paths")
|
||||||
|
}
|
||||||
|
|
||||||
|
// add the paths that should always be blacklisted
|
||||||
|
config.BlacklistedPaths = append(config.BlacklistedPaths, ALWAYS_BLACKLISTED_PATHS...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func mergeForgeConfig(ctx *cli.Context, config *ForgeConfig) {
|
||||||
|
if ctx.IsSet("forge-root") {
|
||||||
|
config.Root = ctx.String("forge-root")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("forge-api-token") {
|
||||||
|
config.Token = ctx.String("forge-api-token")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("enable-lfs-support") {
|
||||||
|
config.LFSEnabled = ctx.Bool("enable-lfs-support")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("enable-symlink-support") {
|
||||||
|
config.FollowSymlinks = ctx.Bool("enable-symlink-support")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("default-mime-type") {
|
||||||
|
config.DefaultMimeType = ctx.String("default-mime-type")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("forbidden-mime-types") {
|
||||||
|
config.ForbiddenMimeTypes = ctx.StringSlice("forbidden-mime-types")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func mergeDatabaseConfig(ctx *cli.Context, config *DatabaseConfig) {
|
||||||
|
if ctx.IsSet("db-type") {
|
||||||
|
config.Type = ctx.String("db-type")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("db-conn") {
|
||||||
|
config.Conn = ctx.String("db-conn")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func mergeACMEConfig(ctx *cli.Context, config *ACMEConfig) {
|
||||||
|
if ctx.IsSet("acme-email") {
|
||||||
|
config.Email = ctx.String("acme-email")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("acme-api-endpoint") {
|
||||||
|
config.APIEndpoint = ctx.String("acme-api-endpoint")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("acme-accept-terms") {
|
||||||
|
config.AcceptTerms = ctx.Bool("acme-accept-terms")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("acme-use-rate-limits") {
|
||||||
|
config.UseRateLimits = ctx.Bool("acme-use-rate-limits")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("acme-eab-hmac") {
|
||||||
|
config.EAB_HMAC = ctx.String("acme-eab-hmac")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("acme-eab-kid") {
|
||||||
|
config.EAB_KID = ctx.String("acme-eab-kid")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("dns-provider") {
|
||||||
|
config.DNSProvider = ctx.String("dns-provider")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("no-dns-01") {
|
||||||
|
config.NoDNS01 = ctx.Bool("no-dns-01")
|
||||||
|
}
|
||||||
|
if ctx.IsSet("acme-account-config") {
|
||||||
|
config.AccountConfigFile = ctx.String("acme-account-config")
|
||||||
|
}
|
||||||
|
}
|
630
config/setup_test.go
Normal file
630
config/setup_test.go
Normal file
|
@ -0,0 +1,630 @@
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/urfave/cli/v2"
|
||||||
|
|
||||||
|
cmd "codeberg.org/codeberg/pages/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
func runApp(t *testing.T, fn func(*cli.Context) error, args []string) {
|
||||||
|
app := cmd.CreatePagesApp()
|
||||||
|
app.Action = fn
|
||||||
|
|
||||||
|
appCtx, appCancel := context.WithCancel(context.Background())
|
||||||
|
defer appCancel()
|
||||||
|
|
||||||
|
// os.Args always contains the binary name
|
||||||
|
args = append([]string{"testing"}, args...)
|
||||||
|
|
||||||
|
err := app.RunContext(appCtx, args)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// fixArrayFromCtx fixes the number of "changed" strings in a string slice according to the number of values in the context.
|
||||||
|
// This is a workaround because the cli library has a bug where the number of values in the context gets bigger the more tests are run.
|
||||||
|
func fixArrayFromCtx(ctx *cli.Context, key string, expected []string) []string {
|
||||||
|
if ctx.IsSet(key) {
|
||||||
|
ctxSlice := ctx.StringSlice(key)
|
||||||
|
|
||||||
|
if len(ctxSlice) > 1 {
|
||||||
|
for i := 1; i < len(ctxSlice); i++ {
|
||||||
|
expected = append([]string{"changed"}, expected...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return expected
|
||||||
|
}
|
||||||
|
|
||||||
|
func readTestConfig() (*Config, error) {
|
||||||
|
content, err := os.ReadFile("assets/test_config.toml")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedConfig := NewDefaultConfig()
|
||||||
|
err = toml.Unmarshal(content, &expectedConfig)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &expectedConfig, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReadConfigShouldReturnEmptyConfigWhenConfigArgEmpty(t *testing.T) {
|
||||||
|
runApp(
|
||||||
|
t,
|
||||||
|
func(ctx *cli.Context) error {
|
||||||
|
cfg, err := ReadConfig(ctx)
|
||||||
|
expected := NewDefaultConfig()
|
||||||
|
assert.Equal(t, &expected, cfg)
|
||||||
|
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
[]string{},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReadConfigShouldReturnConfigFromFileWhenConfigArgPresent(t *testing.T) {
|
||||||
|
runApp(
|
||||||
|
t,
|
||||||
|
func(ctx *cli.Context) error {
|
||||||
|
cfg, err := ReadConfig(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedConfig, err := readTestConfig()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, expectedConfig, cfg)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
[]string{"--config-file", "assets/test_config.toml"},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValuesReadFromConfigFileShouldBeOverwrittenByArgs(t *testing.T) {
|
||||||
|
runApp(
|
||||||
|
t,
|
||||||
|
func(ctx *cli.Context) error {
|
||||||
|
cfg, err := ReadConfig(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
MergeConfig(ctx, cfg)
|
||||||
|
|
||||||
|
expectedConfig, err := readTestConfig()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedConfig.LogLevel = "debug"
|
||||||
|
expectedConfig.Forge.Root = "not-codeberg.org"
|
||||||
|
expectedConfig.ACME.AcceptTerms = true
|
||||||
|
expectedConfig.Server.Host = "172.17.0.2"
|
||||||
|
expectedConfig.Server.BlacklistedPaths = append(expectedConfig.Server.BlacklistedPaths, ALWAYS_BLACKLISTED_PATHS...)
|
||||||
|
|
||||||
|
assert.Equal(t, expectedConfig, cfg)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
[]string{
|
||||||
|
"--config-file", "assets/test_config.toml",
|
||||||
|
"--log-level", "debug",
|
||||||
|
"--forge-root", "not-codeberg.org",
|
||||||
|
"--acme-accept-terms",
|
||||||
|
"--host", "172.17.0.2",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeConfigShouldReplaceAllExistingValuesGivenAllArgsExist(t *testing.T) {
|
||||||
|
runApp(
|
||||||
|
t,
|
||||||
|
func(ctx *cli.Context) error {
|
||||||
|
cfg := &Config{
|
||||||
|
LogLevel: "original",
|
||||||
|
Server: ServerConfig{
|
||||||
|
Host: "original",
|
||||||
|
Port: 8080,
|
||||||
|
HttpPort: 80,
|
||||||
|
HttpServerEnabled: false,
|
||||||
|
MainDomain: "original",
|
||||||
|
RawDomain: "original",
|
||||||
|
PagesBranches: []string{"original"},
|
||||||
|
AllowedCorsDomains: []string{"original"},
|
||||||
|
BlacklistedPaths: []string{"original"},
|
||||||
|
},
|
||||||
|
Forge: ForgeConfig{
|
||||||
|
Root: "original",
|
||||||
|
Token: "original",
|
||||||
|
LFSEnabled: false,
|
||||||
|
FollowSymlinks: false,
|
||||||
|
DefaultMimeType: "original",
|
||||||
|
ForbiddenMimeTypes: []string{"original"},
|
||||||
|
},
|
||||||
|
Database: DatabaseConfig{
|
||||||
|
Type: "original",
|
||||||
|
Conn: "original",
|
||||||
|
},
|
||||||
|
ACME: ACMEConfig{
|
||||||
|
Email: "original",
|
||||||
|
APIEndpoint: "original",
|
||||||
|
AcceptTerms: false,
|
||||||
|
UseRateLimits: false,
|
||||||
|
EAB_HMAC: "original",
|
||||||
|
EAB_KID: "original",
|
||||||
|
DNSProvider: "original",
|
||||||
|
NoDNS01: false,
|
||||||
|
AccountConfigFile: "original",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
MergeConfig(ctx, cfg)
|
||||||
|
|
||||||
|
expectedConfig := &Config{
|
||||||
|
LogLevel: "changed",
|
||||||
|
Server: ServerConfig{
|
||||||
|
Host: "changed",
|
||||||
|
Port: 8443,
|
||||||
|
HttpPort: 443,
|
||||||
|
HttpServerEnabled: true,
|
||||||
|
MainDomain: "changed",
|
||||||
|
RawDomain: "changed",
|
||||||
|
PagesBranches: []string{"changed"},
|
||||||
|
AllowedCorsDomains: []string{"changed"},
|
||||||
|
BlacklistedPaths: append([]string{"changed"}, ALWAYS_BLACKLISTED_PATHS...),
|
||||||
|
},
|
||||||
|
Forge: ForgeConfig{
|
||||||
|
Root: "changed",
|
||||||
|
Token: "changed",
|
||||||
|
LFSEnabled: true,
|
||||||
|
FollowSymlinks: true,
|
||||||
|
DefaultMimeType: "changed",
|
||||||
|
ForbiddenMimeTypes: []string{"changed"},
|
||||||
|
},
|
||||||
|
Database: DatabaseConfig{
|
||||||
|
Type: "changed",
|
||||||
|
Conn: "changed",
|
||||||
|
},
|
||||||
|
ACME: ACMEConfig{
|
||||||
|
Email: "changed",
|
||||||
|
APIEndpoint: "changed",
|
||||||
|
AcceptTerms: true,
|
||||||
|
UseRateLimits: true,
|
||||||
|
EAB_HMAC: "changed",
|
||||||
|
EAB_KID: "changed",
|
||||||
|
DNSProvider: "changed",
|
||||||
|
NoDNS01: true,
|
||||||
|
AccountConfigFile: "changed",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, expectedConfig, cfg)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
[]string{
|
||||||
|
"--log-level", "changed",
|
||||||
|
// Server
|
||||||
|
"--pages-domain", "changed",
|
||||||
|
"--raw-domain", "changed",
|
||||||
|
"--allowed-cors-domains", "changed",
|
||||||
|
"--blacklisted-paths", "changed",
|
||||||
|
"--pages-branch", "changed",
|
||||||
|
"--host", "changed",
|
||||||
|
"--port", "8443",
|
||||||
|
"--http-port", "443",
|
||||||
|
"--enable-http-server",
|
||||||
|
// Forge
|
||||||
|
"--forge-root", "changed",
|
||||||
|
"--forge-api-token", "changed",
|
||||||
|
"--enable-lfs-support",
|
||||||
|
"--enable-symlink-support",
|
||||||
|
"--default-mime-type", "changed",
|
||||||
|
"--forbidden-mime-types", "changed",
|
||||||
|
// Database
|
||||||
|
"--db-type", "changed",
|
||||||
|
"--db-conn", "changed",
|
||||||
|
// ACME
|
||||||
|
"--acme-email", "changed",
|
||||||
|
"--acme-api-endpoint", "changed",
|
||||||
|
"--acme-accept-terms",
|
||||||
|
"--acme-use-rate-limits",
|
||||||
|
"--acme-eab-hmac", "changed",
|
||||||
|
"--acme-eab-kid", "changed",
|
||||||
|
"--dns-provider", "changed",
|
||||||
|
"--no-dns-01",
|
||||||
|
"--acme-account-config", "changed",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeServerConfigShouldAddDefaultBlacklistedPathsToBlacklistedPaths(t *testing.T) {
|
||||||
|
runApp(
|
||||||
|
t,
|
||||||
|
func(ctx *cli.Context) error {
|
||||||
|
cfg := &ServerConfig{}
|
||||||
|
mergeServerConfig(ctx, cfg)
|
||||||
|
|
||||||
|
expected := ALWAYS_BLACKLISTED_PATHS
|
||||||
|
assert.Equal(t, expected, cfg.BlacklistedPaths)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
[]string{},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeServerConfigShouldReplaceAllExistingValuesGivenAllArgsExist(t *testing.T) {
|
||||||
|
for range []uint8{0, 1} {
|
||||||
|
runApp(
|
||||||
|
t,
|
||||||
|
func(ctx *cli.Context) error {
|
||||||
|
cfg := &ServerConfig{
|
||||||
|
Host: "original",
|
||||||
|
Port: 8080,
|
||||||
|
HttpPort: 80,
|
||||||
|
HttpServerEnabled: false,
|
||||||
|
MainDomain: "original",
|
||||||
|
RawDomain: "original",
|
||||||
|
AllowedCorsDomains: []string{"original"},
|
||||||
|
BlacklistedPaths: []string{"original"},
|
||||||
|
}
|
||||||
|
|
||||||
|
mergeServerConfig(ctx, cfg)
|
||||||
|
|
||||||
|
expectedConfig := &ServerConfig{
|
||||||
|
Host: "changed",
|
||||||
|
Port: 8443,
|
||||||
|
HttpPort: 443,
|
||||||
|
HttpServerEnabled: true,
|
||||||
|
MainDomain: "changed",
|
||||||
|
RawDomain: "changed",
|
||||||
|
AllowedCorsDomains: fixArrayFromCtx(ctx, "allowed-cors-domains", []string{"changed"}),
|
||||||
|
BlacklistedPaths: fixArrayFromCtx(ctx, "blacklisted-paths", append([]string{"changed"}, ALWAYS_BLACKLISTED_PATHS...)),
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, expectedConfig, cfg)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
[]string{
|
||||||
|
"--pages-domain", "changed",
|
||||||
|
"--raw-domain", "changed",
|
||||||
|
"--allowed-cors-domains", "changed",
|
||||||
|
"--blacklisted-paths", "changed",
|
||||||
|
"--host", "changed",
|
||||||
|
"--port", "8443",
|
||||||
|
"--http-port", "443",
|
||||||
|
"--enable-http-server",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeServerConfigShouldReplaceOnlyOneValueExistingValueGivenOnlyOneArgExists(t *testing.T) {
|
||||||
|
type testValuePair struct {
|
||||||
|
args []string
|
||||||
|
callback func(*ServerConfig)
|
||||||
|
}
|
||||||
|
testValuePairs := []testValuePair{
|
||||||
|
{args: []string{"--host", "changed"}, callback: func(sc *ServerConfig) { sc.Host = "changed" }},
|
||||||
|
{args: []string{"--port", "8443"}, callback: func(sc *ServerConfig) { sc.Port = 8443 }},
|
||||||
|
{args: []string{"--http-port", "443"}, callback: func(sc *ServerConfig) { sc.HttpPort = 443 }},
|
||||||
|
{args: []string{"--enable-http-server"}, callback: func(sc *ServerConfig) { sc.HttpServerEnabled = true }},
|
||||||
|
{args: []string{"--pages-domain", "changed"}, callback: func(sc *ServerConfig) { sc.MainDomain = "changed" }},
|
||||||
|
{args: []string{"--raw-domain", "changed"}, callback: func(sc *ServerConfig) { sc.RawDomain = "changed" }},
|
||||||
|
{args: []string{"--pages-branch", "changed"}, callback: func(sc *ServerConfig) { sc.PagesBranches = []string{"changed"} }},
|
||||||
|
{args: []string{"--allowed-cors-domains", "changed"}, callback: func(sc *ServerConfig) { sc.AllowedCorsDomains = []string{"changed"} }},
|
||||||
|
{args: []string{"--blacklisted-paths", "changed"}, callback: func(sc *ServerConfig) { sc.BlacklistedPaths = []string{"changed"} }},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, pair := range testValuePairs {
|
||||||
|
runApp(
|
||||||
|
t,
|
||||||
|
func(ctx *cli.Context) error {
|
||||||
|
cfg := ServerConfig{
|
||||||
|
Host: "original",
|
||||||
|
Port: 8080,
|
||||||
|
HttpPort: 80,
|
||||||
|
HttpServerEnabled: false,
|
||||||
|
MainDomain: "original",
|
||||||
|
RawDomain: "original",
|
||||||
|
PagesBranches: []string{"original"},
|
||||||
|
AllowedCorsDomains: []string{"original"},
|
||||||
|
BlacklistedPaths: []string{"original"},
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedConfig := cfg
|
||||||
|
pair.callback(&expectedConfig)
|
||||||
|
expectedConfig.BlacklistedPaths = append(expectedConfig.BlacklistedPaths, ALWAYS_BLACKLISTED_PATHS...)
|
||||||
|
|
||||||
|
expectedConfig.PagesBranches = fixArrayFromCtx(ctx, "pages-branch", expectedConfig.PagesBranches)
|
||||||
|
expectedConfig.AllowedCorsDomains = fixArrayFromCtx(ctx, "allowed-cors-domains", expectedConfig.AllowedCorsDomains)
|
||||||
|
expectedConfig.BlacklistedPaths = fixArrayFromCtx(ctx, "blacklisted-paths", expectedConfig.BlacklistedPaths)
|
||||||
|
|
||||||
|
mergeServerConfig(ctx, &cfg)
|
||||||
|
|
||||||
|
assert.Equal(t, expectedConfig, cfg)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
pair.args,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeForgeConfigShouldReplaceAllExistingValuesGivenAllArgsExist(t *testing.T) {
|
||||||
|
runApp(
|
||||||
|
t,
|
||||||
|
func(ctx *cli.Context) error {
|
||||||
|
cfg := &ForgeConfig{
|
||||||
|
Root: "original",
|
||||||
|
Token: "original",
|
||||||
|
LFSEnabled: false,
|
||||||
|
FollowSymlinks: false,
|
||||||
|
DefaultMimeType: "original",
|
||||||
|
ForbiddenMimeTypes: []string{"original"},
|
||||||
|
}
|
||||||
|
|
||||||
|
mergeForgeConfig(ctx, cfg)
|
||||||
|
|
||||||
|
expectedConfig := &ForgeConfig{
|
||||||
|
Root: "changed",
|
||||||
|
Token: "changed",
|
||||||
|
LFSEnabled: true,
|
||||||
|
FollowSymlinks: true,
|
||||||
|
DefaultMimeType: "changed",
|
||||||
|
ForbiddenMimeTypes: fixArrayFromCtx(ctx, "forbidden-mime-types", []string{"changed"}),
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, expectedConfig, cfg)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
[]string{
|
||||||
|
"--forge-root", "changed",
|
||||||
|
"--forge-api-token", "changed",
|
||||||
|
"--enable-lfs-support",
|
||||||
|
"--enable-symlink-support",
|
||||||
|
"--default-mime-type", "changed",
|
||||||
|
"--forbidden-mime-types", "changed",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeForgeConfigShouldReplaceOnlyOneValueExistingValueGivenOnlyOneArgExists(t *testing.T) {
|
||||||
|
type testValuePair struct {
|
||||||
|
args []string
|
||||||
|
callback func(*ForgeConfig)
|
||||||
|
}
|
||||||
|
testValuePairs := []testValuePair{
|
||||||
|
{args: []string{"--forge-root", "changed"}, callback: func(gc *ForgeConfig) { gc.Root = "changed" }},
|
||||||
|
{args: []string{"--forge-api-token", "changed"}, callback: func(gc *ForgeConfig) { gc.Token = "changed" }},
|
||||||
|
{args: []string{"--enable-lfs-support"}, callback: func(gc *ForgeConfig) { gc.LFSEnabled = true }},
|
||||||
|
{args: []string{"--enable-symlink-support"}, callback: func(gc *ForgeConfig) { gc.FollowSymlinks = true }},
|
||||||
|
{args: []string{"--default-mime-type", "changed"}, callback: func(gc *ForgeConfig) { gc.DefaultMimeType = "changed" }},
|
||||||
|
{args: []string{"--forbidden-mime-types", "changed"}, callback: func(gc *ForgeConfig) { gc.ForbiddenMimeTypes = []string{"changed"} }},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, pair := range testValuePairs {
|
||||||
|
runApp(
|
||||||
|
t,
|
||||||
|
func(ctx *cli.Context) error {
|
||||||
|
cfg := ForgeConfig{
|
||||||
|
Root: "original",
|
||||||
|
Token: "original",
|
||||||
|
LFSEnabled: false,
|
||||||
|
FollowSymlinks: false,
|
||||||
|
DefaultMimeType: "original",
|
||||||
|
ForbiddenMimeTypes: []string{"original"},
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedConfig := cfg
|
||||||
|
pair.callback(&expectedConfig)
|
||||||
|
|
||||||
|
mergeForgeConfig(ctx, &cfg)
|
||||||
|
|
||||||
|
expectedConfig.ForbiddenMimeTypes = fixArrayFromCtx(ctx, "forbidden-mime-types", expectedConfig.ForbiddenMimeTypes)
|
||||||
|
|
||||||
|
assert.Equal(t, expectedConfig, cfg)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
pair.args,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeForgeConfigShouldReplaceValuesGivenGiteaOptionsExist(t *testing.T) {
|
||||||
|
runApp(
|
||||||
|
t,
|
||||||
|
func(ctx *cli.Context) error {
|
||||||
|
cfg := &ForgeConfig{
|
||||||
|
Root: "original",
|
||||||
|
Token: "original",
|
||||||
|
}
|
||||||
|
|
||||||
|
mergeForgeConfig(ctx, cfg)
|
||||||
|
|
||||||
|
expectedConfig := &ForgeConfig{
|
||||||
|
Root: "changed",
|
||||||
|
Token: "changed",
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, expectedConfig, cfg)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
[]string{
|
||||||
|
"--gitea-root", "changed",
|
||||||
|
"--gitea-api-token", "changed",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeDatabaseConfigShouldReplaceAllExistingValuesGivenAllArgsExist(t *testing.T) {
|
||||||
|
runApp(
|
||||||
|
t,
|
||||||
|
func(ctx *cli.Context) error {
|
||||||
|
cfg := &DatabaseConfig{
|
||||||
|
Type: "original",
|
||||||
|
Conn: "original",
|
||||||
|
}
|
||||||
|
|
||||||
|
mergeDatabaseConfig(ctx, cfg)
|
||||||
|
|
||||||
|
expectedConfig := &DatabaseConfig{
|
||||||
|
Type: "changed",
|
||||||
|
Conn: "changed",
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, expectedConfig, cfg)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
[]string{
|
||||||
|
"--db-type", "changed",
|
||||||
|
"--db-conn", "changed",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeDatabaseConfigShouldReplaceOnlyOneValueExistingValueGivenOnlyOneArgExists(t *testing.T) {
|
||||||
|
type testValuePair struct {
|
||||||
|
args []string
|
||||||
|
callback func(*DatabaseConfig)
|
||||||
|
}
|
||||||
|
testValuePairs := []testValuePair{
|
||||||
|
{args: []string{"--db-type", "changed"}, callback: func(gc *DatabaseConfig) { gc.Type = "changed" }},
|
||||||
|
{args: []string{"--db-conn", "changed"}, callback: func(gc *DatabaseConfig) { gc.Conn = "changed" }},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, pair := range testValuePairs {
|
||||||
|
runApp(
|
||||||
|
t,
|
||||||
|
func(ctx *cli.Context) error {
|
||||||
|
cfg := DatabaseConfig{
|
||||||
|
Type: "original",
|
||||||
|
Conn: "original",
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedConfig := cfg
|
||||||
|
pair.callback(&expectedConfig)
|
||||||
|
|
||||||
|
mergeDatabaseConfig(ctx, &cfg)
|
||||||
|
|
||||||
|
assert.Equal(t, expectedConfig, cfg)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
pair.args,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeACMEConfigShouldReplaceAllExistingValuesGivenAllArgsExist(t *testing.T) {
|
||||||
|
runApp(
|
||||||
|
t,
|
||||||
|
func(ctx *cli.Context) error {
|
||||||
|
cfg := &ACMEConfig{
|
||||||
|
Email: "original",
|
||||||
|
APIEndpoint: "original",
|
||||||
|
AcceptTerms: false,
|
||||||
|
UseRateLimits: false,
|
||||||
|
EAB_HMAC: "original",
|
||||||
|
EAB_KID: "original",
|
||||||
|
DNSProvider: "original",
|
||||||
|
NoDNS01: false,
|
||||||
|
AccountConfigFile: "original",
|
||||||
|
}
|
||||||
|
|
||||||
|
mergeACMEConfig(ctx, cfg)
|
||||||
|
|
||||||
|
expectedConfig := &ACMEConfig{
|
||||||
|
Email: "changed",
|
||||||
|
APIEndpoint: "changed",
|
||||||
|
AcceptTerms: true,
|
||||||
|
UseRateLimits: true,
|
||||||
|
EAB_HMAC: "changed",
|
||||||
|
EAB_KID: "changed",
|
||||||
|
DNSProvider: "changed",
|
||||||
|
NoDNS01: true,
|
||||||
|
AccountConfigFile: "changed",
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, expectedConfig, cfg)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
[]string{
|
||||||
|
"--acme-email", "changed",
|
||||||
|
"--acme-api-endpoint", "changed",
|
||||||
|
"--acme-accept-terms",
|
||||||
|
"--acme-use-rate-limits",
|
||||||
|
"--acme-eab-hmac", "changed",
|
||||||
|
"--acme-eab-kid", "changed",
|
||||||
|
"--dns-provider", "changed",
|
||||||
|
"--no-dns-01",
|
||||||
|
"--acme-account-config", "changed",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeACMEConfigShouldReplaceOnlyOneValueExistingValueGivenOnlyOneArgExists(t *testing.T) {
|
||||||
|
type testValuePair struct {
|
||||||
|
args []string
|
||||||
|
callback func(*ACMEConfig)
|
||||||
|
}
|
||||||
|
testValuePairs := []testValuePair{
|
||||||
|
{args: []string{"--acme-email", "changed"}, callback: func(gc *ACMEConfig) { gc.Email = "changed" }},
|
||||||
|
{args: []string{"--acme-api-endpoint", "changed"}, callback: func(gc *ACMEConfig) { gc.APIEndpoint = "changed" }},
|
||||||
|
{args: []string{"--acme-accept-terms"}, callback: func(gc *ACMEConfig) { gc.AcceptTerms = true }},
|
||||||
|
{args: []string{"--acme-use-rate-limits"}, callback: func(gc *ACMEConfig) { gc.UseRateLimits = true }},
|
||||||
|
{args: []string{"--acme-eab-hmac", "changed"}, callback: func(gc *ACMEConfig) { gc.EAB_HMAC = "changed" }},
|
||||||
|
{args: []string{"--acme-eab-kid", "changed"}, callback: func(gc *ACMEConfig) { gc.EAB_KID = "changed" }},
|
||||||
|
{args: []string{"--dns-provider", "changed"}, callback: func(gc *ACMEConfig) { gc.DNSProvider = "changed" }},
|
||||||
|
{args: []string{"--no-dns-01"}, callback: func(gc *ACMEConfig) { gc.NoDNS01 = true }},
|
||||||
|
{args: []string{"--acme-account-config", "changed"}, callback: func(gc *ACMEConfig) { gc.AccountConfigFile = "changed" }},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, pair := range testValuePairs {
|
||||||
|
runApp(
|
||||||
|
t,
|
||||||
|
func(ctx *cli.Context) error {
|
||||||
|
cfg := ACMEConfig{
|
||||||
|
Email: "original",
|
||||||
|
APIEndpoint: "original",
|
||||||
|
AcceptTerms: false,
|
||||||
|
UseRateLimits: false,
|
||||||
|
EAB_HMAC: "original",
|
||||||
|
EAB_KID: "original",
|
||||||
|
DNSProvider: "original",
|
||||||
|
AccountConfigFile: "original",
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedConfig := cfg
|
||||||
|
pair.callback(&expectedConfig)
|
||||||
|
|
||||||
|
mergeACMEConfig(ctx, &cfg)
|
||||||
|
|
||||||
|
assert.Equal(t, expectedConfig, cfg)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
pair.args,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
32
example_config.toml
Normal file
32
example_config.toml
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
logLevel = 'debug'
|
||||||
|
|
||||||
|
[server]
|
||||||
|
host = '[::]'
|
||||||
|
port = 443
|
||||||
|
httpPort = 80
|
||||||
|
httpServerEnabled = true
|
||||||
|
mainDomain = 'codeberg.page'
|
||||||
|
rawDomain = 'raw.codeberg.page'
|
||||||
|
pagesBranches = ["pages"]
|
||||||
|
allowedCorsDomains = []
|
||||||
|
blacklistedPaths = []
|
||||||
|
|
||||||
|
[forge]
|
||||||
|
root = 'https://codeberg.org'
|
||||||
|
token = 'ASDF1234'
|
||||||
|
lfsEnabled = true
|
||||||
|
followSymlinks = true
|
||||||
|
|
||||||
|
[database]
|
||||||
|
type = 'sqlite'
|
||||||
|
conn = 'certs.sqlite'
|
||||||
|
|
||||||
|
[ACME]
|
||||||
|
email = 'noreply@example.email'
|
||||||
|
apiEndpoint = 'https://acme-v02.api.letsencrypt.org/directory'
|
||||||
|
acceptTerms = false
|
||||||
|
useRateLimits = false
|
||||||
|
eab_hmac = ''
|
||||||
|
eab_kid = ''
|
||||||
|
dnsProvider = ''
|
||||||
|
accountConfigFile = 'acme-account.json'
|
1
examples/haproxy-sni/.gitignore
vendored
Normal file
1
examples/haproxy-sni/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
*.dump
|
25
examples/haproxy-sni/README.md
Normal file
25
examples/haproxy-sni/README.md
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
# HAProxy with SNI & Host-based rules
|
||||||
|
|
||||||
|
This is a proof of concept, enabling HAProxy to use _either_ SNI to redirect to backends with their own HTTPS certificates (which are then fully exposed to the client; HAProxy only proxies on a TCP level in that case), _as well as_ to terminate HTTPS and use the Host header to redirect to backends that use HTTP (or a new HTTPS connection).
|
||||||
|
|
||||||
|
## How it works
|
||||||
|
|
||||||
|
1. The `http_redirect_frontend` is only there to listen on port 80 and redirect every request to HTTPS.
|
||||||
|
2. The `https_sni_frontend` listens on port 443 and chooses a backend based on the SNI hostname of the TLS connection.
|
||||||
|
3. The `https_termination_backend` passes all requests to a unix socket (using the plain TCP data).
|
||||||
|
4. The `https_termination_frontend` listens on said unix socket, terminates the HTTPS connections and then chooses a backend based on the Host header.
|
||||||
|
|
||||||
|
In the example (see [haproxy.cfg](haproxy.cfg)), the `pages_backend` is listening via HTTPS and is providing its own HTTPS certificates, while the `gitea_backend` only provides HTTP.
|
||||||
|
|
||||||
|
## How to test
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose up &
|
||||||
|
./test.sh
|
||||||
|
docker-compose down
|
||||||
|
|
||||||
|
# For manual testing: all HTTPS URLs connect to localhost:443 & certificates are not verified.
|
||||||
|
./test.sh [curl-options...] <url>
|
||||||
|
```
|
||||||
|
|
||||||
|

|
8
examples/haproxy-sni/dhparam.pem
Normal file
8
examples/haproxy-sni/dhparam.pem
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
-----BEGIN DH PARAMETERS-----
|
||||||
|
MIIBCAKCAQEA//////////+t+FRYortKmq/cViAnPTzx2LnFg84tNpWp4TZBFGQz
|
||||||
|
+8yTnc4kmz75fS/jY2MMddj2gbICrsRhetPfHtXV/WVhJDP1H18GbtCFY2VVPe0a
|
||||||
|
87VXE15/V8k1mE8McODmi3fipona8+/och3xWKE2rec1MKzKT0g6eXq8CrGCsyT7
|
||||||
|
YdEIqUuyyOP7uWrat2DX9GgdT0Kj3jlN9K5W7edjcrsZCwenyO4KbXCeAvzhzffi
|
||||||
|
7MA0BM0oNC9hkXL+nOmFg/+OTxIy7vKBg8P+OxtMb61zO7X8vC7CIAXFjvGDfRaD
|
||||||
|
ssbzSibBsu/6iGtCOGEoXJf//////////wIBAg==
|
||||||
|
-----END DH PARAMETERS-----
|
21
examples/haproxy-sni/docker-compose.yml
Normal file
21
examples/haproxy-sni/docker-compose.yml
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
version: '3'
|
||||||
|
services:
|
||||||
|
haproxy:
|
||||||
|
image: haproxy
|
||||||
|
ports: ['443:443']
|
||||||
|
volumes:
|
||||||
|
- ./haproxy.cfg:/usr/local/etc/haproxy/haproxy.cfg:ro
|
||||||
|
- ./dhparam.pem:/etc/ssl/dhparam.pem:ro
|
||||||
|
- ./haproxy-certificates:/etc/ssl/private/haproxy:ro
|
||||||
|
cap_add:
|
||||||
|
- NET_ADMIN
|
||||||
|
gitea:
|
||||||
|
image: caddy
|
||||||
|
volumes:
|
||||||
|
- ./gitea-www:/srv:ro
|
||||||
|
- ./gitea.Caddyfile:/etc/caddy/Caddyfile:ro
|
||||||
|
pages:
|
||||||
|
image: caddy
|
||||||
|
volumes:
|
||||||
|
- ./pages-www:/srv:ro
|
||||||
|
- ./pages.Caddyfile:/etc/caddy/Caddyfile:ro
|
1
examples/haproxy-sni/gitea-www/index.html
Normal file
1
examples/haproxy-sni/gitea-www/index.html
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Hello to Gitea!
|
3
examples/haproxy-sni/gitea.Caddyfile
Normal file
3
examples/haproxy-sni/gitea.Caddyfile
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
http://codeberg.org
|
||||||
|
|
||||||
|
file_server
|
26
examples/haproxy-sni/haproxy-certificates/codeberg.org.pem
Normal file
26
examples/haproxy-sni/haproxy-certificates/codeberg.org.pem
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIEUDCCArigAwIBAgIRAMq3iwF963VGkzXFpbrpAtkwDQYJKoZIhvcNAQELBQAw
|
||||||
|
gYkxHjAcBgNVBAoTFW1rY2VydCBkZXZlbG9wbWVudCBDQTEvMC0GA1UECwwmbW9t
|
||||||
|
YXJAbW9yaXR6LWxhcHRvcCAoTW9yaXR6IE1hcnF1YXJkdCkxNjA0BgNVBAMMLW1r
|
||||||
|
Y2VydCBtb21hckBtb3JpdHotbGFwdG9wIChNb3JpdHogTWFycXVhcmR0KTAeFw0y
|
||||||
|
MTA2MDYwOTQ4NDFaFw0yMzA5MDYwOTQ4NDFaMFoxJzAlBgNVBAoTHm1rY2VydCBk
|
||||||
|
ZXZlbG9wbWVudCBjZXJ0aWZpY2F0ZTEvMC0GA1UECwwmbW9tYXJAbW9yaXR6LWxh
|
||||||
|
cHRvcCAoTW9yaXR6IE1hcnF1YXJkdCkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
|
||||||
|
ggEKAoIBAQCrSPSPM6grNZMG4ZKFCVxuXu+qkHdzSR96QUxi00VkIrkGPmyMN7q7
|
||||||
|
rUQJto9C9guJio3n7y3Bvr5kBjICjyWQd7GfkVuYgiYiG/O2hy1u1dIMCAB/Zhx1
|
||||||
|
F1mvRfn/Q4eZk2GSOUM+kC0xaNsn2827VGLOGFywUhRmu7J9QSQ3x1Pi5BME7eNC
|
||||||
|
AKup0CbrMrZSzKAEuYujLY0UYRxUrguMnV60wxJDCYE14YDxn9t0g7wQmzyndupk
|
||||||
|
AMLNJZX5L83RA6vUEuTVYBFcyB0Fu3oBLQ31y5QOZ7WF/QiO5cPicQJI/oyXlHq4
|
||||||
|
97BWS/H28kj1H5ZM8+5yhCYDtgj7dERpAgMBAAGjYTBfMA4GA1UdDwEB/wQEAwIF
|
||||||
|
oDATBgNVHSUEDDAKBggrBgEFBQcDATAfBgNVHSMEGDAWgBSOSXQZqt2gjbTOkE9Q
|
||||||
|
ddI8SYPqrDAXBgNVHREEEDAOggxjb2RlYmVyZy5vcmcwDQYJKoZIhvcNAQELBQAD
|
||||||
|
ggGBAJ/57DGqfuOa3aS/nLeAzl8komvyHuoOZi9yDK2Jqr+COxP58zSu8xwhiZfc
|
||||||
|
TJvIyB9QR7imGiQ7fEKby40q8uxGGx13oY7gQy7PG8hHk2dkfDZuSQacnpPRC3W0
|
||||||
|
0dL2CQIog6rw6jJHjxneitkX9FUmOnHIKy7LHya0Sthg36Z0Qw5JA3SCy6OQNepR
|
||||||
|
R2XzwTZ0KFk6gAuKCto8ENUlU5lV9PM4X3U0cBOIc5LJAPM+cxEDUocFtFqKJPbe
|
||||||
|
YYlSeB200YhYOdi+x34n9xnQjFu/jVlWF+Y0tMBB1WWq6rZbnuylwWLYQZAo10Co
|
||||||
|
D3oWsYRlD/ZL7X20ztIy8vRXz33ugnxxf88Q7csWDYb4S325svLfI2EjciIxYmBo
|
||||||
|
dSJxXRQkadjIoI7gNvzeWBkYSJpQUbaD4nT2xRS8vfuv42/DrIehb8SbTivHmcB3
|
||||||
|
OibpWIvDtS1B8thIlzl0edb+8pb6mof7pOBxoZdcBsSAk2/48s+jfRHfD9XcuKnv
|
||||||
|
hGCdSQ==
|
||||||
|
-----END CERTIFICATE-----
|
|
@ -0,0 +1,28 @@
|
||||||
|
-----BEGIN PRIVATE KEY-----
|
||||||
|
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCrSPSPM6grNZMG
|
||||||
|
4ZKFCVxuXu+qkHdzSR96QUxi00VkIrkGPmyMN7q7rUQJto9C9guJio3n7y3Bvr5k
|
||||||
|
BjICjyWQd7GfkVuYgiYiG/O2hy1u1dIMCAB/Zhx1F1mvRfn/Q4eZk2GSOUM+kC0x
|
||||||
|
aNsn2827VGLOGFywUhRmu7J9QSQ3x1Pi5BME7eNCAKup0CbrMrZSzKAEuYujLY0U
|
||||||
|
YRxUrguMnV60wxJDCYE14YDxn9t0g7wQmzyndupkAMLNJZX5L83RA6vUEuTVYBFc
|
||||||
|
yB0Fu3oBLQ31y5QOZ7WF/QiO5cPicQJI/oyXlHq497BWS/H28kj1H5ZM8+5yhCYD
|
||||||
|
tgj7dERpAgMBAAECggEAAeW+/88cr83aIRtimiKuaXKXyRXsnNRUivAqPnYEsMVJ
|
||||||
|
s24BmdQMN4QF2u2wzJcZLZ7hT45wvVK1nToMV8bqLZ2F1DSyBRB8B6iznHQG5tFr
|
||||||
|
kEKObtrcuddWYQCvckp3OBZP4GTN/+Vs+r0koF5o+whGR+4xKKrgGvs9UPHlytBf
|
||||||
|
0DMzAzWzGPp6qBPw2sUx/fa9r5TqFW+p4SEOZJUqL2/zEZ6KBWbKw5T1e1y2kMEc
|
||||||
|
cquUQ4avqK/N1nwRNKUnTvW827v0k7HQ2cFdrjIATNlICslOWJQicG5GUOuSBkTC
|
||||||
|
0FFkSTtHP4qm0BqShjv6NDmzX+3WCVkGOKFOI+zuWQKBgQDBq8yEcvfMJY98KNlR
|
||||||
|
eKKdJAMJvKdoD65Yv6EG7ZzpeEWHaTGhu71RPgHYkHn8h1T/9WniroSk19+zb4lP
|
||||||
|
mMsBwxpg5HejWPzIiiJRkRCRA7aZZfvaXfIWryB4kI1tlGHBNN/+SYpG1zdNumtp
|
||||||
|
Xyb/sQWMMWRZdRgclF8V+NvduwKBgQDiaM59gBROleREduFZE1a0oXtt+CrwrPlz
|
||||||
|
hclrkYl1FbTA4TdL4JNbj5jCXCR8YakFhxWEmhwq+Dgl1NQY/YjHyG3w2imaeASX
|
||||||
|
QUsEvAIvNrv1mIELiYCLmUElyX4WL3UhqveOFcZUvR1Z4TTwruPQmXf6BJEBLbWI
|
||||||
|
f7odmG6yKwKBgQCzpuLjZiZY9+qe2OGmQopNzE8JJDgCPrGS38fGvnnU1N1iXAFP
|
||||||
|
LvDRwPxDYNnXl84QVR2wygR/SUTYlTlBXdHKw6nfgW89Vlm+yOxGz5MXgeNLbp/u
|
||||||
|
k0DzK+aqECUxJfh8GclCgANF7XP+pVPn/f0WKKalwld86DLCqBuALUX+6wKBgCUh
|
||||||
|
gxvZ8Xqh4nnH9VUicsnU4eU7Ge+2roJfopTdnWlyUd6AEQ2EmyYc+rSFYAZ2Db42
|
||||||
|
VTUWASCa7LpnmREwI0qAeGdToBcRL8+OibsRClqr409331IBDu/WBnUoAmGpDtCi
|
||||||
|
tU68C3bCPRoMcR430GzZfm+maBGFaYwlRmSsJxtZAoGADSA3uAZBuWNDPNKUas2k
|
||||||
|
Z2dXFEPNpViMjQzJ+Ko7lbOBpUUUQfZF2VMSK4lcnhhbmhcMrYzWWmh6uaw78aHY
|
||||||
|
e3M//BfcVMdxHw7EemGOViNNq3uDIwzvYteoe6fAOA7MaV+WjJaf+smceR4o38fk
|
||||||
|
U9RTkKpRJIcvEW5bvTI9h4o=
|
||||||
|
-----END PRIVATE KEY-----
|
99
examples/haproxy-sni/haproxy.cfg
Normal file
99
examples/haproxy-sni/haproxy.cfg
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
#####################################
|
||||||
|
## Global Configuration & Defaults ##
|
||||||
|
#####################################
|
||||||
|
|
||||||
|
global
|
||||||
|
log stderr format iso local7
|
||||||
|
|
||||||
|
# generated 2021-06-05, Mozilla Guideline v5.6, HAProxy 2.1, OpenSSL 1.1.1d, intermediate configuration
|
||||||
|
# https://ssl-config.mozilla.org/#server=haproxy&version=2.1&config=intermediate&openssl=1.1.1d&guideline=5.6
|
||||||
|
ssl-default-bind-ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384
|
||||||
|
ssl-default-bind-ciphersuites TLS_AES_128_GCM_SHA256:TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256
|
||||||
|
ssl-default-bind-options prefer-client-ciphers no-sslv3 no-tlsv10 no-tlsv11 no-tls-tickets
|
||||||
|
|
||||||
|
ssl-default-server-ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384
|
||||||
|
ssl-default-server-ciphersuites TLS_AES_128_GCM_SHA256:TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256
|
||||||
|
ssl-default-server-options no-sslv3 no-tlsv10 no-tlsv11 no-tls-tickets
|
||||||
|
|
||||||
|
# curl https://ssl-config.mozilla.org/ffdhe2048.txt > /path/to/dhparam
|
||||||
|
ssl-dh-param-file /etc/ssl/dhparam.pem
|
||||||
|
|
||||||
|
defaults
|
||||||
|
log global
|
||||||
|
timeout connect 30000
|
||||||
|
timeout check 300000
|
||||||
|
timeout client 300000
|
||||||
|
timeout server 300000
|
||||||
|
|
||||||
|
############################################################################
|
||||||
|
## Frontends: HTTP; HTTPS → HTTPS SNI-based; HTTPS → HTTP(S) header-based ##
|
||||||
|
############################################################################
|
||||||
|
|
||||||
|
frontend http_redirect_frontend
|
||||||
|
# HTTP backend to redirect everything to HTTPS
|
||||||
|
bind :::80 v4v6
|
||||||
|
mode http
|
||||||
|
http-request redirect scheme https
|
||||||
|
|
||||||
|
frontend https_sni_frontend
|
||||||
|
# TCP backend to forward to HTTPS backends based on SNI
|
||||||
|
bind :::443 v4v6
|
||||||
|
mode tcp
|
||||||
|
|
||||||
|
# Wait up to 5s for a SNI header & only accept TLS connections
|
||||||
|
tcp-request inspect-delay 5s
|
||||||
|
tcp-request content capture req.ssl_sni len 255
|
||||||
|
log-format "%ci:%cp -> %[capture.req.hdr(0)] @ %f (%fi:%fp) -> %b (%bi:%bp)"
|
||||||
|
tcp-request content accept if { req.ssl_hello_type 1 }
|
||||||
|
|
||||||
|
###################################################
|
||||||
|
## Rules: forward to HTTPS(S) header-based rules ##
|
||||||
|
###################################################
|
||||||
|
acl use_http_backend req.ssl_sni -i "codeberg.org"
|
||||||
|
acl use_http_backend req.ssl_sni -i "join.codeberg.org"
|
||||||
|
# TODO: use this if no SNI exists
|
||||||
|
use_backend https_termination_backend if use_http_backend
|
||||||
|
|
||||||
|
############################
|
||||||
|
## Rules: HTTPS SNI-based ##
|
||||||
|
############################
|
||||||
|
# use_backend xyz_backend if { req.ssl_sni -i "xyz" }
|
||||||
|
default_backend pages_backend
|
||||||
|
|
||||||
|
frontend https_termination_frontend
|
||||||
|
# Terminate TLS for HTTP backends
|
||||||
|
bind /tmp/haproxy-tls-termination.sock accept-proxy ssl strict-sni alpn h2,http/1.1 crt /etc/ssl/private/haproxy/
|
||||||
|
mode http
|
||||||
|
|
||||||
|
# HSTS (63072000 seconds)
|
||||||
|
http-response set-header Strict-Transport-Security max-age=63072000
|
||||||
|
|
||||||
|
http-request capture req.hdr(Host) len 255
|
||||||
|
log-format "%ci:%cp -> %[capture.req.hdr(0)] @ %f (%fi:%fp) -> %b (%bi:%bp)"
|
||||||
|
|
||||||
|
##################################
|
||||||
|
## Rules: HTTPS(S) header-based ##
|
||||||
|
##################################
|
||||||
|
use_backend gitea_backend if { hdr(host) -i codeberg.org }
|
||||||
|
|
||||||
|
backend https_termination_backend
|
||||||
|
# Redirect to the terminating HTTPS frontend for all HTTP backends
|
||||||
|
server https_termination_server /tmp/haproxy-tls-termination.sock send-proxy-v2-ssl-cn
|
||||||
|
mode tcp
|
||||||
|
|
||||||
|
###############################
|
||||||
|
## Backends: HTTPS SNI-based ##
|
||||||
|
###############################
|
||||||
|
|
||||||
|
backend pages_backend
|
||||||
|
# Pages server is a HTTP backend that uses its own certificates for custom domains
|
||||||
|
server pages_server pages:443
|
||||||
|
mode tcp
|
||||||
|
|
||||||
|
####################################
|
||||||
|
## Backends: HTTP(S) header-based ##
|
||||||
|
####################################
|
||||||
|
|
||||||
|
backend gitea_backend
|
||||||
|
server gitea_server gitea:80
|
||||||
|
mode http
|
1
examples/haproxy-sni/pages-www/index.html
Normal file
1
examples/haproxy-sni/pages-www/index.html
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Hello to Pages!
|
4
examples/haproxy-sni/pages.Caddyfile
Normal file
4
examples/haproxy-sni/pages.Caddyfile
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
https://example-page.org
|
||||||
|
|
||||||
|
tls internal
|
||||||
|
file_server
|
22
examples/haproxy-sni/test.sh
Executable file
22
examples/haproxy-sni/test.sh
Executable file
|
@ -0,0 +1,22 @@
|
||||||
|
#!/bin/sh
|
||||||
|
if [ $# -gt 0 ]; then
|
||||||
|
exec curl -k --resolve '*:443:127.0.0.1' "$@"
|
||||||
|
fi
|
||||||
|
|
||||||
|
fail() {
|
||||||
|
echo "[FAIL] $@"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "Connecting to Gitea..."
|
||||||
|
res=$(curl https://codeberg.org -sk --resolve '*:443:127.0.0.1' --trace-ascii gitea.dump | tee /dev/stderr)
|
||||||
|
echo "$res" | grep -Fx 'Hello to Gitea!' >/dev/null || fail "Gitea didn't answer"
|
||||||
|
grep '^== Info: issuer: O=mkcert development CA;' gitea.dump || { grep grep '^== Info: issuer:' gitea.dump; fail "Gitea didn't use the correct certificate!"; }
|
||||||
|
|
||||||
|
echo "Connecting to Pages..."
|
||||||
|
res=$(curl https://example-page.org -sk --resolve '*:443:127.0.0.1' --trace-ascii pages.dump | tee /dev/stderr)
|
||||||
|
echo "$res" | grep -Fx 'Hello to Pages!' >/dev/null || fail "Pages didn't answer"
|
||||||
|
grep '^== Info: issuer: CN=Caddy Local Authority\b' pages.dump || { grep '^== Info: issuer:' pages.dump; fail "Pages didn't use the correct certificate!"; }
|
||||||
|
|
||||||
|
echo "All tests succeeded"
|
||||||
|
rm *.dump
|
71
flake.lock
generated
Normal file
71
flake.lock
generated
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"flake-utils": {
|
||||||
|
"inputs": {
|
||||||
|
"systems": "systems"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1731533236,
|
||||||
|
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"id": "flake-utils",
|
||||||
|
"type": "indirect"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 0,
|
||||||
|
"narHash": "sha256-WFZDy4bG2RkkCQloIEG8BXEvzyKklFVJbAismOJsIp4=",
|
||||||
|
"path": "/nix/store/c77dsgfxjywplw8bk8s8jlkdsr7a1bi9-source",
|
||||||
|
"type": "path"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"id": "nixpkgs",
|
||||||
|
"type": "indirect"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-utils": "flake-utils",
|
||||||
|
"nixpkgs": "nixpkgs",
|
||||||
|
"systems": "systems_2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"systems": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681028828,
|
||||||
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"systems_2": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681028828,
|
||||||
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"id": "systems",
|
||||||
|
"type": "indirect"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
27
flake.nix
Normal file
27
flake.nix
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
{
|
||||||
|
outputs = {
|
||||||
|
self,
|
||||||
|
nixpkgs,
|
||||||
|
flake-utils,
|
||||||
|
systems,
|
||||||
|
}:
|
||||||
|
flake-utils.lib.eachSystem (import systems)
|
||||||
|
(system: let
|
||||||
|
pkgs = import nixpkgs {
|
||||||
|
inherit system;
|
||||||
|
};
|
||||||
|
in {
|
||||||
|
devShells.default = pkgs.mkShell {
|
||||||
|
buildInputs = with pkgs; [
|
||||||
|
glibc.static
|
||||||
|
go
|
||||||
|
gofumpt
|
||||||
|
golangci-lint
|
||||||
|
gopls
|
||||||
|
gotools
|
||||||
|
go-tools
|
||||||
|
sqlite-interactive
|
||||||
|
];
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
237
go.mod
Normal file
237
go.mod
Normal file
|
@ -0,0 +1,237 @@
|
||||||
|
module codeberg.org/codeberg/pages
|
||||||
|
|
||||||
|
go 1.24.0
|
||||||
|
|
||||||
|
require (
|
||||||
|
code.gitea.io/sdk/gitea v0.20.0
|
||||||
|
github.com/OrlovEvgeny/go-mcache v0.0.0-20200121124330-1a8195b34f3a
|
||||||
|
github.com/creasty/defaults v1.8.0
|
||||||
|
github.com/go-acme/lego/v4 v4.21.0
|
||||||
|
github.com/go-sql-driver/mysql v1.8.1
|
||||||
|
github.com/hashicorp/go-uuid v1.0.3
|
||||||
|
github.com/hashicorp/golang-lru/v2 v2.0.7
|
||||||
|
github.com/joho/godotenv v1.5.1
|
||||||
|
github.com/lib/pq v1.10.9
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.24
|
||||||
|
github.com/microcosm-cc/bluemonday v1.0.27
|
||||||
|
github.com/pelletier/go-toml/v2 v2.2.3
|
||||||
|
github.com/pires/go-proxyproto v0.8.0
|
||||||
|
github.com/reugn/equalizer v0.0.0-20210216135016-a959c509d7ad
|
||||||
|
github.com/rs/zerolog v1.33.0
|
||||||
|
github.com/stretchr/testify v1.10.0
|
||||||
|
github.com/urfave/cli/v2 v2.27.5
|
||||||
|
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394
|
||||||
|
xorm.io/xorm v1.3.9
|
||||||
|
)
|
||||||
|
|
||||||
|
require (
|
||||||
|
cloud.google.com/go/auth v0.14.0 // indirect
|
||||||
|
cloud.google.com/go/auth/oauth2adapt v0.2.7 // indirect
|
||||||
|
cloud.google.com/go/compute/metadata v0.6.0 // indirect
|
||||||
|
filippo.io/edwards25519 v1.1.0 // indirect
|
||||||
|
github.com/42wim/httpsig v1.2.2 // indirect
|
||||||
|
github.com/AdamSLevy/jsonrpc2/v14 v14.1.0 // indirect
|
||||||
|
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.0 // indirect
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.1 // indirect
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/dns/armdns v1.2.0 // indirect
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/privatedns/armprivatedns v1.3.0 // indirect
|
||||||
|
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resourcegraph/armresourcegraph v0.9.0 // indirect
|
||||||
|
github.com/Azure/go-autorest v14.2.0+incompatible // indirect
|
||||||
|
github.com/Azure/go-autorest/autorest v0.11.29 // indirect
|
||||||
|
github.com/Azure/go-autorest/autorest/adal v0.9.24 // indirect
|
||||||
|
github.com/Azure/go-autorest/autorest/azure/auth v0.5.13 // indirect
|
||||||
|
github.com/Azure/go-autorest/autorest/azure/cli v0.4.6 // indirect
|
||||||
|
github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect
|
||||||
|
github.com/Azure/go-autorest/autorest/to v0.4.0 // indirect
|
||||||
|
github.com/Azure/go-autorest/logger v0.2.1 // indirect
|
||||||
|
github.com/Azure/go-autorest/tracing v0.6.0 // indirect
|
||||||
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.3.2 // indirect
|
||||||
|
github.com/OpenDNS/vegadns2client v0.0.0-20180418235048-a3fa4a771d87 // indirect
|
||||||
|
github.com/akamai/AkamaiOPEN-edgegrid-golang v1.2.2 // indirect
|
||||||
|
github.com/aliyun/alibaba-cloud-sdk-go v1.63.83 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2 v1.33.0 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/config v1.29.0 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/credentials v1.17.53 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.24 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.28 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.28 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.1 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.9 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/service/lightsail v1.42.10 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/service/route53 v1.48.1 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/service/sso v1.24.10 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.9 // indirect
|
||||||
|
github.com/aws/aws-sdk-go-v2/service/sts v1.33.8 // indirect
|
||||||
|
github.com/aws/smithy-go v1.22.1 // indirect
|
||||||
|
github.com/aymerick/douceur v0.2.0 // indirect
|
||||||
|
github.com/benbjohnson/clock v1.3.5 // indirect
|
||||||
|
github.com/boombuler/barcode v1.0.2 // indirect
|
||||||
|
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
|
||||||
|
github.com/civo/civogo v0.3.92 // indirect
|
||||||
|
github.com/cloudflare/cloudflare-go v0.114.0 // indirect
|
||||||
|
github.com/cpu/goacmedns v0.1.1 // indirect
|
||||||
|
github.com/cpuguy83/go-md2man/v2 v2.0.6 // indirect
|
||||||
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||||
|
github.com/davidmz/go-pageant v1.0.2 // indirect
|
||||||
|
github.com/dimchansky/utfbom v1.1.1 // indirect
|
||||||
|
github.com/dnsimple/dnsimple-go v1.7.0 // indirect
|
||||||
|
github.com/exoscale/egoscale/v3 v3.1.8 // indirect
|
||||||
|
github.com/fatih/structs v1.1.0 // indirect
|
||||||
|
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||||
|
github.com/fsnotify/fsnotify v1.8.0 // indirect
|
||||||
|
github.com/fxamacker/cbor/v2 v2.7.0 // indirect
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
|
||||||
|
github.com/ghodss/yaml v1.0.0 // indirect
|
||||||
|
github.com/go-errors/errors v1.5.1 // indirect
|
||||||
|
github.com/go-fed/httpsig v1.1.0 // indirect
|
||||||
|
github.com/go-jose/go-jose/v4 v4.0.4 // indirect
|
||||||
|
github.com/go-logr/logr v1.4.2 // indirect
|
||||||
|
github.com/go-logr/stdr v1.2.2 // indirect
|
||||||
|
github.com/go-playground/locales v0.14.1 // indirect
|
||||||
|
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||||
|
github.com/go-playground/validator/v10 v10.24.0 // indirect
|
||||||
|
github.com/go-resty/resty/v2 v2.16.3 // indirect
|
||||||
|
github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
|
||||||
|
github.com/goccy/go-json v0.10.4 // indirect
|
||||||
|
github.com/gofrs/flock v0.12.1 // indirect
|
||||||
|
github.com/gogo/protobuf v1.3.2 // indirect
|
||||||
|
github.com/golang-jwt/jwt/v4 v4.5.1 // indirect
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.2.1 // indirect
|
||||||
|
github.com/golang/snappy v0.0.4 // indirect
|
||||||
|
github.com/google/go-querystring v1.1.0 // indirect
|
||||||
|
github.com/google/gofuzz v1.2.0 // indirect
|
||||||
|
github.com/google/s2a-go v0.1.9 // indirect
|
||||||
|
github.com/google/uuid v1.6.0 // indirect
|
||||||
|
github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect
|
||||||
|
github.com/googleapis/gax-go/v2 v2.14.1 // indirect
|
||||||
|
github.com/gophercloud/gophercloud v1.14.1 // indirect
|
||||||
|
github.com/gophercloud/utils v0.0.0-20231010081019-80377eca5d56 // indirect
|
||||||
|
github.com/gorilla/css v1.0.1 // indirect
|
||||||
|
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||||
|
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||||
|
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||||
|
github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
|
||||||
|
github.com/hashicorp/go-version v1.7.0 // indirect
|
||||||
|
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||||
|
github.com/huaweicloud/huaweicloud-sdk-go-v3 v0.1.132 // indirect
|
||||||
|
github.com/iij/doapi v0.0.0-20190504054126-0bbf12d6d7df // indirect
|
||||||
|
github.com/infobloxopen/infoblox-go-client v1.1.1 // indirect
|
||||||
|
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||||
|
github.com/json-iterator/go v1.1.12 // indirect
|
||||||
|
github.com/k0kubun/go-ansi v0.0.0-20180517002512-3bf9e2903213 // indirect
|
||||||
|
github.com/kolo/xmlrpc v0.0.0-20220921171641-a4b6fa1dd06b // indirect
|
||||||
|
github.com/kylelemons/godebug v1.1.0 // indirect
|
||||||
|
github.com/labbsr0x/bindman-dns-webhook v1.0.2 // indirect
|
||||||
|
github.com/labbsr0x/goh v1.0.1 // indirect
|
||||||
|
github.com/leodido/go-urn v1.4.0 // indirect
|
||||||
|
github.com/linode/linodego v1.46.0 // indirect
|
||||||
|
github.com/liquidweb/liquidweb-cli v0.7.0 // indirect
|
||||||
|
github.com/liquidweb/liquidweb-go v1.6.4 // indirect
|
||||||
|
github.com/magiconair/properties v1.8.9 // indirect
|
||||||
|
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||||
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
|
github.com/miekg/dns v1.1.62 // indirect
|
||||||
|
github.com/mimuret/golang-iij-dpf v0.9.1 // indirect
|
||||||
|
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||||
|
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||||
|
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||||
|
github.com/namedotcom/go v0.0.0-20180403034216-08470befbe04 // indirect
|
||||||
|
github.com/nrdcg/auroradns v1.1.0 // indirect
|
||||||
|
github.com/nrdcg/bunny-go v0.0.0-20240207213615-dde5bf4577a3 // indirect
|
||||||
|
github.com/nrdcg/desec v0.10.0 // indirect
|
||||||
|
github.com/nrdcg/dnspod-go v0.4.0 // indirect
|
||||||
|
github.com/nrdcg/freemyip v0.3.0 // indirect
|
||||||
|
github.com/nrdcg/goinwx v0.10.0 // indirect
|
||||||
|
github.com/nrdcg/mailinabox v0.2.0 // indirect
|
||||||
|
github.com/nrdcg/namesilo v0.2.1 // indirect
|
||||||
|
github.com/nrdcg/nodion v0.1.0 // indirect
|
||||||
|
github.com/nrdcg/porkbun v0.4.0 // indirect
|
||||||
|
github.com/nzdjb/go-metaname v1.0.0 // indirect
|
||||||
|
github.com/opentracing/opentracing-go v1.2.1-0.20220228012449-10b1cf09e00b // indirect
|
||||||
|
github.com/oracle/oci-go-sdk/v65 v65.81.2 // indirect
|
||||||
|
github.com/ovh/go-ovh v1.6.0 // indirect
|
||||||
|
github.com/patrickmn/go-cache v2.1.0+incompatible // indirect
|
||||||
|
github.com/peterhellberg/link v1.2.0 // indirect
|
||||||
|
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||||
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||||
|
github.com/pquerna/otp v1.4.0 // indirect
|
||||||
|
github.com/regfish/regfish-dnsapi-go v0.1.1 // indirect
|
||||||
|
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||||
|
github.com/sacloud/api-client-go v0.2.10 // indirect
|
||||||
|
github.com/sacloud/go-http v0.1.9 // indirect
|
||||||
|
github.com/sacloud/iaas-api-go v1.14.0 // indirect
|
||||||
|
github.com/sacloud/packages-go v0.0.11 // indirect
|
||||||
|
github.com/sagikazarmark/locafero v0.7.0 // indirect
|
||||||
|
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
|
||||||
|
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.30 // indirect
|
||||||
|
github.com/selectel/domains-go v1.1.0 // indirect
|
||||||
|
github.com/selectel/go-selvpcclient/v3 v3.2.1 // indirect
|
||||||
|
github.com/shopspring/decimal v1.4.0 // indirect
|
||||||
|
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||||
|
github.com/smartystreets/go-aws-auth v0.0.0-20180515143844-0c1422d1fdb9 // indirect
|
||||||
|
github.com/softlayer/softlayer-go v1.1.7 // indirect
|
||||||
|
github.com/softlayer/xmlrpc v0.0.0-20200409220501-5f089df7cb7e // indirect
|
||||||
|
github.com/sony/gobreaker v1.0.0 // indirect
|
||||||
|
github.com/sourcegraph/conc v0.3.0 // indirect
|
||||||
|
github.com/spf13/afero v1.12.0 // indirect
|
||||||
|
github.com/spf13/cast v1.7.1 // indirect
|
||||||
|
github.com/spf13/pflag v1.0.5 // indirect
|
||||||
|
github.com/spf13/viper v1.19.0 // indirect
|
||||||
|
github.com/stretchr/objx v0.5.2 // indirect
|
||||||
|
github.com/subosito/gotenv v1.6.0 // indirect
|
||||||
|
github.com/syndtr/goleveldb v1.0.0 // indirect
|
||||||
|
github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common v1.0.1084 // indirect
|
||||||
|
github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/dnspod v1.0.1084 // indirect
|
||||||
|
github.com/tjfoc/gmsm v1.4.1 // indirect
|
||||||
|
github.com/transip/gotransip/v6 v6.26.0 // indirect
|
||||||
|
github.com/ultradns/ultradns-go-sdk v1.8.0-20241010134910-243eeec // indirect
|
||||||
|
github.com/vinyldns/go-vinyldns v0.9.16 // indirect
|
||||||
|
github.com/volcengine/volc-sdk-golang v1.0.193 // indirect
|
||||||
|
github.com/vultr/govultr/v3 v3.14.1 // indirect
|
||||||
|
github.com/x448/float16 v0.8.4 // indirect
|
||||||
|
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
|
||||||
|
github.com/yandex-cloud/go-genproto v0.0.0-20241220122821-aeb3b05efd1c // indirect
|
||||||
|
github.com/yandex-cloud/go-sdk v0.0.0-20241220131134-2393e243c134 // indirect
|
||||||
|
go.mongodb.org/mongo-driver v1.17.2 // indirect
|
||||||
|
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||||
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0 // indirect
|
||||||
|
go.opentelemetry.io/otel v1.34.0 // indirect
|
||||||
|
go.opentelemetry.io/otel/metric v1.34.0 // indirect
|
||||||
|
go.opentelemetry.io/otel/trace v1.34.0 // indirect
|
||||||
|
go.uber.org/atomic v1.11.0 // indirect
|
||||||
|
go.uber.org/multierr v1.11.0 // indirect
|
||||||
|
go.uber.org/ratelimit v0.3.1 // indirect
|
||||||
|
golang.org/x/crypto v0.36.0 // indirect
|
||||||
|
golang.org/x/mod v0.24.0 // indirect
|
||||||
|
golang.org/x/net v0.37.0 // indirect
|
||||||
|
golang.org/x/oauth2 v0.25.0 // indirect
|
||||||
|
golang.org/x/sync v0.12.0 // indirect
|
||||||
|
golang.org/x/sys v0.31.0 // indirect
|
||||||
|
golang.org/x/text v0.23.0 // indirect
|
||||||
|
golang.org/x/time v0.9.0 // indirect
|
||||||
|
golang.org/x/tools v0.31.0 // indirect
|
||||||
|
google.golang.org/api v0.217.0 // indirect
|
||||||
|
google.golang.org/genproto v0.0.0-20250115164207-1a7da9e5054f // indirect
|
||||||
|
google.golang.org/genproto/googleapis/api v0.0.0-20250115164207-1a7da9e5054f // indirect
|
||||||
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20250115164207-1a7da9e5054f // indirect
|
||||||
|
google.golang.org/grpc v1.69.4 // indirect
|
||||||
|
google.golang.org/protobuf v1.36.3 // indirect
|
||||||
|
gopkg.in/inf.v0 v0.9.1 // indirect
|
||||||
|
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||||
|
gopkg.in/ns1/ns1-go.v2 v2.13.0 // indirect
|
||||||
|
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
|
k8s.io/api v0.32.1 // indirect
|
||||||
|
k8s.io/apimachinery v0.32.1 // indirect
|
||||||
|
k8s.io/klog/v2 v2.130.1 // indirect
|
||||||
|
k8s.io/utils v0.0.0-20241210054802-24370beab758 // indirect
|
||||||
|
sigs.k8s.io/json v0.0.0-20241014173422-cfa47c3a1cc8 // indirect
|
||||||
|
sigs.k8s.io/structured-merge-diff/v4 v4.5.0 // indirect
|
||||||
|
sigs.k8s.io/yaml v1.4.0 // indirect
|
||||||
|
xorm.io/builder v0.3.13 // indirect
|
||||||
|
)
|
71
html/html.go
Normal file
71
html/html.go
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
package html
|
||||||
|
|
||||||
|
import (
|
||||||
|
_ "embed"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"text/template" // do not use html/template here, we sanitize the message before passing it to the template
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/server/context"
|
||||||
|
"github.com/microcosm-cc/bluemonday"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:embed templates/error.html
|
||||||
|
var errorPage string
|
||||||
|
|
||||||
|
var (
|
||||||
|
errorTemplate = template.Must(template.New("error").Parse(loadCustomTemplateOrDefault()))
|
||||||
|
sanitizer = createBlueMondayPolicy()
|
||||||
|
)
|
||||||
|
|
||||||
|
type TemplateContext struct {
|
||||||
|
StatusCode int
|
||||||
|
StatusText string
|
||||||
|
Message string
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReturnErrorPage sets the response status code and writes the error page to the response body.
|
||||||
|
// The error page contains a sanitized version of the message and the statusCode both in text and numeric form.
|
||||||
|
//
|
||||||
|
// Currently, only the following html tags are supported: <code>
|
||||||
|
func ReturnErrorPage(ctx *context.Context, msg string, statusCode int) {
|
||||||
|
ctx.RespWriter.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||||
|
ctx.RespWriter.WriteHeader(statusCode)
|
||||||
|
|
||||||
|
templateContext := TemplateContext{
|
||||||
|
StatusCode: statusCode,
|
||||||
|
StatusText: http.StatusText(statusCode),
|
||||||
|
Message: sanitizer.Sanitize(msg),
|
||||||
|
}
|
||||||
|
|
||||||
|
err := errorTemplate.Execute(ctx.RespWriter, templateContext)
|
||||||
|
if err != nil {
|
||||||
|
log.Err(err).Str("message", msg).Int("status", statusCode).Msg("could not write response")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func createBlueMondayPolicy() *bluemonday.Policy {
|
||||||
|
p := bluemonday.NewPolicy()
|
||||||
|
|
||||||
|
p.AllowElements("code")
|
||||||
|
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadCustomTemplateOrDefault() string {
|
||||||
|
contents, err := os.ReadFile("custom/error.html")
|
||||||
|
if err != nil {
|
||||||
|
if !os.IsNotExist(err) {
|
||||||
|
wd, wdErr := os.Getwd()
|
||||||
|
if wdErr != nil {
|
||||||
|
log.Err(err).Msg("could not load custom error page 'custom/error.html'")
|
||||||
|
} else {
|
||||||
|
log.Err(err).Msgf("could not load custom error page '%v'", path.Join(wd, "custom/error.html"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return errorPage
|
||||||
|
}
|
||||||
|
return string(contents)
|
||||||
|
}
|
54
html/html_test.go
Normal file
54
html/html_test.go
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
package html
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestSanitizerSimpleString(t *testing.T) {
|
||||||
|
str := "simple text message without any html elements"
|
||||||
|
|
||||||
|
assert.Equal(t, str, sanitizer.Sanitize(str))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSanitizerStringWithCodeTag(t *testing.T) {
|
||||||
|
str := "simple text message with <code>html</code> tag"
|
||||||
|
|
||||||
|
assert.Equal(t, str, sanitizer.Sanitize(str))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSanitizerStringWithCodeTagWithAttribute(t *testing.T) {
|
||||||
|
str := "simple text message with <code id=\"code\">html</code> tag"
|
||||||
|
expected := "simple text message with <code>html</code> tag"
|
||||||
|
|
||||||
|
assert.Equal(t, expected, sanitizer.Sanitize(str))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSanitizerStringWithATag(t *testing.T) {
|
||||||
|
str := "simple text message with <a>a link to another page</a>"
|
||||||
|
expected := "simple text message with a link to another page"
|
||||||
|
|
||||||
|
assert.Equal(t, expected, sanitizer.Sanitize(str))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSanitizerStringWithATagAndHref(t *testing.T) {
|
||||||
|
str := "simple text message with <a href=\"http://evil.site\">a link to another page</a>"
|
||||||
|
expected := "simple text message with a link to another page"
|
||||||
|
|
||||||
|
assert.Equal(t, expected, sanitizer.Sanitize(str))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSanitizerStringWithImgTag(t *testing.T) {
|
||||||
|
str := "simple text message with a <img alt=\"not found\" src=\"http://evil.site\">"
|
||||||
|
expected := "simple text message with a "
|
||||||
|
|
||||||
|
assert.Equal(t, expected, sanitizer.Sanitize(str))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSanitizerStringWithImgTagAndOnerrorAttribute(t *testing.T) {
|
||||||
|
str := "simple text message with a <img alt=\"not found\" src=\"http://evil.site\" onerror=\"alert(secret)\">"
|
||||||
|
expected := "simple text message with a "
|
||||||
|
|
||||||
|
assert.Equal(t, expected, sanitizer.Sanitize(str))
|
||||||
|
}
|
58
html/templates/error.html
Normal file
58
html/templates/error.html
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
<!doctype html>
|
||||||
|
<html class="codeberg-design">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<meta name="viewport" content="width=device-width" />
|
||||||
|
<title>{{.StatusText}}</title>
|
||||||
|
|
||||||
|
<link rel="stylesheet" href="https://design.codeberg.org/design-kit/codeberg.css" />
|
||||||
|
<link rel="stylesheet" href="https://fonts.codeberg.org/dist/inter/Inter%20Web/inter.css" />
|
||||||
|
|
||||||
|
<style>
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
padding: 1rem;
|
||||||
|
box-sizing: border-box;
|
||||||
|
width: 100%;
|
||||||
|
min-height: 100vh;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
code {
|
||||||
|
border-radius: 0.25rem;
|
||||||
|
padding: 0.25rem;
|
||||||
|
background-color: silver;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" height="10em" viewBox="0 0 24 24" fill="var(--blue-color)">
|
||||||
|
<path
|
||||||
|
d="M 9 2 C 5.1458514 2 2 5.1458514 2 9 C 2 12.854149 5.1458514 16 9 16 C 10.747998 16 12.345009 15.348024 13.574219 14.28125 L 14 14.707031 L 14 16 L 19.585938 21.585938 C 20.137937 22.137937 21.033938 22.137938 21.585938 21.585938 C 22.137938 21.033938 22.137938 20.137938 21.585938 19.585938 L 16 14 L 14.707031 14 L 14.28125 13.574219 C 15.348024 12.345009 16 10.747998 16 9 C 16 5.1458514 12.854149 2 9 2 z M 9 4 C 11.773268 4 14 6.2267316 14 9 C 14 11.773268 11.773268 14 9 14 C 6.2267316 14 4 11.773268 4 9 C 4 6.2267316 6.2267316 4 9 4 z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
<h1 class="mb-0 text-primary">{{.StatusText}} (Error {{.StatusCode}})!</h1>
|
||||||
|
<h5 class="text-center" style="max-width: 25em">
|
||||||
|
<p>Sorry, but this page couldn't be served:</p>
|
||||||
|
<p><b>"{{.Message}}"</b></p>
|
||||||
|
<p>
|
||||||
|
The page you tried to reach is hosted on Codeberg Pages, which might currently be experiencing technical
|
||||||
|
difficulties. If that is the case, it could take a little while until this page is available again.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
Otherwise, this page might also be unavailable due to a configuration error. If you are the owner of this
|
||||||
|
website, please make sure to check the
|
||||||
|
<a href="https://docs.codeberg.org/codeberg-pages/troubleshooting/" target="_blank"
|
||||||
|
>troubleshooting section in the Docs</a
|
||||||
|
>!
|
||||||
|
</p>
|
||||||
|
</h5>
|
||||||
|
<small class="text-muted">
|
||||||
|
<img src="https://design.codeberg.org/logo-kit/icon.svg" class="align-top" />
|
||||||
|
Static pages made easy -
|
||||||
|
<a href="https://codeberg.page">Codeberg Pages</a>
|
||||||
|
</small>
|
||||||
|
</body>
|
||||||
|
</html>
|
282
integration/get_test.go
Normal file
282
integration/get_test.go
Normal file
|
@ -0,0 +1,282 @@
|
||||||
|
//go:build integration
|
||||||
|
// +build integration
|
||||||
|
|
||||||
|
package integration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/tls"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"net/http/cookiejar"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestGetRedirect(t *testing.T) {
|
||||||
|
log.Println("=== TestGetRedirect ===")
|
||||||
|
// test custom domain redirect
|
||||||
|
resp, err := getTestHTTPSClient().Get("https://calciumdibromid.localhost.mock.directory:4430")
|
||||||
|
if !assert.NoError(t, err) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
if !assert.EqualValues(t, http.StatusTemporaryRedirect, resp.StatusCode) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, "https://www.cabr2.de/", resp.Header.Get("Location"))
|
||||||
|
assert.EqualValues(t, `<a href="https://www.cabr2.de/">Temporary Redirect</a>.`, strings.TrimSpace(string(getBytes(resp.Body))))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetContent(t *testing.T) {
|
||||||
|
log.Println("=== TestGetContent ===")
|
||||||
|
// test get image
|
||||||
|
resp, err := getTestHTTPSClient().Get("https://cb_pages_tests.localhost.mock.directory:4430/images/827679288a.jpg")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.EqualValues(t, http.StatusOK, resp.StatusCode) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, "image/jpeg", resp.Header.Get("Content-Type"))
|
||||||
|
assert.EqualValues(t, "124635", resp.Header.Get("Content-Length"))
|
||||||
|
assert.EqualValues(t, 124635, getSize(resp.Body))
|
||||||
|
assert.Len(t, resp.Header.Get("ETag"), 42)
|
||||||
|
|
||||||
|
// specify branch
|
||||||
|
resp, err = getTestHTTPSClient().Get("https://cb_pages_tests.localhost.mock.directory:4430/pag/@master/")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusOK, resp.StatusCode)
|
||||||
|
assert.EqualValues(t, "text/html; charset=utf-8", resp.Header.Get("Content-Type"))
|
||||||
|
assert.True(t, getSize(resp.Body) > 1000)
|
||||||
|
assert.Len(t, resp.Header.Get("ETag"), 44)
|
||||||
|
|
||||||
|
// access branch name contains '/'
|
||||||
|
resp, err = getTestHTTPSClient().Get("https://cb_pages_tests.localhost.mock.directory:4430/blumia/@docs~main/")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.EqualValues(t, http.StatusOK, resp.StatusCode) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, "text/html; charset=utf-8", resp.Header.Get("Content-Type"))
|
||||||
|
assert.True(t, getSize(resp.Body) > 100)
|
||||||
|
assert.Len(t, resp.Header.Get("ETag"), 44)
|
||||||
|
|
||||||
|
// TODO: test get of non cacheable content (content size > fileCacheSizeLimit)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCustomDomain(t *testing.T) {
|
||||||
|
log.Println("=== TestCustomDomain ===")
|
||||||
|
resp, err := getTestHTTPSClient().Get("https://mock-pages.codeberg-test.org:4430/README.md")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusOK, resp.StatusCode)
|
||||||
|
assert.EqualValues(t, "text/markdown; charset=utf-8", resp.Header.Get("Content-Type"))
|
||||||
|
assert.EqualValues(t, "106", resp.Header.Get("Content-Length"))
|
||||||
|
assert.EqualValues(t, 106, getSize(resp.Body))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCustomDomainRedirects(t *testing.T) {
|
||||||
|
log.Println("=== TestCustomDomainRedirects ===")
|
||||||
|
// test redirect from default pages domain to custom domain
|
||||||
|
resp, err := getTestHTTPSClient().Get("https://6543.localhost.mock.directory:4430/test_pages-server_custom-mock-domain/@main/README.md")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusTemporaryRedirect, resp.StatusCode)
|
||||||
|
assert.EqualValues(t, "text/html; charset=utf-8", resp.Header.Get("Content-Type"))
|
||||||
|
// TODO: custom port is not evaluated (witch does hurt tests & dev env only)
|
||||||
|
// assert.EqualValues(t, "https://mock-pages.codeberg-test.org:4430/@main/README.md", resp.Header.Get("Location"))
|
||||||
|
assert.EqualValues(t, "https://mock-pages.codeberg-test.org/@main/README.md", resp.Header.Get("Location"))
|
||||||
|
assert.EqualValues(t, `https:/codeberg.org/6543/test_pages-server_custom-mock-domain/src/branch/main/README.md; rel="canonical"; rel="canonical"`, resp.Header.Get("Link"))
|
||||||
|
|
||||||
|
// test redirect from an custom domain to the primary custom domain (www.example.com -> example.com)
|
||||||
|
// regression test to https://codeberg.org/Codeberg/pages-server/issues/153
|
||||||
|
resp, err = getTestHTTPSClient().Get("https://mock-pages-redirect.codeberg-test.org:4430/README.md")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusTemporaryRedirect, resp.StatusCode)
|
||||||
|
assert.EqualValues(t, "text/html; charset=utf-8", resp.Header.Get("Content-Type"))
|
||||||
|
// TODO: custom port is not evaluated (witch does hurt tests & dev env only)
|
||||||
|
// assert.EqualValues(t, "https://mock-pages.codeberg-test.org:4430/README.md", resp.Header.Get("Location"))
|
||||||
|
assert.EqualValues(t, "https://mock-pages.codeberg-test.org/README.md", resp.Header.Get("Location"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRawCustomDomain(t *testing.T) {
|
||||||
|
log.Println("=== TestRawCustomDomain ===")
|
||||||
|
// test raw domain response for custom domain branch
|
||||||
|
resp, err := getTestHTTPSClient().Get("https://raw.localhost.mock.directory:4430/cb_pages_tests/raw-test/example") // need cb_pages_tests fork
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusOK, resp.StatusCode)
|
||||||
|
assert.EqualValues(t, "text/plain; charset=utf-8", resp.Header.Get("Content-Type"))
|
||||||
|
assert.EqualValues(t, "76", resp.Header.Get("Content-Length"))
|
||||||
|
assert.EqualValues(t, 76, getSize(resp.Body))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRawIndex(t *testing.T) {
|
||||||
|
log.Println("=== TestRawIndex ===")
|
||||||
|
// test raw domain response for index.html
|
||||||
|
resp, err := getTestHTTPSClient().Get("https://raw.localhost.mock.directory:4430/cb_pages_tests/raw-test/@branch-test/index.html") // need cb_pages_tests fork
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusOK, resp.StatusCode)
|
||||||
|
assert.EqualValues(t, "text/plain; charset=utf-8", resp.Header.Get("Content-Type"))
|
||||||
|
assert.EqualValues(t, "597", resp.Header.Get("Content-Length"))
|
||||||
|
assert.EqualValues(t, 597, getSize(resp.Body))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetNotFound(t *testing.T) {
|
||||||
|
log.Println("=== TestGetNotFound ===")
|
||||||
|
// test custom not found pages
|
||||||
|
resp, err := getTestHTTPSClient().Get("https://cb_pages_tests.localhost.mock.directory:4430/pages-404-demo/blah")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusNotFound, resp.StatusCode)
|
||||||
|
assert.EqualValues(t, "text/html; charset=utf-8", resp.Header.Get("Content-Type"))
|
||||||
|
assert.EqualValues(t, "37", resp.Header.Get("Content-Length"))
|
||||||
|
assert.EqualValues(t, 37, getSize(resp.Body))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRedirect(t *testing.T) {
|
||||||
|
log.Println("=== TestRedirect ===")
|
||||||
|
// test redirects
|
||||||
|
resp, err := getTestHTTPSClient().Get("https://cb_pages_tests.localhost.mock.directory:4430/some_redirects/redirect")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusMovedPermanently, resp.StatusCode)
|
||||||
|
assert.EqualValues(t, "https://example.com/", resp.Header.Get("Location"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSPARedirect(t *testing.T) {
|
||||||
|
log.Println("=== TestSPARedirect ===")
|
||||||
|
// test SPA redirects
|
||||||
|
url := "https://cb_pages_tests.localhost.mock.directory:4430/some_redirects/app/aqdjw"
|
||||||
|
resp, err := getTestHTTPSClient().Get(url)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusOK, resp.StatusCode)
|
||||||
|
assert.EqualValues(t, url, resp.Request.URL.String())
|
||||||
|
assert.EqualValues(t, "text/html; charset=utf-8", resp.Header.Get("Content-Type"))
|
||||||
|
assert.EqualValues(t, "258", resp.Header.Get("Content-Length"))
|
||||||
|
assert.EqualValues(t, 258, getSize(resp.Body))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSplatRedirect(t *testing.T) {
|
||||||
|
log.Println("=== TestSplatRedirect ===")
|
||||||
|
// test splat redirects
|
||||||
|
resp, err := getTestHTTPSClient().Get("https://cb_pages_tests.localhost.mock.directory:4430/some_redirects/articles/qfopefe")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusMovedPermanently, resp.StatusCode)
|
||||||
|
assert.EqualValues(t, "/posts/qfopefe", resp.Header.Get("Location"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFollowSymlink(t *testing.T) {
|
||||||
|
log.Printf("=== TestFollowSymlink ===\n")
|
||||||
|
|
||||||
|
// file symlink
|
||||||
|
resp, err := getTestHTTPSClient().Get("https://cb_pages_tests.localhost.mock.directory:4430/tests_for_pages-server/@main/link")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusOK, resp.StatusCode)
|
||||||
|
assert.EqualValues(t, "application/octet-stream", resp.Header.Get("Content-Type"))
|
||||||
|
assert.EqualValues(t, "4", resp.Header.Get("Content-Length"))
|
||||||
|
body := getBytes(resp.Body)
|
||||||
|
assert.EqualValues(t, 4, len(body))
|
||||||
|
assert.EqualValues(t, "abc\n", string(body))
|
||||||
|
|
||||||
|
// relative file links (../index.html file in this case)
|
||||||
|
resp, err = getTestHTTPSClient().Get("https://cb_pages_tests.localhost.mock.directory:4430/tests_for_pages-server/@main/dir_aim/some/")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusOK, resp.StatusCode)
|
||||||
|
assert.EqualValues(t, "text/html; charset=utf-8", resp.Header.Get("Content-Type"))
|
||||||
|
assert.EqualValues(t, "an index\n", string(getBytes(resp.Body)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLFSSupport(t *testing.T) {
|
||||||
|
log.Printf("=== TestLFSSupport ===\n")
|
||||||
|
|
||||||
|
resp, err := getTestHTTPSClient().Get("https://cb_pages_tests.localhost.mock.directory:4430/tests_for_pages-server/@main/lfs.txt")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusOK, resp.StatusCode)
|
||||||
|
body := strings.TrimSpace(string(getBytes(resp.Body)))
|
||||||
|
assert.EqualValues(t, 12, len(body))
|
||||||
|
assert.EqualValues(t, "actual value", body)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetOptions(t *testing.T) {
|
||||||
|
log.Println("=== TestGetOptions ===")
|
||||||
|
req, _ := http.NewRequest(http.MethodOptions, "https://mock-pages.codeberg-test.org:4430/README.md", http.NoBody)
|
||||||
|
resp, err := getTestHTTPSClient().Do(req)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusNoContent, resp.StatusCode)
|
||||||
|
assert.EqualValues(t, "GET, HEAD, OPTIONS", resp.Header.Get("Allow"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHttpRedirect(t *testing.T) {
|
||||||
|
log.Println("=== TestHttpRedirect ===")
|
||||||
|
resp, err := getTestHTTPSClient().Get("http://mock-pages.codeberg-test.org:8880/README.md")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, resp) {
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, http.StatusMovedPermanently, resp.StatusCode)
|
||||||
|
assert.EqualValues(t, "text/html; charset=utf-8", resp.Header.Get("Content-Type"))
|
||||||
|
assert.EqualValues(t, "https://mock-pages.codeberg-test.org:4430/README.md", resp.Header.Get("Location"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func getTestHTTPSClient() *http.Client {
|
||||||
|
cookieJar, _ := cookiejar.New(nil)
|
||||||
|
return &http.Client{
|
||||||
|
Jar: cookieJar,
|
||||||
|
CheckRedirect: func(_ *http.Request, _ []*http.Request) error {
|
||||||
|
return http.ErrUseLastResponse
|
||||||
|
},
|
||||||
|
Transport: &http.Transport{
|
||||||
|
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getBytes(stream io.Reader) []byte {
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
_, _ = buf.ReadFrom(stream)
|
||||||
|
return buf.Bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
func getSize(stream io.Reader) int {
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
_, _ = buf.ReadFrom(stream)
|
||||||
|
return buf.Len()
|
||||||
|
}
|
69
integration/main_test.go
Normal file
69
integration/main_test.go
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
//go:build integration
|
||||||
|
// +build integration
|
||||||
|
|
||||||
|
package integration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/urfave/cli/v2"
|
||||||
|
|
||||||
|
cmd "codeberg.org/codeberg/pages/cli"
|
||||||
|
"codeberg.org/codeberg/pages/server"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMain(m *testing.M) {
|
||||||
|
log.Println("=== TestMain: START Server ===")
|
||||||
|
serverCtx, serverCancel := context.WithCancel(context.Background())
|
||||||
|
if err := startServer(serverCtx); err != nil {
|
||||||
|
log.Fatalf("could not start server: %v", err)
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
serverCancel()
|
||||||
|
log.Println("=== TestMain: Server STOPPED ===")
|
||||||
|
}()
|
||||||
|
|
||||||
|
time.Sleep(10 * time.Second)
|
||||||
|
|
||||||
|
m.Run()
|
||||||
|
}
|
||||||
|
|
||||||
|
func startServer(ctx context.Context) error {
|
||||||
|
args := []string{"integration"}
|
||||||
|
setEnvIfNotSet("ACME_API", "https://acme.mock.directory")
|
||||||
|
setEnvIfNotSet("PAGES_DOMAIN", "localhost.mock.directory")
|
||||||
|
setEnvIfNotSet("RAW_DOMAIN", "raw.localhost.mock.directory")
|
||||||
|
setEnvIfNotSet("PAGES_BRANCHES", "pages,main,master")
|
||||||
|
setEnvIfNotSet("PORT", "4430")
|
||||||
|
setEnvIfNotSet("HTTP_PORT", "8880")
|
||||||
|
setEnvIfNotSet("ENABLE_HTTP_SERVER", "true")
|
||||||
|
setEnvIfNotSet("DB_TYPE", "sqlite3")
|
||||||
|
setEnvIfNotSet("GITEA_ROOT", "https://codeberg.org")
|
||||||
|
setEnvIfNotSet("LOG_LEVEL", "trace")
|
||||||
|
setEnvIfNotSet("ENABLE_LFS_SUPPORT", "true")
|
||||||
|
setEnvIfNotSet("ENABLE_SYMLINK_SUPPORT", "true")
|
||||||
|
setEnvIfNotSet("ACME_ACCOUNT_CONFIG", "integration/acme-account.json")
|
||||||
|
|
||||||
|
app := cli.NewApp()
|
||||||
|
app.Name = "pages-server"
|
||||||
|
app.Action = server.Serve
|
||||||
|
app.Flags = cmd.ServerFlags
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
if err := app.RunContext(ctx, args); err != nil {
|
||||||
|
log.Fatalf("run server error: %v", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func setEnvIfNotSet(key, value string) {
|
||||||
|
if _, set := os.LookupEnv(key); !set {
|
||||||
|
os.Setenv(key, value)
|
||||||
|
}
|
||||||
|
}
|
21
main.go
Normal file
21
main.go
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
|
||||||
|
_ "github.com/joho/godotenv/autoload"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/cli"
|
||||||
|
"codeberg.org/codeberg/pages/server"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
app := cli.CreatePagesApp()
|
||||||
|
app.Action = server.Serve
|
||||||
|
|
||||||
|
if err := app.Run(os.Args); err != nil {
|
||||||
|
log.Error().Err(err).Msg("A fatal error occurred")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
27
renovate.json
Normal file
27
renovate.json
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
{
|
||||||
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
|
"extends": [
|
||||||
|
"config:recommended",
|
||||||
|
":maintainLockFilesWeekly",
|
||||||
|
":enablePreCommit",
|
||||||
|
"schedule:automergeDaily",
|
||||||
|
"schedule:weekends"
|
||||||
|
],
|
||||||
|
"automergeType": "branch",
|
||||||
|
"automergeMajor": false,
|
||||||
|
"automerge": true,
|
||||||
|
"prConcurrentLimit": 5,
|
||||||
|
"labels": ["dependencies"],
|
||||||
|
"packageRules": [
|
||||||
|
{
|
||||||
|
"matchManagers": ["gomod", "dockerfile"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"groupName": "golang deps non-major",
|
||||||
|
"matchManagers": ["gomod"],
|
||||||
|
"matchUpdateTypes": ["minor", "patch"],
|
||||||
|
"extends": ["schedule:daily"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"postUpdateOptions": ["gomodTidy", "gomodUpdateImportPaths"]
|
||||||
|
}
|
26
server/acme/client.go
Normal file
26
server/acme/client.go
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
package acme
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/config"
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
"codeberg.org/codeberg/pages/server/certificates"
|
||||||
|
)
|
||||||
|
|
||||||
|
var ErrAcmeMissConfig = errors.New("ACME client has wrong config")
|
||||||
|
|
||||||
|
func CreateAcmeClient(cfg config.ACMEConfig, enableHTTPServer bool, challengeCache cache.ICache) (*certificates.AcmeClient, error) {
|
||||||
|
// check config
|
||||||
|
if (!cfg.AcceptTerms || (cfg.DNSProvider == "" && !cfg.NoDNS01)) && cfg.APIEndpoint != "https://acme.mock.directory" {
|
||||||
|
return nil, fmt.Errorf("%w: you must set $ACME_ACCEPT_TERMS and $DNS_PROVIDER or $NO_DNS_01, unless $ACME_API is set to https://acme.mock.directory", ErrAcmeMissConfig)
|
||||||
|
}
|
||||||
|
if cfg.EAB_HMAC != "" && cfg.EAB_KID == "" {
|
||||||
|
return nil, fmt.Errorf("%w: ACME_EAB_HMAC also needs ACME_EAB_KID to be set", ErrAcmeMissConfig)
|
||||||
|
} else if cfg.EAB_HMAC == "" && cfg.EAB_KID != "" {
|
||||||
|
return nil, fmt.Errorf("%w: ACME_EAB_KID also needs ACME_EAB_HMAC to be set", ErrAcmeMissConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
return certificates.NewAcmeClient(cfg, enableHTTPServer, challengeCache)
|
||||||
|
}
|
10
server/cache/interface.go
vendored
Normal file
10
server/cache/interface.go
vendored
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
package cache
|
||||||
|
|
||||||
|
import "time"
|
||||||
|
|
||||||
|
// ICache is an interface that defines how the pages server interacts with the cache.
|
||||||
|
type ICache interface {
|
||||||
|
Set(key string, value interface{}, ttl time.Duration) error
|
||||||
|
Get(key string) (interface{}, bool)
|
||||||
|
Remove(key string)
|
||||||
|
}
|
7
server/cache/memory.go
vendored
Normal file
7
server/cache/memory.go
vendored
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
package cache
|
||||||
|
|
||||||
|
import "github.com/OrlovEvgeny/go-mcache"
|
||||||
|
|
||||||
|
func NewInMemoryCache() ICache {
|
||||||
|
return mcache.New()
|
||||||
|
}
|
29
server/certificates/acme_account.go
Normal file
29
server/certificates/acme_account.go
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
package certificates
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto"
|
||||||
|
|
||||||
|
"github.com/go-acme/lego/v4/registration"
|
||||||
|
)
|
||||||
|
|
||||||
|
type AcmeAccount struct {
|
||||||
|
Email string
|
||||||
|
Registration *registration.Resource
|
||||||
|
Key crypto.PrivateKey `json:"-"`
|
||||||
|
KeyPEM string `json:"Key"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// make sure AcmeAccount match User interface
|
||||||
|
var _ registration.User = &AcmeAccount{}
|
||||||
|
|
||||||
|
func (u *AcmeAccount) GetEmail() string {
|
||||||
|
return u.Email
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u AcmeAccount) GetRegistration() *registration.Resource {
|
||||||
|
return u.Registration
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *AcmeAccount) GetPrivateKey() crypto.PrivateKey {
|
||||||
|
return u.Key
|
||||||
|
}
|
93
server/certificates/acme_client.go
Normal file
93
server/certificates/acme_client.go
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
package certificates
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-acme/lego/v4/lego"
|
||||||
|
"github.com/go-acme/lego/v4/providers/dns"
|
||||||
|
"github.com/reugn/equalizer"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/config"
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
)
|
||||||
|
|
||||||
|
type AcmeClient struct {
|
||||||
|
legoClient *lego.Client
|
||||||
|
dnsChallengerLegoClient *lego.Client
|
||||||
|
|
||||||
|
obtainLocks sync.Map
|
||||||
|
|
||||||
|
acmeUseRateLimits bool
|
||||||
|
|
||||||
|
// limiter
|
||||||
|
acmeClientOrderLimit *equalizer.TokenBucket
|
||||||
|
acmeClientRequestLimit *equalizer.TokenBucket
|
||||||
|
acmeClientFailLimit *equalizer.TokenBucket
|
||||||
|
acmeClientCertificateLimitPerUser map[string]*equalizer.TokenBucket
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewAcmeClient(cfg config.ACMEConfig, enableHTTPServer bool, challengeCache cache.ICache) (*AcmeClient, error) {
|
||||||
|
acmeConfig, err := setupAcmeConfig(cfg)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
acmeClient, err := lego.NewClient(acmeConfig)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal().Err(err).Msg("Can't create ACME client, continuing with mock certs only")
|
||||||
|
} else {
|
||||||
|
err = acmeClient.Challenge.SetTLSALPN01Provider(AcmeTLSChallengeProvider{challengeCache})
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("Can't create TLS-ALPN-01 provider")
|
||||||
|
}
|
||||||
|
if enableHTTPServer {
|
||||||
|
err = acmeClient.Challenge.SetHTTP01Provider(AcmeHTTPChallengeProvider{challengeCache})
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("Can't create HTTP-01 provider")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mainDomainAcmeClient, err := lego.NewClient(acmeConfig)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("Can't create ACME client, continuing with mock certs only")
|
||||||
|
} else {
|
||||||
|
if cfg.DNSProvider == "" {
|
||||||
|
// using mock wildcard certs
|
||||||
|
mainDomainAcmeClient = nil
|
||||||
|
} else {
|
||||||
|
// use DNS-Challenge https://go-acme.github.io/lego/dns/
|
||||||
|
provider, err := dns.NewDNSChallengeProviderByName(cfg.DNSProvider)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("can not create DNS Challenge provider: %w", err)
|
||||||
|
}
|
||||||
|
if err := mainDomainAcmeClient.Challenge.SetDNS01Provider(provider); err != nil {
|
||||||
|
return nil, fmt.Errorf("can not create DNS-01 provider: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &AcmeClient{
|
||||||
|
legoClient: acmeClient,
|
||||||
|
dnsChallengerLegoClient: mainDomainAcmeClient,
|
||||||
|
|
||||||
|
acmeUseRateLimits: cfg.UseRateLimits,
|
||||||
|
|
||||||
|
obtainLocks: sync.Map{},
|
||||||
|
|
||||||
|
// limiter
|
||||||
|
|
||||||
|
// rate limit is 300 / 3 hours, we want 200 / 2 hours but to refill more often, so that's 25 new domains every 15 minutes
|
||||||
|
// TODO: when this is used a lot, we probably have to think of a somewhat better solution?
|
||||||
|
acmeClientOrderLimit: equalizer.NewTokenBucket(25, 15*time.Minute),
|
||||||
|
// rate limit is 20 / second, we want 5 / second (especially as one cert takes at least two requests)
|
||||||
|
acmeClientRequestLimit: equalizer.NewTokenBucket(5, 1*time.Second),
|
||||||
|
// rate limit is 5 / hour https://letsencrypt.org/docs/failed-validation-limit/
|
||||||
|
acmeClientFailLimit: equalizer.NewTokenBucket(5, 1*time.Hour),
|
||||||
|
// checkUserLimit() use this to rate also per user
|
||||||
|
acmeClientCertificateLimitPerUser: map[string]*equalizer.TokenBucket{},
|
||||||
|
}, nil
|
||||||
|
}
|
110
server/certificates/acme_config.go
Normal file
110
server/certificates/acme_config.go
Normal file
|
@ -0,0 +1,110 @@
|
||||||
|
package certificates
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/ecdsa"
|
||||||
|
"crypto/elliptic"
|
||||||
|
"crypto/rand"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/config"
|
||||||
|
"github.com/go-acme/lego/v4/certcrypto"
|
||||||
|
"github.com/go-acme/lego/v4/lego"
|
||||||
|
"github.com/go-acme/lego/v4/registration"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
const challengePath = "/.well-known/acme-challenge/"
|
||||||
|
|
||||||
|
func setupAcmeConfig(cfg config.ACMEConfig) (*lego.Config, error) {
|
||||||
|
var myAcmeAccount AcmeAccount
|
||||||
|
var myAcmeConfig *lego.Config
|
||||||
|
|
||||||
|
if cfg.AccountConfigFile == "" {
|
||||||
|
return nil, fmt.Errorf("invalid acme config file: '%s'", cfg.AccountConfigFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
if account, err := os.ReadFile(cfg.AccountConfigFile); err == nil {
|
||||||
|
log.Info().Msgf("found existing acme account config file '%s'", cfg.AccountConfigFile)
|
||||||
|
if err := json.Unmarshal(account, &myAcmeAccount); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
myAcmeAccount.Key, err = certcrypto.ParsePEMPrivateKey([]byte(myAcmeAccount.KeyPEM))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
myAcmeConfig = lego.NewConfig(&myAcmeAccount)
|
||||||
|
myAcmeConfig.CADirURL = cfg.APIEndpoint
|
||||||
|
myAcmeConfig.Certificate.KeyType = certcrypto.RSA2048
|
||||||
|
|
||||||
|
// Validate Config
|
||||||
|
_, err := lego.NewClient(myAcmeConfig)
|
||||||
|
if err != nil {
|
||||||
|
log.Info().Err(err).Msg("config validation failed, you might just delete the config file and let it recreate")
|
||||||
|
return nil, fmt.Errorf("acme config validation failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return myAcmeConfig, nil
|
||||||
|
} else if !os.IsNotExist(err) {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info().Msgf("no existing acme account config found, try to create a new one")
|
||||||
|
|
||||||
|
privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
myAcmeAccount = AcmeAccount{
|
||||||
|
Email: cfg.Email,
|
||||||
|
Key: privateKey,
|
||||||
|
KeyPEM: string(certcrypto.PEMEncode(privateKey)),
|
||||||
|
}
|
||||||
|
myAcmeConfig = lego.NewConfig(&myAcmeAccount)
|
||||||
|
myAcmeConfig.CADirURL = cfg.APIEndpoint
|
||||||
|
myAcmeConfig.Certificate.KeyType = certcrypto.RSA2048
|
||||||
|
tempClient, err := lego.NewClient(myAcmeConfig)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("Can't create ACME client, continuing with mock certs only")
|
||||||
|
} else {
|
||||||
|
// accept terms & log in to EAB
|
||||||
|
if cfg.EAB_KID == "" || cfg.EAB_HMAC == "" {
|
||||||
|
reg, err := tempClient.Registration.Register(registration.RegisterOptions{TermsOfServiceAgreed: cfg.AcceptTerms})
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("Can't register ACME account, continuing with mock certs only")
|
||||||
|
} else {
|
||||||
|
myAcmeAccount.Registration = reg
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
reg, err := tempClient.Registration.RegisterWithExternalAccountBinding(registration.RegisterEABOptions{
|
||||||
|
TermsOfServiceAgreed: cfg.AcceptTerms,
|
||||||
|
Kid: cfg.EAB_KID,
|
||||||
|
HmacEncoded: cfg.EAB_HMAC,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("Can't register ACME account, continuing with mock certs only")
|
||||||
|
} else {
|
||||||
|
myAcmeAccount.Registration = reg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if myAcmeAccount.Registration != nil {
|
||||||
|
acmeAccountJSON, err := json.Marshal(myAcmeAccount)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("json.Marshalfailed, waiting for manual restart to avoid rate limits")
|
||||||
|
select {}
|
||||||
|
}
|
||||||
|
log.Info().Msgf("new acme account created. write to config file '%s'", cfg.AccountConfigFile)
|
||||||
|
err = os.WriteFile(cfg.AccountConfigFile, acmeAccountJSON, 0o600)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("os.WriteFile failed, waiting for manual restart to avoid rate limits")
|
||||||
|
select {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return myAcmeConfig, nil
|
||||||
|
}
|
83
server/certificates/cached_challengers.go
Normal file
83
server/certificates/cached_challengers.go
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
package certificates
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-acme/lego/v4/challenge"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
"codeberg.org/codeberg/pages/server/context"
|
||||||
|
)
|
||||||
|
|
||||||
|
type AcmeTLSChallengeProvider struct {
|
||||||
|
challengeCache cache.ICache
|
||||||
|
}
|
||||||
|
|
||||||
|
// make sure AcmeTLSChallengeProvider match Provider interface
|
||||||
|
var _ challenge.Provider = AcmeTLSChallengeProvider{}
|
||||||
|
|
||||||
|
func (a AcmeTLSChallengeProvider) Present(domain, _, keyAuth string) error {
|
||||||
|
return a.challengeCache.Set(domain, keyAuth, 1*time.Hour)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a AcmeTLSChallengeProvider) CleanUp(domain, _, _ string) error {
|
||||||
|
a.challengeCache.Remove(domain)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type AcmeHTTPChallengeProvider struct {
|
||||||
|
challengeCache cache.ICache
|
||||||
|
}
|
||||||
|
|
||||||
|
// make sure AcmeHTTPChallengeProvider match Provider interface
|
||||||
|
var _ challenge.Provider = AcmeHTTPChallengeProvider{}
|
||||||
|
|
||||||
|
func (a AcmeHTTPChallengeProvider) Present(domain, token, keyAuth string) error {
|
||||||
|
return a.challengeCache.Set(domain+"/"+token, keyAuth, 1*time.Hour)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a AcmeHTTPChallengeProvider) CleanUp(domain, token, _ string) error {
|
||||||
|
a.challengeCache.Remove(domain + "/" + token)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetupHTTPACMEChallengeServer(challengeCache cache.ICache, sslPort uint) http.HandlerFunc {
|
||||||
|
// handle custom-ssl-ports to be added on https redirects
|
||||||
|
portPart := ""
|
||||||
|
if sslPort != 443 {
|
||||||
|
portPart = fmt.Sprintf(":%d", sslPort)
|
||||||
|
}
|
||||||
|
|
||||||
|
return func(w http.ResponseWriter, req *http.Request) {
|
||||||
|
ctx := context.New(w, req)
|
||||||
|
domain := ctx.TrimHostPort()
|
||||||
|
|
||||||
|
// it's an acme request
|
||||||
|
if strings.HasPrefix(ctx.Path(), challengePath) {
|
||||||
|
challenge, ok := challengeCache.Get(domain + "/" + strings.TrimPrefix(ctx.Path(), challengePath))
|
||||||
|
if !ok || challenge == nil {
|
||||||
|
log.Info().Msgf("HTTP-ACME challenge for '%s' failed: token not found", domain)
|
||||||
|
ctx.String("no challenge for this token", http.StatusNotFound)
|
||||||
|
}
|
||||||
|
log.Info().Msgf("HTTP-ACME challenge for '%s' succeeded", domain)
|
||||||
|
ctx.String(challenge.(string))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// it's a normal http request that needs to be redirected
|
||||||
|
u, err := url.Parse(fmt.Sprintf("https://%s%s%s", domain, portPart, ctx.Path()))
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("could not craft http to https redirect")
|
||||||
|
ctx.String("", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
|
||||||
|
newURL := u.String()
|
||||||
|
log.Debug().Msgf("redirect http to https: %s", newURL)
|
||||||
|
ctx.Redirect(newURL, http.StatusMovedPermanently)
|
||||||
|
}
|
||||||
|
}
|
416
server/certificates/certificates.go
Normal file
416
server/certificates/certificates.go
Normal file
|
@ -0,0 +1,416 @@
|
||||||
|
package certificates
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/tls"
|
||||||
|
"crypto/x509"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-acme/lego/v4/certcrypto"
|
||||||
|
"github.com/go-acme/lego/v4/certificate"
|
||||||
|
"github.com/go-acme/lego/v4/challenge/tlsalpn01"
|
||||||
|
"github.com/go-acme/lego/v4/lego"
|
||||||
|
"github.com/hashicorp/golang-lru/v2/expirable"
|
||||||
|
"github.com/reugn/equalizer"
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
psContext "codeberg.org/codeberg/pages/server/context"
|
||||||
|
"codeberg.org/codeberg/pages/server/database"
|
||||||
|
dnsutils "codeberg.org/codeberg/pages/server/dns"
|
||||||
|
"codeberg.org/codeberg/pages/server/gitea"
|
||||||
|
"codeberg.org/codeberg/pages/server/upstream"
|
||||||
|
)
|
||||||
|
|
||||||
|
var ErrUserRateLimitExceeded = errors.New("rate limit exceeded: 10 certificates per user per 24 hours")
|
||||||
|
|
||||||
|
// TLSConfig returns the configuration for generating, serving and cleaning up Let's Encrypt certificates.
|
||||||
|
func TLSConfig(mainDomainSuffix string,
|
||||||
|
giteaClient *gitea.Client,
|
||||||
|
acmeClient *AcmeClient,
|
||||||
|
firstDefaultBranch string,
|
||||||
|
challengeCache, canonicalDomainCache cache.ICache,
|
||||||
|
certDB database.CertDB,
|
||||||
|
noDNS01 bool,
|
||||||
|
rawDomain string,
|
||||||
|
) *tls.Config {
|
||||||
|
// every cert is at most 24h in the cache and 7 days before expiry the cert is renewed
|
||||||
|
keyCache := expirable.NewLRU[string, *tls.Certificate](32, nil, 24*time.Hour)
|
||||||
|
|
||||||
|
return &tls.Config{
|
||||||
|
// check DNS name & get certificate from Let's Encrypt
|
||||||
|
GetCertificate: func(info *tls.ClientHelloInfo) (*tls.Certificate, error) {
|
||||||
|
ctx := psContext.New(nil, nil)
|
||||||
|
log := log.With().Str("ReqId", ctx.ReqId).Logger()
|
||||||
|
|
||||||
|
domain := strings.ToLower(strings.TrimSpace(info.ServerName))
|
||||||
|
log.Debug().Str("domain", domain).Msg("start: get tls certificate")
|
||||||
|
if len(domain) < 1 {
|
||||||
|
return nil, errors.New("missing domain info via SNI (RFC 4366, Section 3.1)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// https request init is actually a acme challenge
|
||||||
|
if info.SupportedProtos != nil {
|
||||||
|
for _, proto := range info.SupportedProtos {
|
||||||
|
if proto != tlsalpn01.ACMETLS1Protocol {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
log.Info().Msgf("Detect ACME-TLS1 challenge for '%s'", domain)
|
||||||
|
|
||||||
|
challenge, ok := challengeCache.Get(domain)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.New("no challenge for this domain")
|
||||||
|
}
|
||||||
|
cert, err := tlsalpn01.ChallengeCert(domain, challenge.(string))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return cert, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
targetOwner := ""
|
||||||
|
mayObtainCert := true
|
||||||
|
|
||||||
|
if strings.HasSuffix(domain, mainDomainSuffix) || strings.EqualFold(domain, mainDomainSuffix[1:]) {
|
||||||
|
if noDNS01 {
|
||||||
|
// Limit the domains allowed to request a certificate to pages-server domains
|
||||||
|
// and domains for an existing user of org
|
||||||
|
if !strings.EqualFold(domain, mainDomainSuffix[1:]) && !strings.EqualFold(domain, rawDomain) {
|
||||||
|
targetOwner := strings.TrimSuffix(domain, mainDomainSuffix)
|
||||||
|
owner_exist, err := giteaClient.GiteaCheckIfOwnerExists(targetOwner)
|
||||||
|
mayObtainCert = owner_exist
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msgf("Failed to check '%s' existence on the forge: %s", targetOwner, err)
|
||||||
|
mayObtainCert = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// deliver default certificate for the main domain (*.codeberg.page)
|
||||||
|
domain = mainDomainSuffix
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var targetRepo, targetBranch string
|
||||||
|
targetOwner, targetRepo, targetBranch = dnsutils.GetTargetFromDNS(domain, mainDomainSuffix, firstDefaultBranch)
|
||||||
|
if targetOwner == "" {
|
||||||
|
// DNS not set up, return main certificate to redirect to the docs
|
||||||
|
domain = mainDomainSuffix
|
||||||
|
} else {
|
||||||
|
targetOpt := &upstream.Options{
|
||||||
|
TargetOwner: targetOwner,
|
||||||
|
TargetRepo: targetRepo,
|
||||||
|
TargetBranch: targetBranch,
|
||||||
|
}
|
||||||
|
_, valid := targetOpt.CheckCanonicalDomain(ctx, giteaClient, domain, mainDomainSuffix, canonicalDomainCache)
|
||||||
|
if !valid {
|
||||||
|
// We shouldn't obtain a certificate when we cannot check if the
|
||||||
|
// repository has specified this domain in the `.domains` file.
|
||||||
|
mayObtainCert = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if tlsCertificate, ok := keyCache.Get(domain); ok {
|
||||||
|
// we can use an existing certificate object
|
||||||
|
return tlsCertificate, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var tlsCertificate *tls.Certificate
|
||||||
|
var err error
|
||||||
|
if tlsCertificate, err = acmeClient.retrieveCertFromDB(log, domain, mainDomainSuffix, false, certDB); err != nil {
|
||||||
|
if !errors.Is(err, database.ErrNotFound) {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// we could not find a cert in db, request a new certificate
|
||||||
|
|
||||||
|
// first check if we are allowed to obtain a cert for this domain
|
||||||
|
if strings.EqualFold(domain, mainDomainSuffix) {
|
||||||
|
return nil, errors.New("won't request certificate for main domain, something really bad has happened")
|
||||||
|
}
|
||||||
|
if !mayObtainCert {
|
||||||
|
return nil, fmt.Errorf("won't request certificate for %q", domain)
|
||||||
|
}
|
||||||
|
|
||||||
|
tlsCertificate, err = acmeClient.obtainCert(log, acmeClient.legoClient, []string{domain}, nil, targetOwner, false, mainDomainSuffix, certDB)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
keyCache.Add(domain, tlsCertificate)
|
||||||
|
|
||||||
|
return tlsCertificate, nil
|
||||||
|
},
|
||||||
|
NextProtos: []string{
|
||||||
|
"h2",
|
||||||
|
"http/1.1",
|
||||||
|
tlsalpn01.ACMETLS1Protocol,
|
||||||
|
},
|
||||||
|
|
||||||
|
// generated 2021-07-13, Mozilla Guideline v5.6, Go 1.14.4, intermediate configuration
|
||||||
|
// https://ssl-config.mozilla.org/#server=go&version=1.14.4&config=intermediate&guideline=5.6
|
||||||
|
MinVersion: tls.VersionTLS12,
|
||||||
|
CipherSuites: []uint16{
|
||||||
|
tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
|
||||||
|
tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
|
||||||
|
tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
|
||||||
|
tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
|
||||||
|
tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,
|
||||||
|
tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *AcmeClient) checkUserLimit(user string) error {
|
||||||
|
userLimit, ok := c.acmeClientCertificateLimitPerUser[user]
|
||||||
|
if !ok {
|
||||||
|
// Each user can only add 10 new domains per day.
|
||||||
|
userLimit = equalizer.NewTokenBucket(10, time.Hour*24)
|
||||||
|
c.acmeClientCertificateLimitPerUser[user] = userLimit
|
||||||
|
}
|
||||||
|
if !userLimit.Ask() {
|
||||||
|
return fmt.Errorf("user '%s' error: %w", user, ErrUserRateLimitExceeded)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *AcmeClient) retrieveCertFromDB(log zerolog.Logger, sni, mainDomainSuffix string, useDnsProvider bool, certDB database.CertDB) (*tls.Certificate, error) {
|
||||||
|
// parse certificate from database
|
||||||
|
res, err := certDB.Get(sni)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else if res == nil {
|
||||||
|
return nil, database.ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
tlsCertificate, err := tls.X509KeyPair(res.Certificate, res.PrivateKey)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: document & put into own function
|
||||||
|
if !strings.EqualFold(sni, mainDomainSuffix) {
|
||||||
|
tlsCertificate.Leaf, err = leaf(&tlsCertificate)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// renew certificates 7 days before they expire
|
||||||
|
if tlsCertificate.Leaf.NotAfter.Before(time.Now().Add(7 * 24 * time.Hour)) {
|
||||||
|
// TODO: use ValidTill of custom cert struct
|
||||||
|
if len(res.CSR) > 0 {
|
||||||
|
// CSR stores the time when the renewal shall be tried again
|
||||||
|
nextTryUnix, err := strconv.ParseInt(string(res.CSR), 10, 64)
|
||||||
|
if err == nil && time.Now().Before(time.Unix(nextTryUnix, 0)) {
|
||||||
|
return &tlsCertificate, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// TODO: make a queue ?
|
||||||
|
go (func() {
|
||||||
|
res.CSR = nil // acme client doesn't like CSR to be set
|
||||||
|
if _, err := c.obtainCert(log, c.legoClient, []string{sni}, res, "", useDnsProvider, mainDomainSuffix, certDB); err != nil {
|
||||||
|
log.Error().Msgf("Couldn't renew certificate for %s: %v", sni, err)
|
||||||
|
}
|
||||||
|
})()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &tlsCertificate, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *AcmeClient) obtainCert(log zerolog.Logger, acmeClient *lego.Client, domains []string, renew *certificate.Resource, user string, useDnsProvider bool, mainDomainSuffix string, keyDatabase database.CertDB) (*tls.Certificate, error) {
|
||||||
|
name := strings.TrimPrefix(domains[0], "*")
|
||||||
|
|
||||||
|
// lock to avoid simultaneous requests
|
||||||
|
_, working := c.obtainLocks.LoadOrStore(name, struct{}{})
|
||||||
|
if working {
|
||||||
|
for working {
|
||||||
|
time.Sleep(100 * time.Millisecond)
|
||||||
|
_, working = c.obtainLocks.Load(name)
|
||||||
|
}
|
||||||
|
cert, err := c.retrieveCertFromDB(log, name, mainDomainSuffix, useDnsProvider, keyDatabase)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("certificate failed in synchronous request: %w", err)
|
||||||
|
}
|
||||||
|
return cert, nil
|
||||||
|
}
|
||||||
|
defer c.obtainLocks.Delete(name)
|
||||||
|
|
||||||
|
if acmeClient == nil {
|
||||||
|
if useDnsProvider {
|
||||||
|
return mockCert(domains[0], "DNS ACME client is not defined", mainDomainSuffix, keyDatabase)
|
||||||
|
} else {
|
||||||
|
return mockCert(domains[0], "ACME client uninitialized. This is a server error, please report!", mainDomainSuffix, keyDatabase)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// request actual cert
|
||||||
|
var res *certificate.Resource
|
||||||
|
var err error
|
||||||
|
if renew != nil && renew.CertURL != "" {
|
||||||
|
if c.acmeUseRateLimits {
|
||||||
|
c.acmeClientRequestLimit.Take()
|
||||||
|
}
|
||||||
|
log.Debug().Msgf("Renewing certificate for: %v", domains)
|
||||||
|
res, err = acmeClient.Certificate.Renew(*renew, true, false, "")
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msgf("Couldn't renew certificate for %v, trying to request a new one", domains)
|
||||||
|
if c.acmeUseRateLimits {
|
||||||
|
c.acmeClientFailLimit.Take()
|
||||||
|
}
|
||||||
|
res = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if res == nil {
|
||||||
|
if user != "" {
|
||||||
|
if err := c.checkUserLimit(user); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.acmeUseRateLimits {
|
||||||
|
c.acmeClientOrderLimit.Take()
|
||||||
|
c.acmeClientRequestLimit.Take()
|
||||||
|
}
|
||||||
|
log.Debug().Msgf("Re-requesting new certificate for %v", domains)
|
||||||
|
res, err = acmeClient.Certificate.Obtain(certificate.ObtainRequest{
|
||||||
|
Domains: domains,
|
||||||
|
Bundle: true,
|
||||||
|
MustStaple: false,
|
||||||
|
})
|
||||||
|
if c.acmeUseRateLimits && err != nil {
|
||||||
|
c.acmeClientFailLimit.Take()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msgf("Couldn't obtain again a certificate or %v", domains)
|
||||||
|
if renew != nil && renew.CertURL != "" {
|
||||||
|
tlsCertificate, err := tls.X509KeyPair(renew.Certificate, renew.PrivateKey)
|
||||||
|
if err != nil {
|
||||||
|
mockC, err2 := mockCert(domains[0], err.Error(), mainDomainSuffix, keyDatabase)
|
||||||
|
if err2 != nil {
|
||||||
|
return nil, errors.Join(err, err2)
|
||||||
|
}
|
||||||
|
return mockC, err
|
||||||
|
}
|
||||||
|
leaf, err := leaf(&tlsCertificate)
|
||||||
|
if err == nil && leaf.NotAfter.After(time.Now()) {
|
||||||
|
tlsCertificate.Leaf = leaf
|
||||||
|
// avoid sending a mock cert instead of a still valid cert, instead abuse CSR field to store time to try again at
|
||||||
|
renew.CSR = []byte(strconv.FormatInt(time.Now().Add(6*time.Hour).Unix(), 10))
|
||||||
|
if err := keyDatabase.Put(name, renew); err != nil {
|
||||||
|
mockC, err2 := mockCert(domains[0], err.Error(), mainDomainSuffix, keyDatabase)
|
||||||
|
if err2 != nil {
|
||||||
|
return nil, errors.Join(err, err2)
|
||||||
|
}
|
||||||
|
return mockC, err
|
||||||
|
}
|
||||||
|
return &tlsCertificate, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return mockCert(domains[0], err.Error(), mainDomainSuffix, keyDatabase)
|
||||||
|
}
|
||||||
|
log.Debug().Msgf("Obtained certificate for %v", domains)
|
||||||
|
|
||||||
|
if err := keyDatabase.Put(name, res); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
tlsCertificate, err := tls.X509KeyPair(res.Certificate, res.PrivateKey)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &tlsCertificate, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetupMainDomainCertificates(log zerolog.Logger, mainDomainSuffix string, acmeClient *AcmeClient, certDB database.CertDB) error {
|
||||||
|
// getting main cert before ACME account so that we can fail here without hitting rate limits
|
||||||
|
mainCertBytes, err := certDB.Get(mainDomainSuffix)
|
||||||
|
if err != nil && !errors.Is(err, database.ErrNotFound) {
|
||||||
|
return fmt.Errorf("cert database is not working: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if mainCertBytes == nil {
|
||||||
|
_, err = acmeClient.obtainCert(log, acmeClient.dnsChallengerLegoClient, []string{"*" + mainDomainSuffix, mainDomainSuffix[1:]}, nil, "", true, mainDomainSuffix, certDB)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("Couldn't renew main domain certificate, continuing with mock certs only")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func MaintainCertDB(log zerolog.Logger, ctx context.Context, interval time.Duration, acmeClient *AcmeClient, mainDomainSuffix string, certDB database.CertDB) {
|
||||||
|
for {
|
||||||
|
// delete expired certs that will be invalid until next clean up
|
||||||
|
threshold := time.Now().Add(interval)
|
||||||
|
expiredCertCount := 0
|
||||||
|
|
||||||
|
certs, err := certDB.Items(0, 0)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("could not get certs from list")
|
||||||
|
} else {
|
||||||
|
for _, cert := range certs {
|
||||||
|
if !strings.EqualFold(cert.Domain, strings.TrimPrefix(mainDomainSuffix, ".")) {
|
||||||
|
if time.Unix(cert.ValidTill, 0).Before(threshold) {
|
||||||
|
err := certDB.Delete(cert.Domain)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msgf("Deleting expired certificate for %q failed", cert.Domain)
|
||||||
|
} else {
|
||||||
|
expiredCertCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.Debug().Msgf("Removed %d expired certificates from the database", expiredCertCount)
|
||||||
|
}
|
||||||
|
|
||||||
|
// update main cert
|
||||||
|
res, err := certDB.Get(mainDomainSuffix)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Msgf("Couldn't get cert for domain %q", mainDomainSuffix)
|
||||||
|
} else if res == nil {
|
||||||
|
log.Error().Msgf("Couldn't renew certificate for main domain %q expected main domain cert to exist, but it's missing - seems like the database is corrupted", mainDomainSuffix)
|
||||||
|
} else {
|
||||||
|
tlsCertificates, err := certcrypto.ParsePEMBundle(res.Certificate)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(fmt.Errorf("could not parse cert for mainDomainSuffix: %w", err))
|
||||||
|
} else if tlsCertificates[0].NotAfter.Before(time.Now().Add(30 * 24 * time.Hour)) {
|
||||||
|
// renew main certificate 30 days before it expires
|
||||||
|
go (func() {
|
||||||
|
_, err = acmeClient.obtainCert(log, acmeClient.dnsChallengerLegoClient, []string{"*" + mainDomainSuffix, mainDomainSuffix[1:]}, res, "", true, mainDomainSuffix, certDB)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("Couldn't renew certificate for main domain")
|
||||||
|
}
|
||||||
|
})()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
return
|
||||||
|
case <-time.After(interval):
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// leaf returns the parsed leaf certificate, either from c.Leaf or by parsing
|
||||||
|
// the corresponding c.Certificate[0].
|
||||||
|
// After successfully parsing the cert c.Leaf gets set to the parsed cert.
|
||||||
|
func leaf(c *tls.Certificate) (*x509.Certificate, error) {
|
||||||
|
if c.Leaf != nil {
|
||||||
|
return c.Leaf, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
leaf, err := x509.ParseCertificate(c.Certificate[0])
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("tlsCert - failed to parse leaf: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
c.Leaf = leaf
|
||||||
|
|
||||||
|
return leaf, err
|
||||||
|
}
|
87
server/certificates/mock.go
Normal file
87
server/certificates/mock.go
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
package certificates
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/rand"
|
||||||
|
"crypto/rsa"
|
||||||
|
"crypto/tls"
|
||||||
|
"crypto/x509"
|
||||||
|
"crypto/x509/pkix"
|
||||||
|
"encoding/pem"
|
||||||
|
"math/big"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-acme/lego/v4/certcrypto"
|
||||||
|
"github.com/go-acme/lego/v4/certificate"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/server/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
func mockCert(domain, msg, mainDomainSuffix string, keyDatabase database.CertDB) (*tls.Certificate, error) {
|
||||||
|
key, err := certcrypto.GeneratePrivateKey(certcrypto.RSA2048)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
template := x509.Certificate{
|
||||||
|
SerialNumber: big.NewInt(1),
|
||||||
|
Subject: pkix.Name{
|
||||||
|
CommonName: domain,
|
||||||
|
Organization: []string{"Codeberg Pages Error Certificate (couldn't obtain ACME certificate)"},
|
||||||
|
OrganizationalUnit: []string{
|
||||||
|
"Will not try again for 6 hours to avoid hitting rate limits for your domain.",
|
||||||
|
"Check https://docs.codeberg.org/codeberg-pages/troubleshooting/ for troubleshooting tips, and feel " +
|
||||||
|
"free to create an issue at https://codeberg.org/Codeberg/pages-server if you can't solve it.\n",
|
||||||
|
"Error message: " + msg,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
// certificates younger than 7 days are renewed, so this enforces the cert to not be renewed for a 6 hours
|
||||||
|
NotAfter: time.Now().Add(time.Hour*24*7 + time.Hour*6),
|
||||||
|
NotBefore: time.Now(),
|
||||||
|
|
||||||
|
KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature,
|
||||||
|
ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},
|
||||||
|
BasicConstraintsValid: true,
|
||||||
|
}
|
||||||
|
certBytes, err := x509.CreateCertificate(
|
||||||
|
rand.Reader,
|
||||||
|
&template,
|
||||||
|
&template,
|
||||||
|
&key.(*rsa.PrivateKey).PublicKey,
|
||||||
|
key,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
out := &bytes.Buffer{}
|
||||||
|
err = pem.Encode(out, &pem.Block{
|
||||||
|
Bytes: certBytes,
|
||||||
|
Type: "CERTIFICATE",
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
outBytes := out.Bytes()
|
||||||
|
res := &certificate.Resource{
|
||||||
|
PrivateKey: certcrypto.PEMEncode(key),
|
||||||
|
Certificate: outBytes,
|
||||||
|
IssuerCertificate: outBytes,
|
||||||
|
Domain: domain,
|
||||||
|
}
|
||||||
|
databaseName := domain
|
||||||
|
if domain == "*"+mainDomainSuffix || domain == mainDomainSuffix[1:] {
|
||||||
|
databaseName = mainDomainSuffix
|
||||||
|
}
|
||||||
|
if err := keyDatabase.Put(databaseName, res); err != nil {
|
||||||
|
log.Error().Err(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
tlsCertificate, err := tls.X509KeyPair(res.Certificate, res.PrivateKey)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &tlsCertificate, nil
|
||||||
|
}
|
21
server/certificates/mock_test.go
Normal file
21
server/certificates/mock_test.go
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
package certificates
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/mock"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/server/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMockCert(t *testing.T) {
|
||||||
|
db := database.NewMockCertDB(t)
|
||||||
|
db.Mock.On("Put", mock.Anything, mock.Anything).Return(nil)
|
||||||
|
|
||||||
|
cert, err := mockCert("example.com", "some error msg", "codeberg.page", db)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if assert.NotEmpty(t, cert) {
|
||||||
|
assert.NotEmpty(t, cert.Certificate)
|
||||||
|
}
|
||||||
|
}
|
72
server/context/context.go
Normal file
72
server/context/context.go
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
package context
|
||||||
|
|
||||||
|
import (
|
||||||
|
stdContext "context"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/server/utils"
|
||||||
|
"github.com/hashicorp/go-uuid"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Context struct {
|
||||||
|
RespWriter http.ResponseWriter
|
||||||
|
Req *http.Request
|
||||||
|
StatusCode int
|
||||||
|
ReqId string
|
||||||
|
}
|
||||||
|
|
||||||
|
func New(w http.ResponseWriter, r *http.Request) *Context {
|
||||||
|
req_uuid, err := uuid.GenerateUUID()
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("Failed to generate request id, assigning error value")
|
||||||
|
req_uuid = "ERROR"
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Context{
|
||||||
|
RespWriter: w,
|
||||||
|
Req: r,
|
||||||
|
StatusCode: http.StatusOK,
|
||||||
|
ReqId: req_uuid,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Context) Context() stdContext.Context {
|
||||||
|
if c.Req != nil {
|
||||||
|
return c.Req.Context()
|
||||||
|
}
|
||||||
|
return stdContext.Background()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Context) Response() *http.Response {
|
||||||
|
if c.Req != nil && c.Req.Response != nil {
|
||||||
|
return c.Req.Response
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Context) String(raw string, status ...int) {
|
||||||
|
code := http.StatusOK
|
||||||
|
if len(status) != 0 {
|
||||||
|
code = status[0]
|
||||||
|
}
|
||||||
|
c.RespWriter.WriteHeader(code)
|
||||||
|
_, _ = c.RespWriter.Write([]byte(raw))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Context) Redirect(uri string, statusCode int) {
|
||||||
|
http.Redirect(c.RespWriter, c.Req, uri, statusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Path returns the cleaned requested path.
|
||||||
|
func (c *Context) Path() string {
|
||||||
|
return utils.CleanPath(c.Req.URL.Path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Context) Host() string {
|
||||||
|
return c.Req.URL.Host
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Context) TrimHostPort() string {
|
||||||
|
return utils.TrimHostPort(c.Req.Host)
|
||||||
|
}
|
78
server/database/interface.go
Normal file
78
server/database/interface.go
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/go-acme/lego/v4/certcrypto"
|
||||||
|
"github.com/go-acme/lego/v4/certificate"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:generate go install github.com/vektra/mockery/v2@latest
|
||||||
|
//go:generate mockery --name CertDB --output . --filename mock.go --inpackage --case underscore
|
||||||
|
|
||||||
|
type CertDB interface {
|
||||||
|
Close() error
|
||||||
|
Put(name string, cert *certificate.Resource) error
|
||||||
|
Get(name string) (*certificate.Resource, error)
|
||||||
|
Delete(key string) error
|
||||||
|
Items(page, pageSize int) ([]*Cert, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Cert struct {
|
||||||
|
Domain string `xorm:"pk NOT NULL UNIQUE 'domain'"`
|
||||||
|
Created int64 `xorm:"created NOT NULL DEFAULT 0 'created'"`
|
||||||
|
Updated int64 `xorm:"updated NOT NULL DEFAULT 0 'updated'"`
|
||||||
|
ValidTill int64 `xorm:" NOT NULL DEFAULT 0 'valid_till'"`
|
||||||
|
// certificate.Resource
|
||||||
|
CertURL string `xorm:"'cert_url'"`
|
||||||
|
CertStableURL string `xorm:"'cert_stable_url'"`
|
||||||
|
PrivateKey []byte `xorm:"'private_key'"`
|
||||||
|
Certificate []byte `xorm:"'certificate'"`
|
||||||
|
IssuerCertificate []byte `xorm:"'issuer_certificate'"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Cert) Raw() *certificate.Resource {
|
||||||
|
return &certificate.Resource{
|
||||||
|
Domain: c.Domain,
|
||||||
|
CertURL: c.CertURL,
|
||||||
|
CertStableURL: c.CertStableURL,
|
||||||
|
PrivateKey: c.PrivateKey,
|
||||||
|
Certificate: c.Certificate,
|
||||||
|
IssuerCertificate: c.IssuerCertificate,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func toCert(name string, c *certificate.Resource) (*Cert, error) {
|
||||||
|
tlsCertificates, err := certcrypto.ParsePEMBundle(c.Certificate)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(tlsCertificates) == 0 || tlsCertificates[0] == nil {
|
||||||
|
err := fmt.Errorf("parsed cert resource has no cert")
|
||||||
|
log.Error().Err(err).Str("domain", c.Domain).Msgf("cert: %v", c)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
validTill := tlsCertificates[0].NotAfter.Unix()
|
||||||
|
|
||||||
|
// handle wildcard certs
|
||||||
|
if name[:1] == "." {
|
||||||
|
name = "*" + name
|
||||||
|
}
|
||||||
|
if name != c.Domain {
|
||||||
|
err := fmt.Errorf("domain key '%s' and cert domain '%s' not equal", name, c.Domain)
|
||||||
|
log.Error().Err(err).Msg("toCert conversion did discover mismatch")
|
||||||
|
// TODO: fail hard: return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Cert{
|
||||||
|
Domain: c.Domain,
|
||||||
|
ValidTill: validTill,
|
||||||
|
|
||||||
|
CertURL: c.CertURL,
|
||||||
|
CertStableURL: c.CertStableURL,
|
||||||
|
PrivateKey: c.PrivateKey,
|
||||||
|
Certificate: c.Certificate,
|
||||||
|
IssuerCertificate: c.IssuerCertificate,
|
||||||
|
}, nil
|
||||||
|
}
|
122
server/database/mock.go
Normal file
122
server/database/mock.go
Normal file
|
@ -0,0 +1,122 @@
|
||||||
|
// Code generated by mockery v2.20.0. DO NOT EDIT.
|
||||||
|
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
certificate "github.com/go-acme/lego/v4/certificate"
|
||||||
|
mock "github.com/stretchr/testify/mock"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MockCertDB is an autogenerated mock type for the CertDB type
|
||||||
|
type MockCertDB struct {
|
||||||
|
mock.Mock
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close provides a mock function with given fields:
|
||||||
|
func (_m *MockCertDB) Close() error {
|
||||||
|
ret := _m.Called()
|
||||||
|
|
||||||
|
var r0 error
|
||||||
|
if rf, ok := ret.Get(0).(func() error); ok {
|
||||||
|
r0 = rf()
|
||||||
|
} else {
|
||||||
|
r0 = ret.Error(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete provides a mock function with given fields: key
|
||||||
|
func (_m *MockCertDB) Delete(key string) error {
|
||||||
|
ret := _m.Called(key)
|
||||||
|
|
||||||
|
var r0 error
|
||||||
|
if rf, ok := ret.Get(0).(func(string) error); ok {
|
||||||
|
r0 = rf(key)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Error(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get provides a mock function with given fields: name
|
||||||
|
func (_m *MockCertDB) Get(name string) (*certificate.Resource, error) {
|
||||||
|
ret := _m.Called(name)
|
||||||
|
|
||||||
|
var r0 *certificate.Resource
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(0).(func(string) (*certificate.Resource, error)); ok {
|
||||||
|
return rf(name)
|
||||||
|
}
|
||||||
|
if rf, ok := ret.Get(0).(func(string) *certificate.Resource); ok {
|
||||||
|
r0 = rf(name)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*certificate.Resource)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if rf, ok := ret.Get(1).(func(string) error); ok {
|
||||||
|
r1 = rf(name)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Items provides a mock function with given fields: page, pageSize
|
||||||
|
func (_m *MockCertDB) Items(page int, pageSize int) ([]*Cert, error) {
|
||||||
|
ret := _m.Called(page, pageSize)
|
||||||
|
|
||||||
|
var r0 []*Cert
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(0).(func(int, int) ([]*Cert, error)); ok {
|
||||||
|
return rf(page, pageSize)
|
||||||
|
}
|
||||||
|
if rf, ok := ret.Get(0).(func(int, int) []*Cert); ok {
|
||||||
|
r0 = rf(page, pageSize)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]*Cert)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if rf, ok := ret.Get(1).(func(int, int) error); ok {
|
||||||
|
r1 = rf(page, pageSize)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Put provides a mock function with given fields: name, cert
|
||||||
|
func (_m *MockCertDB) Put(name string, cert *certificate.Resource) error {
|
||||||
|
ret := _m.Called(name, cert)
|
||||||
|
|
||||||
|
var r0 error
|
||||||
|
if rf, ok := ret.Get(0).(func(string, *certificate.Resource) error); ok {
|
||||||
|
r0 = rf(name, cert)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Error(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|
||||||
|
type mockConstructorTestingTNewMockCertDB interface {
|
||||||
|
mock.TestingT
|
||||||
|
Cleanup(func())
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMockCertDB creates a new instance of MockCertDB. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
||||||
|
func NewMockCertDB(t mockConstructorTestingTNewMockCertDB) *MockCertDB {
|
||||||
|
mock := &MockCertDB{}
|
||||||
|
mock.Mock.Test(t)
|
||||||
|
|
||||||
|
t.Cleanup(func() { mock.AssertExpectations(t) })
|
||||||
|
|
||||||
|
return mock
|
||||||
|
}
|
138
server/database/xorm.go
Normal file
138
server/database/xorm.go
Normal file
|
@ -0,0 +1,138 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
|
||||||
|
"github.com/go-acme/lego/v4/certificate"
|
||||||
|
"xorm.io/xorm"
|
||||||
|
|
||||||
|
// register sql driver
|
||||||
|
_ "github.com/go-sql-driver/mysql"
|
||||||
|
_ "github.com/lib/pq"
|
||||||
|
_ "github.com/mattn/go-sqlite3"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ CertDB = xDB{}
|
||||||
|
|
||||||
|
var ErrNotFound = errors.New("entry not found")
|
||||||
|
|
||||||
|
type xDB struct {
|
||||||
|
engine *xorm.Engine
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewXormDB(dbType, dbConn string) (CertDB, error) {
|
||||||
|
if !supportedDriver(dbType) {
|
||||||
|
return nil, fmt.Errorf("not supported db type '%s'", dbType)
|
||||||
|
}
|
||||||
|
if dbConn == "" {
|
||||||
|
return nil, fmt.Errorf("no db connection provided")
|
||||||
|
}
|
||||||
|
|
||||||
|
e, err := xorm.NewEngine(dbType, dbConn)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := e.Sync2(new(Cert)); err != nil {
|
||||||
|
return nil, fmt.Errorf("could not sync db model :%w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &xDB{
|
||||||
|
engine: e,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x xDB) Close() error {
|
||||||
|
return x.engine.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x xDB) Put(domain string, cert *certificate.Resource) error {
|
||||||
|
log.Trace().Str("domain", cert.Domain).Msg("inserting cert to db")
|
||||||
|
|
||||||
|
c, err := toCert(domain, cert)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
sess := x.engine.NewSession()
|
||||||
|
if err := sess.Begin(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer sess.Close()
|
||||||
|
|
||||||
|
if exist, _ := sess.ID(c.Domain).Exist(new(Cert)); exist {
|
||||||
|
if _, err := sess.ID(c.Domain).Update(c); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if _, err = sess.Insert(c); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sess.Commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x xDB) Get(domain string) (*certificate.Resource, error) {
|
||||||
|
// handle wildcard certs
|
||||||
|
if domain[:1] == "." {
|
||||||
|
domain = "*" + domain
|
||||||
|
}
|
||||||
|
|
||||||
|
cert := new(Cert)
|
||||||
|
log.Trace().Str("domain", domain).Msg("get cert from db")
|
||||||
|
if found, err := x.engine.ID(domain).Get(cert); err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else if !found {
|
||||||
|
return nil, fmt.Errorf("%w: name='%s'", ErrNotFound, domain)
|
||||||
|
}
|
||||||
|
return cert.Raw(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x xDB) Delete(domain string) error {
|
||||||
|
// handle wildcard certs
|
||||||
|
if domain[:1] == "." {
|
||||||
|
domain = "*" + domain
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Trace().Str("domain", domain).Msg("delete cert from db")
|
||||||
|
_, err := x.engine.ID(domain).Delete(new(Cert))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Items return al certs from db, if pageSize is 0 it does not use limit
|
||||||
|
func (x xDB) Items(page, pageSize int) ([]*Cert, error) {
|
||||||
|
// paginated return
|
||||||
|
if pageSize > 0 {
|
||||||
|
certs := make([]*Cert, 0, pageSize)
|
||||||
|
if page >= 0 {
|
||||||
|
page = 1
|
||||||
|
}
|
||||||
|
err := x.engine.Limit(pageSize, (page-1)*pageSize).Find(&certs)
|
||||||
|
return certs, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// return all
|
||||||
|
certs := make([]*Cert, 0, 64)
|
||||||
|
err := x.engine.Find(&certs)
|
||||||
|
return certs, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Supported database drivers
|
||||||
|
const (
|
||||||
|
DriverSqlite = "sqlite3"
|
||||||
|
DriverMysql = "mysql"
|
||||||
|
DriverPostgres = "postgres"
|
||||||
|
)
|
||||||
|
|
||||||
|
func supportedDriver(driver string) bool {
|
||||||
|
switch driver {
|
||||||
|
case DriverMysql, DriverPostgres, DriverSqlite:
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
92
server/database/xorm_test.go
Normal file
92
server/database/xorm_test.go
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/go-acme/lego/v4/certificate"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"xorm.io/xorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func newTestDB(t *testing.T) *xDB {
|
||||||
|
e, err := xorm.NewEngine("sqlite3", ":memory:")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NoError(t, e.Sync2(new(Cert)))
|
||||||
|
return &xDB{engine: e}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSanitizeWildcardCerts(t *testing.T) {
|
||||||
|
certDB := newTestDB(t)
|
||||||
|
|
||||||
|
_, err := certDB.Get(".not.found")
|
||||||
|
assert.True(t, errors.Is(err, ErrNotFound))
|
||||||
|
|
||||||
|
// TODO: cert key and domain mismatch are don not fail hard jet
|
||||||
|
// https://codeberg.org/Codeberg/pages-server/src/commit/d8595cee882e53d7f44f1ddc4ef8a1f7b8f31d8d/server/database/interface.go#L64
|
||||||
|
//
|
||||||
|
// assert.Error(t, certDB.Put(".wildcard.de", &certificate.Resource{
|
||||||
|
// Domain: "*.localhost.mock.directory",
|
||||||
|
// Certificate: localhost_mock_directory_certificate,
|
||||||
|
// }))
|
||||||
|
|
||||||
|
// insert new wildcard cert
|
||||||
|
assert.NoError(t, certDB.Put(".wildcard.de", &certificate.Resource{
|
||||||
|
Domain: "*.wildcard.de",
|
||||||
|
Certificate: localhost_mock_directory_certificate,
|
||||||
|
}))
|
||||||
|
|
||||||
|
// update existing cert
|
||||||
|
assert.NoError(t, certDB.Put(".wildcard.de", &certificate.Resource{
|
||||||
|
Domain: "*.wildcard.de",
|
||||||
|
Certificate: localhost_mock_directory_certificate,
|
||||||
|
}))
|
||||||
|
|
||||||
|
c1, err := certDB.Get(".wildcard.de")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
c2, err := certDB.Get("*.wildcard.de")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.EqualValues(t, c1, c2)
|
||||||
|
}
|
||||||
|
|
||||||
|
var localhost_mock_directory_certificate = []byte(`-----BEGIN CERTIFICATE-----
|
||||||
|
MIIDczCCAlugAwIBAgIIJyBaXHmLk6gwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||||
|
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSA0OWE0ZmIwHhcNMjMwMjEwMDEwOTA2
|
||||||
|
WhcNMjgwMjEwMDEwOTA2WjAjMSEwHwYDVQQDExhsb2NhbGhvc3QubW9jay5kaXJl
|
||||||
|
Y3RvcnkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIU/CjzS7t62Gj
|
||||||
|
neEMqvP7sn99ULT7AEUzEfWL05fWG2z714qcUg1hXkZLgdVDgmsCpplyddip7+2t
|
||||||
|
ZH/9rLPLMqJphzvOL4CF6jDLbeifETtKyjnt9vUZFnnNWcP3tu8lo8iYSl08qsUI
|
||||||
|
Pp/hiEriAQzCDjTbR5m9xUPNPYqxzcS4ALzmmCX9Qfc4CuuhMkdv2G4TT7rylWrA
|
||||||
|
SCSRPnGjeA7pCByfNrO/uXbxmzl3sMO3k5sqgMkx1QIHEN412V8+vtx88mt2sM6k
|
||||||
|
xjzGZWWKXlRq+oufIKX9KPplhsCjMH6E3VNAzgOPYDqXagtUcGmLWghURltO8Mt2
|
||||||
|
zwM6OgjjAgMBAAGjgaUwgaIwDgYDVR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsG
|
||||||
|
AQUFBwMBBggrBgEFBQcDAjAMBgNVHRMBAf8EAjAAMB0GA1UdDgQWBBSMQvlJ1755
|
||||||
|
sarf8i1KNqj7s5o/aDAfBgNVHSMEGDAWgBTcZcxJMhWdP7MecHCCpNkFURC/YzAj
|
||||||
|
BgNVHREEHDAaghhsb2NhbGhvc3QubW9jay5kaXJlY3RvcnkwDQYJKoZIhvcNAQEL
|
||||||
|
BQADggEBACcd7TT28OWwzQN2PcH0aG38JX5Wp2iOS/unDCfWjNAztXHW7nBDMxza
|
||||||
|
VtyebkJfccexpuVuOsjOX+bww0vtEYIvKX3/GbkhogksBrNkE0sJZtMnZWMR33wa
|
||||||
|
YxAy/kJBTmLi02r8fX9ZhwjldStHKBav4USuP7DXZjrgX7LFQhR4LIDrPaYqQRZ8
|
||||||
|
ltC3mM9LDQ9rQyIFP5cSBMO3RUAm4I8JyLoOdb/9G2uxjHr7r6eG1g8DmLYSKBsQ
|
||||||
|
mWGQDOYgR3cGltDe2yMxM++yHY+b1uhxGOWMrDA1+1k7yI19LL8Ifi2FMovDfu/X
|
||||||
|
JxYk1NNNtdctwaYJFenmGQvDaIq1KgE=
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIDUDCCAjigAwIBAgIIKBJ7IIA6W1swDQYJKoZIhvcNAQELBQAwIDEeMBwGA1UE
|
||||||
|
AxMVUGViYmxlIFJvb3QgQ0EgNTdmZjE2MCAXDTIzMDIwOTA1MzMxMloYDzIwNTMw
|
||||||
|
MjA5MDUzMzEyWjAoMSYwJAYDVQQDEx1QZWJibGUgSW50ZXJtZWRpYXRlIENBIDQ5
|
||||||
|
YTRmYjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANOvlqRx8SXQFWo2
|
||||||
|
gFCiXxls53eENcyr8+meFyjgnS853eEvplaPxoa2MREKd+ZYxM8EMMfj2XGvR3UI
|
||||||
|
aqR5QyLQ9ihuRqvQo4fG91usBHgH+vDbGPdMX8gDmm9HgnmtOVhSKJU+M2jfE1SW
|
||||||
|
UuWB9xOa3LMreTXbTNfZEMoXf+GcWZMbx5WPgEga3DvfmV+RsfNvB55eD7YAyZgF
|
||||||
|
ZnQ3Dskmnxxlkz0EGgd7rqhFHHNB9jARlL22gITADwoWZidlr3ciM9DISymRKQ0c
|
||||||
|
mRN15fQjNWdtuREgJlpXecbYQMGhdTOmFrqdHkveD1o63rGSC4z+s/APV6xIbcRp
|
||||||
|
aNpO7L8CAwEAAaOBgzCBgDAOBgNVHQ8BAf8EBAMCAoQwHQYDVR0lBBYwFAYIKwYB
|
||||||
|
BQUHAwEGCCsGAQUFBwMCMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNxlzEky
|
||||||
|
FZ0/sx5wcIKk2QVREL9jMB8GA1UdIwQYMBaAFOqfkm9rebIz4z0SDIKW5edLg5JM
|
||||||
|
MA0GCSqGSIb3DQEBCwUAA4IBAQBRG9AHEnyj2fKzVDDbQaKHjAF5jh0gwyHoIeRK
|
||||||
|
FkP9mQNSWxhvPWI0tK/E49LopzmVuzSbDd5kZsaii73rAs6f6Rf9W5veo3AFSEad
|
||||||
|
stM+Zv0f2vWB38nuvkoCRLXMX+QUeuL65rKxdEpyArBju4L3/PqAZRgMLcrH+ak8
|
||||||
|
nvw5RdAq+Km/ZWyJgGikK6cfMmh91YALCDFnoWUWrCjkBaBFKrG59ONV9f0IQX07
|
||||||
|
aNfFXFCF5l466xw9dHjw5iaFib10cpY3iq4kyPYIMs6uaewkCtxWKKjiozM4g4w3
|
||||||
|
HqwyUyZ52WUJOJ/6G9DJLDtN3fgGR+IAp8BhYd5CqOscnt3h
|
||||||
|
-----END CERTIFICATE-----`)
|
66
server/dns/dns.go
Normal file
66
server/dns/dns.go
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
package dns
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/hashicorp/golang-lru/v2/expirable"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
lookupCacheValidity = 30 * time.Second
|
||||||
|
defaultPagesRepo = "pages"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TODO(#316): refactor to not use global variables
|
||||||
|
var lookupCache *expirable.LRU[string, string] = expirable.NewLRU[string, string](4096, nil, lookupCacheValidity)
|
||||||
|
|
||||||
|
// GetTargetFromDNS searches for CNAME or TXT entries on the request domain ending with MainDomainSuffix.
|
||||||
|
// If everything is fine, it returns the target data.
|
||||||
|
func GetTargetFromDNS(domain, mainDomainSuffix, firstDefaultBranch string) (targetOwner, targetRepo, targetBranch string) {
|
||||||
|
// Get CNAME or TXT
|
||||||
|
var cname string
|
||||||
|
var err error
|
||||||
|
|
||||||
|
if entry, ok := lookupCache.Get(domain); ok {
|
||||||
|
cname = entry
|
||||||
|
} else {
|
||||||
|
cname, err = net.LookupCNAME(domain)
|
||||||
|
cname = strings.TrimSuffix(cname, ".")
|
||||||
|
if err != nil || !strings.HasSuffix(cname, mainDomainSuffix) {
|
||||||
|
cname = ""
|
||||||
|
// TODO: check if the A record matches!
|
||||||
|
names, err := net.LookupTXT(domain)
|
||||||
|
if err == nil {
|
||||||
|
for _, name := range names {
|
||||||
|
name = strings.TrimSuffix(strings.TrimSpace(name), ".")
|
||||||
|
if strings.HasSuffix(name, mainDomainSuffix) {
|
||||||
|
cname = name
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ = lookupCache.Add(domain, cname)
|
||||||
|
}
|
||||||
|
if cname == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
cnameParts := strings.Split(strings.TrimSuffix(cname, mainDomainSuffix), ".")
|
||||||
|
targetOwner = cnameParts[len(cnameParts)-1]
|
||||||
|
if len(cnameParts) > 1 {
|
||||||
|
targetRepo = cnameParts[len(cnameParts)-2]
|
||||||
|
}
|
||||||
|
if len(cnameParts) > 2 {
|
||||||
|
targetBranch = cnameParts[len(cnameParts)-3]
|
||||||
|
}
|
||||||
|
if targetRepo == "" {
|
||||||
|
targetRepo = defaultPagesRepo
|
||||||
|
}
|
||||||
|
if targetBranch == "" && targetRepo != defaultPagesRepo {
|
||||||
|
targetBranch = firstDefaultBranch
|
||||||
|
}
|
||||||
|
// if targetBranch is still empty, the caller must find the default branch
|
||||||
|
return
|
||||||
|
}
|
154
server/gitea/cache.go
Normal file
154
server/gitea/cache.go
Normal file
|
@ -0,0 +1,154 @@
|
||||||
|
package gitea
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
"codeberg.org/codeberg/pages/server/context"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// defaultBranchCacheTimeout specifies the timeout for the default branch cache. It can be quite long.
|
||||||
|
defaultBranchCacheTimeout = 15 * time.Minute
|
||||||
|
|
||||||
|
// branchExistenceCacheTimeout specifies the timeout for the branch timestamp & existence cache. It should be shorter
|
||||||
|
// than fileCacheTimeout, as that gets invalidated if the branch timestamp has changed. That way, repo changes will be
|
||||||
|
// picked up faster, while still allowing the content to be cached longer if nothing changes.
|
||||||
|
branchExistenceCacheTimeout = 5 * time.Minute
|
||||||
|
|
||||||
|
// fileCacheTimeout specifies the timeout for the file content cache - you might want to make this quite long, depending
|
||||||
|
// on your available memory.
|
||||||
|
// TODO: move as option into cache interface
|
||||||
|
fileCacheTimeout = 5 * time.Minute
|
||||||
|
|
||||||
|
// ownerExistenceCacheTimeout specifies the timeout for the existence of a repo/org
|
||||||
|
ownerExistenceCacheTimeout = 5 * time.Minute
|
||||||
|
|
||||||
|
// fileCacheSizeLimit limits the maximum file size that will be cached, and is set to 1 MB by default.
|
||||||
|
fileCacheSizeLimit = int64(1000 * 1000)
|
||||||
|
)
|
||||||
|
|
||||||
|
type FileResponse struct {
|
||||||
|
Exists bool `json:"exists"`
|
||||||
|
IsSymlink bool `json:"isSymlink"`
|
||||||
|
ETag string `json:"eTag"`
|
||||||
|
MimeType string `json:"mimeType"` // uncompressed MIME type
|
||||||
|
RawMime string `json:"rawMime"` // raw MIME type (if compressed, type of compression)
|
||||||
|
Body []byte `json:"-"` // saved separately
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f FileResponse) IsEmpty() bool {
|
||||||
|
return len(f.Body) == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f FileResponse) createHttpResponse(cacheKey string, decompress bool) (header http.Header, statusCode int) {
|
||||||
|
header = make(http.Header)
|
||||||
|
|
||||||
|
if f.Exists {
|
||||||
|
statusCode = http.StatusOK
|
||||||
|
} else {
|
||||||
|
statusCode = http.StatusNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.IsSymlink {
|
||||||
|
header.Set(giteaObjectTypeHeader, objTypeSymlink)
|
||||||
|
}
|
||||||
|
header.Set(ETagHeader, f.ETag)
|
||||||
|
|
||||||
|
if decompress {
|
||||||
|
header.Set(ContentTypeHeader, f.MimeType)
|
||||||
|
} else {
|
||||||
|
header.Set(ContentTypeHeader, f.RawMime)
|
||||||
|
}
|
||||||
|
|
||||||
|
header.Set(ContentLengthHeader, fmt.Sprintf("%d", len(f.Body)))
|
||||||
|
header.Set(PagesCacheIndicatorHeader, "true")
|
||||||
|
|
||||||
|
log.Trace().Msgf("fileCache for %q used", cacheKey)
|
||||||
|
return header, statusCode
|
||||||
|
}
|
||||||
|
|
||||||
|
type BranchTimestamp struct {
|
||||||
|
NotFound bool `json:"notFound"`
|
||||||
|
Branch string `json:"branch,omitempty"`
|
||||||
|
Timestamp time.Time `json:"timestamp,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type writeCacheReader struct {
|
||||||
|
originalReader io.ReadCloser
|
||||||
|
buffer *bytes.Buffer
|
||||||
|
fileResponse *FileResponse
|
||||||
|
cacheKey string
|
||||||
|
cache cache.ICache
|
||||||
|
hasError bool
|
||||||
|
doNotCache bool
|
||||||
|
complete bool
|
||||||
|
log zerolog.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *writeCacheReader) Read(p []byte) (n int, err error) {
|
||||||
|
t.log.Trace().Msgf("[cache] read %q", t.cacheKey)
|
||||||
|
n, err = t.originalReader.Read(p)
|
||||||
|
if err == io.EOF {
|
||||||
|
t.complete = true
|
||||||
|
}
|
||||||
|
if err != nil && err != io.EOF {
|
||||||
|
t.log.Trace().Err(err).Msgf("[cache] original reader for %q has returned an error", t.cacheKey)
|
||||||
|
t.hasError = true
|
||||||
|
} else if n > 0 {
|
||||||
|
if t.buffer.Len()+n > int(fileCacheSizeLimit) {
|
||||||
|
t.doNotCache = true
|
||||||
|
t.buffer.Reset()
|
||||||
|
} else {
|
||||||
|
_, _ = t.buffer.Write(p[:n])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *writeCacheReader) Close() error {
|
||||||
|
doWrite := !t.hasError && !t.doNotCache && t.complete
|
||||||
|
fc := *t.fileResponse
|
||||||
|
fc.Body = t.buffer.Bytes()
|
||||||
|
if doWrite {
|
||||||
|
jsonToCache, err := json.Marshal(fc)
|
||||||
|
if err != nil {
|
||||||
|
t.log.Trace().Err(err).Msgf("[cache] marshaling json for %q has returned an error", t.cacheKey+"|Metadata")
|
||||||
|
}
|
||||||
|
err = t.cache.Set(t.cacheKey+"|Metadata", jsonToCache, fileCacheTimeout)
|
||||||
|
if err != nil {
|
||||||
|
t.log.Trace().Err(err).Msgf("[cache] writer for %q has returned an error", t.cacheKey+"|Metadata")
|
||||||
|
}
|
||||||
|
err = t.cache.Set(t.cacheKey+"|Body", fc.Body, fileCacheTimeout)
|
||||||
|
if err != nil {
|
||||||
|
t.log.Trace().Err(err).Msgf("[cache] writer for %q has returned an error", t.cacheKey+"|Body")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
t.log.Trace().Msgf("cacheReader for %q saved=%t closed", t.cacheKey, doWrite)
|
||||||
|
return t.originalReader.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f FileResponse) CreateCacheReader(ctx *context.Context, r io.ReadCloser, cache cache.ICache, cacheKey string) io.ReadCloser {
|
||||||
|
log := log.With().Str("ReqId", ctx.ReqId).Logger()
|
||||||
|
if r == nil || cache == nil || cacheKey == "" {
|
||||||
|
log.Error().Msg("could not create CacheReader")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &writeCacheReader{
|
||||||
|
originalReader: r,
|
||||||
|
buffer: bytes.NewBuffer(make([]byte, 0)),
|
||||||
|
fileResponse: &f,
|
||||||
|
cache: cache,
|
||||||
|
cacheKey: cacheKey,
|
||||||
|
log: log,
|
||||||
|
}
|
||||||
|
}
|
386
server/gitea/client.go
Normal file
386
server/gitea/client.go
Normal file
|
@ -0,0 +1,386 @@
|
||||||
|
package gitea
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"mime"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"path"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"code.gitea.io/sdk/gitea"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/config"
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
"codeberg.org/codeberg/pages/server/context"
|
||||||
|
"codeberg.org/codeberg/pages/server/version"
|
||||||
|
)
|
||||||
|
|
||||||
|
var ErrorNotFound = errors.New("not found")
|
||||||
|
|
||||||
|
const (
|
||||||
|
// cache key prefixes
|
||||||
|
branchTimestampCacheKeyPrefix = "branchTime"
|
||||||
|
defaultBranchCacheKeyPrefix = "defaultBranch"
|
||||||
|
rawContentCacheKeyPrefix = "rawContent"
|
||||||
|
ownerExistenceKeyPrefix = "ownerExist"
|
||||||
|
|
||||||
|
// pages server
|
||||||
|
PagesCacheIndicatorHeader = "X-Pages-Cache"
|
||||||
|
symlinkReadLimit = 10000
|
||||||
|
|
||||||
|
// gitea
|
||||||
|
giteaObjectTypeHeader = "X-Gitea-Object-Type"
|
||||||
|
objTypeSymlink = "symlink"
|
||||||
|
|
||||||
|
// std
|
||||||
|
ETagHeader = "ETag"
|
||||||
|
ContentTypeHeader = "Content-Type"
|
||||||
|
ContentLengthHeader = "Content-Length"
|
||||||
|
ContentEncodingHeader = "Content-Encoding"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Client struct {
|
||||||
|
sdkClient *gitea.Client
|
||||||
|
sdkFileClient *gitea.Client
|
||||||
|
responseCache cache.ICache
|
||||||
|
|
||||||
|
giteaRoot string
|
||||||
|
|
||||||
|
followSymlinks bool
|
||||||
|
supportLFS bool
|
||||||
|
|
||||||
|
forbiddenMimeTypes map[string]bool
|
||||||
|
defaultMimeType string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewClient(cfg config.ForgeConfig, respCache cache.ICache) (*Client, error) {
|
||||||
|
// url.Parse returns valid on almost anything...
|
||||||
|
rootURL, err := url.ParseRequestURI(cfg.Root)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("invalid forgejo/gitea root url: %w", err)
|
||||||
|
}
|
||||||
|
giteaRoot := strings.TrimSuffix(rootURL.String(), "/")
|
||||||
|
|
||||||
|
forbiddenMimeTypes := make(map[string]bool, len(cfg.ForbiddenMimeTypes))
|
||||||
|
for _, mimeType := range cfg.ForbiddenMimeTypes {
|
||||||
|
forbiddenMimeTypes[mimeType] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
defaultMimeType := cfg.DefaultMimeType
|
||||||
|
if defaultMimeType == "" {
|
||||||
|
defaultMimeType = "application/octet-stream"
|
||||||
|
}
|
||||||
|
|
||||||
|
sdkClient, err := gitea.NewClient(
|
||||||
|
giteaRoot,
|
||||||
|
gitea.SetHTTPClient(&http.Client{Timeout: 10 * time.Second}),
|
||||||
|
gitea.SetToken(cfg.Token),
|
||||||
|
gitea.SetUserAgent("pages-server/"+version.Version),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
sdkFileClient, err := gitea.NewClient(
|
||||||
|
giteaRoot,
|
||||||
|
gitea.SetHTTPClient(&http.Client{Timeout: 1 * time.Hour}),
|
||||||
|
gitea.SetToken(cfg.Token),
|
||||||
|
gitea.SetUserAgent("pages-server/"+version.Version),
|
||||||
|
)
|
||||||
|
|
||||||
|
return &Client{
|
||||||
|
sdkClient: sdkClient,
|
||||||
|
sdkFileClient: sdkFileClient,
|
||||||
|
responseCache: respCache,
|
||||||
|
|
||||||
|
giteaRoot: giteaRoot,
|
||||||
|
|
||||||
|
followSymlinks: cfg.FollowSymlinks,
|
||||||
|
supportLFS: cfg.LFSEnabled,
|
||||||
|
|
||||||
|
forbiddenMimeTypes: forbiddenMimeTypes,
|
||||||
|
defaultMimeType: defaultMimeType,
|
||||||
|
}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (client *Client) ContentWebLink(targetOwner, targetRepo, branch, resource string) string {
|
||||||
|
return path.Join(client.giteaRoot, targetOwner, targetRepo, "src/branch", branch, resource)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (client *Client) GiteaRawContent(ctx *context.Context, targetOwner, targetRepo, ref, resource string) ([]byte, error) {
|
||||||
|
reader, _, _, err := client.ServeRawContent(ctx, targetOwner, targetRepo, ref, resource, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer reader.Close()
|
||||||
|
return io.ReadAll(reader)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (client *Client) ServeRawContent(ctx *context.Context, targetOwner, targetRepo, ref, resource string, decompress bool) (io.ReadCloser, http.Header, int, error) {
|
||||||
|
cacheKey := fmt.Sprintf("%s/%s/%s|%s|%s", rawContentCacheKeyPrefix, targetOwner, targetRepo, ref, resource)
|
||||||
|
log := log.With().Str("ReqId", ctx.ReqId).Str("cache_key", cacheKey).Logger()
|
||||||
|
log.Trace().Msg("try file in cache")
|
||||||
|
// handle if cache entry exist
|
||||||
|
if cacheMetadata, ok := client.responseCache.Get(cacheKey + "|Metadata"); ok {
|
||||||
|
var cache FileResponse
|
||||||
|
err := json.Unmarshal(cacheMetadata.([]byte), &cache)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msgf("[cache] failed to unmarshal metadata for: %s", cacheKey)
|
||||||
|
return nil, nil, http.StatusNotFound, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !cache.Exists {
|
||||||
|
return nil, nil, http.StatusNotFound, ErrorNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
body, ok := client.responseCache.Get(cacheKey + "|Body")
|
||||||
|
if !ok {
|
||||||
|
log.Error().Msgf("[cache] failed to get body for: %s", cacheKey)
|
||||||
|
return nil, nil, http.StatusNotFound, ErrorNotFound
|
||||||
|
}
|
||||||
|
cache.Body = body.([]byte)
|
||||||
|
|
||||||
|
cachedHeader, cachedStatusCode := cache.createHttpResponse(cacheKey, decompress)
|
||||||
|
if cache.Exists {
|
||||||
|
if cache.IsSymlink {
|
||||||
|
linkDest := string(cache.Body)
|
||||||
|
log.Debug().Msgf("[cache] follow symlink from %q to %q", resource, linkDest)
|
||||||
|
return client.ServeRawContent(ctx, targetOwner, targetRepo, ref, linkDest, decompress)
|
||||||
|
} else {
|
||||||
|
log.Debug().Msgf("[cache] return %d bytes", len(cache.Body))
|
||||||
|
return io.NopCloser(bytes.NewReader(cache.Body)), cachedHeader, cachedStatusCode, nil
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return nil, nil, http.StatusNotFound, ErrorNotFound
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.Trace().Msg("file not in cache")
|
||||||
|
// not in cache, open reader via gitea api
|
||||||
|
reader, resp, err := client.sdkFileClient.GetFileReader(targetOwner, targetRepo, ref, resource, client.supportLFS)
|
||||||
|
if resp != nil {
|
||||||
|
switch resp.StatusCode {
|
||||||
|
case http.StatusOK:
|
||||||
|
// first handle symlinks
|
||||||
|
{
|
||||||
|
objType := resp.Header.Get(giteaObjectTypeHeader)
|
||||||
|
log.Trace().Msgf("server raw content object %q", objType)
|
||||||
|
if client.followSymlinks && objType == objTypeSymlink {
|
||||||
|
defer reader.Close()
|
||||||
|
// read limited chars for symlink
|
||||||
|
linkDestBytes, err := io.ReadAll(io.LimitReader(reader, symlinkReadLimit))
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, http.StatusInternalServerError, err
|
||||||
|
}
|
||||||
|
linkDest := strings.TrimSpace(string(linkDestBytes))
|
||||||
|
|
||||||
|
// handle relative links
|
||||||
|
// we first remove the link from the path, and make a relative join (resolve parent paths like "/../" too)
|
||||||
|
linkDest = path.Join(path.Dir(resource), linkDest)
|
||||||
|
|
||||||
|
// we store symlink not content to reduce duplicates in cache
|
||||||
|
fileResponse := FileResponse{
|
||||||
|
Exists: true,
|
||||||
|
IsSymlink: true,
|
||||||
|
Body: []byte(linkDest),
|
||||||
|
ETag: resp.Header.Get(ETagHeader),
|
||||||
|
}
|
||||||
|
log.Trace().Msgf("file response has %d bytes", len(fileResponse.Body))
|
||||||
|
jsonToCache, err := json.Marshal(fileResponse)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msgf("[cache] marshaling json metadata for %q has returned an error", cacheKey)
|
||||||
|
}
|
||||||
|
if err := client.responseCache.Set(cacheKey+"|Metadata", jsonToCache, fileCacheTimeout); err != nil {
|
||||||
|
log.Error().Err(err).Msg("[cache] error on cache write")
|
||||||
|
}
|
||||||
|
if err := client.responseCache.Set(cacheKey+"|Body", fileResponse.Body, fileCacheTimeout); err != nil {
|
||||||
|
log.Error().Err(err).Msg("[cache] error on cache write")
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug().Msgf("follow symlink from %q to %q", resource, linkDest)
|
||||||
|
return client.ServeRawContent(ctx, targetOwner, targetRepo, ref, linkDest, decompress)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// now we are sure it's content so set the MIME type
|
||||||
|
mimeType, rawType := client.getMimeTypeByExtension(resource)
|
||||||
|
resp.Response.Header.Set(ContentTypeHeader, mimeType)
|
||||||
|
if decompress {
|
||||||
|
resp.Response.Header.Set(ContentTypeHeader, mimeType)
|
||||||
|
} else {
|
||||||
|
resp.Response.Header.Set(ContentTypeHeader, rawType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// now we write to cache and respond at the same time
|
||||||
|
fileResp := FileResponse{
|
||||||
|
Exists: true,
|
||||||
|
ETag: resp.Header.Get(ETagHeader),
|
||||||
|
MimeType: mimeType,
|
||||||
|
RawMime: rawType,
|
||||||
|
}
|
||||||
|
return fileResp.CreateCacheReader(ctx, reader, client.responseCache, cacheKey), resp.Response.Header, resp.StatusCode, nil
|
||||||
|
|
||||||
|
case http.StatusNotFound:
|
||||||
|
jsonToCache, err := json.Marshal(FileResponse{ETag: resp.Header.Get(ETagHeader)})
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msgf("[cache] marshaling json metadata for %q has returned an error", cacheKey)
|
||||||
|
}
|
||||||
|
if err := client.responseCache.Set(cacheKey+"|Metadata", jsonToCache, fileCacheTimeout); err != nil {
|
||||||
|
log.Error().Err(err).Msg("[cache] error on cache write")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, resp.Response.Header, http.StatusNotFound, ErrorNotFound
|
||||||
|
default:
|
||||||
|
return nil, resp.Response.Header, resp.StatusCode, fmt.Errorf("unexpected status code '%d'", resp.StatusCode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, nil, http.StatusInternalServerError, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (client *Client) GiteaGetRepoBranchTimestamp(repoOwner, repoName, branchName string) (*BranchTimestamp, error) {
|
||||||
|
cacheKey := fmt.Sprintf("%s/%s/%s/%s", branchTimestampCacheKeyPrefix, repoOwner, repoName, branchName)
|
||||||
|
|
||||||
|
if stampRaw, ok := client.responseCache.Get(cacheKey); ok {
|
||||||
|
var stamp BranchTimestamp
|
||||||
|
err := json.Unmarshal(stampRaw.([]byte), &stamp)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Bytes("stamp", stampRaw.([]byte)).Msgf("[cache] failed to unmarshal timestamp for: %s", cacheKey)
|
||||||
|
return &BranchTimestamp{}, ErrorNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if stamp.NotFound {
|
||||||
|
log.Trace().Msgf("[cache] branch %q does not exist", branchName)
|
||||||
|
|
||||||
|
return &BranchTimestamp{}, ErrorNotFound
|
||||||
|
} else {
|
||||||
|
log.Trace().Msgf("[cache] use branch %q exist", branchName)
|
||||||
|
// This comes from the refactoring of the caching library.
|
||||||
|
// The branch as reported by the API was stored in the cache, and I'm not sure if there are
|
||||||
|
// situations where it differs from the name in the request, hence this is left here.
|
||||||
|
return &stamp, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
branch, resp, err := client.sdkClient.GetRepoBranch(repoOwner, repoName, branchName)
|
||||||
|
if err != nil {
|
||||||
|
if resp != nil && resp.StatusCode == http.StatusNotFound {
|
||||||
|
log.Trace().Msgf("[cache] set cache branch %q not found", branchName)
|
||||||
|
jsonToCache, err := json.Marshal(BranchTimestamp{NotFound: true})
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msgf("[cache] marshaling empty timestamp for '%s' has returned an error", cacheKey)
|
||||||
|
}
|
||||||
|
if err := client.responseCache.Set(cacheKey, jsonToCache, branchExistenceCacheTimeout); err != nil {
|
||||||
|
log.Error().Err(err).Msg("[cache] error on cache write")
|
||||||
|
}
|
||||||
|
return &BranchTimestamp{}, ErrorNotFound
|
||||||
|
}
|
||||||
|
return &BranchTimestamp{}, err
|
||||||
|
}
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return &BranchTimestamp{}, fmt.Errorf("unexpected status code '%d'", resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
stamp := &BranchTimestamp{
|
||||||
|
Branch: branch.Name,
|
||||||
|
Timestamp: branch.Commit.Timestamp,
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Trace().Msgf("set cache branch [%s] exist", branchName)
|
||||||
|
jsonToCache, err := json.Marshal(stamp)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msgf("[cache] marshaling timestamp for %q has returned an error", cacheKey)
|
||||||
|
}
|
||||||
|
if err := client.responseCache.Set(cacheKey, jsonToCache, branchExistenceCacheTimeout); err != nil {
|
||||||
|
log.Error().Err(err).Msg("[cache] error on cache write")
|
||||||
|
}
|
||||||
|
return stamp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (client *Client) GiteaGetRepoDefaultBranch(repoOwner, repoName string) (string, error) {
|
||||||
|
cacheKey := fmt.Sprintf("%s/%s/%s", defaultBranchCacheKeyPrefix, repoOwner, repoName)
|
||||||
|
|
||||||
|
if branch, ok := client.responseCache.Get(cacheKey); ok {
|
||||||
|
return string(branch.([]byte)), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
repo, resp, err := client.sdkClient.GetRepo(repoOwner, repoName)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return "", fmt.Errorf("unexpected status code '%d'", resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
branch := repo.DefaultBranch
|
||||||
|
if err := client.responseCache.Set(cacheKey, []byte(branch), defaultBranchCacheTimeout); err != nil {
|
||||||
|
log.Error().Err(err).Msg("[cache] error on cache write")
|
||||||
|
}
|
||||||
|
return branch, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (client *Client) GiteaCheckIfOwnerExists(owner string) (bool, error) {
|
||||||
|
cacheKey := fmt.Sprintf("%s/%s", ownerExistenceKeyPrefix, owner)
|
||||||
|
|
||||||
|
if existRaw, ok := client.responseCache.Get(cacheKey); ok && existRaw != nil {
|
||||||
|
exist, err := strconv.ParseBool(existRaw.(string))
|
||||||
|
return exist, err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, resp, err := client.sdkClient.GetUserInfo(owner)
|
||||||
|
if resp.StatusCode == http.StatusOK && err == nil {
|
||||||
|
if err := client.responseCache.Set(cacheKey, []byte("true"), ownerExistenceCacheTimeout); err != nil {
|
||||||
|
log.Error().Err(err).Msg("[cache] error on cache write")
|
||||||
|
}
|
||||||
|
return true, nil
|
||||||
|
} else if resp.StatusCode != http.StatusNotFound {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, resp, err = client.sdkClient.GetOrg(owner)
|
||||||
|
if resp.StatusCode == http.StatusOK && err == nil {
|
||||||
|
if err := client.responseCache.Set(cacheKey, []byte("true"), ownerExistenceCacheTimeout); err != nil {
|
||||||
|
log.Error().Err(err).Msg("[cache] error on cache write")
|
||||||
|
}
|
||||||
|
return true, nil
|
||||||
|
} else if resp.StatusCode != http.StatusNotFound {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
if err := client.responseCache.Set(cacheKey, []byte("false"), ownerExistenceCacheTimeout); err != nil {
|
||||||
|
log.Error().Err(err).Msg("[cache] error on cache write")
|
||||||
|
}
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (client *Client) extToMime(ext string) string {
|
||||||
|
mimeType := mime.TypeByExtension(path.Ext(ext))
|
||||||
|
mimeTypeSplit := strings.SplitN(mimeType, ";", 2)
|
||||||
|
if client.forbiddenMimeTypes[mimeTypeSplit[0]] || mimeType == "" {
|
||||||
|
mimeType = client.defaultMimeType
|
||||||
|
}
|
||||||
|
log.Trace().Msgf("probe mime of extension '%q' is '%q'", ext, mimeType)
|
||||||
|
|
||||||
|
return mimeType
|
||||||
|
}
|
||||||
|
|
||||||
|
func (client *Client) getMimeTypeByExtension(resource string) (mimeType, rawType string) {
|
||||||
|
rawExt := path.Ext(resource)
|
||||||
|
innerExt := rawExt
|
||||||
|
switch rawExt {
|
||||||
|
case ".gz", ".br", ".zst":
|
||||||
|
innerExt = path.Ext(resource[:len(resource)-len(rawExt)])
|
||||||
|
}
|
||||||
|
rawType = client.extToMime(rawExt)
|
||||||
|
mimeType = rawType
|
||||||
|
if innerExt != rawExt {
|
||||||
|
mimeType = client.extToMime(innerExt)
|
||||||
|
}
|
||||||
|
log.Trace().Msgf("probe mime of %q is (%q / raw %q)", resource, mimeType, rawType)
|
||||||
|
return mimeType, rawType
|
||||||
|
}
|
114
server/handler/handler.go
Normal file
114
server/handler/handler.go
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/config"
|
||||||
|
"codeberg.org/codeberg/pages/html"
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
"codeberg.org/codeberg/pages/server/context"
|
||||||
|
"codeberg.org/codeberg/pages/server/gitea"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
headerAccessControlAllowOrigin = "Access-Control-Allow-Origin"
|
||||||
|
headerAccessControlAllowMethods = "Access-Control-Allow-Methods"
|
||||||
|
defaultPagesRepo = "pages"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler handles a single HTTP request to the web server.
|
||||||
|
func Handler(
|
||||||
|
cfg config.ServerConfig,
|
||||||
|
giteaClient *gitea.Client,
|
||||||
|
canonicalDomainCache, redirectsCache cache.ICache,
|
||||||
|
) http.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, req *http.Request) {
|
||||||
|
ctx := context.New(w, req)
|
||||||
|
log := log.With().Str("ReqId", ctx.ReqId).Strs("Handler", []string{req.Host, req.RequestURI}).Logger()
|
||||||
|
log.Debug().Msg("\n----------------------------------------------------------")
|
||||||
|
|
||||||
|
ctx.RespWriter.Header().Set("Server", "pages-server")
|
||||||
|
|
||||||
|
// Force new default from specification (since November 2020) - see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#strict-origin-when-cross-origin
|
||||||
|
ctx.RespWriter.Header().Set("Referrer-Policy", "strict-origin-when-cross-origin")
|
||||||
|
|
||||||
|
// Enable browser caching for up to 10 minutes
|
||||||
|
ctx.RespWriter.Header().Set("Cache-Control", "public, max-age=600")
|
||||||
|
|
||||||
|
trimmedHost := ctx.TrimHostPort()
|
||||||
|
|
||||||
|
// Add HSTS for RawDomain and MainDomain
|
||||||
|
if hsts := getHSTSHeader(trimmedHost, cfg.MainDomain, cfg.RawDomain); hsts != "" {
|
||||||
|
ctx.RespWriter.Header().Set("Strict-Transport-Security", hsts)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle all http methods
|
||||||
|
ctx.RespWriter.Header().Set("Allow", http.MethodGet+", "+http.MethodHead+", "+http.MethodOptions)
|
||||||
|
switch ctx.Req.Method {
|
||||||
|
case http.MethodOptions:
|
||||||
|
// return Allow header
|
||||||
|
ctx.RespWriter.WriteHeader(http.StatusNoContent)
|
||||||
|
return
|
||||||
|
case http.MethodGet,
|
||||||
|
http.MethodHead:
|
||||||
|
// end switch case and handle allowed requests
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
// Block all methods not required for static pages
|
||||||
|
ctx.String("Method not allowed", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Block blacklisted paths (like ACME challenges)
|
||||||
|
for _, blacklistedPath := range cfg.BlacklistedPaths {
|
||||||
|
if strings.HasPrefix(ctx.Path(), blacklistedPath) {
|
||||||
|
html.ReturnErrorPage(ctx, "requested path is blacklisted", http.StatusForbidden)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allow CORS for specified domains
|
||||||
|
allowCors := false
|
||||||
|
for _, allowedCorsDomain := range cfg.AllowedCorsDomains {
|
||||||
|
if strings.EqualFold(trimmedHost, allowedCorsDomain) {
|
||||||
|
allowCors = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if allowCors {
|
||||||
|
ctx.RespWriter.Header().Set(headerAccessControlAllowOrigin, "*")
|
||||||
|
ctx.RespWriter.Header().Set(headerAccessControlAllowMethods, http.MethodGet+", "+http.MethodHead)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare request information to Gitea
|
||||||
|
pathElements := strings.Split(strings.Trim(ctx.Path(), "/"), "/")
|
||||||
|
|
||||||
|
if cfg.RawDomain != "" && strings.EqualFold(trimmedHost, cfg.RawDomain) {
|
||||||
|
log.Debug().Msg("raw domain request detected")
|
||||||
|
handleRaw(log, ctx, giteaClient,
|
||||||
|
cfg.MainDomain,
|
||||||
|
trimmedHost,
|
||||||
|
pathElements,
|
||||||
|
canonicalDomainCache, redirectsCache)
|
||||||
|
} else if strings.HasSuffix(trimmedHost, cfg.MainDomain) {
|
||||||
|
log.Debug().Msg("subdomain request detected")
|
||||||
|
handleSubDomain(log, ctx, giteaClient,
|
||||||
|
cfg.MainDomain,
|
||||||
|
cfg.PagesBranches,
|
||||||
|
trimmedHost,
|
||||||
|
pathElements,
|
||||||
|
canonicalDomainCache, redirectsCache)
|
||||||
|
} else {
|
||||||
|
log.Debug().Msg("custom domain request detected")
|
||||||
|
handleCustomDomain(log, ctx, giteaClient,
|
||||||
|
cfg.MainDomain,
|
||||||
|
trimmedHost,
|
||||||
|
pathElements,
|
||||||
|
cfg.PagesBranches[0],
|
||||||
|
canonicalDomainCache, redirectsCache)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
72
server/handler/handler_custom_domain.go
Normal file
72
server/handler/handler_custom_domain.go
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/html"
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
"codeberg.org/codeberg/pages/server/context"
|
||||||
|
"codeberg.org/codeberg/pages/server/dns"
|
||||||
|
"codeberg.org/codeberg/pages/server/gitea"
|
||||||
|
"codeberg.org/codeberg/pages/server/upstream"
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
)
|
||||||
|
|
||||||
|
func handleCustomDomain(log zerolog.Logger, ctx *context.Context, giteaClient *gitea.Client,
|
||||||
|
mainDomainSuffix string,
|
||||||
|
trimmedHost string,
|
||||||
|
pathElements []string,
|
||||||
|
firstDefaultBranch string,
|
||||||
|
canonicalDomainCache, redirectsCache cache.ICache,
|
||||||
|
) {
|
||||||
|
// Serve pages from custom domains
|
||||||
|
targetOwner, targetRepo, targetBranch := dns.GetTargetFromDNS(trimmedHost, mainDomainSuffix, firstDefaultBranch)
|
||||||
|
if targetOwner == "" {
|
||||||
|
html.ReturnErrorPage(ctx,
|
||||||
|
"could not obtain repo owner from custom domain",
|
||||||
|
http.StatusFailedDependency)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
pathParts := pathElements
|
||||||
|
canonicalLink := false
|
||||||
|
if strings.HasPrefix(pathElements[0], "@") {
|
||||||
|
targetBranch = pathElements[0][1:]
|
||||||
|
pathParts = pathElements[1:]
|
||||||
|
canonicalLink = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to use the given repo on the given branch or the default branch
|
||||||
|
log.Debug().Msg("custom domain preparations, now trying with details from DNS")
|
||||||
|
if targetOpt, works := tryBranch(log, ctx, giteaClient, &upstream.Options{
|
||||||
|
TryIndexPages: true,
|
||||||
|
TargetOwner: targetOwner,
|
||||||
|
TargetRepo: targetRepo,
|
||||||
|
TargetBranch: targetBranch,
|
||||||
|
TargetPath: path.Join(pathParts...),
|
||||||
|
}, canonicalLink); works {
|
||||||
|
canonicalDomain, valid := targetOpt.CheckCanonicalDomain(ctx, giteaClient, trimmedHost, mainDomainSuffix, canonicalDomainCache)
|
||||||
|
if !valid {
|
||||||
|
html.ReturnErrorPage(ctx, "domain not specified in <code>.domains</code> file", http.StatusMisdirectedRequest)
|
||||||
|
return
|
||||||
|
} else if canonicalDomain != trimmedHost {
|
||||||
|
// only redirect if the target is also a codeberg page!
|
||||||
|
targetOwner, _, _ = dns.GetTargetFromDNS(strings.SplitN(canonicalDomain, "/", 2)[0], mainDomainSuffix, firstDefaultBranch)
|
||||||
|
if targetOwner != "" {
|
||||||
|
ctx.Redirect("https://"+canonicalDomain+"/"+targetOpt.TargetPath, http.StatusTemporaryRedirect)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
html.ReturnErrorPage(ctx, "target is no codeberg page", http.StatusFailedDependency)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug().Str("url", trimmedHost).Msg("tryBranch, now trying upstream")
|
||||||
|
tryUpstream(log, ctx, giteaClient, mainDomainSuffix, trimmedHost, targetOpt, canonicalDomainCache, redirectsCache)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
html.ReturnErrorPage(ctx, "could not find target for custom domain", http.StatusFailedDependency)
|
||||||
|
}
|
71
server/handler/handler_raw_domain.go
Normal file
71
server/handler/handler_raw_domain.go
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/html"
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
"codeberg.org/codeberg/pages/server/context"
|
||||||
|
"codeberg.org/codeberg/pages/server/gitea"
|
||||||
|
"codeberg.org/codeberg/pages/server/upstream"
|
||||||
|
)
|
||||||
|
|
||||||
|
func handleRaw(log zerolog.Logger, ctx *context.Context, giteaClient *gitea.Client,
|
||||||
|
mainDomainSuffix string,
|
||||||
|
trimmedHost string,
|
||||||
|
pathElements []string,
|
||||||
|
canonicalDomainCache, redirectsCache cache.ICache,
|
||||||
|
) {
|
||||||
|
// Serve raw content from RawDomain
|
||||||
|
log.Debug().Msg("raw domain")
|
||||||
|
|
||||||
|
if len(pathElements) < 2 {
|
||||||
|
html.ReturnErrorPage(
|
||||||
|
ctx,
|
||||||
|
"a url in the form of <code>https://{domain}/{owner}/{repo}[/@{branch}]/{path}</code> is required",
|
||||||
|
http.StatusBadRequest,
|
||||||
|
)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// raw.codeberg.org/example/myrepo/@main/index.html
|
||||||
|
if len(pathElements) > 2 && strings.HasPrefix(pathElements[2], "@") {
|
||||||
|
log.Debug().Msg("raw domain preparations, now trying with specified branch")
|
||||||
|
if targetOpt, works := tryBranch(log, ctx, giteaClient, &upstream.Options{
|
||||||
|
ServeRaw: true,
|
||||||
|
TargetOwner: pathElements[0],
|
||||||
|
TargetRepo: pathElements[1],
|
||||||
|
TargetBranch: pathElements[2][1:],
|
||||||
|
TargetPath: path.Join(pathElements[3:]...),
|
||||||
|
}, true); works {
|
||||||
|
log.Trace().Msg("tryUpstream: serve raw domain with specified branch")
|
||||||
|
tryUpstream(log, ctx, giteaClient, mainDomainSuffix, trimmedHost, targetOpt, canonicalDomainCache, redirectsCache)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.Debug().Msg("missing branch info")
|
||||||
|
html.ReturnErrorPage(ctx, "missing branch info", http.StatusFailedDependency)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug().Msg("raw domain preparations, now trying with default branch")
|
||||||
|
if targetOpt, works := tryBranch(log, ctx, giteaClient, &upstream.Options{
|
||||||
|
TryIndexPages: false,
|
||||||
|
ServeRaw: true,
|
||||||
|
TargetOwner: pathElements[0],
|
||||||
|
TargetRepo: pathElements[1],
|
||||||
|
TargetPath: path.Join(pathElements[2:]...),
|
||||||
|
}, true); works {
|
||||||
|
log.Trace().Msg("tryUpstream: serve raw domain with default branch")
|
||||||
|
tryUpstream(log, ctx, giteaClient, mainDomainSuffix, trimmedHost, targetOpt, canonicalDomainCache, redirectsCache)
|
||||||
|
} else {
|
||||||
|
html.ReturnErrorPage(ctx,
|
||||||
|
fmt.Sprintf("raw domain could not find repo <code>%s/%s</code> or repo is empty", targetOpt.TargetOwner, targetOpt.TargetRepo),
|
||||||
|
http.StatusNotFound)
|
||||||
|
}
|
||||||
|
}
|
156
server/handler/handler_sub_domain.go
Normal file
156
server/handler/handler_sub_domain.go
Normal file
|
@ -0,0 +1,156 @@
|
||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
"golang.org/x/exp/slices"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/html"
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
"codeberg.org/codeberg/pages/server/context"
|
||||||
|
"codeberg.org/codeberg/pages/server/gitea"
|
||||||
|
"codeberg.org/codeberg/pages/server/upstream"
|
||||||
|
)
|
||||||
|
|
||||||
|
func handleSubDomain(log zerolog.Logger, ctx *context.Context, giteaClient *gitea.Client,
|
||||||
|
mainDomainSuffix string,
|
||||||
|
defaultPagesBranches []string,
|
||||||
|
trimmedHost string,
|
||||||
|
pathElements []string,
|
||||||
|
canonicalDomainCache, redirectsCache cache.ICache,
|
||||||
|
) {
|
||||||
|
// Serve pages from subdomains of MainDomainSuffix
|
||||||
|
log.Debug().Msg("main domain suffix")
|
||||||
|
|
||||||
|
targetOwner := strings.TrimSuffix(trimmedHost, mainDomainSuffix)
|
||||||
|
targetRepo := pathElements[0]
|
||||||
|
|
||||||
|
if targetOwner == "www" {
|
||||||
|
// www.codeberg.page redirects to codeberg.page // TODO: rm hardcoded - use cname?
|
||||||
|
ctx.Redirect("https://"+mainDomainSuffix[1:]+ctx.Path(), http.StatusPermanentRedirect)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the first directory is a repo with the second directory as a branch
|
||||||
|
// example.codeberg.page/myrepo/@main/index.html
|
||||||
|
if len(pathElements) > 1 && strings.HasPrefix(pathElements[1], "@") {
|
||||||
|
if targetRepo == defaultPagesRepo {
|
||||||
|
// example.codeberg.org/pages/@... redirects to example.codeberg.org/@...
|
||||||
|
ctx.Redirect("/"+strings.Join(pathElements[1:], "/"), http.StatusTemporaryRedirect)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug().Msg("main domain preparations, now trying with specified repo & branch")
|
||||||
|
if targetOpt, works := tryBranch(log, ctx, giteaClient, &upstream.Options{
|
||||||
|
TryIndexPages: true,
|
||||||
|
TargetOwner: targetOwner,
|
||||||
|
TargetRepo: pathElements[0],
|
||||||
|
TargetBranch: pathElements[1][1:],
|
||||||
|
TargetPath: path.Join(pathElements[2:]...),
|
||||||
|
}, true); works {
|
||||||
|
log.Trace().Msg("tryUpstream: serve with specified repo and branch")
|
||||||
|
tryUpstream(log, ctx, giteaClient, mainDomainSuffix, trimmedHost, targetOpt, canonicalDomainCache, redirectsCache)
|
||||||
|
} else {
|
||||||
|
html.ReturnErrorPage(
|
||||||
|
ctx,
|
||||||
|
formatSetBranchNotFoundMessage(pathElements[1][1:], targetOwner, pathElements[0]),
|
||||||
|
http.StatusFailedDependency,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the first directory is a branch for the defaultPagesRepo
|
||||||
|
// example.codeberg.page/@main/index.html
|
||||||
|
if strings.HasPrefix(pathElements[0], "@") {
|
||||||
|
targetBranch := pathElements[0][1:]
|
||||||
|
|
||||||
|
// if the default pages branch can be determined exactly, it does not need to be set
|
||||||
|
if len(defaultPagesBranches) == 1 && slices.Contains(defaultPagesBranches, targetBranch) {
|
||||||
|
// example.codeberg.org/@pages/... redirects to example.codeberg.org/...
|
||||||
|
ctx.Redirect("/"+strings.Join(pathElements[1:], "/"), http.StatusTemporaryRedirect)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug().Msg("main domain preparations, now trying with specified branch")
|
||||||
|
if targetOpt, works := tryBranch(log, ctx, giteaClient, &upstream.Options{
|
||||||
|
TryIndexPages: true,
|
||||||
|
TargetOwner: targetOwner,
|
||||||
|
TargetRepo: defaultPagesRepo,
|
||||||
|
TargetBranch: targetBranch,
|
||||||
|
TargetPath: path.Join(pathElements[1:]...),
|
||||||
|
}, true); works {
|
||||||
|
log.Trace().Msg("tryUpstream: serve default pages repo with specified branch")
|
||||||
|
tryUpstream(log, ctx, giteaClient, mainDomainSuffix, trimmedHost, targetOpt, canonicalDomainCache, redirectsCache)
|
||||||
|
} else {
|
||||||
|
html.ReturnErrorPage(
|
||||||
|
ctx,
|
||||||
|
formatSetBranchNotFoundMessage(targetBranch, targetOwner, defaultPagesRepo),
|
||||||
|
http.StatusFailedDependency,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, defaultPagesBranch := range defaultPagesBranches {
|
||||||
|
// Check if the first directory is a repo with a default pages branch
|
||||||
|
// example.codeberg.page/myrepo/index.html
|
||||||
|
// example.codeberg.page/{PAGES_BRANCHE}/... is not allowed here.
|
||||||
|
log.Debug().Msg("main domain preparations, now trying with specified repo")
|
||||||
|
if pathElements[0] != defaultPagesBranch {
|
||||||
|
if targetOpt, works := tryBranch(log, ctx, giteaClient, &upstream.Options{
|
||||||
|
TryIndexPages: true,
|
||||||
|
TargetOwner: targetOwner,
|
||||||
|
TargetRepo: pathElements[0],
|
||||||
|
TargetBranch: defaultPagesBranch,
|
||||||
|
TargetPath: path.Join(pathElements[1:]...),
|
||||||
|
}, false); works {
|
||||||
|
log.Debug().Msg("tryBranch, now trying upstream 5")
|
||||||
|
tryUpstream(log, ctx, giteaClient, mainDomainSuffix, trimmedHost, targetOpt, canonicalDomainCache, redirectsCache)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to use the defaultPagesRepo on an default pages branch
|
||||||
|
// example.codeberg.page/index.html
|
||||||
|
log.Debug().Msg("main domain preparations, now trying with default repo")
|
||||||
|
if targetOpt, works := tryBranch(log, ctx, giteaClient, &upstream.Options{
|
||||||
|
TryIndexPages: true,
|
||||||
|
TargetOwner: targetOwner,
|
||||||
|
TargetRepo: defaultPagesRepo,
|
||||||
|
TargetBranch: defaultPagesBranch,
|
||||||
|
TargetPath: path.Join(pathElements...),
|
||||||
|
}, false); works {
|
||||||
|
log.Debug().Msg("tryBranch, now trying upstream 6")
|
||||||
|
tryUpstream(log, ctx, giteaClient, mainDomainSuffix, trimmedHost, targetOpt, canonicalDomainCache, redirectsCache)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to use the defaultPagesRepo on its default branch
|
||||||
|
// example.codeberg.page/index.html
|
||||||
|
log.Debug().Msg("main domain preparations, now trying with default repo/branch")
|
||||||
|
if targetOpt, works := tryBranch(log, ctx, giteaClient, &upstream.Options{
|
||||||
|
TryIndexPages: true,
|
||||||
|
TargetOwner: targetOwner,
|
||||||
|
TargetRepo: defaultPagesRepo,
|
||||||
|
TargetPath: path.Join(pathElements...),
|
||||||
|
}, false); works {
|
||||||
|
log.Debug().Msg("tryBranch, now trying upstream 6")
|
||||||
|
tryUpstream(log, ctx, giteaClient, mainDomainSuffix, trimmedHost, targetOpt, canonicalDomainCache, redirectsCache)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Couldn't find a valid repo/branch
|
||||||
|
html.ReturnErrorPage(ctx,
|
||||||
|
fmt.Sprintf("could not find a valid repository or branch for repository: <code>%s</code>", targetRepo),
|
||||||
|
http.StatusNotFound)
|
||||||
|
}
|
||||||
|
|
||||||
|
func formatSetBranchNotFoundMessage(branch, owner, repo string) string {
|
||||||
|
return fmt.Sprintf("explicitly set branch <code>%q</code> does not exist at <code>%s/%s</code>", branch, owner, repo)
|
||||||
|
}
|
58
server/handler/handler_test.go
Normal file
58
server/handler/handler_test.go
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/config"
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
"codeberg.org/codeberg/pages/server/gitea"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestHandlerPerformance(t *testing.T) {
|
||||||
|
cfg := config.ForgeConfig{
|
||||||
|
Root: "https://codeberg.org",
|
||||||
|
Token: "",
|
||||||
|
LFSEnabled: false,
|
||||||
|
FollowSymlinks: false,
|
||||||
|
}
|
||||||
|
giteaClient, _ := gitea.NewClient(cfg, cache.NewInMemoryCache())
|
||||||
|
serverCfg := config.ServerConfig{
|
||||||
|
MainDomain: "codeberg.page",
|
||||||
|
RawDomain: "raw.codeberg.page",
|
||||||
|
BlacklistedPaths: []string{
|
||||||
|
"/.well-known/acme-challenge/",
|
||||||
|
},
|
||||||
|
AllowedCorsDomains: []string{"raw.codeberg.org", "fonts.codeberg.org", "design.codeberg.org"},
|
||||||
|
PagesBranches: []string{"pages"},
|
||||||
|
}
|
||||||
|
testHandler := Handler(serverCfg, giteaClient, cache.NewInMemoryCache(), cache.NewInMemoryCache())
|
||||||
|
|
||||||
|
testCase := func(uri string, status int) {
|
||||||
|
t.Run(uri, func(t *testing.T) {
|
||||||
|
req := httptest.NewRequest("GET", uri, http.NoBody)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
log.Printf("Start: %v\n", time.Now())
|
||||||
|
start := time.Now()
|
||||||
|
testHandler(w, req)
|
||||||
|
end := time.Now()
|
||||||
|
log.Printf("Done: %v\n", time.Now())
|
||||||
|
|
||||||
|
resp := w.Result()
|
||||||
|
|
||||||
|
if resp.StatusCode != status {
|
||||||
|
t.Errorf("request failed with status code %d", resp.StatusCode)
|
||||||
|
} else {
|
||||||
|
t.Logf("request took %d milliseconds", end.Sub(start).Milliseconds())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
testCase("https://mondstern.codeberg.page/", 404) // TODO: expect 200
|
||||||
|
testCase("https://codeberg.page/", 404) // TODO: expect 200
|
||||||
|
testCase("https://example.momar.xyz/", 424)
|
||||||
|
}
|
15
server/handler/hsts.go
Normal file
15
server/handler/hsts.go
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// getHSTSHeader returns a HSTS header with includeSubdomains & preload for MainDomainSuffix and RawDomain, or an empty
|
||||||
|
// string for custom domains.
|
||||||
|
func getHSTSHeader(host, mainDomainSuffix, rawDomain string) string {
|
||||||
|
if strings.HasSuffix(host, mainDomainSuffix) || strings.EqualFold(host, rawDomain) {
|
||||||
|
return "max-age=63072000; includeSubdomains; preload"
|
||||||
|
} else {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
84
server/handler/try.go
Normal file
84
server/handler/try.go
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/html"
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
"codeberg.org/codeberg/pages/server/context"
|
||||||
|
"codeberg.org/codeberg/pages/server/gitea"
|
||||||
|
"codeberg.org/codeberg/pages/server/upstream"
|
||||||
|
)
|
||||||
|
|
||||||
|
// tryUpstream forwards the target request to the Gitea API, and shows an error page on failure.
|
||||||
|
func tryUpstream(log zerolog.Logger, ctx *context.Context, giteaClient *gitea.Client,
|
||||||
|
mainDomainSuffix, trimmedHost string,
|
||||||
|
options *upstream.Options,
|
||||||
|
canonicalDomainCache cache.ICache,
|
||||||
|
redirectsCache cache.ICache,
|
||||||
|
) {
|
||||||
|
// check if a canonical domain exists on a request on MainDomain
|
||||||
|
if strings.HasSuffix(trimmedHost, mainDomainSuffix) && !options.ServeRaw {
|
||||||
|
canonicalDomain, _ := options.CheckCanonicalDomain(ctx, giteaClient, "", mainDomainSuffix, canonicalDomainCache)
|
||||||
|
if !strings.HasSuffix(strings.SplitN(canonicalDomain, "/", 2)[0], mainDomainSuffix) {
|
||||||
|
canonicalPath := ctx.Req.RequestURI
|
||||||
|
if options.TargetRepo != defaultPagesRepo {
|
||||||
|
path := strings.SplitN(canonicalPath, "/", 3)
|
||||||
|
if len(path) >= 3 {
|
||||||
|
canonicalPath = "/" + path[2]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
redirect_to := "https://" + canonicalDomain + canonicalPath
|
||||||
|
|
||||||
|
log.Debug().Str("to", redirect_to).Msg("redirecting")
|
||||||
|
|
||||||
|
ctx.Redirect(redirect_to, http.StatusTemporaryRedirect)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add host for debugging.
|
||||||
|
options.Host = trimmedHost
|
||||||
|
|
||||||
|
// Try to request the file from the Gitea API
|
||||||
|
log.Debug().Msg("requesting from upstream")
|
||||||
|
if !options.Upstream(ctx, giteaClient, redirectsCache) {
|
||||||
|
html.ReturnErrorPage(ctx, fmt.Sprintf("Forge returned %d %s", ctx.StatusCode, http.StatusText(ctx.StatusCode)), ctx.StatusCode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// tryBranch checks if a branch exists and populates the target variables. If canonicalLink is non-empty,
|
||||||
|
// it will also disallow search indexing and add a Link header to the canonical URL.
|
||||||
|
func tryBranch(log zerolog.Logger, ctx *context.Context, giteaClient *gitea.Client,
|
||||||
|
targetOptions *upstream.Options, canonicalLink bool,
|
||||||
|
) (*upstream.Options, bool) {
|
||||||
|
if targetOptions.TargetOwner == "" || targetOptions.TargetRepo == "" {
|
||||||
|
log.Debug().Msg("tryBranch: owner or repo is empty")
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Replace "~" to "/" so we can access branch that contains slash character
|
||||||
|
// Branch name cannot contain "~" so doing this is okay
|
||||||
|
targetOptions.TargetBranch = strings.ReplaceAll(targetOptions.TargetBranch, "~", "/")
|
||||||
|
|
||||||
|
// Check if the branch exists, otherwise treat it as a file path
|
||||||
|
branchExist, _ := targetOptions.GetBranchTimestamp(giteaClient)
|
||||||
|
if !branchExist {
|
||||||
|
log.Debug().Msg("tryBranch: branch doesn't exist")
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
if canonicalLink {
|
||||||
|
// Hide from search machines & add canonical link
|
||||||
|
ctx.RespWriter.Header().Set("X-Robots-Tag", "noarchive, noindex")
|
||||||
|
ctx.RespWriter.Header().Set("Link", targetOptions.ContentWebLink(giteaClient)+"; rel=\"canonical\"")
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug().Msg("tryBranch: true")
|
||||||
|
return targetOptions, true
|
||||||
|
}
|
21
server/profiling.go
Normal file
21
server/profiling.go
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
package server
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
_ "net/http/pprof"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
func StartProfilingServer(listeningAddress string) {
|
||||||
|
server := &http.Server{
|
||||||
|
Addr: listeningAddress,
|
||||||
|
Handler: http.DefaultServeMux,
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info().Msgf("Starting debug server on %s", listeningAddress)
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
log.Fatal().Err(server.ListenAndServe()).Msg("Failed to start debug server")
|
||||||
|
}()
|
||||||
|
}
|
145
server/startup.go
Normal file
145
server/startup.go
Normal file
|
@ -0,0 +1,145 @@
|
||||||
|
package server
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/tls"
|
||||||
|
"fmt"
|
||||||
|
"net"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/pires/go-proxyproto"
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
"github.com/urfave/cli/v2"
|
||||||
|
|
||||||
|
cmd "codeberg.org/codeberg/pages/cli"
|
||||||
|
"codeberg.org/codeberg/pages/config"
|
||||||
|
"codeberg.org/codeberg/pages/server/acme"
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
"codeberg.org/codeberg/pages/server/certificates"
|
||||||
|
"codeberg.org/codeberg/pages/server/gitea"
|
||||||
|
"codeberg.org/codeberg/pages/server/handler"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Serve sets up and starts the web server.
|
||||||
|
func Serve(ctx *cli.Context) error {
|
||||||
|
// initialize logger with Trace, overridden later with actual level
|
||||||
|
log.Logger = zerolog.New(zerolog.ConsoleWriter{Out: os.Stderr}).With().Timestamp().Caller().Logger().Level(zerolog.TraceLevel)
|
||||||
|
|
||||||
|
cfg, err := config.ReadConfig(ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("could not read config")
|
||||||
|
}
|
||||||
|
|
||||||
|
config.MergeConfig(ctx, cfg)
|
||||||
|
|
||||||
|
// Initialize the logger.
|
||||||
|
logLevel, err := zerolog.ParseLevel(cfg.LogLevel)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
fmt.Printf("Setting log level to: %s\n", logLevel)
|
||||||
|
log.Logger = zerolog.New(zerolog.ConsoleWriter{Out: os.Stderr}).With().Timestamp().Caller().Logger().Level(logLevel)
|
||||||
|
|
||||||
|
listeningSSLAddress := fmt.Sprintf("%s:%d", cfg.Server.Host, cfg.Server.Port)
|
||||||
|
listeningHTTPAddress := fmt.Sprintf("%s:%d", cfg.Server.Host, cfg.Server.HttpPort)
|
||||||
|
|
||||||
|
if cfg.Server.RawDomain != "" {
|
||||||
|
cfg.Server.AllowedCorsDomains = append(cfg.Server.AllowedCorsDomains, cfg.Server.RawDomain)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure MainDomain has a leading dot
|
||||||
|
if !strings.HasPrefix(cfg.Server.MainDomain, ".") {
|
||||||
|
// TODO make this better
|
||||||
|
cfg.Server.MainDomain = "." + cfg.Server.MainDomain
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(cfg.Server.PagesBranches) == 0 {
|
||||||
|
return fmt.Errorf("no default branches set (PAGES_BRANCHES)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Init ssl cert database
|
||||||
|
certDB, closeFn, err := cmd.OpenCertDB(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer closeFn()
|
||||||
|
|
||||||
|
challengeCache := cache.NewInMemoryCache()
|
||||||
|
// canonicalDomainCache stores canonical domains
|
||||||
|
canonicalDomainCache := cache.NewInMemoryCache()
|
||||||
|
// redirectsCache stores redirects in _redirects files
|
||||||
|
redirectsCache := cache.NewInMemoryCache()
|
||||||
|
// clientResponseCache stores responses from the Gitea server
|
||||||
|
clientResponseCache := cache.NewInMemoryCache()
|
||||||
|
|
||||||
|
giteaClient, err := gitea.NewClient(cfg.Forge, clientResponseCache)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("could not create new gitea client: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
acmeClient, err := acme.CreateAcmeClient(cfg.ACME, cfg.Server.HttpServerEnabled, challengeCache)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := certificates.SetupMainDomainCertificates(log.Logger, cfg.Server.MainDomain, acmeClient, certDB); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create listener for SSL connections
|
||||||
|
log.Info().Msgf("Create TCP listener for SSL on %s", listeningSSLAddress)
|
||||||
|
listener, err := net.Listen("tcp", listeningSSLAddress)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("couldn't create listener: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if cfg.Server.UseProxyProtocol {
|
||||||
|
listener = &proxyproto.Listener{Listener: listener}
|
||||||
|
}
|
||||||
|
// Setup listener for SSL connections
|
||||||
|
listener = tls.NewListener(listener, certificates.TLSConfig(
|
||||||
|
cfg.Server.MainDomain,
|
||||||
|
giteaClient,
|
||||||
|
acmeClient,
|
||||||
|
cfg.Server.PagesBranches[0],
|
||||||
|
challengeCache, canonicalDomainCache,
|
||||||
|
certDB,
|
||||||
|
cfg.ACME.NoDNS01,
|
||||||
|
cfg.Server.RawDomain,
|
||||||
|
))
|
||||||
|
|
||||||
|
interval := 12 * time.Hour
|
||||||
|
certMaintainCtx, cancelCertMaintain := context.WithCancel(context.Background())
|
||||||
|
defer cancelCertMaintain()
|
||||||
|
go certificates.MaintainCertDB(log.Logger, certMaintainCtx, interval, acmeClient, cfg.Server.MainDomain, certDB)
|
||||||
|
|
||||||
|
if cfg.Server.HttpServerEnabled {
|
||||||
|
// Create handler for http->https redirect and http acme challenges
|
||||||
|
httpHandler := certificates.SetupHTTPACMEChallengeServer(challengeCache, uint(cfg.Server.Port))
|
||||||
|
|
||||||
|
// Create listener for http and start listening
|
||||||
|
go func() {
|
||||||
|
log.Info().Msgf("Start HTTP server listening on %s", listeningHTTPAddress)
|
||||||
|
err := http.ListenAndServe(listeningHTTPAddress, httpHandler)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("Couldn't start HTTP server")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.IsSet("enable-profiling") {
|
||||||
|
StartProfilingServer(ctx.String("profiling-address"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create ssl handler based on settings
|
||||||
|
sslHandler := handler.Handler(cfg.Server, giteaClient, canonicalDomainCache, redirectsCache)
|
||||||
|
|
||||||
|
// Start the ssl listener
|
||||||
|
log.Info().Msgf("Start SSL server using TCP listener on %s", listener.Addr())
|
||||||
|
|
||||||
|
return http.Serve(listener, sslHandler)
|
||||||
|
}
|
71
server/upstream/domains.go
Normal file
71
server/upstream/domains.go
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
package upstream
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
"codeberg.org/codeberg/pages/server/context"
|
||||||
|
"codeberg.org/codeberg/pages/server/gitea"
|
||||||
|
)
|
||||||
|
|
||||||
|
// canonicalDomainCacheTimeout specifies the timeout for the canonical domain cache.
|
||||||
|
var canonicalDomainCacheTimeout = 15 * time.Minute
|
||||||
|
|
||||||
|
const canonicalDomainConfig = ".domains"
|
||||||
|
|
||||||
|
// CheckCanonicalDomain returns the canonical domain specified in the repo (using the `.domains` file).
|
||||||
|
func (o *Options) CheckCanonicalDomain(ctx *context.Context, giteaClient *gitea.Client, actualDomain, mainDomainSuffix string, canonicalDomainCache cache.ICache) (domain string, valid bool) {
|
||||||
|
// Check if this request is cached.
|
||||||
|
if cachedValue, ok := canonicalDomainCache.Get(o.TargetOwner + "/" + o.TargetRepo + "/" + o.TargetBranch); ok {
|
||||||
|
domains := cachedValue.([]string)
|
||||||
|
for _, domain := range domains {
|
||||||
|
if domain == actualDomain {
|
||||||
|
valid = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return domains[0], valid
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := giteaClient.GiteaRawContent(ctx, o.TargetOwner, o.TargetRepo, o.TargetBranch, canonicalDomainConfig)
|
||||||
|
if err != nil && !errors.Is(err, gitea.ErrorNotFound) {
|
||||||
|
log.Error().Err(err).Msgf("could not read %s of %s/%s", canonicalDomainConfig, o.TargetOwner, o.TargetRepo)
|
||||||
|
}
|
||||||
|
|
||||||
|
var domains []string
|
||||||
|
for _, domain := range strings.Split(string(body), "\n") {
|
||||||
|
domain = strings.ToLower(domain)
|
||||||
|
domain = strings.TrimSpace(domain)
|
||||||
|
domain = strings.TrimPrefix(domain, "http://")
|
||||||
|
domain = strings.TrimPrefix(domain, "https://")
|
||||||
|
if domain != "" && !strings.HasPrefix(domain, "#") && !strings.ContainsAny(domain, "\t /") && strings.ContainsRune(domain, '.') {
|
||||||
|
domains = append(domains, domain)
|
||||||
|
}
|
||||||
|
if domain == actualDomain {
|
||||||
|
valid = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add [owner].[pages-domain] as valid domain.
|
||||||
|
domains = append(domains, o.TargetOwner+mainDomainSuffix)
|
||||||
|
if domains[len(domains)-1] == actualDomain {
|
||||||
|
valid = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the target repository isn't called pages, add `/[repository]` to the
|
||||||
|
// previous valid domain.
|
||||||
|
if o.TargetRepo != "" && o.TargetRepo != "pages" {
|
||||||
|
domains[len(domains)-1] += "/" + o.TargetRepo
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add result to cache.
|
||||||
|
_ = canonicalDomainCache.Set(o.TargetOwner+"/"+o.TargetRepo+"/"+o.TargetBranch, domains, canonicalDomainCacheTimeout)
|
||||||
|
|
||||||
|
// Return the first domain from the list and return if any of the domains
|
||||||
|
// matched the requested domain.
|
||||||
|
return domains[0], valid
|
||||||
|
}
|
31
server/upstream/header.go
Normal file
31
server/upstream/header.go
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
package upstream
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/server/context"
|
||||||
|
"codeberg.org/codeberg/pages/server/gitea"
|
||||||
|
)
|
||||||
|
|
||||||
|
// setHeader set values to response header
|
||||||
|
func (o *Options) setHeader(ctx *context.Context, header http.Header) {
|
||||||
|
if eTag := header.Get(gitea.ETagHeader); eTag != "" {
|
||||||
|
ctx.RespWriter.Header().Set(gitea.ETagHeader, eTag)
|
||||||
|
}
|
||||||
|
if cacheIndicator := header.Get(gitea.PagesCacheIndicatorHeader); cacheIndicator != "" {
|
||||||
|
ctx.RespWriter.Header().Set(gitea.PagesCacheIndicatorHeader, cacheIndicator)
|
||||||
|
}
|
||||||
|
if length := header.Get(gitea.ContentLengthHeader); length != "" {
|
||||||
|
ctx.RespWriter.Header().Set(gitea.ContentLengthHeader, length)
|
||||||
|
}
|
||||||
|
if mime := header.Get(gitea.ContentTypeHeader); mime == "" || o.ServeRaw {
|
||||||
|
ctx.RespWriter.Header().Set(gitea.ContentTypeHeader, rawMime)
|
||||||
|
} else {
|
||||||
|
ctx.RespWriter.Header().Set(gitea.ContentTypeHeader, mime)
|
||||||
|
}
|
||||||
|
if encoding := header.Get(gitea.ContentEncodingHeader); encoding != "" && encoding != "identity" {
|
||||||
|
ctx.RespWriter.Header().Set(gitea.ContentEncodingHeader, encoding)
|
||||||
|
}
|
||||||
|
ctx.RespWriter.Header().Set(headerLastModified, o.BranchTimestamp.In(time.UTC).Format(http.TimeFormat))
|
||||||
|
}
|
47
server/upstream/helper.go
Normal file
47
server/upstream/helper.go
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
package upstream
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/server/gitea"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GetBranchTimestamp finds the default branch (if branch is "") and save branch and it's last modification time to Options
|
||||||
|
func (o *Options) GetBranchTimestamp(giteaClient *gitea.Client) (bool, error) {
|
||||||
|
log := log.With().Strs("BranchInfo", []string{o.TargetOwner, o.TargetRepo, o.TargetBranch}).Logger()
|
||||||
|
|
||||||
|
if o.TargetBranch == "" {
|
||||||
|
// Get default branch
|
||||||
|
defaultBranch, err := giteaClient.GiteaGetRepoDefaultBranch(o.TargetOwner, o.TargetRepo)
|
||||||
|
if err != nil {
|
||||||
|
log.Err(err).Msg("Couldn't fetch default branch from repository")
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
log.Debug().Msgf("Successfully fetched default branch %q from Gitea", defaultBranch)
|
||||||
|
o.TargetBranch = defaultBranch
|
||||||
|
}
|
||||||
|
|
||||||
|
timestamp, err := giteaClient.GiteaGetRepoBranchTimestamp(o.TargetOwner, o.TargetRepo, o.TargetBranch)
|
||||||
|
if err != nil {
|
||||||
|
if !errors.Is(err, gitea.ErrorNotFound) {
|
||||||
|
log.Error().Err(err).Msg("Could not get latest commit timestamp from branch")
|
||||||
|
}
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if timestamp == nil || timestamp.Branch == "" {
|
||||||
|
return false, fmt.Errorf("empty response")
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug().Msgf("Successfully fetched latest commit timestamp from branch: %#v", timestamp)
|
||||||
|
o.BranchTimestamp = timestamp.Timestamp
|
||||||
|
o.TargetBranch = timestamp.Branch
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Options) ContentWebLink(giteaClient *gitea.Client) string {
|
||||||
|
return giteaClient.ContentWebLink(o.TargetOwner, o.TargetRepo, o.TargetBranch, o.TargetPath) + "; rel=\"canonical\""
|
||||||
|
}
|
108
server/upstream/redirects.go
Normal file
108
server/upstream/redirects.go
Normal file
|
@ -0,0 +1,108 @@
|
||||||
|
package upstream
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
"codeberg.org/codeberg/pages/server/context"
|
||||||
|
"codeberg.org/codeberg/pages/server/gitea"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Redirect struct {
|
||||||
|
From string
|
||||||
|
To string
|
||||||
|
StatusCode int
|
||||||
|
}
|
||||||
|
|
||||||
|
// rewriteURL returns the destination URL and true if r matches reqURL.
|
||||||
|
func (r *Redirect) rewriteURL(reqURL string) (dstURL string, ok bool) {
|
||||||
|
// check if from url matches request url
|
||||||
|
if strings.TrimSuffix(r.From, "/") == strings.TrimSuffix(reqURL, "/") {
|
||||||
|
return r.To, true
|
||||||
|
}
|
||||||
|
// handle wildcard redirects
|
||||||
|
if strings.HasSuffix(r.From, "/*") {
|
||||||
|
trimmedFromURL := strings.TrimSuffix(r.From, "/*")
|
||||||
|
if reqURL == trimmedFromURL || strings.HasPrefix(reqURL, trimmedFromURL+"/") {
|
||||||
|
if strings.Contains(r.To, ":splat") {
|
||||||
|
matched := strings.TrimPrefix(reqURL, trimmedFromURL)
|
||||||
|
matched = strings.TrimPrefix(matched, "/")
|
||||||
|
return strings.ReplaceAll(r.To, ":splat", matched), true
|
||||||
|
}
|
||||||
|
return r.To, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
// redirectsCacheTimeout specifies the timeout for the redirects cache.
|
||||||
|
var redirectsCacheTimeout = 10 * time.Minute
|
||||||
|
|
||||||
|
const redirectsConfig = "_redirects"
|
||||||
|
|
||||||
|
// getRedirects returns redirects specified in the _redirects file.
|
||||||
|
func (o *Options) getRedirects(ctx *context.Context, giteaClient *gitea.Client, redirectsCache cache.ICache) []Redirect {
|
||||||
|
var redirects []Redirect
|
||||||
|
cacheKey := o.TargetOwner + "/" + o.TargetRepo + "/" + o.TargetBranch
|
||||||
|
|
||||||
|
// Check for cached redirects
|
||||||
|
if cachedValue, ok := redirectsCache.Get(cacheKey); ok {
|
||||||
|
redirects = cachedValue.([]Redirect)
|
||||||
|
} else {
|
||||||
|
// Get _redirects file and parse
|
||||||
|
body, err := giteaClient.GiteaRawContent(ctx, o.TargetOwner, o.TargetRepo, o.TargetBranch, redirectsConfig)
|
||||||
|
if err == nil {
|
||||||
|
for _, line := range strings.Split(string(body), "\n") {
|
||||||
|
redirectArr := strings.Fields(line)
|
||||||
|
|
||||||
|
// Ignore comments and invalid lines
|
||||||
|
if strings.HasPrefix(line, "#") || len(redirectArr) < 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get redirect status code
|
||||||
|
statusCode := 301
|
||||||
|
if len(redirectArr) == 3 {
|
||||||
|
statusCode, err = strconv.Atoi(redirectArr[2])
|
||||||
|
if err != nil {
|
||||||
|
log.Info().Err(err).Msgf("could not read %s of %s/%s", redirectsConfig, o.TargetOwner, o.TargetRepo)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
redirects = append(redirects, Redirect{
|
||||||
|
From: redirectArr[0],
|
||||||
|
To: redirectArr[1],
|
||||||
|
StatusCode: statusCode,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ = redirectsCache.Set(cacheKey, redirects, redirectsCacheTimeout)
|
||||||
|
}
|
||||||
|
return redirects
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Options) matchRedirects(ctx *context.Context, giteaClient *gitea.Client, redirects []Redirect, redirectsCache cache.ICache) (final bool) {
|
||||||
|
reqURL := ctx.Req.RequestURI
|
||||||
|
// remove repo and branch from request url
|
||||||
|
reqURL = strings.TrimPrefix(reqURL, "/"+o.TargetRepo)
|
||||||
|
reqURL = strings.TrimPrefix(reqURL, "/@"+o.TargetBranch)
|
||||||
|
|
||||||
|
for _, redirect := range redirects {
|
||||||
|
if dstURL, ok := redirect.rewriteURL(reqURL); ok {
|
||||||
|
if o.TargetPath == dstURL { // recursion base case, rewrite directly when paths are the same
|
||||||
|
return true
|
||||||
|
} else if redirect.StatusCode == 200 { // do rewrite if status code is 200
|
||||||
|
o.TargetPath = dstURL
|
||||||
|
o.Upstream(ctx, giteaClient, redirectsCache)
|
||||||
|
} else {
|
||||||
|
ctx.Redirect(dstURL, redirect.StatusCode)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
36
server/upstream/redirects_test.go
Normal file
36
server/upstream/redirects_test.go
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
package upstream
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestRedirect_rewriteURL(t *testing.T) {
|
||||||
|
for _, tc := range []struct {
|
||||||
|
redirect Redirect
|
||||||
|
reqURL string
|
||||||
|
wantDstURL string
|
||||||
|
wantOk bool
|
||||||
|
}{
|
||||||
|
{Redirect{"/", "/dst", 200}, "/", "/dst", true},
|
||||||
|
{Redirect{"/", "/dst", 200}, "/foo", "", false},
|
||||||
|
{Redirect{"/src", "/dst", 200}, "/src", "/dst", true},
|
||||||
|
{Redirect{"/src", "/dst", 200}, "/foo", "", false},
|
||||||
|
{Redirect{"/src", "/dst", 200}, "/src/foo", "", false},
|
||||||
|
{Redirect{"/*", "/dst", 200}, "/", "/dst", true},
|
||||||
|
{Redirect{"/*", "/dst", 200}, "/src", "/dst", true},
|
||||||
|
{Redirect{"/src/*", "/dst/:splat", 200}, "/src", "/dst/", true},
|
||||||
|
{Redirect{"/src/*", "/dst/:splat", 200}, "/src/", "/dst/", true},
|
||||||
|
{Redirect{"/src/*", "/dst/:splat", 200}, "/src/foo", "/dst/foo", true},
|
||||||
|
{Redirect{"/src/*", "/dst/:splat", 200}, "/src/foo/bar", "/dst/foo/bar", true},
|
||||||
|
{Redirect{"/src/*", "/dst/:splatsuffix", 200}, "/src/foo", "/dst/foosuffix", true},
|
||||||
|
{Redirect{"/src/*", "/dst:splat", 200}, "/src/foo", "/dstfoo", true},
|
||||||
|
{Redirect{"/src/*", "/dst", 200}, "/srcfoo", "", false},
|
||||||
|
// This is the example from FEATURES.md:
|
||||||
|
{Redirect{"/articles/*", "/posts/:splat", 302}, "/articles/2022/10/12/post-1/", "/posts/2022/10/12/post-1/", true},
|
||||||
|
} {
|
||||||
|
if dstURL, ok := tc.redirect.rewriteURL(tc.reqURL); dstURL != tc.wantDstURL || ok != tc.wantOk {
|
||||||
|
t.Errorf("%#v.rewriteURL(%q) = %q, %v; want %q, %v",
|
||||||
|
tc.redirect, tc.reqURL, dstURL, ok, tc.wantDstURL, tc.wantOk)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
318
server/upstream/upstream.go
Normal file
318
server/upstream/upstream.go
Normal file
|
@ -0,0 +1,318 @@
|
||||||
|
package upstream
|
||||||
|
|
||||||
|
import (
|
||||||
|
"cmp"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"slices"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
|
||||||
|
"codeberg.org/codeberg/pages/html"
|
||||||
|
"codeberg.org/codeberg/pages/server/cache"
|
||||||
|
"codeberg.org/codeberg/pages/server/context"
|
||||||
|
"codeberg.org/codeberg/pages/server/gitea"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
headerLastModified = "Last-Modified"
|
||||||
|
headerIfModifiedSince = "If-Modified-Since"
|
||||||
|
headerAcceptEncoding = "Accept-Encoding"
|
||||||
|
headerContentEncoding = "Content-Encoding"
|
||||||
|
|
||||||
|
rawMime = "text/plain; charset=utf-8"
|
||||||
|
)
|
||||||
|
|
||||||
|
// upstreamIndexPages lists pages that may be considered as index pages for directories.
|
||||||
|
var upstreamIndexPages = []string{
|
||||||
|
"index.html",
|
||||||
|
}
|
||||||
|
|
||||||
|
// upstreamNotFoundPages lists pages that may be considered as custom 404 Not Found pages.
|
||||||
|
var upstreamNotFoundPages = []string{
|
||||||
|
"404.html",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Options provides various options for the upstream request.
|
||||||
|
type Options struct {
|
||||||
|
TargetOwner string
|
||||||
|
TargetRepo string
|
||||||
|
TargetBranch string
|
||||||
|
TargetPath string
|
||||||
|
|
||||||
|
// Used for debugging purposes.
|
||||||
|
Host string
|
||||||
|
|
||||||
|
TryIndexPages bool
|
||||||
|
BranchTimestamp time.Time
|
||||||
|
// internal
|
||||||
|
appendTrailingSlash bool
|
||||||
|
redirectIfExists string
|
||||||
|
|
||||||
|
ServeRaw bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// allowed encodings
|
||||||
|
var allowedEncodings = map[string]string{
|
||||||
|
"gzip": ".gz",
|
||||||
|
"br": ".br",
|
||||||
|
"zstd": ".zst",
|
||||||
|
"identity": "",
|
||||||
|
}
|
||||||
|
|
||||||
|
// parses Accept-Encoding header into a list of acceptable encodings
|
||||||
|
func AcceptEncodings(header string) []string {
|
||||||
|
log.Trace().Msgf("got accept-encoding: %s", header)
|
||||||
|
encodings := []string{}
|
||||||
|
globQuality := 0.0
|
||||||
|
qualities := make(map[string]float64)
|
||||||
|
|
||||||
|
for _, encoding := range strings.Split(header, ",") {
|
||||||
|
name, quality_str, has_quality := strings.Cut(encoding, ";q=")
|
||||||
|
quality := 1.0
|
||||||
|
|
||||||
|
if has_quality {
|
||||||
|
var err error
|
||||||
|
quality, err = strconv.ParseFloat(quality_str, 64)
|
||||||
|
if err != nil || quality < 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
name = strings.TrimSpace(name)
|
||||||
|
|
||||||
|
if name == "*" {
|
||||||
|
globQuality = quality
|
||||||
|
} else {
|
||||||
|
_, allowed := allowedEncodings[name]
|
||||||
|
if allowed {
|
||||||
|
qualities[name] = quality
|
||||||
|
if quality > 0 {
|
||||||
|
encodings = append(encodings, name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if globQuality > 0 {
|
||||||
|
for encoding := range allowedEncodings {
|
||||||
|
_, exists := qualities[encoding]
|
||||||
|
if !exists {
|
||||||
|
encodings = append(encodings, encoding)
|
||||||
|
qualities[encoding] = globQuality
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
_, exists := qualities["identity"]
|
||||||
|
if !exists {
|
||||||
|
encodings = append(encodings, "identity")
|
||||||
|
qualities["identity"] = -1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
slices.SortStableFunc(encodings, func(x, y string) int {
|
||||||
|
// sort in reverse order; big quality comes first
|
||||||
|
return cmp.Compare(qualities[y], qualities[x])
|
||||||
|
})
|
||||||
|
log.Trace().Msgf("decided encoding order: %v", encodings)
|
||||||
|
return encodings
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upstream requests a file from the Gitea API at GiteaRoot and writes it to the request context.
|
||||||
|
func (o *Options) Upstream(ctx *context.Context, giteaClient *gitea.Client, redirectsCache cache.ICache) bool {
|
||||||
|
log := log.With().Str("ReqId", ctx.ReqId).Strs("upstream", []string{o.TargetOwner, o.TargetRepo, o.TargetBranch, o.TargetPath}).Logger()
|
||||||
|
|
||||||
|
log.Debug().Msg("Start")
|
||||||
|
|
||||||
|
if o.TargetOwner == "" || o.TargetRepo == "" {
|
||||||
|
html.ReturnErrorPage(ctx, "forge client: either repo owner or name info is missing", http.StatusBadRequest)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the branch exists and when it was modified
|
||||||
|
if o.BranchTimestamp.IsZero() {
|
||||||
|
branchExist, err := o.GetBranchTimestamp(giteaClient)
|
||||||
|
// handle 404
|
||||||
|
if err != nil && errors.Is(err, gitea.ErrorNotFound) || !branchExist {
|
||||||
|
html.ReturnErrorPage(ctx,
|
||||||
|
fmt.Sprintf("branch <code>%q</code> for <code>%s/%s</code> not found", o.TargetBranch, o.TargetOwner, o.TargetRepo),
|
||||||
|
http.StatusNotFound)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// handle unexpected errors
|
||||||
|
if err != nil {
|
||||||
|
html.ReturnErrorPage(ctx,
|
||||||
|
fmt.Sprintf("could not get timestamp of branch <code>%q</code>: '%v'", o.TargetBranch, err),
|
||||||
|
http.StatusFailedDependency)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the browser has a cached version
|
||||||
|
if ctx.Response() != nil {
|
||||||
|
if ifModifiedSince, err := time.Parse(time.RFC1123, ctx.Response().Header.Get(headerIfModifiedSince)); err == nil {
|
||||||
|
if ifModifiedSince.After(o.BranchTimestamp) {
|
||||||
|
ctx.RespWriter.WriteHeader(http.StatusNotModified)
|
||||||
|
log.Trace().Msg("check response against last modified: valid")
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.Trace().Msg("check response against last modified: outdated")
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug().Msg("Preparing")
|
||||||
|
|
||||||
|
var reader io.ReadCloser
|
||||||
|
var header http.Header
|
||||||
|
var statusCode int
|
||||||
|
var err error
|
||||||
|
|
||||||
|
// pick first non-404 response for encoding, *only* if not root
|
||||||
|
if o.TargetPath == "" || strings.HasSuffix(o.TargetPath, "/") {
|
||||||
|
err = gitea.ErrorNotFound
|
||||||
|
} else {
|
||||||
|
for _, encoding := range AcceptEncodings(ctx.Req.Header.Get(headerAcceptEncoding)) {
|
||||||
|
log.Trace().Msgf("try %s encoding", encoding)
|
||||||
|
|
||||||
|
// add extension for encoding
|
||||||
|
path := o.TargetPath + allowedEncodings[encoding]
|
||||||
|
reader, header, statusCode, err = giteaClient.ServeRawContent(ctx, o.TargetOwner, o.TargetRepo, o.TargetBranch, path, true)
|
||||||
|
if statusCode == http.StatusNotFound {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
log.Debug().Msgf("using %s encoding", encoding)
|
||||||
|
if encoding != "identity" {
|
||||||
|
header.Set(headerContentEncoding, encoding)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if reader != nil {
|
||||||
|
defer reader.Close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug().Msg("Aquisting")
|
||||||
|
|
||||||
|
// Handle not found error
|
||||||
|
if err != nil && errors.Is(err, gitea.ErrorNotFound) {
|
||||||
|
log.Debug().Msg("Handling not found error")
|
||||||
|
// Get and match redirects
|
||||||
|
redirects := o.getRedirects(ctx, giteaClient, redirectsCache)
|
||||||
|
if o.matchRedirects(ctx, giteaClient, redirects, redirectsCache) {
|
||||||
|
log.Trace().Msg("redirect")
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if o.TryIndexPages {
|
||||||
|
log.Trace().Msg("try index page")
|
||||||
|
// copy the o struct & try if an index page exists
|
||||||
|
optionsForIndexPages := *o
|
||||||
|
optionsForIndexPages.TryIndexPages = false
|
||||||
|
optionsForIndexPages.appendTrailingSlash = true
|
||||||
|
for _, indexPage := range upstreamIndexPages {
|
||||||
|
optionsForIndexPages.TargetPath = strings.TrimSuffix(o.TargetPath, "/") + "/" + indexPage
|
||||||
|
if optionsForIndexPages.Upstream(ctx, giteaClient, redirectsCache) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.Trace().Msg("try html file with path name")
|
||||||
|
// compatibility fix for GitHub Pages (/example → /example.html)
|
||||||
|
optionsForIndexPages.appendTrailingSlash = false
|
||||||
|
optionsForIndexPages.redirectIfExists = strings.TrimSuffix(ctx.Path(), "/") + ".html"
|
||||||
|
optionsForIndexPages.TargetPath = o.TargetPath + ".html"
|
||||||
|
if optionsForIndexPages.Upstream(ctx, giteaClient, redirectsCache) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug().Msg("not found")
|
||||||
|
|
||||||
|
ctx.StatusCode = http.StatusNotFound
|
||||||
|
if o.TryIndexPages {
|
||||||
|
log.Trace().Msg("try not found page")
|
||||||
|
// copy the o struct & try if a not found page exists
|
||||||
|
optionsForNotFoundPages := *o
|
||||||
|
optionsForNotFoundPages.TryIndexPages = false
|
||||||
|
optionsForNotFoundPages.appendTrailingSlash = false
|
||||||
|
for _, notFoundPage := range upstreamNotFoundPages {
|
||||||
|
optionsForNotFoundPages.TargetPath = "/" + notFoundPage
|
||||||
|
if optionsForNotFoundPages.Upstream(ctx, giteaClient, redirectsCache) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.Trace().Msg("not found page missing")
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// handle unexpected client errors
|
||||||
|
if err != nil || reader == nil || statusCode != http.StatusOK {
|
||||||
|
log.Debug().Msg("Handling error")
|
||||||
|
var msg string
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
msg = "forge client: returned unexpected error"
|
||||||
|
log.Error().Err(err).Msg(msg)
|
||||||
|
msg = fmt.Sprintf("%s: '%v'", msg, err)
|
||||||
|
}
|
||||||
|
if reader == nil {
|
||||||
|
msg = "forge client: returned no reader"
|
||||||
|
log.Error().Msg(msg)
|
||||||
|
}
|
||||||
|
if statusCode != http.StatusOK {
|
||||||
|
msg = fmt.Sprintf("forge client: couldn't fetch contents: <code>%d - %s</code>", statusCode, http.StatusText(statusCode))
|
||||||
|
log.Error().Msg(msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
html.ReturnErrorPage(ctx, msg, http.StatusInternalServerError)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append trailing slash if missing (for index files), and redirect to fix filenames in general
|
||||||
|
// o.appendTrailingSlash is only true when looking for index pages
|
||||||
|
if o.appendTrailingSlash && !strings.HasSuffix(ctx.Path(), "/") {
|
||||||
|
log.Trace().Msg("append trailing slash and redirect")
|
||||||
|
ctx.Redirect(ctx.Path()+"/", http.StatusTemporaryRedirect)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if strings.HasSuffix(ctx.Path(), "/index.html") && !o.ServeRaw {
|
||||||
|
log.Trace().Msg("remove index.html from path and redirect")
|
||||||
|
ctx.Redirect(strings.TrimSuffix(ctx.Path(), "index.html"), http.StatusTemporaryRedirect)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if o.redirectIfExists != "" {
|
||||||
|
ctx.Redirect(o.redirectIfExists, http.StatusTemporaryRedirect)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set ETag & MIME
|
||||||
|
o.setHeader(ctx, header)
|
||||||
|
|
||||||
|
log.Debug().Msg("Prepare response")
|
||||||
|
|
||||||
|
ctx.RespWriter.WriteHeader(ctx.StatusCode)
|
||||||
|
|
||||||
|
// Write the response body to the original request
|
||||||
|
if reader != nil {
|
||||||
|
_, err := io.Copy(ctx.RespWriter, reader)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msgf("Couldn't write body for %q", o.TargetPath)
|
||||||
|
html.ReturnErrorPage(ctx, "", http.StatusInternalServerError)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug().Msg("Sending response")
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
27
server/utils/utils.go
Normal file
27
server/utils/utils.go
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/url"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TrimHostPort(host string) string {
|
||||||
|
i := strings.IndexByte(host, ':')
|
||||||
|
if i >= 0 {
|
||||||
|
return host[:i]
|
||||||
|
}
|
||||||
|
return host
|
||||||
|
}
|
||||||
|
|
||||||
|
func CleanPath(uriPath string) string {
|
||||||
|
unescapedPath, _ := url.PathUnescape(uriPath)
|
||||||
|
cleanedPath := path.Join("/", unescapedPath)
|
||||||
|
|
||||||
|
// If the path refers to a directory, add a trailing slash.
|
||||||
|
if !strings.HasSuffix(cleanedPath, "/") && (strings.HasSuffix(unescapedPath, "/") || strings.HasSuffix(unescapedPath, "/.") || strings.HasSuffix(unescapedPath, "/..")) {
|
||||||
|
cleanedPath += "/"
|
||||||
|
}
|
||||||
|
|
||||||
|
return cleanedPath
|
||||||
|
}
|
69
server/utils/utils_test.go
Normal file
69
server/utils/utils_test.go
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestTrimHostPort(t *testing.T) {
|
||||||
|
assert.EqualValues(t, "aa", TrimHostPort("aa"))
|
||||||
|
assert.EqualValues(t, "", TrimHostPort(":"))
|
||||||
|
assert.EqualValues(t, "example.com", TrimHostPort("example.com:80"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestCleanPath is mostly copied from fasthttp, to keep the behaviour we had before migrating away from it.
|
||||||
|
// Source (MIT licensed): https://github.com/valyala/fasthttp/blob/v1.48.0/uri_test.go#L154
|
||||||
|
// Copyright (c) 2015-present Aliaksandr Valialkin, VertaMedia, Kirill Danshin, Erik Dubbelboer, FastHTTP Authors
|
||||||
|
func TestCleanPath(t *testing.T) {
|
||||||
|
// double slash
|
||||||
|
testURIPathNormalize(t, "/aa//bb", "/aa/bb")
|
||||||
|
|
||||||
|
// triple slash
|
||||||
|
testURIPathNormalize(t, "/x///y/", "/x/y/")
|
||||||
|
|
||||||
|
// multi slashes
|
||||||
|
testURIPathNormalize(t, "/abc//de///fg////", "/abc/de/fg/")
|
||||||
|
|
||||||
|
// encoded slashes
|
||||||
|
testURIPathNormalize(t, "/xxxx%2fyyy%2f%2F%2F", "/xxxx/yyy/")
|
||||||
|
|
||||||
|
// dotdot
|
||||||
|
testURIPathNormalize(t, "/aaa/..", "/")
|
||||||
|
|
||||||
|
// dotdot with trailing slash
|
||||||
|
testURIPathNormalize(t, "/xxx/yyy/../", "/xxx/")
|
||||||
|
|
||||||
|
// multi dotdots
|
||||||
|
testURIPathNormalize(t, "/aaa/bbb/ccc/../../ddd", "/aaa/ddd")
|
||||||
|
|
||||||
|
// dotdots separated by other data
|
||||||
|
testURIPathNormalize(t, "/a/b/../c/d/../e/..", "/a/c/")
|
||||||
|
|
||||||
|
// too many dotdots
|
||||||
|
testURIPathNormalize(t, "/aaa/../../../../xxx", "/xxx")
|
||||||
|
testURIPathNormalize(t, "/../../../../../..", "/")
|
||||||
|
testURIPathNormalize(t, "/../../../../../../", "/")
|
||||||
|
|
||||||
|
// encoded dotdots
|
||||||
|
testURIPathNormalize(t, "/aaa%2Fbbb%2F%2E.%2Fxxx", "/aaa/xxx")
|
||||||
|
|
||||||
|
// double slash with dotdots
|
||||||
|
testURIPathNormalize(t, "/aaa////..//b", "/b")
|
||||||
|
|
||||||
|
// fake dotdot
|
||||||
|
testURIPathNormalize(t, "/aaa/..bbb/ccc/..", "/aaa/..bbb/")
|
||||||
|
|
||||||
|
// single dot
|
||||||
|
testURIPathNormalize(t, "/a/./b/././c/./d.html", "/a/b/c/d.html")
|
||||||
|
testURIPathNormalize(t, "./foo/", "/foo/")
|
||||||
|
testURIPathNormalize(t, "./../.././../../aaa/bbb/../../../././../", "/")
|
||||||
|
testURIPathNormalize(t, "./a/./.././../b/./foo.html", "/b/foo.html")
|
||||||
|
}
|
||||||
|
|
||||||
|
func testURIPathNormalize(t *testing.T, requestURI, expectedPath string) {
|
||||||
|
cleanedPath := CleanPath(requestURI)
|
||||||
|
if cleanedPath != expectedPath {
|
||||||
|
t.Fatalf("Unexpected path %q. Expected %q. requestURI=%q", cleanedPath, expectedPath, requestURI)
|
||||||
|
}
|
||||||
|
}
|
3
server/version/version.go
Normal file
3
server/version/version.go
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
package version
|
||||||
|
|
||||||
|
var Version string = "dev"
|
|
@ -1,152 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<meta name="description" content="Create your own free website for you and your projects using Codeberg Pages.">
|
|
||||||
<meta name="keywords" content="Codeberg, Website, Pages, Free, Hosting, Git, Repository">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
|
|
||||||
<link rel="shortcut icon" href="https://${HOSTNAME_FQDN}/assets/img/logo-medium.svg" />
|
|
||||||
<link rel="icon" href="https://design.${HOSTNAME_FQDN}/logo-kit/favicon.ico" type="image/x-icon" />
|
|
||||||
<link rel="icon" href="https://design.${HOSTNAME_FQDN}/logo-kit/favicon.svg" type="image/svg+xml" />
|
|
||||||
<link rel="apple-touch-icon" href="https://design.${HOSTNAME_FQDN}/logo-kit/apple-touch-icon.png" />
|
|
||||||
|
|
||||||
<!-- Details and License: https://fonts.${HOSTNAME_FQDN}/fonts/inter/ -->
|
|
||||||
<link rel="stylesheet" type="text/css" href="https://fonts.${HOSTNAME_FQDN}/dist/inter/Inter%20Web/inter.css">
|
|
||||||
|
|
||||||
<!-- License: http://www.apache.org/licenses/LICENSE-2.0.txt -->
|
|
||||||
<!-- Material Design icons by Google -->
|
|
||||||
|
|
||||||
<title>Codeberg Pages - Static pages for your projects.</title>
|
|
||||||
|
|
||||||
<style>
|
|
||||||
body {
|
|
||||||
margin: 0px;
|
|
||||||
padding: 0px;
|
|
||||||
background: #fcfcfc;
|
|
||||||
}
|
|
||||||
|
|
||||||
code {
|
|
||||||
display: inline-block;
|
|
||||||
margin: 5px;
|
|
||||||
padding: 2px 6px 2px 6px;
|
|
||||||
font-size: 14px;
|
|
||||||
border-radius: 3px;
|
|
||||||
background: #393e46;
|
|
||||||
color: white;
|
|
||||||
box-shadow: 0px 4px 10px -2px rgb(0, 0, 0, 0.3);
|
|
||||||
}
|
|
||||||
|
|
||||||
code:hover {
|
|
||||||
box-shadow: 0px 4px 10px 1px rgb(0, 0, 0, 0.3);
|
|
||||||
transition: box-shadow 0.2s ease-in-out;
|
|
||||||
}
|
|
||||||
|
|
||||||
.container {
|
|
||||||
margin: 50px 0 50px 0;
|
|
||||||
font-family: "Inter", sans-serif;
|
|
||||||
font-weight: 300;
|
|
||||||
color: #404040;
|
|
||||||
}
|
|
||||||
|
|
||||||
.container .logo {
|
|
||||||
width: 220px;
|
|
||||||
margin-bottom: 40px;
|
|
||||||
mix-blend-mode: multiply;
|
|
||||||
}
|
|
||||||
|
|
||||||
.container .header .name {
|
|
||||||
font-size: 50px;
|
|
||||||
font-weight: 900;
|
|
||||||
line-height: 1;
|
|
||||||
color: #2185d0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.container .header .description {
|
|
||||||
font-size: 23px;
|
|
||||||
margin-top: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.cards {
|
|
||||||
display: flex;
|
|
||||||
align-items: top;
|
|
||||||
justify-content: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card {
|
|
||||||
display: inline-block;
|
|
||||||
margin: 0 10px 0 10px;
|
|
||||||
max-width: 200px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card .card-icon {
|
|
||||||
display: inline-block;
|
|
||||||
padding: 12px;
|
|
||||||
border-radius: 30px;
|
|
||||||
background: black;
|
|
||||||
box-shadow: 0px 4px 16px -10px rgba(0,0,0,0.75);
|
|
||||||
width: 18px;
|
|
||||||
height: 18px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card .card-header {
|
|
||||||
margin-top: 20px;
|
|
||||||
font-size: 18px;
|
|
||||||
font-weight: 700;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card .card-description {
|
|
||||||
margin-top: 2px;
|
|
||||||
font-size: 16px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.elevated {
|
|
||||||
display: block;
|
|
||||||
margin-top: 60px;
|
|
||||||
padding: 50px 5px 50px 5px;
|
|
||||||
background: #fafafa;
|
|
||||||
box-shadow: inset 0px 0px 10px 0px rgba(0,0,0,0.10);
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<center>
|
|
||||||
<div class="container">
|
|
||||||
<a href="https://${HOSTNAME_FQDN}"><img class="logo" src="https://design.${HOSTNAME_FQDN}/logo-kit/icon.svg" alt="Go to ${HOSTNAME_FQDN}"></a><br>
|
|
||||||
<div class="header">
|
|
||||||
<div class="name">Codeberg Pages.</div>
|
|
||||||
<div class="description">Static pages for your projects.</div>
|
|
||||||
<code style="margin-top: 15px;">https://<your_username>.${HOSTNAME_PAGES}/</code>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="elevated">
|
|
||||||
<div class="cards">
|
|
||||||
<div class="card">
|
|
||||||
<div style="background: #efb960;" class="card-icon">
|
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="white" width="18px" height="18px"><path d="M0 0h24v24H0V0z" fill="none"/><path d="M19 13h-6v6h-2v-6H5v-2h6V5h2v6h6v2z"/></svg>
|
|
||||||
</div>
|
|
||||||
<div class="card-header">Create a repository.</div>
|
|
||||||
<div class="card-description">Create a repo named 'pages' in your user account or organization.</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="card">
|
|
||||||
<div style="background: #0a97b0;" class="card-icon">
|
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="white" width="18px" height="18px"><path d="M0 0h24v24H0z" fill="none"/><path d="M19.35 10.04C18.67 6.59 15.64 4 12 4 9.11 4 6.6 5.64 5.35 8.04 2.34 8.36 0 10.91 0 14c0 3.31 2.69 6 6 6h13c2.76 0 5-2.24 5-5 0-2.64-2.05-4.78-4.65-4.96zM14 13v4h-4v-4H7l5-5 5 5h-3z"/></svg>
|
|
||||||
</div>
|
|
||||||
<div class="card-header">Push your content.</div>
|
|
||||||
<div class="card-description">Push your static content, HTML, style, fonts or images.</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="card">
|
|
||||||
<div style="background: #6c5b7b;" class="card-icon">
|
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="white" width="18px" height="18px"><path d="M0 0h24v24H0z" fill="none"/><path d="M14.4 6L14 4H5v17h2v-7h5.6l.4 2h7V6z"/></svg>
|
|
||||||
</div>
|
|
||||||
<div class="card-header">You're done!</div>
|
|
||||||
<div class="card-description">You should now be able to access your content using the link shown above.</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</center>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,276 +0,0 @@
|
||||||
<?php
|
|
||||||
|
|
||||||
function send_response($code, $message = "") {
|
|
||||||
http_response_code($code);
|
|
||||||
echo $message;
|
|
||||||
exit();
|
|
||||||
}
|
|
||||||
|
|
||||||
$hostname_fqdn = "${HOSTNAME_FQDN}";
|
|
||||||
$hostname_pages = "${HOSTNAME_PAGES}";
|
|
||||||
|
|
||||||
$domain_parts = explode('.', $_SERVER['HTTP_HOST']);
|
|
||||||
$subdomain = implode(".", array_slice($domain_parts, 0, -2));
|
|
||||||
$tld = end($domain_parts);
|
|
||||||
|
|
||||||
$request_uri = explode("?", $_SERVER["REQUEST_URI"])[0];
|
|
||||||
$request_url = filter_var($request_uri, FILTER_SANITIZE_URL);
|
|
||||||
$request_url = str_replace("%20", " ", $request_url);
|
|
||||||
$request_url_parts = explode("/", $request_url);
|
|
||||||
$request_url_parts = array_diff($request_url_parts, array("")); # Remove empty parts in URL
|
|
||||||
|
|
||||||
$repo = "pages";
|
|
||||||
|
|
||||||
if ($tld === "org") {
|
|
||||||
$subdomain_repo = array(
|
|
||||||
// subdomain => array(owner, repo, allowCORS),
|
|
||||||
"docs" => array("docs", "pages", false),
|
|
||||||
"fonts" => array("codeberg-fonts", "pages", true),
|
|
||||||
"get-it-on" => array("get-it-on", "pages", false),
|
|
||||||
"design" => array("codeberg", "design", true)
|
|
||||||
);
|
|
||||||
if (array_key_exists($subdomain, $subdomain_repo)) {
|
|
||||||
$owner = $subdomain_repo[$subdomain][0];
|
|
||||||
$repo = $subdomain_repo[$subdomain][1];
|
|
||||||
if ($subdomain_repo[$subdomain][2]) {
|
|
||||||
// Allow CORS requests to specified pages, for web fonts etc.
|
|
||||||
header("Access-Control-Allow-Origin: *");
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
$owner = strtolower(array_shift($request_url_parts));
|
|
||||||
if (!$owner) {
|
|
||||||
header("Location: https://" . $hostname_pages);
|
|
||||||
exit;
|
|
||||||
}
|
|
||||||
if (strpos($owner, ".") === false) {
|
|
||||||
$h = "Location: https://" . $owner . "." . $hostname_pages . "/" . implode("/", $request_url_parts);
|
|
||||||
if ($_SERVER['QUERY_STRING'] !== "")
|
|
||||||
$h .= "?" . $_SERVER['QUERY_STRING'];
|
|
||||||
header($h);
|
|
||||||
exit;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
$owner = strtolower($subdomain);
|
|
||||||
if (strpos($owner, ".") !== false)
|
|
||||||
send_response(200, "Pages not supported for user names with dots. Please rename your username to use Codeberg pages.");
|
|
||||||
if ($owner === "raw") {
|
|
||||||
// Make URL safe
|
|
||||||
$url = "/" . explode("?", $_SERVER["REQUEST_URI"])[0];
|
|
||||||
$url = preg_replace('/\/\/+/', "/", $url); // clean duplicate slashes
|
|
||||||
if (strpos($url, "/../") !== false || strpos($url, "/./") !== false || substr($url, -3) === "/.." || substr($url, -2) === "/.") {
|
|
||||||
// contains .. or . path elements (which should be filtered by web browsers anyways)
|
|
||||||
http_response_code(403);
|
|
||||||
die("Forbidden");
|
|
||||||
}
|
|
||||||
$url_parts = explode("/", substr($url, 1), 3);
|
|
||||||
if (strpos($url_parts[2], "@") === 0) {
|
|
||||||
$url_parts[2] = substr($url_parts[2], 1);
|
|
||||||
}
|
|
||||||
if (count($url_parts) < 3 || strpos($url_parts[2], "blob/") === 0) {
|
|
||||||
// misses /owner/repo/path or path begins with "blob/" (e.g. issue attachments etc.)
|
|
||||||
http_response_code(403);
|
|
||||||
die("Forbidden");
|
|
||||||
}
|
|
||||||
if (strpos(" admin api assets attachments avatars captcha commits debug error explore ghost help install issues less login metrics milestones new notifications org plugins pulls raw repo search stars template user ", " " . $url_parts[0] . " ") !== false) {
|
|
||||||
// username is forbidden by Gitea
|
|
||||||
http_response_code(403);
|
|
||||||
die("Forbidden");
|
|
||||||
}
|
|
||||||
$url = "/api/v1/repos/" . $url_parts[0] . "/" . $url_parts[1] . "/raw/" . $url_parts[2];
|
|
||||||
|
|
||||||
// Send request to Gitea
|
|
||||||
$ch = curl_init("http://localhost:3000" . $url);
|
|
||||||
curl_setopt($ch, CURLOPT_CUSTOMREQUEST, "GET");
|
|
||||||
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
|
|
||||||
curl_setopt($ch, CURLOPT_HEADER, true);
|
|
||||||
$response = curl_exec($ch);
|
|
||||||
$status = curl_getinfo($ch, CURLINFO_HTTP_CODE);
|
|
||||||
$header_size = curl_getinfo($ch, CURLINFO_HEADER_SIZE);
|
|
||||||
$header = substr($response, 0, $header_size);
|
|
||||||
$header = explode("\r\n", $header);
|
|
||||||
$body = substr($response, $header_size);
|
|
||||||
foreach($header as $h) {
|
|
||||||
if ($h && substr($h, 0, 11) != "Set-Cookie:")
|
|
||||||
if (substr($h, 0, 13) == "Content-Type:" && strpos($h, "text/html") !== false)
|
|
||||||
// text/html shouldn't be rendered on raw.codeberg.org, as it might confuse both users (with it being a legit codeberg.org subdomain) and developers (with it having a really strict CSP)
|
|
||||||
header(str_replace("text/html", "text/plain", $h));
|
|
||||||
else
|
|
||||||
header($h);
|
|
||||||
}
|
|
||||||
// Allow CORS
|
|
||||||
header("Access-Control-Allow-Origin: *");
|
|
||||||
// Even though text/html isn't allowed, SVG files might still invoke JavaScript, which is blocked here
|
|
||||||
header("Content-Security-Policy: default-src 'none'; style-src 'unsafe-inline'; sandbox");
|
|
||||||
send_response($status, $body);
|
|
||||||
die();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
$reservedUsernames = array(
|
|
||||||
"abuse", "admin", "api", "app", "apt", "apps", "appserver", "archive", "archives", "assets", "attachments", "auth", "avatar", "avatars",
|
|
||||||
"bbs", "bin", "blog",
|
|
||||||
"cache", "cd", "cdn", "ci", "cloud", "cluster", "commits", "connect", "contact",
|
|
||||||
"dashboard", "debug", "deploy", "deployment", "dev", "dns", "dns0", "dns1", "dns2", "dns3", "dns4", "doc", "download", "downloads",
|
|
||||||
"email", "error", "explore",
|
|
||||||
"fonts", "forum", "ftp", "fuck",
|
|
||||||
"gist", "gists", "ghost",
|
|
||||||
"hello", "help", "helpdesk", "host",
|
|
||||||
"i", "imap", "info", "install", "internal", "issues",
|
|
||||||
"less", "login",
|
|
||||||
"m", "me", "mail", "mailserver", "manifest", "merch", "merchandise", "metrics", "milestones", "mx",
|
|
||||||
"new", "news", "notifications",
|
|
||||||
"official", "org", "ota", "owa",
|
|
||||||
"page", "pages", "packages", "pastebin", "plugins", "poll", "polls", "pop", "pop3", "portal", "postmaster", "project", "projects", "pulls",
|
|
||||||
"raw", "remote", "repo", "robot", "robots",
|
|
||||||
"search", "secure", "server", "shop", "shopping", "signin", "signon", "smtp", "ssl", "sso", "stars", "store", "support", "swag", "swagshop", "suck",
|
|
||||||
"takeout", "template", "test", "testing",
|
|
||||||
"user",
|
|
||||||
"vote", "voting",
|
|
||||||
"web", "webmail", "webmaster", "webshop", "webstore", "welcome", "www", "www0", "www1", "www2", "www3", "www4", "www5", "www6", "www7", "www8", "www9",
|
|
||||||
"ns", "ns0", "ns1", "ns2", "ns3", "ns4",
|
|
||||||
"vpn",
|
|
||||||
);
|
|
||||||
|
|
||||||
if (in_array($owner, $reservedUsernames))
|
|
||||||
send_response(404, "Reserved user name '" . $owner . "' cannot have pages");
|
|
||||||
|
|
||||||
if (!$owner) {
|
|
||||||
send_response(200, file_get_contents("./default-page.html"));
|
|
||||||
}
|
|
||||||
|
|
||||||
# Restrict allowed characters in request URI:
|
|
||||||
if (preg_match("/^\/[a-zA-Z0-9_ +\-\/\.]*\$/", $request_url) != 1)
|
|
||||||
send_response(404, "invalid request URL");
|
|
||||||
|
|
||||||
$git_prefix = "/data/git/gitea-repositories";
|
|
||||||
$git_root = realpath("$git_prefix/$owner/$repo.git");
|
|
||||||
$file_url = implode("/", $request_url_parts);
|
|
||||||
|
|
||||||
# Ensure that only files within $git_root are accessed:
|
|
||||||
if (substr($git_root, 0, strlen($git_prefix)) !== $git_prefix)
|
|
||||||
send_response(404, "this user/organization does not have codeberg pages");
|
|
||||||
|
|
||||||
# Setup file descriptors
|
|
||||||
$null_fd = array(
|
|
||||||
1 => array('file','/dev/null','w'),
|
|
||||||
2 => array('file','/dev/null','w'),
|
|
||||||
);
|
|
||||||
|
|
||||||
$pipe_fd = array(
|
|
||||||
1 => array('pipe','w'),
|
|
||||||
2 => array('pipe','w'),
|
|
||||||
);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Excute git commands
|
|
||||||
* @param array $cmd_array git command and parameters as an array
|
|
||||||
* @param string &$stdout reference to $stdout variable, to receive stdout value
|
|
||||||
* @param string &$stderr reference to $stderr variable, to receive stderr value
|
|
||||||
* @param string &$retval reference to $retval variable, to receive return value
|
|
||||||
*/
|
|
||||||
function git_exec($cmd_array, &$stdout = false, &$retval = false, &$stderr = false) {
|
|
||||||
global $git_root, $pipe_fd;
|
|
||||||
$git_bin = '/usr/bin/git';
|
|
||||||
array_unshift($cmd_array, $git_bin);
|
|
||||||
|
|
||||||
$process = proc_open($cmd_array, $pipe_fd, $pipes, $git_root);
|
|
||||||
if($stdout !== false)
|
|
||||||
$stdout = stream_get_contents($pipes[1]);
|
|
||||||
if($stderr !== false)
|
|
||||||
$stderr = stream_get_contents($pipes[2]);
|
|
||||||
fclose($pipes[1]);
|
|
||||||
fclose($pipes[2]);
|
|
||||||
$tmpret = proc_close($process);
|
|
||||||
if($retval !== false)
|
|
||||||
$retval = $tmpret;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check whether git command succeeds
|
|
||||||
* @param array $cmd_array git command and parameters as an array
|
|
||||||
* @return bool true if return value is 0, false otherwise
|
|
||||||
*/
|
|
||||||
function git_check($cmd_array) {
|
|
||||||
global $git_root, $null_fd;
|
|
||||||
$git_bin = '/usr/bin/git';
|
|
||||||
array_unshift($cmd_array, $git_bin);
|
|
||||||
return ( proc_close(proc_open($cmd_array,$null_fd,$pipes,$git_root)) === 0 );
|
|
||||||
}
|
|
||||||
|
|
||||||
# If this is a folder, we explicitly redirect to folder URL, otherwise browsers will construct invalid relative links:
|
|
||||||
$command = ["ls-tree", "HEAD:$file_url"];
|
|
||||||
if (git_check($command)) {
|
|
||||||
if (substr($request_url, -1) !== "/") {
|
|
||||||
$h = "Location: " . $request_url . "/";
|
|
||||||
if ($_SERVER['QUERY_STRING'] !== "")
|
|
||||||
$h .= "?" . $_SERVER['QUERY_STRING'];
|
|
||||||
header($h);
|
|
||||||
exit();
|
|
||||||
}
|
|
||||||
if ($file_url !== "")
|
|
||||||
$file_url .= "/";
|
|
||||||
$file_url .= "index.html";
|
|
||||||
}
|
|
||||||
|
|
||||||
$ext = pathinfo($file_url, PATHINFO_EXTENSION);
|
|
||||||
$ext = strtolower($ext);
|
|
||||||
|
|
||||||
$mime_types = array(
|
|
||||||
"css" => "text/css",
|
|
||||||
"csv" => "text/csv",
|
|
||||||
"gif" => "image/gif",
|
|
||||||
"html" => "text/html",
|
|
||||||
"ico" => "image/x-icon",
|
|
||||||
"ics" => "text/calendar",
|
|
||||||
"jpg" => "image/jpeg",
|
|
||||||
"jpeg" => "image/jpeg",
|
|
||||||
"js" => "application/javascript",
|
|
||||||
"json" => "application/json",
|
|
||||||
"pdf" => "application/pdf",
|
|
||||||
"png" => "image/png",
|
|
||||||
"svg" => "image/svg+xml",
|
|
||||||
"ttf" => "font/ttf",
|
|
||||||
"txt" => "text/plain",
|
|
||||||
"woff" => "font/woff",
|
|
||||||
"woff2" => "font/woff2",
|
|
||||||
"xml" => "text/xml"
|
|
||||||
);
|
|
||||||
|
|
||||||
$mime_type = "application/octet-stream";
|
|
||||||
if (array_key_exists($ext, $mime_types))
|
|
||||||
$mime_type = $mime_types[$ext];
|
|
||||||
|
|
||||||
header("Content-Type: " . $mime_type);
|
|
||||||
|
|
||||||
#header("Cache-Control: public, max-age=10, immutable");
|
|
||||||
|
|
||||||
$command = ["log", "--format=%H", "-1"];
|
|
||||||
git_exec($command, $output, $retval);
|
|
||||||
if ($retval === 0 && !empty($output)) {
|
|
||||||
$revision=trim($output);
|
|
||||||
header('ETag: "' . $revision . '"');
|
|
||||||
if (isset($_SERVER["HTTP_IF_NONE_MATCH"])) {
|
|
||||||
$req_revision = str_replace('"', '', str_replace('W/"', '', $_SERVER["HTTP_IF_NONE_MATCH"]));
|
|
||||||
if ($req_revision === $revision) {
|
|
||||||
send_response(304);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
$command = ["show", "HEAD:$file_url"];
|
|
||||||
git_exec($command, $output, $retval);
|
|
||||||
if ($retval !== 0) {
|
|
||||||
# Try adding '.html' suffix, if this does not work either, report error
|
|
||||||
$command = ["show", "HEAD:$file_url.html"];
|
|
||||||
git_exec($command, $output, $retval);
|
|
||||||
header("Content-Type: text/html");
|
|
||||||
if ($retval !== 0) {
|
|
||||||
# Render user-provided 404.html if exists, generic 404 message if not:
|
|
||||||
http_response_code(404);
|
|
||||||
$command = ["show", "HEAD:404.html"];
|
|
||||||
git_exec($command, $output, $retval);
|
|
||||||
if ($retval !== 0)
|
|
||||||
send_response(404, "no such file in repo: '" . htmlspecialchars($file_url) . "'");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
echo $output;
|
|
Loading…
Add table
Add a link
Reference in a new issue