Compare commits

..

5 Commits

Author SHA1 Message Date
Xe Iaso
3bf1acd548 ci: use go stable
Signed-off-by: Xe Iaso <me@xeiaso.net>
2026-03-16 10:26:38 +00:00
Xe Iaso
780c1d8d6a ci(go): use go stable
Signed-off-by: Xe Iaso <me@xeiaso.net>
2026-03-16 10:18:10 +00:00
Xe Iaso
ea08ba2f61 ci: purge govulncheck, it's less signal than i hoped
Signed-off-by: Xe Iaso <me@xeiaso.net>
2026-03-16 10:12:14 +00:00
Mozi
fa518e1b8c docs: fix mixed tab/space indentation in Caddy config example (#1506)
Assisted-by: Claude Opus 4.6 via Copilot

Signed-off-by: Mozi <29089388+pzhlkj6612@users.noreply.github.com>
2026-03-12 16:35:53 +00:00
Xe Iaso
f38210fd84 docs(admin/policy): document ReadWritePaths for logging to files (#1469)
The default Anubis systemd configuration is very restrictive in
order to prevent any possible compromise of Anubis to be useful
by threat actors. As such, it assumes all logs will be pushed to
the system journal. Some administrators do not want Anubis' logs
to be pushed to the system journal and want Anubis to log to a
file instead.

This change documents how to set up ReadWritePaths in the Anubis
systemd configuration such that Anubis can lot to a file as
administrators expect.

Closes: #1468

Signed-off-by: Xe Iaso <me@xeiaso.net>
2026-02-19 12:24:34 +00:00
34 changed files with 119 additions and 73 deletions

View File

@@ -31,7 +31,7 @@ jobs:
node-version: "24.11.0" node-version: "24.11.0"
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
with: with:
go-version: "1.25.4" go-version: "stable"
- uses: ko-build/setup-ko@d006021bd0c28d1ce33a07e7943d48b079944c8d # v0.9 - uses: ko-build/setup-ko@d006021bd0c28d1ce33a07e7943d48b079944c8d # v0.9

View File

@@ -41,7 +41,7 @@ jobs:
node-version: "24.11.0" node-version: "24.11.0"
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
with: with:
go-version: "1.25.4" go-version: "stable"
- uses: ko-build/setup-ko@d006021bd0c28d1ce33a07e7943d48b079944c8d # v0.9 - uses: ko-build/setup-ko@d006021bd0c28d1ce33a07e7943d48b079944c8d # v0.9

View File

@@ -19,7 +19,7 @@ jobs:
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
with: with:
go-version: "1.25.4" go-version: "stable"
- name: Check go.mod and go.sum in main directory - name: Check go.mod and go.sum in main directory
run: | run: |

View File

@@ -29,7 +29,7 @@ jobs:
node-version: "24.11.0" node-version: "24.11.0"
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
with: with:
go-version: "1.25.4" go-version: "stable"
- name: Cache playwright binaries - name: Cache playwright binaries
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
@@ -61,4 +61,4 @@ jobs:
- name: Govulncheck - name: Govulncheck
run: | run: |
go tool govulncheck ./... go tool govulncheck ./... ||:

View File

@@ -30,7 +30,7 @@ jobs:
node-version: "24.11.0" node-version: "24.11.0"
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
with: with:
go-version: "1.25.4" go-version: "stable"
- name: install node deps - name: install node deps
run: | run: |

View File

@@ -31,7 +31,7 @@ jobs:
node-version: "24.11.0" node-version: "24.11.0"
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
with: with:
go-version: "1.25.4" go-version: "stable"
- name: install node deps - name: install node deps
run: | run: |

View File

@@ -39,7 +39,7 @@ jobs:
node-version: "24.11.0" node-version: "24.11.0"
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
with: with:
go-version: "1.25.4" go-version: "stable"
- uses: ko-build/setup-ko@d006021bd0c28d1ce33a07e7943d48b079944c8d # v0.9 - uses: ko-build/setup-ko@d006021bd0c28d1ce33a07e7943d48b079944c8d # v0.9

View File

@@ -37,7 +37,7 @@ jobs:
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0 - uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
with: with:
go-version: "1.25.4" go-version: "stable"
- name: Run CI - name: Run CI
run: go run ./utils/cmd/backoff-retry bash test/ssh-ci/rigging.sh ${{ matrix.host }} run: go run ./utils/cmd/backoff-retry bash test/ssh-ci/rigging.sh ${{ matrix.host }}

View File

@@ -24,8 +24,7 @@ build: assets
lint: assets lint: assets
$(GO) vet ./... $(GO) vet ./...
$(GO) tool staticcheck ./... $(GO) tool staticcheck ./...
$(GO) tool govulncheck ./...
prebaked-build: prebaked-build:
$(GO) build -o ./var/anubis -ldflags "-X 'github.com/TecharoHQ/anubis.Version=$(VERSION)'" ./cmd/anubis $(GO) build -o ./var/anubis -ldflags "-X 'github.com/TecharoHQ/anubis.Version=$(VERSION)'" ./cmd/anubis
$(GO) build -o ./var/robots2policy -ldflags "-X 'github.com/TecharoHQ/anubis.Version=$(VERSION)'" ./cmd/robots2policy $(GO) build -o ./var/robots2policy -ldflags "-X 'github.com/TecharoHQ/anubis.Version=$(VERSION)'" ./cmd/robots2policy

View File

@@ -418,8 +418,8 @@ func main() {
var redirectDomainsList []string var redirectDomainsList []string
if *redirectDomains != "" { if *redirectDomains != "" {
domains := strings.SplitSeq(*redirectDomains, ",") domains := strings.Split(*redirectDomains, ",")
for domain := range domains { for _, domain := range domains {
_, err = url.Parse(domain) _, err = url.Parse(domain)
if err != nil { if err != nil {
log.Fatalf("cannot parse redirect-domain %q: %s", domain, err.Error()) log.Fatalf("cannot parse redirect-domain %q: %s", domain, err.Error())

View File

@@ -10,7 +10,6 @@ import (
"net/http" "net/http"
"os" "os"
"regexp" "regexp"
"slices"
"strings" "strings"
"github.com/TecharoHQ/anubis/lib/config" "github.com/TecharoHQ/anubis/lib/config"
@@ -211,8 +210,11 @@ func parseRobotsTxt(input io.Reader) ([]RobotsRule, error) {
// Mark blacklisted user agents (those with "Disallow: /") // Mark blacklisted user agents (those with "Disallow: /")
for i := range rules { for i := range rules {
if slices.Contains(rules[i].Disallows, "/") { for _, disallow := range rules[i].Disallows {
rules[i].IsBlacklist = true if disallow == "/" {
rules[i].IsBlacklist = true
break
}
} }
} }

View File

@@ -158,8 +158,8 @@ func TestDataFileConversion(t *testing.T) {
} }
if strings.ToLower(*outputFormat) == "yaml" { if strings.ToLower(*outputFormat) == "yaml" {
var actualData []any var actualData []interface{}
var expectedData []any var expectedData []interface{}
err = yaml.Unmarshal(actualOutput, &actualData) err = yaml.Unmarshal(actualOutput, &actualData)
if err != nil { if err != nil {
@@ -178,8 +178,8 @@ func TestDataFileConversion(t *testing.T) {
t.Errorf("Output mismatch for %s\nExpected:\n%s\n\nActual:\n%s", tc.name, expectedStr, actualStr) t.Errorf("Output mismatch for %s\nExpected:\n%s\n\nActual:\n%s", tc.name, expectedStr, actualStr)
} }
} else { } else {
var actualData []any var actualData []interface{}
var expectedData []any var expectedData []interface{}
err = json.Unmarshal(actualOutput, &actualData) err = json.Unmarshal(actualOutput, &actualData)
if err != nil { if err != nil {
@@ -419,6 +419,6 @@ Disallow: /`
// compareData performs a deep comparison of two data structures, // compareData performs a deep comparison of two data structures,
// ignoring differences that are semantically equivalent in YAML/JSON // ignoring differences that are semantically equivalent in YAML/JSON
func compareData(actual, expected any) bool { func compareData(actual, expected interface{}) bool {
return reflect.DeepEqual(actual, expected) return reflect.DeepEqual(actual, expected)
} }

View File

@@ -11,6 +11,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] ## [Unreleased]
- Fixed mixed tab/space indentation in Caddy documentation code block
<!-- This changes the project to: --> <!-- This changes the project to: -->
## v1.25.0: Necron ## v1.25.0: Necron

View File

@@ -62,9 +62,9 @@ yourdomain.example.com {
tls your@email.address tls your@email.address
reverse_proxy http://anubis:3000 { reverse_proxy http://anubis:3000 {
header_up X-Real-Ip {remote_host} header_up X-Real-Ip {remote_host}
header_up X-Http-Version {http.request.proto} header_up X-Http-Version {http.request.proto}
} }
} }
``` ```

View File

@@ -393,6 +393,32 @@ logging:
When files are rotated out, the old files will be named after the rotation timestamp in [RFC 3339 format](https://www.rfc-editor.org/rfc/rfc3339). When files are rotated out, the old files will be named after the rotation timestamp in [RFC 3339 format](https://www.rfc-editor.org/rfc/rfc3339).
:::note
If you are running Anubis in systemd via a native package, the default systemd unit settings are very restrictive and will forbid writing to folders in `/var/log`. In order to fix this, please make a [drop-in unit](https://www.flatcar.org/docs/latest/setup/systemd/drop-in-units/) like the following:
```text
# /etc/systemd/anubis@instance-name.service.d/50-var-log-readwrite.conf
[Service]
ReadWritePaths=/run /var/log/anubis
```
Once you write this to the correct place, reload the systemd configuration:
```text
sudo systemctl daemon-reload
```
And then restart Anubis:
```text
sudo systemctl restart anubis@instance-name
```
You may be required to make drop-ins for each Anubis instance depending on the facts and circumstances of your deployment.
:::
### `stdio` sink ### `stdio` sink
By default, Anubis logs everything to the standard error stream of its process. This requires no configuration: By default, Anubis logs everything to the standard error stream of its process. This requires no configuration:

View File

@@ -36,7 +36,7 @@ func Glob(pattern, subj string) bool {
end := len(parts) - 1 end := len(parts) - 1
// Go over the leading parts and ensure they match. // Go over the leading parts and ensure they match.
for i := range end { for i := 0; i < end; i++ {
idx := strings.Index(subj, parts[i]) idx := strings.Index(subj, parts[i])
switch i { switch i {

View File

@@ -184,7 +184,7 @@ func TestHashCollisions(t *testing.T) {
for _, prefix := range prefixes { for _, prefix := range prefixes {
for _, suffix := range suffixes { for _, suffix := range suffixes {
for _, variation := range variations { for _, variation := range variations {
for i := range 100 { for i := 0; i < 100; i++ {
input := fmt.Sprintf("%s%s%s-%d", prefix, suffix, variation, i) input := fmt.Sprintf("%s%s%s-%d", prefix, suffix, variation, i)
hash := XXHash64sum(input) hash := XXHash64sum(input)
if existing, exists := xxhashHashes[hash]; exists { if existing, exists := xxhashHashes[hash]; exists {
@@ -211,7 +211,7 @@ func TestHashCollisions(t *testing.T) {
seqCount := 0 seqCount := 0
for _, pattern := range patterns { for _, pattern := range patterns {
for i := range 10000 { for i := 0; i < 10000; i++ {
input := fmt.Sprintf(pattern, i) input := fmt.Sprintf(pattern, i)
hash := XXHash64sum(input) hash := XXHash64sum(input)
if existing, exists := xxhashHashes[hash]; exists { if existing, exists := xxhashHashes[hash]; exists {

View File

@@ -120,7 +120,7 @@ func (i *Impl) makeAffirmations() []string {
count := rand.IntN(5) + 1 count := rand.IntN(5) + 1
var result []string var result []string
for range count { for j := 0; j < count; j++ {
result = append(result, i.affirmation.Spin()) result = append(result, i.affirmation.Spin())
} }
@@ -131,7 +131,7 @@ func (i *Impl) makeSpins() []string {
count := rand.IntN(5) + 1 count := rand.IntN(5) + 1
var result []string var result []string
for range count { for j := 0; j < count; j++ {
result = append(result, i.body.Spin()) result = append(result, i.body.Spin())
} }

View File

@@ -16,7 +16,7 @@ func (lo *ListOr[T]) UnmarshalJSON(data []byte) error {
// Check if first non-whitespace character is '[' // Check if first non-whitespace character is '['
firstChar := data[0] firstChar := data[0]
for i := range data { for i := 0; i < len(data); i++ {
if data[i] != ' ' && data[i] != '\t' && data[i] != '\n' && data[i] != '\r' { if data[i] != ' ' && data[i] != '\t' && data[i] != '\n' && data[i] != '\r' {
firstChar = data[i] firstChar = data[i]
break break
@@ -36,4 +36,4 @@ func (lo *ListOr[T]) UnmarshalJSON(data []byte) error {
} }
return nil return nil
} }

View File

@@ -95,7 +95,7 @@ func TestMemoryUsage(t *testing.T) {
// Run getTarget many times // Run getTarget many times
u, _ := url.Parse("/path/to/resource?query=1&foo=bar&baz=qux") u, _ := url.Parse("/path/to/resource?query=1&foo=bar&baz=qux")
for range 10000 { for i := 0; i < 10000; i++ {
_ = cache.getTarget(u) _ = cache.getTarget(u)
} }
@@ -129,7 +129,7 @@ func TestMemoryUsage(t *testing.T) {
runtime.GC() runtime.GC()
runtime.ReadMemStats(&m1) runtime.ReadMemStats(&m1)
for range 1000 { for i := 0; i < 1000; i++ {
_ = cache.extractOGTags(doc) _ = cache.extractOGTags(doc)
} }

View File

@@ -3,7 +3,6 @@ package ogtags
import ( import (
"context" "context"
"net/url" "net/url"
"slices"
"strings" "strings"
"testing" "testing"
"unicode/utf8" "unicode/utf8"
@@ -79,7 +78,7 @@ func FuzzGetTarget(f *testing.F) {
} }
// Ensure no memory corruption by calling multiple times // Ensure no memory corruption by calling multiple times
for range 3 { for i := 0; i < 3; i++ {
result2 := cache.getTarget(u) result2 := cache.getTarget(u)
if result != result2 { if result != result2 {
t.Errorf("getTarget not deterministic: %q != %q", result, result2) t.Errorf("getTarget not deterministic: %q != %q", result, result2)
@@ -149,8 +148,11 @@ func FuzzExtractOGTags(f *testing.F) {
} }
} }
if !approved { if !approved {
if slices.Contains(cache.approvedTags, property) { for _, tag := range cache.approvedTags {
approved = true if property == tag {
approved = true
break
}
} }
} }
if !approved { if !approved {
@@ -258,8 +260,11 @@ func FuzzExtractMetaTagInfo(f *testing.F) {
} }
} }
if !approved { if !approved {
if slices.Contains(cache.approvedTags, property) { for _, tag := range cache.approvedTags {
approved = true if property == tag {
approved = true
break
}
} }
} }
if !approved { if !approved {

View File

@@ -1,7 +1,6 @@
package ogtags package ogtags
import ( import (
"slices"
"strings" "strings"
"golang.org/x/net/html" "golang.org/x/net/html"
@@ -66,8 +65,10 @@ func (c *OGTagCache) extractMetaTagInfo(n *html.Node) (property, content string)
} }
// Check exact matches // Check exact matches
if slices.Contains(c.approvedTags, propertyKey) { for _, tag := range c.approvedTags {
return propertyKey, content if propertyKey == tag {
return propertyKey, content
}
} }
return "", content return "", content

View File

@@ -270,7 +270,7 @@ func TestPlaywrightBrowser(t *testing.T) {
var performedAction action var performedAction action
var err error var err error
for i := range 5 { for i := 0; i < 5; i++ {
performedAction, err = executeTestCase(t, tc, typ, anubisURL) performedAction, err = executeTestCase(t, tc, typ, anubisURL)
if performedAction == tc.action { if performedAction == tc.action {
break break

View File

@@ -81,11 +81,11 @@ type Server struct {
func (s *Server) getTokenKeyfunc() jwt.Keyfunc { func (s *Server) getTokenKeyfunc() jwt.Keyfunc {
// return ED25519 key if HS512 is not set // return ED25519 key if HS512 is not set
if len(s.hs512Secret) == 0 { if len(s.hs512Secret) == 0 {
return func(token *jwt.Token) (any, error) { return func(token *jwt.Token) (interface{}, error) {
return s.ed25519Priv.Public().(ed25519.PublicKey), nil return s.ed25519Priv.Public().(ed25519.PublicKey), nil
} }
} else { } else {
return func(token *jwt.Token) (any, error) { return func(token *jwt.Token) (interface{}, error) {
return s.hs512Secret, nil return s.hs512Secret, nil
} }
} }

View File

@@ -38,8 +38,8 @@ func NewTLogWriter(t *testing.T) io.Writer {
// Write splits input on newlines and logs each line separately. // Write splits input on newlines and logs each line separately.
func (w *TLogWriter) Write(p []byte) (n int, err error) { func (w *TLogWriter) Write(p []byte) (n int, err error) {
lines := strings.SplitSeq(string(p), "\n") lines := strings.Split(string(p), "\n")
for line := range lines { for _, line := range lines {
if line != "" { if line != "" {
w.t.Log(line) w.t.Log(line)
} }

View File

@@ -228,8 +228,8 @@ type ImportStatement struct {
} }
func (is *ImportStatement) open() (fs.File, error) { func (is *ImportStatement) open() (fs.File, error) {
if after, ok := strings.CutPrefix(is.Import, "(data)/"); ok { if strings.HasPrefix(is.Import, "(data)/") {
fname := after fname := strings.TrimPrefix(is.Import, "(data)/")
fin, err := data.BotPolicies.Open(fname) fin, err := data.BotPolicies.Open(fname)
return fin, err return fin, err
} }
@@ -325,7 +325,7 @@ func (sc StatusCodes) Valid() error {
} }
type fileConfig struct { type fileConfig struct {
OpenGraph openGraphFileConfig `json:"openGraph"` OpenGraph openGraphFileConfig `json:"openGraph,omitempty"`
Impressum *Impressum `json:"impressum,omitempty"` Impressum *Impressum `json:"impressum,omitempty"`
Store *Store `json:"store"` Store *Store `json:"store"`
Bots []BotOrImport `json:"bots"` Bots []BotOrImport `json:"bots"`

View File

@@ -188,6 +188,7 @@ func TestBotValid(t *testing.T) {
} }
for _, cs := range tests { for _, cs := range tests {
cs := cs
t.Run(cs.name, func(t *testing.T) { t.Run(cs.name, func(t *testing.T) {
err := cs.bot.Valid() err := cs.bot.Valid()
if err == nil && cs.err == nil { if err == nil && cs.err == nil {
@@ -215,6 +216,7 @@ func TestConfigValidKnownGood(t *testing.T) {
} }
for _, st := range finfos { for _, st := range finfos {
st := st
t.Run(st.Name(), func(t *testing.T) { t.Run(st.Name(), func(t *testing.T) {
fin, err := os.Open(filepath.Join("testdata", "good", st.Name())) fin, err := os.Open(filepath.Join("testdata", "good", st.Name()))
if err != nil { if err != nil {
@@ -301,6 +303,7 @@ func TestConfigValidBad(t *testing.T) {
} }
for _, st := range finfos { for _, st := range finfos {
st := st
t.Run(st.Name(), func(t *testing.T) { t.Run(st.Name(), func(t *testing.T) {
fin, err := os.Open(filepath.Join("testdata", "bad", st.Name())) fin, err := os.Open(filepath.Join("testdata", "bad", st.Name()))
if err != nil { if err != nil {

View File

@@ -24,6 +24,7 @@ func TestBadConfigs(t *testing.T) {
} }
for _, st := range finfos { for _, st := range finfos {
st := st
t.Run(st.Name(), func(t *testing.T) { t.Run(st.Name(), func(t *testing.T) {
if _, err := LoadPoliciesOrDefault(t.Context(), filepath.Join("config", "testdata", "bad", st.Name()), anubis.DefaultDifficulty, "info"); err == nil { if _, err := LoadPoliciesOrDefault(t.Context(), filepath.Join("config", "testdata", "bad", st.Name()), anubis.DefaultDifficulty, "info"); err == nil {
t.Fatal(err) t.Fatal(err)
@@ -41,6 +42,7 @@ func TestGoodConfigs(t *testing.T) {
} }
for _, st := range finfos { for _, st := range finfos {
st := st
t.Run(st.Name(), func(t *testing.T) { t.Run(st.Name(), func(t *testing.T) {
t.Run("with-thoth", func(t *testing.T) { t.Run("with-thoth", func(t *testing.T) {
ctx := thothmock.WithMockThoth(t) ctx := thothmock.WithMockThoth(t)

View File

@@ -182,7 +182,10 @@ func makeCode(err error) string {
enc := base64.StdEncoding.EncodeToString(buf.Bytes()) enc := base64.StdEncoding.EncodeToString(buf.Bytes())
var builder strings.Builder var builder strings.Builder
for i := 0; i < len(enc); i += width { for i := 0; i < len(enc); i += width {
end := min(i+width, len(enc)) end := i + width
if end > len(enc) {
end = len(enc)
}
builder.WriteString(enc[i:end]) builder.WriteString(enc[i:end])
builder.WriteByte('\n') builder.WriteByte('\n')
} }

View File

@@ -103,7 +103,7 @@ func TestBotEnvironment(t *testing.T) {
t.Fatalf("failed to compile expression %q: %v", tt.expression, err) t.Fatalf("failed to compile expression %q: %v", tt.expression, err)
} }
result, _, err := prog.Eval(map[string]any{ result, _, err := prog.Eval(map[string]interface{}{
"headers": tt.headers, "headers": tt.headers,
}) })
if err != nil { if err != nil {
@@ -168,7 +168,7 @@ func TestBotEnvironment(t *testing.T) {
t.Fatalf("failed to compile expression %q: %v", tt.expression, err) t.Fatalf("failed to compile expression %q: %v", tt.expression, err)
} }
result, _, err := prog.Eval(map[string]any{ result, _, err := prog.Eval(map[string]interface{}{
"path": tt.path, "path": tt.path,
}) })
if err != nil { if err != nil {
@@ -280,7 +280,7 @@ func TestBotEnvironment(t *testing.T) {
t.Fatalf("failed to compile expression %q: %v", tt.expression, err) t.Fatalf("failed to compile expression %q: %v", tt.expression, err)
} }
result, _, err := prog.Eval(map[string]any{}) result, _, err := prog.Eval(map[string]interface{}{})
if err != nil { if err != nil {
t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err) t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err)
} }
@@ -359,7 +359,7 @@ func TestBotEnvironment(t *testing.T) {
t.Fatalf("failed to compile expression %q: %v", tt.expression, err) t.Fatalf("failed to compile expression %q: %v", tt.expression, err)
} }
result, _, err := prog.Eval(map[string]any{}) result, _, err := prog.Eval(map[string]interface{}{})
if err != nil { if err != nil {
t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err) t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err)
} }
@@ -421,7 +421,7 @@ func TestBotEnvironment(t *testing.T) {
t.Fatalf("failed to compile expression %q: %v", tt.expression, err) t.Fatalf("failed to compile expression %q: %v", tt.expression, err)
} }
result, _, err := prog.Eval(map[string]any{}) result, _, err := prog.Eval(map[string]interface{}{})
if err != nil { if err != nil {
t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err) t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err)
} }
@@ -514,7 +514,7 @@ func TestBotEnvironment(t *testing.T) {
t.Fatalf("failed to compile expression %q: %v", tt.expression, err) t.Fatalf("failed to compile expression %q: %v", tt.expression, err)
} }
result, _, err := prog.Eval(map[string]any{}) result, _, err := prog.Eval(map[string]interface{}{})
if err != nil { if err != nil {
t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err) t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err)
} }
@@ -572,7 +572,7 @@ func TestBotEnvironment(t *testing.T) {
t.Fatalf("failed to compile expression %q: %v", tt.expression, err) t.Fatalf("failed to compile expression %q: %v", tt.expression, err)
} }
result, _, err := prog.Eval(map[string]any{}) result, _, err := prog.Eval(map[string]interface{}{})
if tt.evalError { if tt.evalError {
if err == nil { if err == nil {
t.Errorf("%s: expected an evaluation error, but got none", tt.description) t.Errorf("%s: expected an evaluation error, but got none", tt.description)
@@ -598,7 +598,7 @@ func TestThresholdEnvironment(t *testing.T) {
} }
tests := []struct { tests := []struct {
variables map[string]any variables map[string]interface{}
name string name string
expression string expression string
description string description string
@@ -608,7 +608,7 @@ func TestThresholdEnvironment(t *testing.T) {
{ {
name: "weight-variable-available", name: "weight-variable-available",
expression: `weight > 100`, expression: `weight > 100`,
variables: map[string]any{"weight": 150}, variables: map[string]interface{}{"weight": 150},
expected: types.Bool(true), expected: types.Bool(true),
description: "should support weight variable in expressions", description: "should support weight variable in expressions",
shouldCompile: true, shouldCompile: true,
@@ -616,7 +616,7 @@ func TestThresholdEnvironment(t *testing.T) {
{ {
name: "weight-variable-false-case", name: "weight-variable-false-case",
expression: `weight > 100`, expression: `weight > 100`,
variables: map[string]any{"weight": 50}, variables: map[string]interface{}{"weight": 50},
expected: types.Bool(false), expected: types.Bool(false),
description: "should correctly evaluate weight comparisons", description: "should correctly evaluate weight comparisons",
shouldCompile: true, shouldCompile: true,
@@ -624,7 +624,7 @@ func TestThresholdEnvironment(t *testing.T) {
{ {
name: "missingHeader-not-available", name: "missingHeader-not-available",
expression: `missingHeader(headers, "Test")`, expression: `missingHeader(headers, "Test")`,
variables: map[string]any{}, variables: map[string]interface{}{},
expected: types.Bool(false), // not used expected: types.Bool(false), // not used
description: "should not have missingHeader function available", description: "should not have missingHeader function available",
shouldCompile: false, shouldCompile: false,
@@ -667,7 +667,7 @@ func TestNewEnvironment(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
expression string expression string
variables map[string]any variables map[string]interface{}
expectBool *bool // nil if we just want to test compilation or non-bool result expectBool *bool // nil if we just want to test compilation or non-bool result
description string description string
shouldCompile bool shouldCompile bool
@@ -675,7 +675,7 @@ func TestNewEnvironment(t *testing.T) {
{ {
name: "randInt-function-compilation", name: "randInt-function-compilation",
expression: `randInt(10)`, expression: `randInt(10)`,
variables: map[string]any{}, variables: map[string]interface{}{},
expectBool: nil, // Don't check result, just compilation expectBool: nil, // Don't check result, just compilation
description: "should compile randInt function", description: "should compile randInt function",
shouldCompile: true, shouldCompile: true,
@@ -683,7 +683,7 @@ func TestNewEnvironment(t *testing.T) {
{ {
name: "randInt-range-validation", name: "randInt-range-validation",
expression: `randInt(10) >= 0 && randInt(10) < 10`, expression: `randInt(10) >= 0 && randInt(10) < 10`,
variables: map[string]any{}, variables: map[string]interface{}{},
expectBool: boolPtr(true), expectBool: boolPtr(true),
description: "should return values in correct range", description: "should return values in correct range",
shouldCompile: true, shouldCompile: true,
@@ -691,7 +691,7 @@ func TestNewEnvironment(t *testing.T) {
{ {
name: "strings-extension-size", name: "strings-extension-size",
expression: `"hello".size() == 5`, expression: `"hello".size() == 5`,
variables: map[string]any{}, variables: map[string]interface{}{},
expectBool: boolPtr(true), expectBool: boolPtr(true),
description: "should support string extension functions", description: "should support string extension functions",
shouldCompile: true, shouldCompile: true,
@@ -699,7 +699,7 @@ func TestNewEnvironment(t *testing.T) {
{ {
name: "strings-extension-contains", name: "strings-extension-contains",
expression: `"hello world".contains("world")`, expression: `"hello world".contains("world")`,
variables: map[string]any{}, variables: map[string]interface{}{},
expectBool: boolPtr(true), expectBool: boolPtr(true),
description: "should support string contains function", description: "should support string contains function",
shouldCompile: true, shouldCompile: true,
@@ -707,7 +707,7 @@ func TestNewEnvironment(t *testing.T) {
{ {
name: "strings-extension-startsWith", name: "strings-extension-startsWith",
expression: `"hello world".startsWith("hello")`, expression: `"hello world".startsWith("hello")`,
variables: map[string]any{}, variables: map[string]interface{}{},
expectBool: boolPtr(true), expectBool: boolPtr(true),
description: "should support string startsWith function", description: "should support string startsWith function",
shouldCompile: true, shouldCompile: true,

View File

@@ -32,6 +32,7 @@ func TestGoodConfigs(t *testing.T) {
} }
for _, st := range finfos { for _, st := range finfos {
st := st
t.Run(st.Name(), func(t *testing.T) { t.Run(st.Name(), func(t *testing.T) {
t.Run("with-thoth", func(t *testing.T) { t.Run("with-thoth", func(t *testing.T) {
fin, err := os.Open(filepath.Join("..", "config", "testdata", "good", st.Name())) fin, err := os.Open(filepath.Join("..", "config", "testdata", "good", st.Name()))
@@ -70,6 +71,7 @@ func TestBadConfigs(t *testing.T) {
} }
for _, st := range finfos { for _, st := range finfos {
st := st
t.Run(st.Name(), func(t *testing.T) { t.Run(st.Name(), func(t *testing.T) {
fin, err := os.Open(filepath.Join("..", "config", "testdata", "bad", st.Name())) fin, err := os.Open(filepath.Join("..", "config", "testdata", "bad", st.Name()))
if err != nil { if err != nil {

View File

@@ -6,7 +6,6 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
"maps"
"sync" "sync"
"testing" "testing"
"time" "time"
@@ -37,7 +36,9 @@ func (m *mockS3) PutObject(ctx context.Context, in *s3.PutObjectInput, _ ...func
m.data[aws.ToString(in.Key)] = bytes.Clone(b) m.data[aws.ToString(in.Key)] = bytes.Clone(b)
if in.Metadata != nil { if in.Metadata != nil {
m.meta[aws.ToString(in.Key)] = map[string]string{} m.meta[aws.ToString(in.Key)] = map[string]string{}
maps.Copy(m.meta[aws.ToString(in.Key)], in.Metadata) for k, v := range in.Metadata {
m.meta[aws.ToString(in.Key)][k] = v
}
} }
m.bucket = aws.ToString(in.Bucket) m.bucket = aws.ToString(in.Bucket)
return &s3.PutObjectOutput{}, nil return &s3.PutObjectOutput{}, nil

View File

@@ -103,7 +103,7 @@ func (s Sentinel) Valid() error {
// redisClient is satisfied by *valkey.Client and *valkey.ClusterClient. // redisClient is satisfied by *valkey.Client and *valkey.ClusterClient.
type redisClient interface { type redisClient interface {
Get(ctx context.Context, key string) *valkey.StringCmd Get(ctx context.Context, key string) *valkey.StringCmd
Set(ctx context.Context, key string, value any, expiration time.Duration) *valkey.StatusCmd Set(ctx context.Context, key string, value interface{}, expiration time.Duration) *valkey.StatusCmd
Del(ctx context.Context, keys ...string) *valkey.IntCmd Del(ctx context.Context, keys ...string) *valkey.IntCmd
Ping(ctx context.Context) *valkey.StatusCmd Ping(ctx context.Context) *valkey.StatusCmd
} }

View File

@@ -11,8 +11,8 @@ func authUnaryClientInterceptor(token string) grpc.UnaryClientInterceptor {
return func( return func(
ctx context.Context, ctx context.Context,
method string, method string,
req any, req interface{},
reply any, reply interface{},
cc *grpc.ClientConn, cc *grpc.ClientConn,
invoker grpc.UnaryInvoker, invoker grpc.UnaryInvoker,
opts ...grpc.CallOption, opts ...grpc.CallOption,