Compare commits

...

2 Commits

Author SHA1 Message Date
Jason Cameron
3510abb3bd chore: gofix
Signed-off-by: Jason Cameron <jason.cameron@stanwith.me>
2026-02-18 12:49:58 -05:00
Xe Iaso
35b5e78a0d chore: tag v1.25.0
Signed-off-by: Xe Iaso <me@xeiaso.net>
2026-02-18 15:56:28 +00:00
26 changed files with 90 additions and 85 deletions

View File

@@ -1 +1 @@
1.24.0 1.25.0

View File

@@ -418,8 +418,8 @@ func main() {
var redirectDomainsList []string var redirectDomainsList []string
if *redirectDomains != "" { if *redirectDomains != "" {
domains := strings.Split(*redirectDomains, ",") domains := strings.SplitSeq(*redirectDomains, ",")
for _, domain := range domains { for domain := range domains {
_, err = url.Parse(domain) _, err = url.Parse(domain)
if err != nil { if err != nil {
log.Fatalf("cannot parse redirect-domain %q: %s", domain, err.Error()) log.Fatalf("cannot parse redirect-domain %q: %s", domain, err.Error())

View File

@@ -10,6 +10,7 @@ import (
"net/http" "net/http"
"os" "os"
"regexp" "regexp"
"slices"
"strings" "strings"
"github.com/TecharoHQ/anubis/lib/config" "github.com/TecharoHQ/anubis/lib/config"
@@ -210,11 +211,8 @@ func parseRobotsTxt(input io.Reader) ([]RobotsRule, error) {
// Mark blacklisted user agents (those with "Disallow: /") // Mark blacklisted user agents (those with "Disallow: /")
for i := range rules { for i := range rules {
for _, disallow := range rules[i].Disallows { if slices.Contains(rules[i].Disallows, "/") {
if disallow == "/" { rules[i].IsBlacklist = true
rules[i].IsBlacklist = true
break
}
} }
} }

View File

@@ -158,8 +158,8 @@ func TestDataFileConversion(t *testing.T) {
} }
if strings.ToLower(*outputFormat) == "yaml" { if strings.ToLower(*outputFormat) == "yaml" {
var actualData []interface{} var actualData []any
var expectedData []interface{} var expectedData []any
err = yaml.Unmarshal(actualOutput, &actualData) err = yaml.Unmarshal(actualOutput, &actualData)
if err != nil { if err != nil {
@@ -178,8 +178,8 @@ func TestDataFileConversion(t *testing.T) {
t.Errorf("Output mismatch for %s\nExpected:\n%s\n\nActual:\n%s", tc.name, expectedStr, actualStr) t.Errorf("Output mismatch for %s\nExpected:\n%s\n\nActual:\n%s", tc.name, expectedStr, actualStr)
} }
} else { } else {
var actualData []interface{} var actualData []any
var expectedData []interface{} var expectedData []any
err = json.Unmarshal(actualOutput, &actualData) err = json.Unmarshal(actualOutput, &actualData)
if err != nil { if err != nil {
@@ -419,6 +419,6 @@ Disallow: /`
// compareData performs a deep comparison of two data structures, // compareData performs a deep comparison of two data structures,
// ignoring differences that are semantically equivalent in YAML/JSON // ignoring differences that are semantically equivalent in YAML/JSON
func compareData(actual, expected interface{}) bool { func compareData(actual, expected any) bool {
return reflect.DeepEqual(actual, expected) return reflect.DeepEqual(actual, expected)
} }

View File

@@ -11,6 +11,32 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] ## [Unreleased]
<!-- This changes the project to: -->
## v1.25.0: Necron
Hey all,
I'm sure you've all been aware that things have been slowing down a little with Anubis development, and I want to apologize for that. A lot has been going on in my life lately (my blog will have a post out on Friday with more information), and as a result I haven't really had the energy to work on Anubis in publicly visible ways. There are things going on behind the scenes, but nothing is really shippable yet, sorry!
I've also been feeling some burnout in the wake of perennial waves of anger directed towards me. I'm handling it, I'll be fine, I've just had a lot going on in my life and it's been rough.
I've been missing the sense of wanderlust and discovery that comes with the artistic way I playfully develop software. I suspect that some of the stresses I've been through (setting up a complicated surgery in a country whose language you aren't fluent in is kind of an experience) have been sapping my energy. I'd gonna try to mess with things on my break, but realistically I'm probably just gonna be either watching Stargate SG-1 or doing unreasonable amounts of ocean fishing in Final Fantasy 14. Normally I'd love to keep the details about my medical state fairly private, but I'm more of a public figure now than I was this time last year so I don't really get the invisibility I'm used to for this.
I've also had a fair amount of negativity directed at me for simply being much more visible than the anonymous threat actors running the scrapers that are ruining everything, which though understandable has not helped.
Anyways, it all worked out and I'm about to be in the hospital for a week, so if things go really badly with this release please downgrade to the last version and/or upgrade to the main branch when the fix PR is inevitably merged. I hoped to have time to tame GPG and set up full release automation in the Anubis repo, but that didn't work out this time and that's okay.
If I can challenge you all to do something, go out there and try to actually create something new somehow. Combine ideas you've never mixed before. Be creative, be human, make something purely for yourself to scratch an itch that you've always had yet never gotten around to actually mending.
At the very least, try to be an example of how you want other people to act, even when you're in a situation where software written by someone else is configured to require a user agent to execute javascript to access a webpage.
Be well,
Xe
PS: if you're well-versed in FFXIV lore, the release title should give you an idea of the kind of stuff I've been going through mentally.
- Add iplist2rule tool that lets admins turn an IP address blocklist into an Anubis ruleset. - Add iplist2rule tool that lets admins turn an IP address blocklist into an Anubis ruleset.
- Add Polish locale ([#1292](https://github.com/TecharoHQ/anubis/pull/1309)) - Add Polish locale ([#1292](https://github.com/TecharoHQ/anubis/pull/1309))
- Fix honeypot and imprint links missing `BASE_PREFIX` when deployed behind a path prefix ([#1402](https://github.com/TecharoHQ/anubis/issues/1402)) - Fix honeypot and imprint links missing `BASE_PREFIX` when deployed behind a path prefix ([#1402](https://github.com/TecharoHQ/anubis/issues/1402))
@@ -18,8 +44,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Improve idle performance in memory storage - Improve idle performance in memory storage
- Add HAProxy Configurations to Docs ([#1424](https://github.com/TecharoHQ/anubis/pull/1424)) - Add HAProxy Configurations to Docs ([#1424](https://github.com/TecharoHQ/anubis/pull/1424))
<!-- This changes the project to: -->
## v1.24.0: Y'shtola Rhul ## v1.24.0: Y'shtola Rhul
Anubis is back and better than ever! Lots of minor fixes with some big ones interspersed. Anubis is back and better than ever! Lots of minor fixes with some big ones interspersed.

View File

@@ -36,7 +36,7 @@ func Glob(pattern, subj string) bool {
end := len(parts) - 1 end := len(parts) - 1
// Go over the leading parts and ensure they match. // Go over the leading parts and ensure they match.
for i := 0; i < end; i++ { for i := range end {
idx := strings.Index(subj, parts[i]) idx := strings.Index(subj, parts[i])
switch i { switch i {

View File

@@ -184,7 +184,7 @@ func TestHashCollisions(t *testing.T) {
for _, prefix := range prefixes { for _, prefix := range prefixes {
for _, suffix := range suffixes { for _, suffix := range suffixes {
for _, variation := range variations { for _, variation := range variations {
for i := 0; i < 100; i++ { for i := range 100 {
input := fmt.Sprintf("%s%s%s-%d", prefix, suffix, variation, i) input := fmt.Sprintf("%s%s%s-%d", prefix, suffix, variation, i)
hash := XXHash64sum(input) hash := XXHash64sum(input)
if existing, exists := xxhashHashes[hash]; exists { if existing, exists := xxhashHashes[hash]; exists {
@@ -211,7 +211,7 @@ func TestHashCollisions(t *testing.T) {
seqCount := 0 seqCount := 0
for _, pattern := range patterns { for _, pattern := range patterns {
for i := 0; i < 10000; i++ { for i := range 10000 {
input := fmt.Sprintf(pattern, i) input := fmt.Sprintf(pattern, i)
hash := XXHash64sum(input) hash := XXHash64sum(input)
if existing, exists := xxhashHashes[hash]; exists { if existing, exists := xxhashHashes[hash]; exists {

View File

@@ -120,7 +120,7 @@ func (i *Impl) makeAffirmations() []string {
count := rand.IntN(5) + 1 count := rand.IntN(5) + 1
var result []string var result []string
for j := 0; j < count; j++ { for range count {
result = append(result, i.affirmation.Spin()) result = append(result, i.affirmation.Spin())
} }
@@ -131,7 +131,7 @@ func (i *Impl) makeSpins() []string {
count := rand.IntN(5) + 1 count := rand.IntN(5) + 1
var result []string var result []string
for j := 0; j < count; j++ { for range count {
result = append(result, i.body.Spin()) result = append(result, i.body.Spin())
} }

View File

@@ -16,7 +16,7 @@ func (lo *ListOr[T]) UnmarshalJSON(data []byte) error {
// Check if first non-whitespace character is '[' // Check if first non-whitespace character is '['
firstChar := data[0] firstChar := data[0]
for i := 0; i < len(data); i++ { for i := range data {
if data[i] != ' ' && data[i] != '\t' && data[i] != '\n' && data[i] != '\r' { if data[i] != ' ' && data[i] != '\t' && data[i] != '\n' && data[i] != '\r' {
firstChar = data[i] firstChar = data[i]
break break
@@ -36,4 +36,4 @@ func (lo *ListOr[T]) UnmarshalJSON(data []byte) error {
} }
return nil return nil
} }

View File

@@ -95,7 +95,7 @@ func TestMemoryUsage(t *testing.T) {
// Run getTarget many times // Run getTarget many times
u, _ := url.Parse("/path/to/resource?query=1&foo=bar&baz=qux") u, _ := url.Parse("/path/to/resource?query=1&foo=bar&baz=qux")
for i := 0; i < 10000; i++ { for range 10000 {
_ = cache.getTarget(u) _ = cache.getTarget(u)
} }
@@ -129,7 +129,7 @@ func TestMemoryUsage(t *testing.T) {
runtime.GC() runtime.GC()
runtime.ReadMemStats(&m1) runtime.ReadMemStats(&m1)
for i := 0; i < 1000; i++ { for range 1000 {
_ = cache.extractOGTags(doc) _ = cache.extractOGTags(doc)
} }

View File

@@ -3,6 +3,7 @@ package ogtags
import ( import (
"context" "context"
"net/url" "net/url"
"slices"
"strings" "strings"
"testing" "testing"
"unicode/utf8" "unicode/utf8"
@@ -78,7 +79,7 @@ func FuzzGetTarget(f *testing.F) {
} }
// Ensure no memory corruption by calling multiple times // Ensure no memory corruption by calling multiple times
for i := 0; i < 3; i++ { for range 3 {
result2 := cache.getTarget(u) result2 := cache.getTarget(u)
if result != result2 { if result != result2 {
t.Errorf("getTarget not deterministic: %q != %q", result, result2) t.Errorf("getTarget not deterministic: %q != %q", result, result2)
@@ -148,11 +149,8 @@ func FuzzExtractOGTags(f *testing.F) {
} }
} }
if !approved { if !approved {
for _, tag := range cache.approvedTags { if slices.Contains(cache.approvedTags, property) {
if property == tag { approved = true
approved = true
break
}
} }
} }
if !approved { if !approved {
@@ -260,11 +258,8 @@ func FuzzExtractMetaTagInfo(f *testing.F) {
} }
} }
if !approved { if !approved {
for _, tag := range cache.approvedTags { if slices.Contains(cache.approvedTags, property) {
if property == tag { approved = true
approved = true
break
}
} }
} }
if !approved { if !approved {

View File

@@ -1,6 +1,7 @@
package ogtags package ogtags
import ( import (
"slices"
"strings" "strings"
"golang.org/x/net/html" "golang.org/x/net/html"
@@ -65,10 +66,8 @@ func (c *OGTagCache) extractMetaTagInfo(n *html.Node) (property, content string)
} }
// Check exact matches // Check exact matches
for _, tag := range c.approvedTags { if slices.Contains(c.approvedTags, propertyKey) {
if propertyKey == tag { return propertyKey, content
return propertyKey, content
}
} }
return "", content return "", content

View File

@@ -270,7 +270,7 @@ func TestPlaywrightBrowser(t *testing.T) {
var performedAction action var performedAction action
var err error var err error
for i := 0; i < 5; i++ { for i := range 5 {
performedAction, err = executeTestCase(t, tc, typ, anubisURL) performedAction, err = executeTestCase(t, tc, typ, anubisURL)
if performedAction == tc.action { if performedAction == tc.action {
break break

View File

@@ -81,11 +81,11 @@ type Server struct {
func (s *Server) getTokenKeyfunc() jwt.Keyfunc { func (s *Server) getTokenKeyfunc() jwt.Keyfunc {
// return ED25519 key if HS512 is not set // return ED25519 key if HS512 is not set
if len(s.hs512Secret) == 0 { if len(s.hs512Secret) == 0 {
return func(token *jwt.Token) (interface{}, error) { return func(token *jwt.Token) (any, error) {
return s.ed25519Priv.Public().(ed25519.PublicKey), nil return s.ed25519Priv.Public().(ed25519.PublicKey), nil
} }
} else { } else {
return func(token *jwt.Token) (interface{}, error) { return func(token *jwt.Token) (any, error) {
return s.hs512Secret, nil return s.hs512Secret, nil
} }
} }

View File

@@ -38,8 +38,8 @@ func NewTLogWriter(t *testing.T) io.Writer {
// Write splits input on newlines and logs each line separately. // Write splits input on newlines and logs each line separately.
func (w *TLogWriter) Write(p []byte) (n int, err error) { func (w *TLogWriter) Write(p []byte) (n int, err error) {
lines := strings.Split(string(p), "\n") lines := strings.SplitSeq(string(p), "\n")
for _, line := range lines { for line := range lines {
if line != "" { if line != "" {
w.t.Log(line) w.t.Log(line)
} }

View File

@@ -228,8 +228,8 @@ type ImportStatement struct {
} }
func (is *ImportStatement) open() (fs.File, error) { func (is *ImportStatement) open() (fs.File, error) {
if strings.HasPrefix(is.Import, "(data)/") { if after, ok := strings.CutPrefix(is.Import, "(data)/"); ok {
fname := strings.TrimPrefix(is.Import, "(data)/") fname := after
fin, err := data.BotPolicies.Open(fname) fin, err := data.BotPolicies.Open(fname)
return fin, err return fin, err
} }
@@ -325,7 +325,7 @@ func (sc StatusCodes) Valid() error {
} }
type fileConfig struct { type fileConfig struct {
OpenGraph openGraphFileConfig `json:"openGraph,omitempty"` OpenGraph openGraphFileConfig `json:"openGraph"`
Impressum *Impressum `json:"impressum,omitempty"` Impressum *Impressum `json:"impressum,omitempty"`
Store *Store `json:"store"` Store *Store `json:"store"`
Bots []BotOrImport `json:"bots"` Bots []BotOrImport `json:"bots"`

View File

@@ -188,7 +188,6 @@ func TestBotValid(t *testing.T) {
} }
for _, cs := range tests { for _, cs := range tests {
cs := cs
t.Run(cs.name, func(t *testing.T) { t.Run(cs.name, func(t *testing.T) {
err := cs.bot.Valid() err := cs.bot.Valid()
if err == nil && cs.err == nil { if err == nil && cs.err == nil {
@@ -216,7 +215,6 @@ func TestConfigValidKnownGood(t *testing.T) {
} }
for _, st := range finfos { for _, st := range finfos {
st := st
t.Run(st.Name(), func(t *testing.T) { t.Run(st.Name(), func(t *testing.T) {
fin, err := os.Open(filepath.Join("testdata", "good", st.Name())) fin, err := os.Open(filepath.Join("testdata", "good", st.Name()))
if err != nil { if err != nil {
@@ -303,7 +301,6 @@ func TestConfigValidBad(t *testing.T) {
} }
for _, st := range finfos { for _, st := range finfos {
st := st
t.Run(st.Name(), func(t *testing.T) { t.Run(st.Name(), func(t *testing.T) {
fin, err := os.Open(filepath.Join("testdata", "bad", st.Name())) fin, err := os.Open(filepath.Join("testdata", "bad", st.Name()))
if err != nil { if err != nil {

View File

@@ -24,7 +24,6 @@ func TestBadConfigs(t *testing.T) {
} }
for _, st := range finfos { for _, st := range finfos {
st := st
t.Run(st.Name(), func(t *testing.T) { t.Run(st.Name(), func(t *testing.T) {
if _, err := LoadPoliciesOrDefault(t.Context(), filepath.Join("config", "testdata", "bad", st.Name()), anubis.DefaultDifficulty, "info"); err == nil { if _, err := LoadPoliciesOrDefault(t.Context(), filepath.Join("config", "testdata", "bad", st.Name()), anubis.DefaultDifficulty, "info"); err == nil {
t.Fatal(err) t.Fatal(err)
@@ -42,7 +41,6 @@ func TestGoodConfigs(t *testing.T) {
} }
for _, st := range finfos { for _, st := range finfos {
st := st
t.Run(st.Name(), func(t *testing.T) { t.Run(st.Name(), func(t *testing.T) {
t.Run("with-thoth", func(t *testing.T) { t.Run("with-thoth", func(t *testing.T) {
ctx := thothmock.WithMockThoth(t) ctx := thothmock.WithMockThoth(t)

View File

@@ -182,10 +182,7 @@ func makeCode(err error) string {
enc := base64.StdEncoding.EncodeToString(buf.Bytes()) enc := base64.StdEncoding.EncodeToString(buf.Bytes())
var builder strings.Builder var builder strings.Builder
for i := 0; i < len(enc); i += width { for i := 0; i < len(enc); i += width {
end := i + width end := min(i+width, len(enc))
if end > len(enc) {
end = len(enc)
}
builder.WriteString(enc[i:end]) builder.WriteString(enc[i:end])
builder.WriteByte('\n') builder.WriteByte('\n')
} }

View File

@@ -103,7 +103,7 @@ func TestBotEnvironment(t *testing.T) {
t.Fatalf("failed to compile expression %q: %v", tt.expression, err) t.Fatalf("failed to compile expression %q: %v", tt.expression, err)
} }
result, _, err := prog.Eval(map[string]interface{}{ result, _, err := prog.Eval(map[string]any{
"headers": tt.headers, "headers": tt.headers,
}) })
if err != nil { if err != nil {
@@ -168,7 +168,7 @@ func TestBotEnvironment(t *testing.T) {
t.Fatalf("failed to compile expression %q: %v", tt.expression, err) t.Fatalf("failed to compile expression %q: %v", tt.expression, err)
} }
result, _, err := prog.Eval(map[string]interface{}{ result, _, err := prog.Eval(map[string]any{
"path": tt.path, "path": tt.path,
}) })
if err != nil { if err != nil {
@@ -280,7 +280,7 @@ func TestBotEnvironment(t *testing.T) {
t.Fatalf("failed to compile expression %q: %v", tt.expression, err) t.Fatalf("failed to compile expression %q: %v", tt.expression, err)
} }
result, _, err := prog.Eval(map[string]interface{}{}) result, _, err := prog.Eval(map[string]any{})
if err != nil { if err != nil {
t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err) t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err)
} }
@@ -359,7 +359,7 @@ func TestBotEnvironment(t *testing.T) {
t.Fatalf("failed to compile expression %q: %v", tt.expression, err) t.Fatalf("failed to compile expression %q: %v", tt.expression, err)
} }
result, _, err := prog.Eval(map[string]interface{}{}) result, _, err := prog.Eval(map[string]any{})
if err != nil { if err != nil {
t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err) t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err)
} }
@@ -421,7 +421,7 @@ func TestBotEnvironment(t *testing.T) {
t.Fatalf("failed to compile expression %q: %v", tt.expression, err) t.Fatalf("failed to compile expression %q: %v", tt.expression, err)
} }
result, _, err := prog.Eval(map[string]interface{}{}) result, _, err := prog.Eval(map[string]any{})
if err != nil { if err != nil {
t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err) t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err)
} }
@@ -514,7 +514,7 @@ func TestBotEnvironment(t *testing.T) {
t.Fatalf("failed to compile expression %q: %v", tt.expression, err) t.Fatalf("failed to compile expression %q: %v", tt.expression, err)
} }
result, _, err := prog.Eval(map[string]interface{}{}) result, _, err := prog.Eval(map[string]any{})
if err != nil { if err != nil {
t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err) t.Fatalf("failed to evaluate expression %q: %v", tt.expression, err)
} }
@@ -572,7 +572,7 @@ func TestBotEnvironment(t *testing.T) {
t.Fatalf("failed to compile expression %q: %v", tt.expression, err) t.Fatalf("failed to compile expression %q: %v", tt.expression, err)
} }
result, _, err := prog.Eval(map[string]interface{}{}) result, _, err := prog.Eval(map[string]any{})
if tt.evalError { if tt.evalError {
if err == nil { if err == nil {
t.Errorf("%s: expected an evaluation error, but got none", tt.description) t.Errorf("%s: expected an evaluation error, but got none", tt.description)
@@ -598,7 +598,7 @@ func TestThresholdEnvironment(t *testing.T) {
} }
tests := []struct { tests := []struct {
variables map[string]interface{} variables map[string]any
name string name string
expression string expression string
description string description string
@@ -608,7 +608,7 @@ func TestThresholdEnvironment(t *testing.T) {
{ {
name: "weight-variable-available", name: "weight-variable-available",
expression: `weight > 100`, expression: `weight > 100`,
variables: map[string]interface{}{"weight": 150}, variables: map[string]any{"weight": 150},
expected: types.Bool(true), expected: types.Bool(true),
description: "should support weight variable in expressions", description: "should support weight variable in expressions",
shouldCompile: true, shouldCompile: true,
@@ -616,7 +616,7 @@ func TestThresholdEnvironment(t *testing.T) {
{ {
name: "weight-variable-false-case", name: "weight-variable-false-case",
expression: `weight > 100`, expression: `weight > 100`,
variables: map[string]interface{}{"weight": 50}, variables: map[string]any{"weight": 50},
expected: types.Bool(false), expected: types.Bool(false),
description: "should correctly evaluate weight comparisons", description: "should correctly evaluate weight comparisons",
shouldCompile: true, shouldCompile: true,
@@ -624,7 +624,7 @@ func TestThresholdEnvironment(t *testing.T) {
{ {
name: "missingHeader-not-available", name: "missingHeader-not-available",
expression: `missingHeader(headers, "Test")`, expression: `missingHeader(headers, "Test")`,
variables: map[string]interface{}{}, variables: map[string]any{},
expected: types.Bool(false), // not used expected: types.Bool(false), // not used
description: "should not have missingHeader function available", description: "should not have missingHeader function available",
shouldCompile: false, shouldCompile: false,
@@ -667,7 +667,7 @@ func TestNewEnvironment(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
expression string expression string
variables map[string]interface{} variables map[string]any
expectBool *bool // nil if we just want to test compilation or non-bool result expectBool *bool // nil if we just want to test compilation or non-bool result
description string description string
shouldCompile bool shouldCompile bool
@@ -675,7 +675,7 @@ func TestNewEnvironment(t *testing.T) {
{ {
name: "randInt-function-compilation", name: "randInt-function-compilation",
expression: `randInt(10)`, expression: `randInt(10)`,
variables: map[string]interface{}{}, variables: map[string]any{},
expectBool: nil, // Don't check result, just compilation expectBool: nil, // Don't check result, just compilation
description: "should compile randInt function", description: "should compile randInt function",
shouldCompile: true, shouldCompile: true,
@@ -683,7 +683,7 @@ func TestNewEnvironment(t *testing.T) {
{ {
name: "randInt-range-validation", name: "randInt-range-validation",
expression: `randInt(10) >= 0 && randInt(10) < 10`, expression: `randInt(10) >= 0 && randInt(10) < 10`,
variables: map[string]interface{}{}, variables: map[string]any{},
expectBool: boolPtr(true), expectBool: boolPtr(true),
description: "should return values in correct range", description: "should return values in correct range",
shouldCompile: true, shouldCompile: true,
@@ -691,7 +691,7 @@ func TestNewEnvironment(t *testing.T) {
{ {
name: "strings-extension-size", name: "strings-extension-size",
expression: `"hello".size() == 5`, expression: `"hello".size() == 5`,
variables: map[string]interface{}{}, variables: map[string]any{},
expectBool: boolPtr(true), expectBool: boolPtr(true),
description: "should support string extension functions", description: "should support string extension functions",
shouldCompile: true, shouldCompile: true,
@@ -699,7 +699,7 @@ func TestNewEnvironment(t *testing.T) {
{ {
name: "strings-extension-contains", name: "strings-extension-contains",
expression: `"hello world".contains("world")`, expression: `"hello world".contains("world")`,
variables: map[string]interface{}{}, variables: map[string]any{},
expectBool: boolPtr(true), expectBool: boolPtr(true),
description: "should support string contains function", description: "should support string contains function",
shouldCompile: true, shouldCompile: true,
@@ -707,7 +707,7 @@ func TestNewEnvironment(t *testing.T) {
{ {
name: "strings-extension-startsWith", name: "strings-extension-startsWith",
expression: `"hello world".startsWith("hello")`, expression: `"hello world".startsWith("hello")`,
variables: map[string]interface{}{}, variables: map[string]any{},
expectBool: boolPtr(true), expectBool: boolPtr(true),
description: "should support string startsWith function", description: "should support string startsWith function",
shouldCompile: true, shouldCompile: true,

View File

@@ -32,7 +32,6 @@ func TestGoodConfigs(t *testing.T) {
} }
for _, st := range finfos { for _, st := range finfos {
st := st
t.Run(st.Name(), func(t *testing.T) { t.Run(st.Name(), func(t *testing.T) {
t.Run("with-thoth", func(t *testing.T) { t.Run("with-thoth", func(t *testing.T) {
fin, err := os.Open(filepath.Join("..", "config", "testdata", "good", st.Name())) fin, err := os.Open(filepath.Join("..", "config", "testdata", "good", st.Name()))
@@ -71,7 +70,6 @@ func TestBadConfigs(t *testing.T) {
} }
for _, st := range finfos { for _, st := range finfos {
st := st
t.Run(st.Name(), func(t *testing.T) { t.Run(st.Name(), func(t *testing.T) {
fin, err := os.Open(filepath.Join("..", "config", "testdata", "bad", st.Name())) fin, err := os.Open(filepath.Join("..", "config", "testdata", "bad", st.Name()))
if err != nil { if err != nil {

View File

@@ -6,6 +6,7 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
"maps"
"sync" "sync"
"testing" "testing"
"time" "time"
@@ -36,9 +37,7 @@ func (m *mockS3) PutObject(ctx context.Context, in *s3.PutObjectInput, _ ...func
m.data[aws.ToString(in.Key)] = bytes.Clone(b) m.data[aws.ToString(in.Key)] = bytes.Clone(b)
if in.Metadata != nil { if in.Metadata != nil {
m.meta[aws.ToString(in.Key)] = map[string]string{} m.meta[aws.ToString(in.Key)] = map[string]string{}
for k, v := range in.Metadata { maps.Copy(m.meta[aws.ToString(in.Key)], in.Metadata)
m.meta[aws.ToString(in.Key)][k] = v
}
} }
m.bucket = aws.ToString(in.Bucket) m.bucket = aws.ToString(in.Bucket)
return &s3.PutObjectOutput{}, nil return &s3.PutObjectOutput{}, nil

View File

@@ -103,7 +103,7 @@ func (s Sentinel) Valid() error {
// redisClient is satisfied by *valkey.Client and *valkey.ClusterClient. // redisClient is satisfied by *valkey.Client and *valkey.ClusterClient.
type redisClient interface { type redisClient interface {
Get(ctx context.Context, key string) *valkey.StringCmd Get(ctx context.Context, key string) *valkey.StringCmd
Set(ctx context.Context, key string, value interface{}, expiration time.Duration) *valkey.StatusCmd Set(ctx context.Context, key string, value any, expiration time.Duration) *valkey.StatusCmd
Del(ctx context.Context, keys ...string) *valkey.IntCmd Del(ctx context.Context, keys ...string) *valkey.IntCmd
Ping(ctx context.Context) *valkey.StatusCmd Ping(ctx context.Context) *valkey.StatusCmd
} }

View File

@@ -11,8 +11,8 @@ func authUnaryClientInterceptor(token string) grpc.UnaryClientInterceptor {
return func( return func(
ctx context.Context, ctx context.Context,
method string, method string,
req interface{}, req any,
reply interface{}, reply any,
cc *grpc.ClientConn, cc *grpc.ClientConn,
invoker grpc.UnaryInvoker, invoker grpc.UnaryInvoker,
opts ...grpc.CallOption, opts ...grpc.CallOption,

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "@techaro/anubis", "name": "@techaro/anubis",
"version": "1.24.0", "version": "1.25.0",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@techaro/anubis", "name": "@techaro/anubis",
"version": "1.24.0", "version": "1.25.0",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"@aws-crypto/sha256-js": "^5.2.0", "@aws-crypto/sha256-js": "^5.2.0",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@techaro/anubis", "name": "@techaro/anubis",
"version": "1.24.0", "version": "1.25.0",
"description": "", "description": "",
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
@@ -66,4 +66,4 @@
"trailingComma": "all", "trailingComma": "all",
"printWidth": 80 "printWidth": 80
} }
} }