mirror of
https://github.com/TecharoHQ/anubis.git
synced 2026-04-09 10:08:45 +00:00
Compare commits
4 Commits
fix/multib
...
json/docs
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a8393df522 | ||
|
|
74c85bb971 | ||
|
|
4a527a304b | ||
|
|
c2ead79823 |
@@ -29,7 +29,7 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type RobotsRule struct {
|
type RobotsRule struct {
|
||||||
UserAgents []string
|
UserAgent string
|
||||||
Disallows []string
|
Disallows []string
|
||||||
Allows []string
|
Allows []string
|
||||||
CrawlDelay int
|
CrawlDelay int
|
||||||
@@ -130,26 +130,10 @@ func main() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func createRuleFromAccumulated(userAgents, disallows, allows []string, crawlDelay int) RobotsRule {
|
|
||||||
rule := RobotsRule{
|
|
||||||
UserAgents: make([]string, len(userAgents)),
|
|
||||||
Disallows: make([]string, len(disallows)),
|
|
||||||
Allows: make([]string, len(allows)),
|
|
||||||
CrawlDelay: crawlDelay,
|
|
||||||
}
|
|
||||||
copy(rule.UserAgents, userAgents)
|
|
||||||
copy(rule.Disallows, disallows)
|
|
||||||
copy(rule.Allows, allows)
|
|
||||||
return rule
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseRobotsTxt(input io.Reader) ([]RobotsRule, error) {
|
func parseRobotsTxt(input io.Reader) ([]RobotsRule, error) {
|
||||||
scanner := bufio.NewScanner(input)
|
scanner := bufio.NewScanner(input)
|
||||||
var rules []RobotsRule
|
var rules []RobotsRule
|
||||||
var currentUserAgents []string
|
var currentRule *RobotsRule
|
||||||
var currentDisallows []string
|
|
||||||
var currentAllows []string
|
|
||||||
var currentCrawlDelay int
|
|
||||||
|
|
||||||
for scanner.Scan() {
|
for scanner.Scan() {
|
||||||
line := strings.TrimSpace(scanner.Text())
|
line := strings.TrimSpace(scanner.Text())
|
||||||
@@ -170,42 +154,38 @@ func parseRobotsTxt(input io.Reader) ([]RobotsRule, error) {
|
|||||||
|
|
||||||
switch directive {
|
switch directive {
|
||||||
case "user-agent":
|
case "user-agent":
|
||||||
// If we have accumulated rules with directives and encounter a new user-agent,
|
// Start a new rule section
|
||||||
// flush the current rules
|
if currentRule != nil {
|
||||||
if len(currentUserAgents) > 0 && (len(currentDisallows) > 0 || len(currentAllows) > 0 || currentCrawlDelay > 0) {
|
rules = append(rules, *currentRule)
|
||||||
rule := createRuleFromAccumulated(currentUserAgents, currentDisallows, currentAllows, currentCrawlDelay)
|
}
|
||||||
rules = append(rules, rule)
|
currentRule = &RobotsRule{
|
||||||
// Reset for next group
|
UserAgent: value,
|
||||||
currentUserAgents = nil
|
Disallows: make([]string, 0),
|
||||||
currentDisallows = nil
|
Allows: make([]string, 0),
|
||||||
currentAllows = nil
|
|
||||||
currentCrawlDelay = 0
|
|
||||||
}
|
}
|
||||||
currentUserAgents = append(currentUserAgents, value)
|
|
||||||
|
|
||||||
case "disallow":
|
case "disallow":
|
||||||
if len(currentUserAgents) > 0 && value != "" {
|
if currentRule != nil && value != "" {
|
||||||
currentDisallows = append(currentDisallows, value)
|
currentRule.Disallows = append(currentRule.Disallows, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
case "allow":
|
case "allow":
|
||||||
if len(currentUserAgents) > 0 && value != "" {
|
if currentRule != nil && value != "" {
|
||||||
currentAllows = append(currentAllows, value)
|
currentRule.Allows = append(currentRule.Allows, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
case "crawl-delay":
|
case "crawl-delay":
|
||||||
if len(currentUserAgents) > 0 {
|
if currentRule != nil {
|
||||||
if delay, err := parseIntSafe(value); err == nil {
|
if delay, err := parseIntSafe(value); err == nil {
|
||||||
currentCrawlDelay = delay
|
currentRule.CrawlDelay = delay
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Don't forget the last group of rules
|
// Don't forget the last rule
|
||||||
if len(currentUserAgents) > 0 {
|
if currentRule != nil {
|
||||||
rule := createRuleFromAccumulated(currentUserAgents, currentDisallows, currentAllows, currentCrawlDelay)
|
rules = append(rules, *currentRule)
|
||||||
rules = append(rules, rule)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Mark blacklisted user agents (those with "Disallow: /")
|
// Mark blacklisted user agents (those with "Disallow: /")
|
||||||
@@ -231,11 +211,10 @@ func convertToAnubisRules(robotsRules []RobotsRule) []AnubisRule {
|
|||||||
var anubisRules []AnubisRule
|
var anubisRules []AnubisRule
|
||||||
ruleCounter := 0
|
ruleCounter := 0
|
||||||
|
|
||||||
// Process each robots rule individually
|
|
||||||
for _, robotsRule := range robotsRules {
|
for _, robotsRule := range robotsRules {
|
||||||
userAgents := robotsRule.UserAgents
|
userAgent := robotsRule.UserAgent
|
||||||
|
|
||||||
// Handle crawl delay
|
// Handle crawl delay as weight adjustment (do this first before any continues)
|
||||||
if robotsRule.CrawlDelay > 0 && *crawlDelay > 0 {
|
if robotsRule.CrawlDelay > 0 && *crawlDelay > 0 {
|
||||||
ruleCounter++
|
ruleCounter++
|
||||||
rule := AnubisRule{
|
rule := AnubisRule{
|
||||||
@@ -244,32 +223,20 @@ func convertToAnubisRules(robotsRules []RobotsRule) []AnubisRule {
|
|||||||
Weight: &config.Weight{Adjust: *crawlDelay},
|
Weight: &config.Weight{Adjust: *crawlDelay},
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(userAgents) == 1 && userAgents[0] == "*" {
|
if userAgent == "*" {
|
||||||
rule.Expression = &config.ExpressionOrList{
|
rule.Expression = &config.ExpressionOrList{
|
||||||
All: []string{"true"}, // Always applies
|
All: []string{"true"}, // Always applies
|
||||||
}
|
}
|
||||||
} else if len(userAgents) == 1 {
|
|
||||||
rule.Expression = &config.ExpressionOrList{
|
|
||||||
All: []string{fmt.Sprintf("userAgent.contains(%q)", userAgents[0])},
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// Multiple user agents - use any block
|
|
||||||
var expressions []string
|
|
||||||
for _, ua := range userAgents {
|
|
||||||
if ua == "*" {
|
|
||||||
expressions = append(expressions, "true")
|
|
||||||
} else {
|
|
||||||
expressions = append(expressions, fmt.Sprintf("userAgent.contains(%q)", ua))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
rule.Expression = &config.ExpressionOrList{
|
rule.Expression = &config.ExpressionOrList{
|
||||||
Any: expressions,
|
All: []string{fmt.Sprintf("userAgent.contains(%q)", userAgent)},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
anubisRules = append(anubisRules, rule)
|
anubisRules = append(anubisRules, rule)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle blacklisted user agents
|
// Handle blacklisted user agents (complete deny/challenge)
|
||||||
if robotsRule.IsBlacklist {
|
if robotsRule.IsBlacklist {
|
||||||
ruleCounter++
|
ruleCounter++
|
||||||
rule := AnubisRule{
|
rule := AnubisRule{
|
||||||
@@ -277,36 +244,21 @@ func convertToAnubisRules(robotsRules []RobotsRule) []AnubisRule {
|
|||||||
Action: *userAgentDeny,
|
Action: *userAgentDeny,
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(userAgents) == 1 {
|
if userAgent == "*" {
|
||||||
userAgent := userAgents[0]
|
// This would block everything - convert to a weight adjustment instead
|
||||||
if userAgent == "*" {
|
rule.Name = fmt.Sprintf("%s-global-restriction-%d", *policyName, ruleCounter)
|
||||||
// This would block everything - convert to a weight adjustment instead
|
rule.Action = "WEIGH"
|
||||||
rule.Name = fmt.Sprintf("%s-global-restriction-%d", *policyName, ruleCounter)
|
rule.Weight = &config.Weight{Adjust: 20} // Increase difficulty significantly
|
||||||
rule.Action = "WEIGH"
|
rule.Expression = &config.ExpressionOrList{
|
||||||
rule.Weight = &config.Weight{Adjust: 20} // Increase difficulty significantly
|
All: []string{"true"}, // Always applies
|
||||||
rule.Expression = &config.ExpressionOrList{
|
|
||||||
All: []string{"true"}, // Always applies
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
rule.Expression = &config.ExpressionOrList{
|
|
||||||
All: []string{fmt.Sprintf("userAgent.contains(%q)", userAgent)},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Multiple user agents - use any block
|
|
||||||
var expressions []string
|
|
||||||
for _, ua := range userAgents {
|
|
||||||
if ua == "*" {
|
|
||||||
expressions = append(expressions, "true")
|
|
||||||
} else {
|
|
||||||
expressions = append(expressions, fmt.Sprintf("userAgent.contains(%q)", ua))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
rule.Expression = &config.ExpressionOrList{
|
rule.Expression = &config.ExpressionOrList{
|
||||||
Any: expressions,
|
All: []string{fmt.Sprintf("userAgent.contains(%q)", userAgent)},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
anubisRules = append(anubisRules, rule)
|
anubisRules = append(anubisRules, rule)
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle specific disallow rules
|
// Handle specific disallow rules
|
||||||
@@ -324,33 +276,9 @@ func convertToAnubisRules(robotsRules []RobotsRule) []AnubisRule {
|
|||||||
// Build CEL expression
|
// Build CEL expression
|
||||||
var conditions []string
|
var conditions []string
|
||||||
|
|
||||||
// Add user agent conditions
|
// Add user agent condition if not wildcard
|
||||||
if len(userAgents) == 1 && userAgents[0] == "*" {
|
if userAgent != "*" {
|
||||||
// Wildcard user agent - no user agent condition needed
|
conditions = append(conditions, fmt.Sprintf("userAgent.contains(%q)", userAgent))
|
||||||
} else if len(userAgents) == 1 {
|
|
||||||
conditions = append(conditions, fmt.Sprintf("userAgent.contains(%q)", userAgents[0]))
|
|
||||||
} else {
|
|
||||||
// For multiple user agents, we need to use a more complex expression
|
|
||||||
// This is a limitation - we can't easily combine any for user agents with all for path
|
|
||||||
// So we'll create separate rules for each user agent
|
|
||||||
for _, ua := range userAgents {
|
|
||||||
if ua == "*" {
|
|
||||||
continue // Skip wildcard as it's handled separately
|
|
||||||
}
|
|
||||||
ruleCounter++
|
|
||||||
subRule := AnubisRule{
|
|
||||||
Name: fmt.Sprintf("%s-disallow-%d", *policyName, ruleCounter),
|
|
||||||
Action: *baseAction,
|
|
||||||
Expression: &config.ExpressionOrList{
|
|
||||||
All: []string{
|
|
||||||
fmt.Sprintf("userAgent.contains(%q)", ua),
|
|
||||||
buildPathCondition(disallow),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
anubisRules = append(anubisRules, subRule)
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add path condition
|
// Add path condition
|
||||||
@@ -363,6 +291,7 @@ func convertToAnubisRules(robotsRules []RobotsRule) []AnubisRule {
|
|||||||
|
|
||||||
anubisRules = append(anubisRules, rule)
|
anubisRules = append(anubisRules, rule)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return anubisRules
|
return anubisRules
|
||||||
|
|||||||
@@ -78,12 +78,6 @@ func TestDataFileConversion(t *testing.T) {
|
|||||||
expectedFile: "complex.yaml",
|
expectedFile: "complex.yaml",
|
||||||
options: TestOptions{format: "yaml", crawlDelayWeight: 5},
|
options: TestOptions{format: "yaml", crawlDelayWeight: 5},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: "consecutive_user_agents",
|
|
||||||
robotsFile: "consecutive.robots.txt",
|
|
||||||
expectedFile: "consecutive.yaml",
|
|
||||||
options: TestOptions{format: "yaml", crawlDelayWeight: 3},
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tc := range testCases {
|
for _, tc := range testCases {
|
||||||
|
|||||||
6
cmd/robots2policy/testdata/blacklist.yaml
vendored
6
cmd/robots2policy/testdata/blacklist.yaml
vendored
@@ -25,6 +25,6 @@
|
|||||||
- action: CHALLENGE
|
- action: CHALLENGE
|
||||||
expression:
|
expression:
|
||||||
all:
|
all:
|
||||||
- userAgent.contains("Googlebot")
|
- userAgent.contains("Googlebot")
|
||||||
- path.startsWith("/search")
|
- path.startsWith("/search")
|
||||||
name: robots-txt-policy-disallow-7
|
name: robots-txt-policy-disallow-7
|
||||||
24
cmd/robots2policy/testdata/complex.yaml
vendored
24
cmd/robots2policy/testdata/complex.yaml
vendored
@@ -20,8 +20,8 @@
|
|||||||
- action: CHALLENGE
|
- action: CHALLENGE
|
||||||
expression:
|
expression:
|
||||||
all:
|
all:
|
||||||
- userAgent.contains("Googlebot")
|
- userAgent.contains("Googlebot")
|
||||||
- path.startsWith("/search/")
|
- path.startsWith("/search/")
|
||||||
name: robots-txt-policy-disallow-6
|
name: robots-txt-policy-disallow-6
|
||||||
- action: WEIGH
|
- action: WEIGH
|
||||||
expression: userAgent.contains("Bingbot")
|
expression: userAgent.contains("Bingbot")
|
||||||
@@ -31,14 +31,14 @@
|
|||||||
- action: CHALLENGE
|
- action: CHALLENGE
|
||||||
expression:
|
expression:
|
||||||
all:
|
all:
|
||||||
- userAgent.contains("Bingbot")
|
- userAgent.contains("Bingbot")
|
||||||
- path.startsWith("/search/")
|
- path.startsWith("/search/")
|
||||||
name: robots-txt-policy-disallow-8
|
name: robots-txt-policy-disallow-8
|
||||||
- action: CHALLENGE
|
- action: CHALLENGE
|
||||||
expression:
|
expression:
|
||||||
all:
|
all:
|
||||||
- userAgent.contains("Bingbot")
|
- userAgent.contains("Bingbot")
|
||||||
- path.startsWith("/admin/")
|
- path.startsWith("/admin/")
|
||||||
name: robots-txt-policy-disallow-9
|
name: robots-txt-policy-disallow-9
|
||||||
- action: DENY
|
- action: DENY
|
||||||
expression: userAgent.contains("BadBot")
|
expression: userAgent.contains("BadBot")
|
||||||
@@ -54,18 +54,18 @@
|
|||||||
- action: CHALLENGE
|
- action: CHALLENGE
|
||||||
expression:
|
expression:
|
||||||
all:
|
all:
|
||||||
- userAgent.contains("TestBot")
|
- userAgent.contains("TestBot")
|
||||||
- path.matches("^/.*/admin")
|
- path.matches("^/.*/admin")
|
||||||
name: robots-txt-policy-disallow-13
|
name: robots-txt-policy-disallow-13
|
||||||
- action: CHALLENGE
|
- action: CHALLENGE
|
||||||
expression:
|
expression:
|
||||||
all:
|
all:
|
||||||
- userAgent.contains("TestBot")
|
- userAgent.contains("TestBot")
|
||||||
- path.matches("^/temp.*\\.html")
|
- path.matches("^/temp.*\\.html")
|
||||||
name: robots-txt-policy-disallow-14
|
name: robots-txt-policy-disallow-14
|
||||||
- action: CHALLENGE
|
- action: CHALLENGE
|
||||||
expression:
|
expression:
|
||||||
all:
|
all:
|
||||||
- userAgent.contains("TestBot")
|
- userAgent.contains("TestBot")
|
||||||
- path.matches("^/file.\\.log")
|
- path.matches("^/file.\\.log")
|
||||||
name: robots-txt-policy-disallow-15
|
name: robots-txt-policy-disallow-15
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
# Test consecutive user agents that should be grouped into any: blocks
|
|
||||||
User-agent: *
|
|
||||||
Disallow: /admin
|
|
||||||
Crawl-delay: 10
|
|
||||||
|
|
||||||
# Multiple consecutive user agents - should be grouped
|
|
||||||
User-agent: BadBot
|
|
||||||
User-agent: SpamBot
|
|
||||||
User-agent: EvilBot
|
|
||||||
Disallow: /
|
|
||||||
|
|
||||||
# Single user agent - should be separate
|
|
||||||
User-agent: GoodBot
|
|
||||||
Disallow: /private
|
|
||||||
|
|
||||||
# Multiple consecutive user agents with crawl delay
|
|
||||||
User-agent: SlowBot1
|
|
||||||
User-agent: SlowBot2
|
|
||||||
Crawl-delay: 5
|
|
||||||
|
|
||||||
# Multiple consecutive user agents with specific path
|
|
||||||
User-agent: SearchBot1
|
|
||||||
User-agent: SearchBot2
|
|
||||||
User-agent: SearchBot3
|
|
||||||
Disallow: /search
|
|
||||||
47
cmd/robots2policy/testdata/consecutive.yaml
vendored
47
cmd/robots2policy/testdata/consecutive.yaml
vendored
@@ -1,47 +0,0 @@
|
|||||||
- action: WEIGH
|
|
||||||
expression: "true"
|
|
||||||
name: robots-txt-policy-crawl-delay-1
|
|
||||||
weight:
|
|
||||||
adjust: 3
|
|
||||||
- action: CHALLENGE
|
|
||||||
expression: path.startsWith("/admin")
|
|
||||||
name: robots-txt-policy-disallow-2
|
|
||||||
- action: DENY
|
|
||||||
expression:
|
|
||||||
any:
|
|
||||||
- userAgent.contains("BadBot")
|
|
||||||
- userAgent.contains("SpamBot")
|
|
||||||
- userAgent.contains("EvilBot")
|
|
||||||
name: robots-txt-policy-blacklist-3
|
|
||||||
- action: CHALLENGE
|
|
||||||
expression:
|
|
||||||
all:
|
|
||||||
- userAgent.contains("GoodBot")
|
|
||||||
- path.startsWith("/private")
|
|
||||||
name: robots-txt-policy-disallow-4
|
|
||||||
- action: WEIGH
|
|
||||||
expression:
|
|
||||||
any:
|
|
||||||
- userAgent.contains("SlowBot1")
|
|
||||||
- userAgent.contains("SlowBot2")
|
|
||||||
name: robots-txt-policy-crawl-delay-5
|
|
||||||
weight:
|
|
||||||
adjust: 3
|
|
||||||
- action: CHALLENGE
|
|
||||||
expression:
|
|
||||||
all:
|
|
||||||
- userAgent.contains("SearchBot1")
|
|
||||||
- path.startsWith("/search")
|
|
||||||
name: robots-txt-policy-disallow-7
|
|
||||||
- action: CHALLENGE
|
|
||||||
expression:
|
|
||||||
all:
|
|
||||||
- userAgent.contains("SearchBot2")
|
|
||||||
- path.startsWith("/search")
|
|
||||||
name: robots-txt-policy-disallow-8
|
|
||||||
- action: CHALLENGE
|
|
||||||
expression:
|
|
||||||
all:
|
|
||||||
- userAgent.contains("SearchBot3")
|
|
||||||
- path.startsWith("/search")
|
|
||||||
name: robots-txt-policy-disallow-9
|
|
||||||
8
cmd/robots2policy/testdata/simple.json
vendored
8
cmd/robots2policy/testdata/simple.json
vendored
@@ -1,12 +1,12 @@
|
|||||||
[
|
[
|
||||||
{
|
{
|
||||||
|
"action": "CHALLENGE",
|
||||||
"expression": "path.startsWith(\"/admin/\")",
|
"expression": "path.startsWith(\"/admin/\")",
|
||||||
"name": "robots-txt-policy-disallow-1",
|
"name": "robots-txt-policy-disallow-1"
|
||||||
"action": "CHALLENGE"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"action": "CHALLENGE",
|
||||||
"expression": "path.startsWith(\"/private\")",
|
"expression": "path.startsWith(\"/private\")",
|
||||||
"name": "robots-txt-policy-disallow-2",
|
"name": "robots-txt-policy-disallow-2"
|
||||||
"action": "CHALLENGE"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@@ -12,7 +12,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
- Document missing environment variables in installation guide: `SLOG_LEVEL`, `COOKIE_PREFIX`, `FORCED_LANGUAGE`, and `TARGET_DISABLE_KEEPALIVE` ([#1086](https://github.com/TecharoHQ/anubis/pull/1086))
|
- Document missing environment variables in installation guide: `SLOG_LEVEL`, `COOKIE_PREFIX`, `FORCED_LANGUAGE`, and `TARGET_DISABLE_KEEPALIVE` ([#1086](https://github.com/TecharoHQ/anubis/pull/1086))
|
||||||
- Fixed `robots2policy` to properly group consecutive user agents into `any:` instead of only processing the last one ([#925](https://github.com/TecharoHQ/anubis/pull/925))
|
|
||||||
|
|
||||||
<!-- This changes the project to: -->
|
<!-- This changes the project to: -->
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user