biscuide revised this gist . Go to revision
1 file changed, 21 insertions
block_ai_crawlers.conf(file created)
| @@ -0,0 +1,21 @@ | |||
| 1 | + | (blockAiCrawlers) { | |
| 2 | + | @blockAiCrawlers { | |
| 3 | + | header_regexp User-Agent "(?i)(^|\W)(Bytespider|CCBot|Diffbot|FacebookBot|Google-Extended|GPTBot|omgili|anthropic-ai|Claude-Web|ClaudeBot|cohere-ai)($|\W)" | |
| 4 | + | } | |
| 5 | + | handle @blockAiCrawlers { | |
| 6 | + | abort | |
| 7 | + | } | |
| 8 | + | } | |
| 9 | + | ||
| 10 | + | # Usage: | |
| 11 | + | # 1. Place this file next to your Caddyfile | |
| 12 | + | # 2. Edit your Caddyfile as in the example below | |
| 13 | + | # | |
| 14 | + | # ``` | |
| 15 | + | # import block_ai_crawlers.conf | |
| 16 | + | # | |
| 17 | + | # www.mywebsite.com { | |
| 18 | + | # import blockAiCrawlers | |
| 19 | + | # reverse_proxy * localhost:3000 | |
| 20 | + | # } | |
| 21 | + | # ``` | |
Newer
Older