Add the greylisting feature

This commit is contained in:
TheophileDiot 2022-09-28 14:56:12 +02:00
parent 94ce249d74
commit 27b4ff330c
4 changed files with 544 additions and 4 deletions

247
core/greylist/greylist.lua Normal file
View File

@ -0,0 +1,247 @@
local _M = {}
_M.__index = _M
local utils = require "utils"
local datastore = require "datastore"
local logger = require "logger"
local cjson = require "cjson"
local ipmatcher = require "resty.ipmatcher"
function _M.new()
local self = setmetatable({}, _M)
return self, nil
end
function _M:init()
-- Check if init is needed
local init_needed, err = utils.has_variable("USE_GREYLIST", "yes")
if init_needed == nil then
return false, err
end
if not init_needed then
return true, "no service uses Greylist, skipping init"
end
-- Read greylists
local greylists = {
["IP"] = {},
["RDNS"] = {},
["ASN"] = {},
["USER_AGENT"] = {},
["URI"] = {}
}
local i = 0
for kind, _ in pairs(greylists) do
local f, err = io.open("/opt/bunkerweb/cache/greylist/" .. kind .. ".list", "r")
if f then
for line in f:lines() do
table.insert(greylists[kind], line)
i = i + 1
end
f:close()
end
end
-- Load them into datastore
local ok, err = datastore:set("plugin_greylist_list", cjson.encode(greylists))
if not ok then
return false, "can't store Greylist list into datastore : " .. err
end
return true, "successfully loaded " .. tostring(i) .. " greylisted IP/network/rDNS/ASN/User-Agent/URI"
end
function _M:access()
-- Check if access is needed
local access_needed, err = utils.get_variable("USE_GREYLIST")
if access_needed == nil then
return false, err, false, nil
end
if access_needed ~= "yes" then
return true, "Greylist not activated", false, nil
end
-- Check the cache
local cached_ip, err = self:is_in_cache("ip" .. ngx.var.remote_addr)
if cached_ip and cached_ip ~= "ok" then
return true, "IP is in greylist cache (info = " .. cached_ip .. ")", false, ngx.OK
end
local cached_uri, err = self:is_in_cache("uri" .. ngx.var.uri)
if cached_uri and cached_uri ~= "ok" then
return true, "URI is in greylist cache (info = " .. cached_uri .. ")", false, ngx.OK
end
local cached_ua = true
if ngx.var.http_user_agent then
cached_ua, err = self:is_in_cache("ua" .. ngx.var.http_user_agent)
if cached_ua and cached_ua ~= "ok" then
return true, "User-Agent is in greylist cache (info = " .. cached_ua .. ")", false, ngx.OK
end
end
if cached_ip and cached_uri and cached_ua then
return true, "full request is in greylist cache (not greylisted)", false, nil
end
-- Get list
local data, err = datastore:get("plugin_greylist_list")
if not data then
return false, "can't get Greylist list : " .. err, false, nil
end
local ok, greylists = pcall(cjson.decode, data)
if not ok then
return false, "error while decoding greylists : " .. greylists, false, nil
end
-- Return value
local ret, ret_err = true, "success"
-- Check if IP is in IP/net greylist
local ip_net, err = utils.get_variable("GREYLIST_IP")
if ip_net and ip_net ~= "" then
for element in ip_net:gmatch("%S+") do
table.insert(greylists["IP"], element)
end
end
if not cached_ip then
local ipm, err = ipmatcher.new(greylists["IP"])
if not ipm then
ret = false
ret_err = "can't instantiate ipmatcher " .. err
else
if ipm:match(ngx.var.remote_addr) then
self:add_to_cache("ip" .. ngx.var.remote_addr, "ip/net")
return ret, "client IP " .. ngx.var.remote_addr .. " is in greylist", false, ngx.OK
end
end
end
-- Check if rDNS is in greylist
local rdns_global, err = utils.get_variable("GREYLIST_RDNS_GLOBAL")
local check = true
if not rdns_global then
logger.log(ngx.ERR, "GREYLIST", "Error while getting GREYLIST_RDNS_GLOBAL variable : " .. err)
elseif rdns_global == "yes" then
check, err = utils.ip_is_global(ngx.var.remote_addr)
if check == nil then
logger.log(ngx.ERR, "GREYLIST", "Error while getting checking if IP is global : " .. err)
end
end
if not cached_ip and check then
local rdns, err = utils.get_rdns(ngx.var.remote_addr)
if not rdns then
ret = false
ret_err = "error while trying to get reverse dns : " .. err
else
local rdns_list, err = utils.get_variable("GREYLIST_RDNS")
if rdns_list and rdns_list ~= "" then
for element in rdns_list:gmatch("%S+") do
table.insert(greylists["RDNS"], element)
end
end
for i, suffix in ipairs(greylists["RDNS"]) do
if rdns:sub(- #suffix) == suffix then
self:add_to_cache("ip" .. ngx.var.remote_addr, "rDNS " .. suffix)
return ret, "client IP " .. ngx.var.remote_addr .. " is in greylist (info = rDNS " .. suffix .. ")", false, ngx.OK
end
end
end
end
-- Check if ASN is in greylist
if not cached_ip then
if utils.ip_is_global(ngx.var.remote_addr) then
local asn, err = utils.get_asn(ngx.var.remote_addr)
if not asn then
ret = false
ret_err = "error while trying to get asn number : " .. err
else
local asn_list, err = utils.get_variable("GREYLIST_ASN")
if asn_list and asn_list ~= "" then
for element in asn_list:gmatch("%S+") do
table.insert(greylists["ASN"], element)
end
end
for i, asn_bl in ipairs(greylists["ASN"]) do
if tostring(asn) == asn_bl then
self:add_to_cache("ip" .. ngx.var.remote_addr, "ASN " .. tostring(asn))
return ret, "client IP " .. ngx.var.remote_addr .. " is in greylist (kind = ASN " .. tostring(asn) .. ")", false,
ngx.OK
end
end
end
end
end
-- IP is not greylisted
local ok, err = self:add_to_cache("ip" .. ngx.var.remote_addr, "ok")
if not ok then
ret = false
ret_err = err
end
-- Check if User-Agent is in greylist
if not cached_ua and ngx.var.http_user_agent then
local ua_list, err = utils.get_variable("GREYLIST_USER_AGENT")
if ua_list and ua_list ~= "" then
for element in ua_list:gmatch("%S+") do
table.insert(greylists["USER_AGENT"], element)
end
end
for i, ua_bl in ipairs(greylists["USER_AGENT"]) do
if ngx.var.http_user_agent:match(ua_bl) then
self:add_to_cache("ua" .. ngx.var.http_user_agent, "UA " .. ua_bl)
return ret, "client User-Agent " .. ngx.var.http_user_agent .. " is in greylist (matched " .. ua_bl .. ")", false,
ngx.OK
end
end
-- UA is not greylisted
local ok, err = self:add_to_cache("ua" .. ngx.var.http_user_agent, "ok")
if not ok then
ret = false
ret_err = err
end
end
-- Check if URI is in greylist
if not cached_uri then
local uri_list, err = utils.get_variable("GREYLIST_URI")
if uri_list and uri_list ~= "" then
for element in uri_list:gmatch("%S+") do
table.insert(greylists["URI"], element)
end
end
for i, uri_bl in ipairs(greylists["URI"]) do
if ngx.var.uri:match(uri_bl) then
self:add_to_cache("uri" .. ngx.var.uri, "URI " .. uri_bl)
return ret, "client URI " .. ngx.var.uri .. " is in greylist (matched " .. uri_bl .. ")", false, ngx.OK
end
end
end
-- URI is not greylisted
local ok, err = self:add_to_cache("uri" .. ngx.var.uri, "ok")
if not ok then
ret = false
ret_err = err
end
return ret, "IP is not in list (error = " .. ret_err .. ")", true, utils.get_deny_status()
end
function _M:is_in_cache(ele)
local kind, err = datastore:get("plugin_greylist_cache_" .. ngx.var.server_name .. ele)
if not kind then
if err ~= "not found" then
logger.log(ngx.ERR, "GREYLIST", "Error while accessing cache : " .. err)
end
return false, err
end
return kind, "success"
end
function _M:add_to_cache(ele, kind)
local ok, err = datastore:set("plugin_greylist_cache_" .. ngx.var.server_name .. ele, kind, 3600)
if not ok then
logger.log(ngx.ERR, "GREYLIST", "Error while adding element to cache : " .. err)
return false, err
end
return true, "success"
end
return _M

View File

@ -0,0 +1,152 @@
#!/usr/bin/python3
import sys, os, traceback
sys.path.append("/opt/bunkerweb/deps/python")
sys.path.append("/opt/bunkerweb/utils")
import logger, jobs, requests, ipaddress
def check_line(kind, line) :
if kind == "IP" :
if "/" in line :
try :
ipaddress.ip_network(line)
return True, line
except :
pass
else :
try :
ipaddress.ip_address(line)
return True, line
except :
pass
return False, ""
elif kind == "RDNS" :
if re.match(r"^(\.?[A-Za-z0-9\-]+)*\.[A-Za-z]{2,}$", line) :
return True, line.lower()
return False, ""
elif kind == "ASN" :
real_line = line.replace("AS", "")
if re.match(r"^\d+$", real_line) :
return True, real_line
elif kind == "USER_AGENT" :
return True, line.replace("\\ ", " ").replace("\\.", "%.").replace("\\\\", "\\").replace("-", "%-")
elif kind == "URI" :
if re.match(r"^/", line) :
return True, line
return False, ""
status = 0
try :
# Check if at least a server has Greylist activated
greylist_activated = False
# Multisite case
if os.getenv("MULTISITE") == "yes" :
for first_server in os.getenv("SERVER_NAME").split(" ") :
if os.getenv(first_server + "_USE_GREYLIST", os.getenv("USE_GREYLIST")) == "yes" :
greylist_activated = True
break
# Singlesite case
elif os.getenv("USE_GREYLIST") == "yes" :
greylist_activated = True
if not greylist_activated :
logger.log("GREYLIST", "", "Greylist is not activated, skipping downloads...")
os._exit(0)
# Create directories if they don't exist
os.makedirs("/opt/bunkerweb/cache/greylist", exist_ok=True)
os.makedirs("/opt/bunkerweb/tmp/greylist", exist_ok=True)
# Our urls data
urls = {
"IP": [],
"RDNS": [],
"ASN" : [],
"USER_AGENT": [],
"URI": []
}
# Don't go further if the cache is fresh
kinds_fresh = {
"IP": True,
"RDNS": True,
"ASN" : True,
"USER_AGENT": True,
"URI": True
}
all_fresh = True
for kind in kinds_fresh :
if not jobs.is_cached_file("/opt/bunkerweb/cache/greylist/" + kind + ".list", "hour") :
kinds_fresh[kind] = False
all_fresh = False
logger.log("GREYLIST", "", "Greylist for " + kind + " is not cached, processing downloads...")
else :
logger.log("GREYLIST", "", "Greylist for " + kind + " is already in cache, skipping downloads...")
if all_fresh :
os._exit(0)
# Get URLs
urls = {
"IP": [],
"RDNS": [],
"ASN" : [],
"USER_AGENT": [],
"URI": []
}
for kind in urls :
for url in os.getenv("GREYLIST_" + kind + "_URLS", "").split(" ") :
if url != "" and url not in urls[kind] :
urls[kind].append(url)
# Loop on kinds
for kind, urls_list in urls.items() :
if kinds_fresh[kind] :
continue
# Write combined data of the kind to a single temp file
for url in urls_list :
try :
logger.log("GREYLIST", "", "Downloading greylist data from " + url + " ...")
resp = requests.get(url, stream=True)
if resp.status_code != 200 :
continue
i = 0
with open("/opt/bunkerweb/tmp/greylist/" + kind + ".list", "w") as f :
for line in resp.iter_lines(decode_unicode=True) :
line = line.strip()
if kind != "USER_AGENT" :
line = line.strip().split(" ")[0]
if line == "" or line.startswith("#") or line.startswith(";") :
continue
ok, data = check_line(kind, line)
if ok :
f.write(data + "\n")
i += 1
logger.log("GREYLIST", "", "Downloaded " + str(i) + " bad " + kind)
# Check if file has changed
file_hash = jobs.file_hash("/opt/bunkerweb/tmp/greylist/" + kind + ".list")
cache_hash = jobs.cache_hash("/opt/bunkerweb/cache/greylist/" + kind + ".list")
if file_hash == cache_hash :
logger.log("GREYLIST", "", "New file " + kind + ".list is identical to cache file, reload is not needed")
else :
logger.log("GREYLIST", "", "New file " + kind + ".list is different than cache file, reload is needed")
# Put file in cache
cached, err = jobs.cache_file("/opt/bunkerweb/tmp/greylist/" + kind + ".list", "/opt/bunkerweb/cache/greylist/" + kind + ".list", file_hash)
if not cached :
logger.log("GREYLIST", "", "Error while caching greylist : " + err)
status = 2
if status != 2 :
status = 1
except :
status = 2
logger.log("GREYLIST", "", "Exception while getting greylist from " + url + " :")
print(traceback.format_exc())
except :
status = 2
logger.log("GREYLIST", "", "Exception while running greylist-download.py :")
print(traceback.format_exc())
sys.exit(status)

123
core/greylist/plugin.json Normal file
View File

@ -0,0 +1,123 @@
{
"id": "greylist",
"order": 2,
"name": "Greylist",
"description": "Allow access while keeping security features based on internal and external IP/network/rDNS/ASN greylists.",
"version": "0.1",
"settings": {
"USE_GREYLIST": {
"context": "multisite",
"default": "no",
"help": "Activate greylist feature.",
"id": "use-greylist",
"label": "Activate greylisting",
"regex": "^(yes|no)$",
"type": "check"
},
"GREYLIST_IP_URLS": {
"context": "global",
"default": "",
"help": "List of URLs, separated with spaces, containing good IP/network to put into the greylist.",
"id": "greylist-ip-urls",
"label": "Greylist IP/network URLs",
"regex": "^.*$",
"type": "text"
},
"GREYLIST_IP": {
"context": "multisite",
"default": "",
"help": "List of IP/network, separated with spaces, to put into the greylist.",
"id": "greylist-ip",
"label": "Greylist IP/network",
"regex": "^.*$",
"type": "text"
},
"GREYLIST_RDNS": {
"context": "multisite",
"default": "",
"help": "List of reverse DNS suffixes, separated with spaces, to put into the greylist.",
"id": "greylist-rdns",
"label": "Greylist reverse DNS",
"regex": "^.*$",
"type": "text"
},
"GREYLIST_RDNS_URLS": {
"context": "global",
"default": "",
"help": "List of URLs, separated with spaces, containing reverse DNS suffixes to put into the greylist.",
"id": "greylist-rdns-urls",
"label": "Greylist reverse DNS URLs",
"regex": "^.*$",
"type": "text"
},
"GREYLIST_RDNS_GLOBAL": {
"context": "multisite",
"default": "yes",
"help": "Only perform RDNS greylist checks on global IP addresses.",
"id": "greylist-rdns-global",
"label": "Greylist reverse DNS global IPs",
"regex": "^.*$",
"type": "text"
},
"GREYLIST_ASN": {
"context": "multisite",
"default": "",
"help": "List of ASN numbers, separated with spaces, to put into the greylist.",
"id": "greylist-asn",
"label": "Greylist ASN",
"regex": "^.*$",
"type": "text"
},
"GREYLIST_ASN_URLS": {
"context": "global",
"default": "",
"help": "List of URLs, separated with spaces, containing ASN to put into the greylist.",
"id": "greylist-rdns-urls",
"label": "Greylist ASN URLs",
"regex": "^.*$",
"type": "text"
},
"GREYLIST_USER_AGENT": {
"context": "multisite",
"default": "",
"help": "List of User-Agent, separated with spaces, to put into the greylist.",
"id": "greylist-user-agent",
"label": "Greylist User-Agent",
"regex": "^.*$",
"type": "text"
},
"GREYLIST_USER_AGENT_URLS": {
"context": "global",
"default": "",
"help": "List of URLs, separated with spaces, containing good User-Agent to put into the greylist.",
"id": "greylist-user-agent-urls",
"label": "Greylist User-Agent URLs",
"regex": "^.*$",
"type": "text"
},
"GREYLIST_URI": {
"context": "multisite",
"default": "",
"help": "List of URI, separated with spaces, to put into the greylist.",
"id": "greylist-uri",
"label": "Greylist URI",
"regex": "^.*$",
"type": "text"
},
"GREYLIST_URI_URLS": {
"context": "global",
"default": "",
"help": "List of URLs, separated with spaces, containing bad URI to put into the greylist.",
"id": "greylist-uri-urls",
"label": "Greylist URI URLs",
"regex": "^.*$",
"type": "text"
}
},
"jobs": [{
"name": "greylist-download",
"file": "greylist-download.py",
"every": "hour",
"reload": true
}]
}

View File

@ -204,11 +204,11 @@ Here is the list of related settings :
| `ANTIBOT_HCAPTCHA_SECRET` and `ANTIBOT_RECAPTCHA_SECRET` | | The Secret value to use when `USE_ANTIBOT` is set to `hcaptcha` or `recaptcha`. |
| `ANTIBOT_RECAPTCHA_SCORE` | `0.7` | The minimum score that clients must have when `USE_ANTIBOT` is set to `recaptcha`. |
## Blacklisting and whitelisting
## Blacklisting, whitelisting and greylisting
The blacklisting security feature is very easy to understand : if a specific criteria is met, the client will be banned. As for the whitelisting, it's the exact opposite : if a specific criteria is met, the client will be allowed and no additional security check will be done.
The blacklisting security feature is very easy to understand : if a specific criteria is met, the client will be banned. As for the whitelisting, it's the exact opposite : if a specific criteria is met, the client will be allowed and no additional security check will be done. Whereas for the greylisting : if a specific criteria is met, the client will be allowed but additional security checks will be done.
You can configure blacklisting and whitelisting at the same time. If that's the case, note that whitelisting is executed before blacklisting : if a criteria is true for both, the client will be whitelisted.
You can configure blacklisting, whitelisting and greylisting at the same time. If that's the case, note that whitelisting is executed before blacklisting and greylisting : even if a criteria is true for all of them, the client will be whitelisted.
### Blacklisting
@ -228,13 +228,31 @@ You can use the following settings to setup blacklisting :
| `BLACKLIST_URI` | | List of requests URI to blacklist. |
| `BLACKLIST_URI_URLS` | | List of URLs containing request URI to blacklist. |
### Greylisting
You can use the following settings to setup greylisting :
| Setting | Default | Description |
| :-------------------------: | :----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------- |
| `USE_GREYLIST` | `no` | When set to `yes`, will enable greylisting based on various criteria. |
| `GREYLIST_IP` | | List of IPs and networks to greylist. |
| `GREYLIST_IP_URLS` | | List of URL containing IP and network to greylist. |
| `GREYLIST_RDNS` | | List of reverse DNS to greylist. |
| `GREYLIST_RDNS_URLS` | | List of URLs containing reverse DNS to greylist. |
| `GREYLIST_ASN` | | List of ASN to greylist. |
| `GREYLIST_ASN_URLS` | | List of URLs containing ASN to greylist. |
| `GREYLIST_USER_AGENT` | | List of User-Agents to greylist. |
| `GREYLIST_USER_AGENT_URLS` | | List of URLs containing User-Agent(s) to greylist. |
| `GREYLIST_URI` | | List of requests URI to greylist. |
| `GREYLIST_URI_URLS` | | List of URLs containing request URI to greylist. |
### Whitelisting
You can use the following settings to setup whitelisting :
| Setting | Default | Description |
| :-------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------- |
| `USE_WHITELIST` | `yes` | When set to `yes`, will enable blacklisting based on various criteria. |
| `USE_WHITELIST` | `yes` | When set to `yes`, will enable whitelisting based on various criteria. |
| `WHITELIST_IP` | `20.191.45.212 40.88.21.235 40.76.173.151 40.76.163.7 20.185.79.47 52.142.26.175 20.185.79.15 52.142.24.149 40.76.162.208 40.76.163.23 40.76.162.191 40.76.162.247 54.208.102.37 107.21.1.8` | List of IP and network to whitelist. The default list contains IP from DuckDuckGo crawler. |
| `WHITELIST_IP_URLS` | `` | List of URLs containing IP and network to whitelist. |
| `WHITELIST_RDNS` | `.google.com .googlebot.com .yandex.ru .yandex.net .yandex.com .search.msn.com .baidu.com .baidu.jp .crawl.yahoo.net .fwd.linkedin.com .twitter.com .twttr.com .discord.com` | List of reverse DNS to whitelist. Default list contains various reverse DNS of search engines and social media crawlers. |