Merge branch 'refactor' into staging
This commit is contained in:
commit
b5fa473ae3
|
@ -0,0 +1,6 @@
|
|||
- utils refactoring
|
||||
- load inline values for white/black/grey list core
|
||||
- check if correct setting is set to yes in new() before loading stuff in self
|
||||
- store object in ngx.ctx
|
||||
- bwcli with redis
|
||||
- move bans to cachestore
|
|
@ -1,4 +1,4 @@
|
|||
FROM nginx:1.22.1-alpine AS builder
|
||||
FROM nginx:1.24.0-alpine AS builder
|
||||
|
||||
# Copy dependencies sources folder
|
||||
COPY src/deps /tmp/bunkerweb/deps
|
||||
|
@ -21,7 +21,7 @@ RUN apk add --no-cache --virtual .build-deps py3-pip && \
|
|||
pip install --no-cache-dir --require-hashes --target /usr/share/bunkerweb/deps/python -r /usr/share/bunkerweb/deps/requirements.txt && \
|
||||
apk del .build-deps
|
||||
|
||||
FROM nginx:1.22.1-alpine
|
||||
FROM nginx:1.24.0-alpine
|
||||
|
||||
# Copy dependencies
|
||||
COPY --from=builder /usr/share/bunkerweb /usr/share/bunkerweb
|
||||
|
|
|
@ -1,40 +1,46 @@
|
|||
local datastore = require "datastore"
|
||||
local utils = require "utils"
|
||||
local cjson = require "cjson"
|
||||
local plugins = require "plugins"
|
||||
local upload = require "resty.upload"
|
||||
local logger = require "logger"
|
||||
|
||||
local api = { global = { GET = {}, POST = {}, PUT = {}, DELETE = {} } }
|
||||
local class = require "middleclass"
|
||||
local datastore = require "bunkerweb.datastore"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local cjson = require "cjson"
|
||||
local upload = require "resty.upload"
|
||||
|
||||
api.response = function(self, http_status, api_status, msg)
|
||||
local api = class("api")
|
||||
|
||||
api.global = { GET = {}, POST = {}, PUT = {}, DELETE = {} }
|
||||
|
||||
function api:initialize()
|
||||
self.datastore = datastore:new()
|
||||
end
|
||||
|
||||
function api:response(http_status, api_status, msg)
|
||||
local resp = {}
|
||||
resp["status"] = api_status
|
||||
resp["msg"] = msg
|
||||
return http_status, resp
|
||||
end
|
||||
|
||||
api.global.GET["^/ping$"] = function(api)
|
||||
return api:response(ngx.HTTP_OK, "success", "pong")
|
||||
api.global.GET["^/ping$"] = function(self)
|
||||
return self:response(ngx.HTTP_OK, "success", "pong")
|
||||
end
|
||||
|
||||
api.global.POST["^/reload$"] = function(api)
|
||||
api.global.POST["^/reload$"] = function(self)
|
||||
local status = os.execute("nginx -s reload")
|
||||
if status == 0 then
|
||||
return api:response(ngx.HTTP_OK, "success", "reload successful")
|
||||
return self:response(ngx.HTTP_OK, "success", "reload successful")
|
||||
end
|
||||
return api:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error", "exit status = " .. tostring(status))
|
||||
return self:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error", "exit status = " .. tostring(status))
|
||||
end
|
||||
|
||||
api.global.POST["^/stop$"] = function(api)
|
||||
api.global.POST["^/stop$"] = function(self)
|
||||
local status = os.execute("nginx -s quit")
|
||||
if status == 0 then
|
||||
return api:response(ngx.HTTP_OK, "success", "stop successful")
|
||||
return self:response(ngx.HTTP_OK, "success", "stop successful")
|
||||
end
|
||||
return api:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error", "exit status = " .. tostring(status))
|
||||
return self:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error", "exit status = " .. tostring(status))
|
||||
end
|
||||
|
||||
api.global.POST["^/confs$"] = function(api)
|
||||
api.global.POST["^/confs$"] = function(self)
|
||||
local tmp = "/var/tmp/bunkerweb/api_" .. ngx.var.uri:sub(2) .. ".tar.gz"
|
||||
local destination = "/usr/share/bunkerweb/" .. ngx.var.uri:sub(2)
|
||||
if ngx.var.uri == "/confs" then
|
||||
|
@ -50,7 +56,7 @@ api.global.POST["^/confs$"] = function(api)
|
|||
end
|
||||
local form, err = upload:new(4096)
|
||||
if not form then
|
||||
return api:response(ngx.HTTP_BAD_REQUEST, "error", err)
|
||||
return self:response(ngx.HTTP_BAD_REQUEST, "error", err)
|
||||
end
|
||||
form:set_timeout(1000)
|
||||
local file = io.open(tmp, "w+")
|
||||
|
@ -58,7 +64,7 @@ api.global.POST["^/confs$"] = function(api)
|
|||
local typ, res, err = form:read()
|
||||
if not typ then
|
||||
file:close()
|
||||
return api:response(ngx.HTTP_BAD_REQUEST, "error", err)
|
||||
return self:response(ngx.HTTP_BAD_REQUEST, "error", err)
|
||||
end
|
||||
if typ == "eof" then
|
||||
break
|
||||
|
@ -71,24 +77,24 @@ api.global.POST["^/confs$"] = function(api)
|
|||
file:close()
|
||||
local status = os.execute("rm -rf " .. destination .. "/*")
|
||||
if status ~= 0 then
|
||||
return api:response(ngx.HTTP_BAD_REQUEST, "error", "can't remove old files")
|
||||
return self:response(ngx.HTTP_BAD_REQUEST, "error", "can't remove old files")
|
||||
end
|
||||
status = os.execute("tar xzf " .. tmp .. " -C " .. destination)
|
||||
if status ~= 0 then
|
||||
return api:response(ngx.HTTP_BAD_REQUEST, "error", "can't extract archive")
|
||||
return self:response(ngx.HTTP_BAD_REQUEST, "error", "can't extract archive")
|
||||
end
|
||||
return api:response(ngx.HTTP_OK, "success", "saved data at " .. destination)
|
||||
return self:response(ngx.HTTP_OK, "success", "saved data at " .. destination)
|
||||
end
|
||||
|
||||
api.global.POST["^/data$"] = api.global.POST["^/confs$"]
|
||||
api.global.POST["^/data$"] = api.global.POST["^/confs$"]
|
||||
|
||||
api.global.POST["^/cache$"] = api.global.POST["^/confs$"]
|
||||
api.global.POST["^/cache$"] = api.global.POST["^/confs$"]
|
||||
|
||||
api.global.POST["^/custom_configs$"] = api.global.POST["^/confs$"]
|
||||
|
||||
api.global.POST["^/plugins$"] = api.global.POST["^/confs$"]
|
||||
api.global.POST["^/plugins$"] = api.global.POST["^/confs$"]
|
||||
|
||||
api.global.POST["^/unban$"] = function(api)
|
||||
api.global.POST["^/unban$"] = function(self)
|
||||
ngx.req.read_body()
|
||||
local data = ngx.req.get_body_data()
|
||||
if not data then
|
||||
|
@ -101,13 +107,13 @@ api.global.POST["^/unban$"] = function(api)
|
|||
end
|
||||
local ok, ip = pcall(cjson.decode, data)
|
||||
if not ok then
|
||||
return api:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error", "can't decode JSON : " .. env)
|
||||
return self:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error", "can't decode JSON : " .. env)
|
||||
end
|
||||
datastore:delete("bans_ip_" .. ip["ip"])
|
||||
return api:response(ngx.HTTP_OK, "success", "ip " .. ip["ip"] .. " unbanned")
|
||||
self.datastore:delete("bans_ip_" .. ip["ip"])
|
||||
return self:response(ngx.HTTP_OK, "success", "ip " .. ip["ip"] .. " unbanned")
|
||||
end
|
||||
|
||||
api.global.POST["^/ban$"] = function(api)
|
||||
api.global.POST["^/ban$"] = function(self)
|
||||
ngx.req.read_body()
|
||||
local data = ngx.req.get_body_data()
|
||||
if not data then
|
||||
|
@ -120,45 +126,45 @@ api.global.POST["^/ban$"] = function(api)
|
|||
end
|
||||
local ok, ip = pcall(cjson.decode, data)
|
||||
if not ok then
|
||||
return api:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error", "can't decode JSON : " .. env)
|
||||
return self:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error", "can't decode JSON : " .. env)
|
||||
end
|
||||
datastore:set("bans_ip_" .. ip["ip"], "manual", ip["exp"])
|
||||
return api:response(ngx.HTTP_OK, "success", "ip " .. ip["ip"] .. " banned")
|
||||
self.datastore:set("bans_ip_" .. ip["ip"], "manual", ip["exp"])
|
||||
return self:response(ngx.HTTP_OK, "success", "ip " .. ip["ip"] .. " banned")
|
||||
end
|
||||
|
||||
api.global.GET["^/bans$"] = function(api)
|
||||
api.global.GET["^/bans$"] = function(self)
|
||||
local data = {}
|
||||
for i, k in ipairs(datastore:keys()) do
|
||||
for i, k in ipairs(self.datastore:keys()) do
|
||||
if k:find("^bans_ip_") then
|
||||
local ret, reason = datastore:get(k)
|
||||
if not ret then
|
||||
return api:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error",
|
||||
return self:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error",
|
||||
"can't access " .. k .. " from datastore : " + reason)
|
||||
end
|
||||
local ret, exp = datastore:exp(k)
|
||||
local ret, exp = self.datastore:exp(k)
|
||||
if not ret then
|
||||
return api:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error",
|
||||
return self:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error",
|
||||
"can't access exp " .. k .. " from datastore : " + exp)
|
||||
end
|
||||
local ban = { ip = k:sub(9, #k), reason = reason, exp = exp }
|
||||
table.insert(data, ban)
|
||||
end
|
||||
end
|
||||
return api:response(ngx.HTTP_OK, "success", data)
|
||||
return self:response(ngx.HTTP_OK, "success", data)
|
||||
end
|
||||
|
||||
api.is_allowed_ip = function(self)
|
||||
local data, err = datastore:get("api_whitelist_ip")
|
||||
function api:is_allowed_ip()
|
||||
local data, err = self.datastore:get("api_whitelist_ip")
|
||||
if not data then
|
||||
return false, "can't access api_allowed_ips in datastore"
|
||||
end
|
||||
if utils.is_ip_in_networks(ngx.var.remote_addr, cjson.decode(data).data) then
|
||||
if utils.is_ip_in_networks(ngx.var.remote_addr, cjson.decode(data)) then
|
||||
return true, "ok"
|
||||
end
|
||||
return false, "IP is not in API_WHITELIST_IP"
|
||||
end
|
||||
|
||||
api.do_api_call = function(self)
|
||||
function api:do_api_call()
|
||||
if self.global[ngx.var.request_method] ~= nil then
|
||||
for uri, api_fun in pairs(self.global[ngx.var.request_method]) do
|
||||
if string.match(ngx.var.uri, uri) then
|
||||
|
@ -177,7 +183,7 @@ api.do_api_call = function(self)
|
|||
end
|
||||
end
|
||||
end
|
||||
local list, err = plugins:list()
|
||||
local list, err = self.datastore:get("plugins")
|
||||
if not list then
|
||||
local status, resp = self:response(ngx.HTTP_INTERNAL_SERVER_ERROR, "error", "can't list loaded plugins : " .. err)
|
||||
return false, resp["msg"], ngx.HTTP_INTERNAL_SERVER_ERROR, resp
|
|
@ -0,0 +1,167 @@
|
|||
local mlcache = require "resty.mlcache"
|
||||
local logger = require "bunkerweb.logger"
|
||||
local class = require "middleclass"
|
||||
local cachestore = class("cachestore")
|
||||
|
||||
-- Instantiate mlcache object at module level (which will be cached when running init phase)
|
||||
-- TODO : custom settings
|
||||
local shm = "cachestore"
|
||||
local ipc_shm = "cachestore_ipc"
|
||||
local shm_miss = "cachestore_miss"
|
||||
local shm_locks = "cachestore_locks"
|
||||
if not ngx.shared.cachestore then
|
||||
shm = "cachestore_stream"
|
||||
ipc_shm = "cachestore_ipc_stream"
|
||||
shm_miss = "cachestore_miss_stream"
|
||||
shm_locks = "cachestore_locks_stream"
|
||||
end
|
||||
local cache, err = mlcache.new(
|
||||
"cachestore",
|
||||
shm,
|
||||
{
|
||||
lru_size = 100,
|
||||
ttl = 30,
|
||||
neg_ttl = 0.1,
|
||||
shm_set_tries = 3,
|
||||
shm_miss = shm_miss,
|
||||
shm_locks = shm_locks,
|
||||
resty_lock_opts = {
|
||||
exptime = 30,
|
||||
timeout = 5,
|
||||
step = 0.001,
|
||||
ratio = 2,
|
||||
max_step = 0.5
|
||||
},
|
||||
ipc_shm = ipc_shm
|
||||
}
|
||||
)
|
||||
local module_logger = logger:new("CACHESTORE")
|
||||
if not cache then
|
||||
module_logger:log(ngx.ERR, "can't instantiate mlcache : " .. err)
|
||||
end
|
||||
|
||||
function cachestore:initialize(use_redis)
|
||||
self.cache = cache
|
||||
self.use_redis = use_redis or false
|
||||
self.logger = module_logger
|
||||
end
|
||||
|
||||
function cachestore:get(key)
|
||||
local function callback(key)
|
||||
-- Connect to redis
|
||||
local clusterstore = require "bunkerweb.clusterstore"
|
||||
local ok, err = clusterstore:new()
|
||||
if not ok then
|
||||
return nil, "clusterstore:new() failed : " .. err, nil
|
||||
end
|
||||
local ok, err = clusterstore:connect()
|
||||
if not ok then
|
||||
return nil, "can't connect to redis : " .. err, nil
|
||||
end
|
||||
-- Exec transaction
|
||||
local calls = {
|
||||
{"get", {key}},
|
||||
{"ttl", {key}}
|
||||
}
|
||||
-- Exec transaction
|
||||
local exec, err = clusterstore:multi(calls)
|
||||
if err then
|
||||
clusterstore:close()
|
||||
return nil, "exec() failed : " .. err, nil
|
||||
end
|
||||
-- Get results
|
||||
local value = exec[1]
|
||||
if type(value) == "table" then
|
||||
clusterstore:close(redis)
|
||||
return nil, "GET error : " .. value[2], nil
|
||||
end
|
||||
local ttl = exec[2]
|
||||
if type(ttl) == "table" then
|
||||
clusterstore:close(redis)
|
||||
return nil, "TTL error : " .. ttl[2], nil
|
||||
end
|
||||
-- Return value
|
||||
clusterstore:close(redis)
|
||||
if value == ngx.null then
|
||||
value = nil
|
||||
end
|
||||
if ttl < 0 then
|
||||
ttl = ttl + 1
|
||||
end
|
||||
return value, nil, ttl
|
||||
end
|
||||
local value, err, hit_level
|
||||
if self.use_redis then
|
||||
value, err, hit_level = self.cache:get(key, nil, callback, key)
|
||||
else
|
||||
value, err, hit_level = self.cache:get(key)
|
||||
end
|
||||
if value == nil and hit_level == nil then
|
||||
return false, err
|
||||
end
|
||||
self.logger:log(ngx.INFO, "hit level for " .. key .. " = " .. tostring(hit_level))
|
||||
return true, value
|
||||
end
|
||||
|
||||
function cachestore:set(key, value, ex)
|
||||
if self.use_redis then
|
||||
local ok, err = self.set_redis(key, value, ex)
|
||||
if not ok then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
end
|
||||
local ok, err = self.cache:set(key, nil, value)
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
function cachestore:set_redis(key, value, ex)
|
||||
-- Connect to redis
|
||||
local redis, err = clusterstore:connect()
|
||||
if not redis then
|
||||
return false, "can't connect to redis : " .. err
|
||||
end
|
||||
-- Set value with ttl
|
||||
local default_ex = ttl or 30
|
||||
local ok, err = redis:set(key, value, "EX", ex)
|
||||
if err then
|
||||
clusterstore:close(redis)
|
||||
return false, "GET failed : " .. err
|
||||
end
|
||||
clusterstore:close(redis)
|
||||
return true
|
||||
end
|
||||
|
||||
function cachestore:delete(key, value, ex)
|
||||
if self.use_redis then
|
||||
local ok, err = self.del_redis(key)
|
||||
if not ok then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
end
|
||||
local ok, err = self.cache:delete(key)
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
function cachestore:del_redis(key)
|
||||
-- Connect to redis
|
||||
local redis, err = clusterstore:connect()
|
||||
if not redis then
|
||||
return false, "can't connect to redis : " .. err
|
||||
end
|
||||
-- Set value with ttl
|
||||
local ok, err = redis:del(key)
|
||||
if err then
|
||||
clusterstore:close(redis)
|
||||
return false, "DEL failed : " .. err
|
||||
end
|
||||
clusterstore:close(redis)
|
||||
return true
|
||||
end
|
||||
|
||||
return cachestore
|
|
@ -0,0 +1,122 @@
|
|||
local class = require "middleclass"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local logger = require "bunkerweb.logger"
|
||||
local redis = require "resty.redis"
|
||||
|
||||
local clusterstore = class("clusterstore")
|
||||
|
||||
function clusterstore:initialize()
|
||||
-- Instantiate logger
|
||||
self.logger = logger:new("CLUSTERSTORE")
|
||||
-- Get variables
|
||||
local variables = {
|
||||
["REDIS_HOST"] = "",
|
||||
["REDIS_PORT"] = "",
|
||||
["REDIS_DATABASE"] = "",
|
||||
["REDIS_SSL"] = "",
|
||||
["REDIS_TIMEOUT"] = "",
|
||||
["REDIS_KEEPALIVE_IDLE"] = "",
|
||||
["REDIS_KEEPALIVE_POOL"] = ""
|
||||
}
|
||||
-- Set them for later user
|
||||
self.variables = {}
|
||||
for k, v in pairs(variables) do
|
||||
local value, err = utils.get_variable(k, false)
|
||||
if value == nil then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
self.variables[k] = value
|
||||
end
|
||||
-- Don't instantiate a redis object for now
|
||||
self.redis_client = nil
|
||||
end
|
||||
|
||||
function clusterstore:connect()
|
||||
-- Check if we are already connected
|
||||
if self.redis_client ~= nil then
|
||||
return true, "already connected"
|
||||
end
|
||||
-- Instantiate object
|
||||
local redis_client, err = redis:new()
|
||||
if redis_client == nil then
|
||||
return false, err
|
||||
end
|
||||
-- Set timeouts
|
||||
redis_client:set_timeouts(tonumber(self.variables["REDIS_TIMEOUT"]), tonumber(self.variables["REDIS_TIMEOUT"]), tonumber(self.variables["REDIS_TIMEOUT"]))
|
||||
-- Connect
|
||||
local options = {
|
||||
ssl = self.variables["REDIS_SSL"] == "yes",
|
||||
pool = "bw",
|
||||
pool_size = tonumber(self.variables["REDIS_KEEPALIVE_POOL"])
|
||||
}
|
||||
local ok, err = redis_client:connect(self.variables["REDIS_HOST"], tonumber(self.variables["REDIS_PORT"]), options)
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
-- Save client
|
||||
self.redis_client = redis_client
|
||||
-- Select database if needed
|
||||
local times, err = redis_client:get_reused_times()
|
||||
if err then
|
||||
self:close()
|
||||
return false, err
|
||||
end
|
||||
if times == 0 then
|
||||
local select, err = redis_client:select(tonumber(variables["REDIS_DATABASE"]))
|
||||
if err then
|
||||
self:close()
|
||||
return false, err
|
||||
end
|
||||
end
|
||||
return true, "success"
|
||||
end
|
||||
|
||||
function clusterstore:close()
|
||||
if self.redis_client then
|
||||
-- Equivalent to close but keep a pool of connections
|
||||
self.redis_client = nil
|
||||
return self.redis_client:set_keepalive(tonumber(self.variables["REDIS_KEEPALIVE_IDLE"]), tonumber(self.variables["REDIS_KEEPALIVE_POOL"]))
|
||||
end
|
||||
return false, "not connected"
|
||||
end
|
||||
|
||||
function clusterstore:call(method, ...)
|
||||
-- Check if we are connected
|
||||
if not self.redis_client then
|
||||
return false, "not connected"
|
||||
end
|
||||
-- Call method
|
||||
return self.redis_client[method](self.redis_client, ...)
|
||||
end
|
||||
|
||||
function clusterstore:multi(calls)
|
||||
-- Check if we are connected
|
||||
if not self.redis_client then
|
||||
return false, "not connected"
|
||||
end
|
||||
-- Start transaction
|
||||
local ok, err = self.redis_client:multi()
|
||||
if not ok then
|
||||
return false, "multi() failed : " .. err
|
||||
end
|
||||
-- Loop on calls
|
||||
for i, call in ipairs(calls) do
|
||||
local method = call[1]
|
||||
local args = table.unpack(call[2])
|
||||
local ok, err = self.redis_client[method](self.redis_client, args)
|
||||
if not ok then
|
||||
return false, method + "() failed : " .. err
|
||||
end
|
||||
end
|
||||
-- Exec transaction
|
||||
local exec, err = self.redis_client:exec()
|
||||
if not exec then
|
||||
return false, "exec() failed : " .. err
|
||||
end
|
||||
if type(exec) ~= "table" then
|
||||
return false, "exec() result is not a table"
|
||||
end
|
||||
return true, "success", exec
|
||||
end
|
||||
|
||||
return clusterstore
|
|
@ -1,11 +1,14 @@
|
|||
local class = require "middleclass"
|
||||
local datastore = class("datastore")
|
||||
|
||||
local datastore = { dict = ngx.shared.datastore }
|
||||
|
||||
if not datastore.dict then
|
||||
datastore.dict = ngx.shared.datastore_stream
|
||||
function datastore:initialize()
|
||||
self.dict = ngx.shared.datastore
|
||||
if not self.dict then
|
||||
self.dict = ngx.shared.datastore_stream
|
||||
end
|
||||
end
|
||||
|
||||
datastore.get = function(self, key)
|
||||
function datastore:get(key)
|
||||
local value, err = self.dict:get(key)
|
||||
if not value and not err then
|
||||
err = "not found"
|
||||
|
@ -13,21 +16,21 @@ datastore.get = function(self, key)
|
|||
return value, err
|
||||
end
|
||||
|
||||
datastore.set = function(self, key, value, exptime)
|
||||
function datastore:set(key, value, exptime)
|
||||
exptime = exptime or 0
|
||||
return self.dict:safe_set(key, value, exptime)
|
||||
end
|
||||
|
||||
datastore.keys = function(self)
|
||||
return self.dict:get_keys(0)
|
||||
end
|
||||
|
||||
datastore.delete = function(self, key)
|
||||
function datastore:delete(key)
|
||||
self.dict:delete(key)
|
||||
return true, "success"
|
||||
end
|
||||
|
||||
datastore.exp = function(self, key)
|
||||
function datastore:keys()
|
||||
return self.dict:get_keys(0)
|
||||
end
|
||||
|
||||
function datastore:exp(key)
|
||||
local ttl, err = self.dict:ttl(key)
|
||||
if not ttl then
|
||||
return false, err
|
||||
|
@ -35,7 +38,7 @@ datastore.exp = function(self, key)
|
|||
return true, ttl
|
||||
end
|
||||
|
||||
datastore.delete_all = function(self, pattern)
|
||||
function datastore:delete_all(pattern)
|
||||
local keys = self.dict:get_keys(0)
|
||||
for i, key in ipairs(keys) do
|
||||
if key:match(pattern) then
|
||||
|
@ -45,4 +48,4 @@ datastore.delete_all = function(self, pattern)
|
|||
return true, "success"
|
||||
end
|
||||
|
||||
return datastore
|
||||
return datastore
|
|
@ -0,0 +1,87 @@
|
|||
local cjson = require "cjson"
|
||||
|
||||
local helpers = {}
|
||||
|
||||
helpers.load_plugin = function(json)
|
||||
-- Open file
|
||||
local file, err, nb = io.open(json, "r")
|
||||
if not file then
|
||||
return false, "can't load JSON at " .. json .. " : " .. err .. "(nb = " .. tostring(nb) .. ")"
|
||||
end
|
||||
-- Decode JSON
|
||||
local ok, plugin = pcall(cjson.decode, file:read("*a"))
|
||||
file:close()
|
||||
if not ok then
|
||||
return false, "invalid JSON at " .. json .. " : " .. err
|
||||
end
|
||||
-- Check fields
|
||||
local missing_fields = {}
|
||||
local required_fields = {"id", "order", "name", "description", "version", "settings"}
|
||||
for i, field in ipairs(required_fields) do
|
||||
if plugin[field] == nil then
|
||||
valid_json = false
|
||||
table.insert(missing_fields, field)
|
||||
end
|
||||
end
|
||||
if #missing_fields > 0 then
|
||||
return false, "missing field(s) " .. cjson.encode(missing_fields) .. " for JSON at " .. json
|
||||
end
|
||||
-- Return plugin
|
||||
return true, plugin
|
||||
end
|
||||
|
||||
helpers.require_plugin = function(id)
|
||||
-- Require call
|
||||
local ok, plugin_lua = pcall(require, id .. "/" .. id)
|
||||
if not ok then
|
||||
if plugin_lua:match("not found") then
|
||||
return nil, "plugin " .. id .. " doesn't have LUA code"
|
||||
end
|
||||
return false, "require error for plugin " .. id .. " : " .. plugin_lua
|
||||
end
|
||||
-- New call
|
||||
if plugin_lua.new == nil then
|
||||
return false, "missing new() method for plugin " .. id
|
||||
end
|
||||
-- Return plugin
|
||||
return plugin_lua, "new() call successful for plugin " .. id
|
||||
end
|
||||
|
||||
helpers.new_plugin = function(plugin_lua)
|
||||
-- Require call
|
||||
local ok, plugin_obj = pcall(plugin_lua.new, plugin_lua)
|
||||
if not ok then
|
||||
return false, "new error for plugin " .. plugin_lua.name .. " : " .. plugin_obj
|
||||
end
|
||||
return true, plugin_obj
|
||||
end
|
||||
|
||||
helpers.call_plugin = function(plugin, method)
|
||||
-- Check if method is present
|
||||
if plugin[method] == nil then
|
||||
return nil, "missing " .. method .. "() method for plugin " .. plugin:get_id()
|
||||
end
|
||||
-- Call method
|
||||
local ok, ret = pcall(plugin[method], plugin)
|
||||
if not ok then
|
||||
return false, plugin:get_id() .. ":" .. method .. "() failed : " .. ret
|
||||
end
|
||||
if ret == nil then
|
||||
return false, plugin:get_id() .. ":" .. method .. "() returned nil value"
|
||||
end
|
||||
-- Check values
|
||||
local missing_values = {}
|
||||
local required_values = {"ret", "msg"}
|
||||
for i, value in ipairs(required_values) do
|
||||
if ret[value] == nil then
|
||||
table.insert(missing_values, value)
|
||||
end
|
||||
end
|
||||
if #missing_values > 0 then
|
||||
return false, "missing required return value(s) : " .. cjson.encode(missing_values)
|
||||
end
|
||||
-- Return
|
||||
return true, ret
|
||||
end
|
||||
|
||||
return helpers
|
|
@ -0,0 +1,13 @@
|
|||
local errlog = require "ngx.errlog"
|
||||
local class = require "middleclass"
|
||||
local logger = class("logger")
|
||||
|
||||
function logger:initialize(prefix)
|
||||
self.prefix = string.upper(prefix)
|
||||
end
|
||||
|
||||
function logger:log(level, msg)
|
||||
errlog.raw_log(level, "[" .. self.prefix .. "] " .. msg)
|
||||
end
|
||||
|
||||
return logger
|
|
@ -0,0 +1,40 @@
|
|||
local class = require "middleclass"
|
||||
local logger = require "bunkerweb.logger"
|
||||
local datastore = require "bunkerweb.datastore"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local cjson = require "cjson"
|
||||
local plugin = class("plugin")
|
||||
|
||||
function plugin:initialize(id)
|
||||
-- Store default values
|
||||
self.id = id
|
||||
self.variables = {}
|
||||
-- Instantiate objects
|
||||
self.logger = logger:new(id)
|
||||
self.datastore = datastore:new()
|
||||
-- Get metadata
|
||||
local encoded_metadata, err = self.datastore:get("plugin_" .. id)
|
||||
if not encoded_metadata then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
return
|
||||
end
|
||||
-- Store variables
|
||||
local metadata = cjson.decode(encoded_metadata)
|
||||
for k, v in pairs(metadata.settings) do
|
||||
local value, err = utils.get_variable(k, v.context == "multisite" and ngx.get_phase() ~= "init")
|
||||
if value == nil then
|
||||
self.logger:log(ngx.ERR, "can't get " .. k .. " variable : " .. err)
|
||||
end
|
||||
self.variables[k] = value
|
||||
end
|
||||
end
|
||||
|
||||
function plugin:get_id()
|
||||
return self.id
|
||||
end
|
||||
|
||||
function plugin:ret(ret, msg, status, redirect)
|
||||
return {ret = ret, msg = msg, status = status, redirect = redirect}
|
||||
end
|
||||
|
||||
return plugin
|
|
@ -1,9 +1,13 @@
|
|||
local datastore = require "datastore"
|
||||
local cdatastore = require "bunkerweb.datastore"
|
||||
local ipmatcher = require "resty.ipmatcher"
|
||||
local cjson = require "cjson"
|
||||
local resolver = require "resty.dns.resolver"
|
||||
local mmdb = require "mmdb"
|
||||
local logger = require "logger"
|
||||
local mmdb = require "bunkerweb.mmdb"
|
||||
local clogger = require "bunkerweb.logger"
|
||||
local session = require "resty.session"
|
||||
|
||||
local logger = clogger:new("UTILS")
|
||||
local datastore = cdatastore:new()
|
||||
|
||||
local utils = {}
|
||||
|
||||
|
@ -173,7 +177,7 @@ utils.ip_is_global = function(ip)
|
|||
if not ok then
|
||||
return nil, "can't decode json : " .. reserved_ips
|
||||
end
|
||||
local ipm, err = ipmatcher.new(reserved_ips.data)
|
||||
local ipm, err = ipmatcher.new(reserved_ips)
|
||||
if not ipm then
|
||||
return nil, "can't instantiate ipmatcher : " .. err
|
||||
end
|
||||
|
@ -213,7 +217,7 @@ utils.get_integration = function()
|
|||
end
|
||||
local ok, err = datastore:set("misc_integration", integration)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "UTILS", "Can't cache integration to datastore : " .. err)
|
||||
logger:log(ngx.ERR, "can't cache integration to datastore : " .. err)
|
||||
end
|
||||
return integration
|
||||
end
|
||||
|
@ -225,14 +229,14 @@ utils.get_version = function()
|
|||
end
|
||||
local f, err = io.open("/usr/share/bunkerweb/VERSION", "r")
|
||||
if not f then
|
||||
logger.log(ngx.ERR, "UTILS", "Can't read VERSION file : " .. err)
|
||||
logger:log(ngx.ERR, "can't read VERSION file : " .. err)
|
||||
return "unknown"
|
||||
end
|
||||
version = f:read("*a")
|
||||
f:close()
|
||||
local ok, err = datastore:set("misc_version", version)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "UTILS", "Can't cache version to datastore : " .. err)
|
||||
logger:log(ngx.ERR, "can't cache version to datastore : " .. err)
|
||||
end
|
||||
return version
|
||||
end
|
||||
|
@ -359,10 +363,60 @@ utils.get_deny_status = function()
|
|||
end
|
||||
local status, err = datastore:get("variable_DENY_HTTP_STATUS")
|
||||
if not status then
|
||||
logger.log(ngx.ERR, "UTILS", "Can't get DENY_HTTP_STATUS variable " .. err)
|
||||
logger:log(ngx.ERR, "can't get DENY_HTTP_STATUS variable " .. err)
|
||||
return 403
|
||||
end
|
||||
return tonumber(status)
|
||||
end
|
||||
|
||||
return utils
|
||||
utils.get_session = function()
|
||||
if ngx.ctx.session then
|
||||
return ngx.ctx.session, ngx.ctx.session_err, ngx.ctx.session_exists
|
||||
end
|
||||
local _session, err, exists = session.start()
|
||||
if err then
|
||||
logger:log(ngx.ERR, "UTILS", "can't start session : " .. err)
|
||||
end
|
||||
ngx.ctx.session = _session
|
||||
ngx.ctx.session_err = err
|
||||
ngx.ctx.session_exists = exists
|
||||
ngx.ctx.session_saved = false
|
||||
ngx.ctx.session_data = _session.get_data()
|
||||
if not ngx.ctx.session_data then
|
||||
ngx.ctx.session_data = {}
|
||||
end
|
||||
return _session, err, exists
|
||||
end
|
||||
|
||||
utils.save_session = function()
|
||||
if ngx.ctx.session and not ngx.ctx.session_err and not ngx.ctx.session_saved then
|
||||
ngx.ctx.session:set_data(ngx.ctx.session_data)
|
||||
local ok, err = ngx.ctx.session:save()
|
||||
if err then
|
||||
logger:log(ngx.ERR, "can't save session : " .. err)
|
||||
return false, "can't save session : " .. err
|
||||
end
|
||||
ngx.ctx.session_saved = true
|
||||
return true, "session saved"
|
||||
elseif ngx.ctx.session_saved then
|
||||
return true, "session already saved"
|
||||
end
|
||||
return true, "no session"
|
||||
end
|
||||
|
||||
utils.set_session = function(key, value)
|
||||
if ngx.ctx.session and not ngx.ctx.session_err then
|
||||
ngx.ctx.session_data[key] = value
|
||||
return true, "value set"
|
||||
end
|
||||
return true, "no session"
|
||||
end
|
||||
|
||||
utils.get_session = function(key)
|
||||
if ngx.ctx.session and not ngx.ctx.session_err then
|
||||
return true, "value get", ngx.ctx.session_data[key]
|
||||
end
|
||||
return false, "no session"
|
||||
end
|
||||
|
||||
return utils
|
|
@ -1,71 +0,0 @@
|
|||
local M = {}
|
||||
local redis = require "resty.redis"
|
||||
local utils = require "utils"
|
||||
|
||||
function M:connect()
|
||||
-- Instantiate object
|
||||
local redis_client, err = redis:new()
|
||||
if redis_client == nil then
|
||||
return false, err
|
||||
end
|
||||
-- Get variables
|
||||
local variables = {
|
||||
["REDIS_HOST"] = "",
|
||||
["REDIS_PORT"] = "",
|
||||
["REDIS_DATABASE"] = "",
|
||||
["REDIS_SSL"] = "",
|
||||
["REDIS_TIMEOUT"] = "",
|
||||
["REDIS_KEEPALIVE_IDLE"] = "",
|
||||
["REDIS_KEEPALIVE_POOL"] = ""
|
||||
}
|
||||
for k, v in pairs(variables) do
|
||||
local value, err = utils.get_variable(k, false)
|
||||
if value == nil then
|
||||
return false, err
|
||||
end
|
||||
variables[k] = value
|
||||
end
|
||||
-- Set timeouts
|
||||
redis_client:set_timeouts(tonumber(variables["REDIS_TIMEOUT"]), tonumber(variables["REDIS_TIMEOUT"]), tonumber(variables["REDIS_TIMEOUT"]))
|
||||
-- Connect
|
||||
local options = {
|
||||
ssl = variables["REDIS_SSL"] == "yes",
|
||||
pool = "bw",
|
||||
pool_size = tonumber(variables["REDIS_KEEPALIVE_POOL"])
|
||||
}
|
||||
local ok, err = redis_client:connect(variables["REDIS_HOST"], tonumber(variables["REDIS_PORT"]), options)
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
-- Select database if needed
|
||||
local times, err = redis_client:get_reused_times()
|
||||
if err then
|
||||
return false, err
|
||||
end
|
||||
if times == 0 then
|
||||
local select, err = redis_client:select(tonumber(variables["REDIS_DATABASE"]))
|
||||
if err then
|
||||
return false, err
|
||||
end
|
||||
end
|
||||
return redis_client
|
||||
end
|
||||
|
||||
function M:close(redis_client)
|
||||
-- Get variables
|
||||
local variables = {
|
||||
["REDIS_KEEPALIVE_IDLE"] = "",
|
||||
["REDIS_KEEPALIVE_POOL"] = ""
|
||||
}
|
||||
for k, v in pairs(variables) do
|
||||
local value, err = utils.get_variable(k, false)
|
||||
if value == nil then
|
||||
return false, err
|
||||
end
|
||||
variables[k] = value
|
||||
end
|
||||
-- Equivalent to close but keep a pool of connections
|
||||
return redis_client:set_keepalive(tonumber(variables["REDIS_KEEPALIVE_IDLE"]), tonumber(variables["REDIS_KEEPALIVE_POOL"]))
|
||||
end
|
||||
|
||||
return M
|
|
@ -1,9 +0,0 @@
|
|||
local M = {}
|
||||
local errlog = require "ngx.errlog"
|
||||
|
||||
function M.log (level, prefix, msg)
|
||||
errlog.raw_log(level, "[" .. prefix .. "] " .. msg)
|
||||
end
|
||||
|
||||
return M
|
||||
|
|
@ -0,0 +1,194 @@
|
|||
local middleclass = {
|
||||
_VERSION = 'middleclass v4.1.1',
|
||||
_DESCRIPTION = 'Object Orientation for Lua',
|
||||
_URL = 'https://github.com/kikito/middleclass',
|
||||
_LICENSE = [[
|
||||
MIT LICENSE
|
||||
|
||||
Copyright (c) 2011 Enrique García Cota
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
]]
|
||||
}
|
||||
|
||||
local function _createIndexWrapper(aClass, f)
|
||||
if f == nil then
|
||||
return aClass.__instanceDict
|
||||
elseif type(f) == "function" then
|
||||
return function(self, name)
|
||||
local value = aClass.__instanceDict[name]
|
||||
|
||||
if value ~= nil then
|
||||
return value
|
||||
else
|
||||
return (f(self, name))
|
||||
end
|
||||
end
|
||||
else -- if type(f) == "table" then
|
||||
return function(self, name)
|
||||
local value = aClass.__instanceDict[name]
|
||||
|
||||
if value ~= nil then
|
||||
return value
|
||||
else
|
||||
return f[name]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local function _propagateInstanceMethod(aClass, name, f)
|
||||
f = name == "__index" and _createIndexWrapper(aClass, f) or f
|
||||
aClass.__instanceDict[name] = f
|
||||
|
||||
for subclass in pairs(aClass.subclasses) do
|
||||
if rawget(subclass.__declaredMethods, name) == nil then
|
||||
_propagateInstanceMethod(subclass, name, f)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local function _declareInstanceMethod(aClass, name, f)
|
||||
aClass.__declaredMethods[name] = f
|
||||
|
||||
if f == nil and aClass.super then
|
||||
f = aClass.super.__instanceDict[name]
|
||||
end
|
||||
|
||||
_propagateInstanceMethod(aClass, name, f)
|
||||
end
|
||||
|
||||
local function _tostring(self) return "class " .. self.name end
|
||||
local function _call(self, ...) return self:new(...) end
|
||||
|
||||
local function _createClass(name, super)
|
||||
local dict = {}
|
||||
dict.__index = dict
|
||||
|
||||
local aClass = { name = name, super = super, static = {},
|
||||
__instanceDict = dict, __declaredMethods = {},
|
||||
subclasses = setmetatable({}, {__mode='k'}) }
|
||||
|
||||
if super then
|
||||
setmetatable(aClass.static, {
|
||||
__index = function(_,k)
|
||||
local result = rawget(dict,k)
|
||||
if result == nil then
|
||||
return super.static[k]
|
||||
end
|
||||
return result
|
||||
end
|
||||
})
|
||||
else
|
||||
setmetatable(aClass.static, { __index = function(_,k) return rawget(dict,k) end })
|
||||
end
|
||||
|
||||
setmetatable(aClass, { __index = aClass.static, __tostring = _tostring,
|
||||
__call = _call, __newindex = _declareInstanceMethod })
|
||||
|
||||
return aClass
|
||||
end
|
||||
|
||||
local function _includeMixin(aClass, mixin)
|
||||
assert(type(mixin) == 'table', "mixin must be a table")
|
||||
|
||||
for name,method in pairs(mixin) do
|
||||
if name ~= "included" and name ~= "static" then aClass[name] = method end
|
||||
end
|
||||
|
||||
for name,method in pairs(mixin.static or {}) do
|
||||
aClass.static[name] = method
|
||||
end
|
||||
|
||||
if type(mixin.included)=="function" then mixin:included(aClass) end
|
||||
return aClass
|
||||
end
|
||||
|
||||
local DefaultMixin = {
|
||||
__tostring = function(self) return "instance of " .. tostring(self.class) end,
|
||||
|
||||
initialize = function(self, ...) end,
|
||||
|
||||
isInstanceOf = function(self, aClass)
|
||||
return type(aClass) == 'table'
|
||||
and type(self) == 'table'
|
||||
and (self.class == aClass
|
||||
or type(self.class) == 'table'
|
||||
and type(self.class.isSubclassOf) == 'function'
|
||||
and self.class:isSubclassOf(aClass))
|
||||
end,
|
||||
|
||||
static = {
|
||||
allocate = function(self)
|
||||
assert(type(self) == 'table', "Make sure that you are using 'Class:allocate' instead of 'Class.allocate'")
|
||||
return setmetatable({ class = self }, self.__instanceDict)
|
||||
end,
|
||||
|
||||
new = function(self, ...)
|
||||
assert(type(self) == 'table', "Make sure that you are using 'Class:new' instead of 'Class.new'")
|
||||
local instance = self:allocate()
|
||||
instance:initialize(...)
|
||||
return instance
|
||||
end,
|
||||
|
||||
subclass = function(self, name)
|
||||
assert(type(self) == 'table', "Make sure that you are using 'Class:subclass' instead of 'Class.subclass'")
|
||||
assert(type(name) == "string", "You must provide a name(string) for your class")
|
||||
|
||||
local subclass = _createClass(name, self)
|
||||
|
||||
for methodName, f in pairs(self.__instanceDict) do
|
||||
if not (methodName == "__index" and type(f) == "table") then
|
||||
_propagateInstanceMethod(subclass, methodName, f)
|
||||
end
|
||||
end
|
||||
subclass.initialize = function(instance, ...) return self.initialize(instance, ...) end
|
||||
|
||||
self.subclasses[subclass] = true
|
||||
self:subclassed(subclass)
|
||||
|
||||
return subclass
|
||||
end,
|
||||
|
||||
subclassed = function(self, other) end,
|
||||
|
||||
isSubclassOf = function(self, other)
|
||||
return type(other) == 'table' and
|
||||
type(self.super) == 'table' and
|
||||
( self.super == other or self.super:isSubclassOf(other) )
|
||||
end,
|
||||
|
||||
include = function(self, ...)
|
||||
assert(type(self) == 'table', "Make sure you that you are using 'Class:include' instead of 'Class.include'")
|
||||
for _,mixin in ipairs({...}) do _includeMixin(self, mixin) end
|
||||
return self
|
||||
end
|
||||
}
|
||||
}
|
||||
|
||||
function middleclass.class(name, super)
|
||||
assert(type(name) == 'string', "A name (string) is needed for the new class")
|
||||
return super and super:subclass(name) or _includeMixin(_createClass(name), DefaultMixin)
|
||||
end
|
||||
|
||||
setmetatable(middleclass, { __call = function(_, ...) return middleclass.class(...) end })
|
||||
|
||||
return middleclass
|
||||
|
|
@ -15,23 +15,25 @@ server {
|
|||
|
||||
# check IP and do the API call
|
||||
access_by_lua_block {
|
||||
local api = require "api"
|
||||
local logger = require "logger"
|
||||
local capi = require "bunkerweb.api"
|
||||
local clogger = require "bunkerweb.logger"
|
||||
local logger = clogger:new("API")
|
||||
local api = capi:new()
|
||||
if not ngx.var.http_host or ngx.var.http_host ~= "{{ API_SERVER_NAME }}" then
|
||||
logger.log(ngx.WARN, "API", "Wrong Host header from IP " .. ngx.var.remote_addr)
|
||||
logger:log(ngx.WARN, "wrong Host header from IP " .. ngx.var.remote_addr)
|
||||
return ngx.exit(ngx.HTTP_CLOSE)
|
||||
end
|
||||
local ok, err = api:is_allowed_ip()
|
||||
if not ok then
|
||||
logger.log(ngx.WARN, "API", "Can't validate access from IP " .. ngx.var.remote_addr .. " : " .. err)
|
||||
logger:log(ngx.WARN, "can't validate access from IP " .. ngx.var.remote_addr .. " : " .. err)
|
||||
return ngx.exit(ngx.HTTP_CLOSE)
|
||||
end
|
||||
logger.log(ngx.NOTICE, "API", "Validated access from IP " .. ngx.var.remote_addr)
|
||||
logger:log(ngx.NOTICE, "validated access from IP " .. ngx.var.remote_addr)
|
||||
local ok, err, status, resp = api:do_api_call()
|
||||
if not ok then
|
||||
logger.log(ngx.WARN, "API", "Call from " .. ngx.var.remote_addr .. " on " .. ngx.var.uri .. " failed : " .. err)
|
||||
logger:log(ngx.WARN, "call from " .. ngx.var.remote_addr .. " on " .. ngx.var.uri .. " failed : " .. err)
|
||||
else
|
||||
logger.log(ngx.NOTICE, "API", "Successful call from " .. ngx.var.remote_addr .. " on " .. ngx.var.uri .. " : " .. err)
|
||||
logger:log(ngx.NOTICE, "successful call from " .. ngx.var.remote_addr .. " on " .. ngx.var.uri .. " : " .. err)
|
||||
end
|
||||
ngx.status = status
|
||||
ngx.say(resp)
|
||||
|
|
|
@ -33,46 +33,54 @@ server {
|
|||
|
||||
log_by_lua_block {
|
||||
|
||||
local utils = require "utils"
|
||||
local logger = require "logger"
|
||||
local datastore = require "datastore"
|
||||
local plugins = require "plugins"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local logger = require "bunkerweb.logger"
|
||||
local datastore = require "bunkerweb.datastore"
|
||||
|
||||
logger.log(ngx.INFO, "LOG", "Log phase started")
|
||||
-- Start log phase
|
||||
logger:new("LOG-DEFAULT")
|
||||
datastore:new()
|
||||
logger:log(ngx.INFO, "log_default phase started")
|
||||
|
||||
-- List all plugins
|
||||
local list, err = plugins:list()
|
||||
if not list then
|
||||
logger.log(ngx.ERR, "LOG", "Can't list loaded plugins : " .. err)
|
||||
list = {}
|
||||
-- Get plugins
|
||||
local plugins, err = datastore:get("plugins")
|
||||
if not plugins then
|
||||
logger:log(ngx.ERR, "can't get plugins from datastore : " .. err)
|
||||
return false
|
||||
end
|
||||
|
||||
-- Call log_default method of plugins
|
||||
for i, plugin in ipairs(list) do
|
||||
local ret, plugin_lua = pcall(require, plugin.id .. "/" .. plugin.id)
|
||||
if ret then
|
||||
local plugin_obj = plugin_lua.new()
|
||||
if plugin_obj.log_default ~= nil then
|
||||
logger.log(ngx.INFO, "LOG", "Executing log_default() of " .. plugin.id)
|
||||
local ok, err = plugin_obj:log_default()
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "LOG", "Error while calling log_default() on plugin " .. plugin.id .. " : " .. err)
|
||||
else
|
||||
logger.log(ngx.INFO, "LOG", "Return value from " .. plugin.id .. ".log_default() is : " .. err)
|
||||
end
|
||||
-- Call log() methods
|
||||
logger:log(ngx.INFO, "calling log_default() methods of plugins ...")
|
||||
for i, plugin in ipairs(plugins) do
|
||||
local plugin_lua, err = helpers.new_plugin(plugin.id)
|
||||
if plugin_lua == false then
|
||||
logger:log(ngx.ERR, err)
|
||||
else
|
||||
logger:log(ngx.INFO, err)
|
||||
end
|
||||
if plugin_lua ~= nil then
|
||||
local ok, ret = helpers.call_plugin(plugin_lua, "log_default")
|
||||
if ok == false then
|
||||
logger:log(ngx.ERR, ret)
|
||||
elseif ok == nil then
|
||||
logger:log(ngx.INFO, ret)
|
||||
else
|
||||
logger.log(ngx.INFO, "LOG", "log_default() method not found in " .. plugin.id .. ", skipped execution")
|
||||
if ret.ret then
|
||||
logger:log(ngx.INFO, plugin.id .. ":log_default() call successful : " .. ret.msg)
|
||||
else
|
||||
logger:log(ngx.ERR, plugin.id .. ":log_default() call failed : " .. ret.msg)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
logger:log(ngx.INFO, "called log_default() methods of plugins")
|
||||
|
||||
-- Display reason at info level
|
||||
local reason = utils.get_reason()
|
||||
if reason then
|
||||
logger.log(ngx.INFO, "LOG", "Client was denied with reason : " .. reason)
|
||||
if ngx.ctx.reason then
|
||||
logger:log(ngx.INFO, "client was denied with reason : " .. reason)
|
||||
end
|
||||
|
||||
logger.log(ngx.INFO, "LOG", "Log phase ended")
|
||||
logger:log(ngx.INFO, "log_default phase ended")
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -45,6 +45,10 @@ lua_package_cpath "/usr/share/bunkerweb/deps/lib/?.so;/usr/share/bunkerweb/deps/
|
|||
lua_ssl_trusted_certificate "/usr/share/bunkerweb/misc/root-ca.pem";
|
||||
lua_ssl_verify_depth 2;
|
||||
lua_shared_dict datastore {{ DATASTORE_MEMORY_SIZE }};
|
||||
lua_shared_dict cachestore {{ CACHESTORE_MEMORY_SIZE }};
|
||||
lua_shared_dict cachestore_ipc {{ CACHESTORE_IPC_MEMORY_SIZE }};
|
||||
lua_shared_dict cachestore_miss {{ CACHESTORE_MISS_MEMORY_SIZE }};
|
||||
lua_shared_dict cachestore_locks {{ CACHESTORE_LOCKS_MEMORY_SIZE }};
|
||||
|
||||
# LUA init block
|
||||
include /etc/nginx/init-lua.conf;
|
||||
|
|
|
@ -1,118 +1,177 @@
|
|||
init_by_lua_block {
|
||||
|
||||
local logger = require "logger"
|
||||
local datastore = require "datastore"
|
||||
local plugins = require "plugins"
|
||||
local utils = require "utils"
|
||||
local class = require "middleclass"
|
||||
local logger = require "bunkerweb.logger"
|
||||
local helpers = require "bunkerweb.helpers"
|
||||
local datastore = require "bunkerweb.datastore"
|
||||
local cjson = require "cjson"
|
||||
|
||||
logger.log(ngx.NOTICE, "INIT", "Init phase started")
|
||||
-- Start init phase
|
||||
local init_logger = logger:new("INIT")
|
||||
local ds = datastore:new()
|
||||
init_logger:log(ngx.NOTICE, "init phase started")
|
||||
|
||||
-- Remove previous data from the datastore
|
||||
init_logger:log(ngx.NOTICE, "deleting old keys from datastore ...")
|
||||
local data_keys = {"^plugin_", "^variable_", "^plugins$", "^api_", "^misc_"}
|
||||
for i, key in pairs(data_keys) do
|
||||
local ok, err = datastore:delete_all(key)
|
||||
local ok, err = ds:delete_all(key)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "INIT", "Can't delete " .. key .. " from datastore : " .. err)
|
||||
init_logger:log(ngx.ERR, "can't delete " .. key .. " from datastore : " .. err)
|
||||
return false
|
||||
end
|
||||
logger.log(ngx.INFO, "INIT", "Deleted " .. key .. " from datastore")
|
||||
init_logger:log(ngx.INFO, "deleted " .. key .. " from datastore")
|
||||
end
|
||||
init_logger:log(ngx.NOTICE, "deleted old keys from datastore")
|
||||
|
||||
-- Load variables into the datastore
|
||||
init_logger:log(ngx.NOTICE, "saving variables into datastore ...")
|
||||
local file = io.open("/etc/nginx/variables.env")
|
||||
if not file then
|
||||
logger.log(ngx.ERR, "INIT", "Can't open /etc/nginx/variables.env file")
|
||||
init_logger:log(ngx.ERR, "can't open /etc/nginx/variables.env file")
|
||||
return false
|
||||
end
|
||||
file:close()
|
||||
for line in io.lines("/etc/nginx/variables.env") do
|
||||
local variable, value = line:match("(.+)=(.*)")
|
||||
ok, err = datastore:set("variable_" .. variable, value)
|
||||
local ok, err = ds:set("variable_" .. variable, value)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "INIT", "Can't save variable " .. variable .. " into datastore")
|
||||
init_logger:log(ngx.ERR, "can't save variable " .. variable .. " into datastore : " .. err)
|
||||
return false
|
||||
end
|
||||
init_logger:log(ngx.INFO, "saved variable " .. variable .. "=" .. value .. " into datastore")
|
||||
end
|
||||
init_logger:log(ngx.NOTICE, "saved variables into datastore")
|
||||
|
||||
-- Set default values into the datastore
|
||||
ok, err = datastore:set("plugins", cjson.encode({}))
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "INIT", "Can't set default value for plugins into the datastore : " .. err)
|
||||
-- Set misc values into the datastore
|
||||
init_logger:log(ngx.NOTICE, "saving misc values into datastore ...")
|
||||
local miscs = {
|
||||
reserved_ips = {
|
||||
"0.0.0.0/8",
|
||||
"10.0.0.0/8",
|
||||
"100.64.0.0/10",
|
||||
"127.0.0.0/8",
|
||||
"169.254.0.0/16",
|
||||
"172.16.0.0/12",
|
||||
"192.0.0.0/24",
|
||||
"192.88.99.0/24",
|
||||
"192.168.0.0/16",
|
||||
"198.18.0.0/15",
|
||||
"198.51.100.0/24",
|
||||
"203.0.113.0/24",
|
||||
"224.0.0.0/4",
|
||||
"233.252.0.0/24",
|
||||
"240.0.0.0/4",
|
||||
"255.255.255.255/32"
|
||||
},
|
||||
resolvers = {}
|
||||
}
|
||||
local var_resolvers, err = ds:get("variable_DNS_RESOLVERS")
|
||||
if not var_resolvers then
|
||||
init_logger:log(ngx.ERR, "can't get variable DNS_RESOLVERS from datastore : " .. err)
|
||||
return false
|
||||
end
|
||||
ok, err = utils.set_values()
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "INIT", "Error while setting default values : " .. err)
|
||||
return false
|
||||
for str_resolver in var_resolvers:gmatch("%S+") do
|
||||
table.insert(miscs.resolvers, str_resolver)
|
||||
end
|
||||
for k, v in pairs(miscs) do
|
||||
local ok, err = ds:set("misc_" .. k, cjson.encode(v))
|
||||
if not ok then
|
||||
init_logger:log(ngx.ERR, "can't save misc " .. k .. " into datastore : " .. err)
|
||||
return false
|
||||
end
|
||||
init_logger:log(ngx.INFO, "saved misc " .. k .. " into datastore")
|
||||
end
|
||||
init_logger:log(ngx.NOTICE, "saved misc values into datastore")
|
||||
|
||||
-- API setup
|
||||
local value, err = datastore:get("variable_USE_API")
|
||||
-- Set API values into the datastore
|
||||
init_logger:log(ngx.NOTICE, "saving API values into datastore ...")
|
||||
local value, err = ds:get("variable_USE_API")
|
||||
if not value then
|
||||
logger.log(ngx.ERR, "INIT", "Can't get variable USE_API from the datastore")
|
||||
init_logger:log(ngx.ERR, "can't get variable USE_API from the datastore : " .. err)
|
||||
return false
|
||||
end
|
||||
if value == "yes" then
|
||||
value, err = datastore:get("variable_API_WHITELIST_IP")
|
||||
local value, err = ds:get("variable_API_WHITELIST_IP")
|
||||
if not value then
|
||||
logger.log(ngx.ERR, "INIT", "Can't get variable API_WHITELIST_IP from the datastore")
|
||||
init_logger:log(ngx.ERR, "can't get variable API_WHITELIST_IP from the datastore : " .. err)
|
||||
return false
|
||||
end
|
||||
local whitelists = { data = {}}
|
||||
local whitelists = {}
|
||||
for whitelist in value:gmatch("%S+") do
|
||||
table.insert(whitelists.data, whitelist)
|
||||
table.insert(whitelists, whitelist)
|
||||
end
|
||||
ok, err = datastore:set("api_whitelist_ip", cjson.encode(whitelists))
|
||||
local ok, err = ds:set("api_whitelist_ip", cjson.encode(whitelists))
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "INIT", "Can't save api_whitelist_ip to datastore : " .. err)
|
||||
init_logger:log(ngx.ERR, "can't save API whitelist_ip to datastore : " .. err)
|
||||
return false
|
||||
end
|
||||
init_logger:log(ngx.INFO, "saved API whitelist_ip into datastore")
|
||||
end
|
||||
init_logger:log(ngx.NOTICE, "saved API values into datastore")
|
||||
|
||||
-- Load plugins into the datastore
|
||||
init_logger:log(ngx.NOTICE, "saving plugins into datastore ...")
|
||||
local plugins = {}
|
||||
local plugin_paths = {"/usr/share/bunkerweb/core", "/etc/bunkerweb/plugins"}
|
||||
for i, plugin_path in ipairs(plugin_paths) do
|
||||
local paths = io.popen("find -L " .. plugin_path .. " -maxdepth 1 -type d ! -path " .. plugin_path)
|
||||
for path in paths:lines() do
|
||||
plugin, err = plugins:load(path)
|
||||
if not plugin then
|
||||
logger.log(ngx.ERR, "INIT", "Error while loading plugin from " .. path .. " : " .. err)
|
||||
return false
|
||||
local ok, plugin = helpers.load_plugin(path .. "/plugin.json")
|
||||
if not ok then
|
||||
init_logger:log(ngx.ERR, plugin)
|
||||
else
|
||||
local ok, err = ds:set("plugin_" .. plugin.id, cjson.encode(plugin))
|
||||
if not ok then
|
||||
init_logger:log(ngx.ERR, "can't save " .. plugin.id .. " into datastore : " .. err)
|
||||
else
|
||||
table.insert(plugins, plugin)
|
||||
table.sort(plugins, function (a, b)
|
||||
return a.order < b.order
|
||||
end)
|
||||
init_logger:log(ngx.NOTICE, "loaded plugin " .. plugin.id .. " v" .. plugin.version)
|
||||
end
|
||||
end
|
||||
logger.log(ngx.NOTICE, "INIT", "Loaded plugin " .. plugin.id .. " v" .. plugin.version)
|
||||
end
|
||||
end
|
||||
|
||||
-- Call init method of plugins
|
||||
local list, err = plugins:list()
|
||||
if not list then
|
||||
logger.log(ngx.ERR, "INIT", "Can't list loaded plugins : " .. err)
|
||||
list = {}
|
||||
local ok, err = ds:set("plugins", cjson.encode(plugins))
|
||||
if not ok then
|
||||
init_logger:log(ngx.ERR, "can't save plugins into datastore : " .. err)
|
||||
return false
|
||||
end
|
||||
for i, plugin in ipairs(list) do
|
||||
local ret, plugin_lua = pcall(require, plugin.id .. "/" .. plugin.id)
|
||||
if ret then
|
||||
local plugin_obj = plugin_lua.new()
|
||||
if plugin_obj.init ~= nil then
|
||||
ok, err = plugin_obj:init()
|
||||
init_logger:log(ngx.NOTICE, "saved plugins into datastore")
|
||||
|
||||
-- Call init() methods
|
||||
init_logger:log(ngx.NOTICE, "calling init() methods of plugins ...")
|
||||
for i, plugin in ipairs(plugins) do
|
||||
-- Require call
|
||||
local plugin_lua, err = helpers.require_plugin(plugin.id)
|
||||
if plugin_lua == false then
|
||||
init_logger:log(ngx.ERR, err)
|
||||
elseif plugin_lua == nil then
|
||||
init_logger:log(ngx.NOTICE, err)
|
||||
else
|
||||
-- Check if plugin has init method
|
||||
if plugin_lua.init ~= nil then
|
||||
-- New call
|
||||
local ok, plugin_obj = helpers.new_plugin(plugin_lua)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "INIT", "Plugin " .. plugin.id .. " failed on init() : " .. err)
|
||||
init_logger:log(ngx.ERR, plugin_obj)
|
||||
else
|
||||
logger.log(ngx.INFO, "INIT", "Successfull init() call for plugin " .. plugin.id .. " : " .. err)
|
||||
local ok, ret = helpers.call_plugin(plugin_obj, "init")
|
||||
if not ok then
|
||||
init_logger:log(ngx.ERR, ret)
|
||||
else
|
||||
init_logger:log(ngx.NOTICE, plugin.id .. ":init() call successful : " .. ret.msg)
|
||||
end
|
||||
end
|
||||
else
|
||||
logger.log(ngx.INFO, "INIT", "init() method not found in " .. plugin.id .. ", skipped execution")
|
||||
end
|
||||
else
|
||||
if plugin_lua:match("not found") then
|
||||
logger.log(ngx.INFO, "INIT", "can't require " .. plugin.id .. " : not found")
|
||||
else
|
||||
logger.log(ngx.ERR, "INIT", "can't require " .. plugin.id .. " : " .. plugin_lua)
|
||||
init_logger:log(ngx.NOTICE, "skipped execution of " .. plugin.id .. " because method init() is not defined")
|
||||
end
|
||||
end
|
||||
end
|
||||
init_logger:log(ngx.NOTICE, "called init() methods of plugins")
|
||||
|
||||
logger.log(ngx.NOTICE, "INIT", "Init phase ended")
|
||||
init_logger:log(ngx.NOTICE, "init phase ended")
|
||||
|
||||
}
|
||||
|
|
|
@ -1,87 +1,116 @@
|
|||
access_by_lua_block {
|
||||
|
||||
local logger = require "logger"
|
||||
local datastore = require "datastore"
|
||||
local plugins = require "plugins"
|
||||
local utils = require "utils"
|
||||
local redisutils = require "redisutils"
|
||||
local class = require "middleclass"
|
||||
local clogger = require "bunkerweb.logger"
|
||||
local helpers = require "bunkerweb.helpers"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local cdatastore = require "bunkerweb.datastore"
|
||||
local ccachestore = require "bunkerweb.cachestore"
|
||||
local cjson = require "cjson"
|
||||
|
||||
-- Don't process internal requests
|
||||
local logger = clogger:new("ACCESS")
|
||||
if ngx.req.is_internal() then
|
||||
logger.log(ngx.INFO, "ACCESS", "Skipped access phase because request is internal")
|
||||
return
|
||||
logger:log(ngx.INFO, "skipped access phase because request is internal")
|
||||
return true
|
||||
end
|
||||
|
||||
logger.log(ngx.INFO, "ACCESS", "Access phase started")
|
||||
-- Start access phase
|
||||
local datastore = cdatastore:new()
|
||||
local use_redis, err = utils.get_variable("USE_REDIS", false)
|
||||
if not use_redis then
|
||||
logger:log(ngx.ERR, err)
|
||||
end
|
||||
local cachestore = ccachestore:new(use_redis == "yes")
|
||||
logger:log(ngx.INFO, "access phase started")
|
||||
|
||||
-- Update cachestore only once and before any other code
|
||||
local ok, err = cachestore.cache:update()
|
||||
if not ok then
|
||||
logger:log(ngx.ERR, "can't update cachestore : " .. err)
|
||||
end
|
||||
|
||||
-- Process bans as soon as possible
|
||||
local banned = nil
|
||||
-- Redis case
|
||||
local use_redis = utils.get_variable("USE_REDIS")
|
||||
if use_redis == "yes" then
|
||||
local redis_banned, reason = redisutils.ban(ngx.var.remote_addr)
|
||||
if redis_banned == nil then
|
||||
logger.log(ngx.ERR, "ACCESS", "Error while checking ban from redis, falling back to local : " .. reason)
|
||||
elseif not redis_banned then
|
||||
banned = false
|
||||
else
|
||||
banned = reason
|
||||
end
|
||||
end
|
||||
-- Local case
|
||||
if banned == nil then
|
||||
local reason, err = datastore:get("bans_ip_" .. ngx.var.remote_addr)
|
||||
if reason then
|
||||
banned = reason
|
||||
else
|
||||
banned = false
|
||||
end
|
||||
end
|
||||
-- Deny request
|
||||
if banned then
|
||||
logger.log(ngx.WARN, "ACCESS", "IP " .. ngx.var.remote_addr .. " is banned with reason : " .. banned)
|
||||
ngx.exit(utils.get_deny_status())
|
||||
local ok, reason = datastore:get("bans_ip_" .. ngx.var.remote_addr)
|
||||
if not ok and reason then
|
||||
logger:log(ngx.INFO, "error while checking if client is banned : " .. reason)
|
||||
return false
|
||||
elseif reason then
|
||||
logger:log(ngx.WARN, "IP " .. ngx.var.remote_addr .. " is banned with reason : " .. reason)
|
||||
return ngx.exit(utils.get_deny_status())
|
||||
end
|
||||
|
||||
-- List all plugins
|
||||
local list, err = plugins:list()
|
||||
if not list then
|
||||
logger.log(ngx.ERR, "ACCESS", "Can't list loaded plugins : " .. err)
|
||||
list = {}
|
||||
-- Get plugins
|
||||
local plugins, err = datastore:get("plugins")
|
||||
if not plugins then
|
||||
logger:log(ngx.ERR, "can't get plugins from datastore : " .. err)
|
||||
return false
|
||||
end
|
||||
plugins = cjson.decode(plugins)
|
||||
|
||||
-- Call access method of plugins
|
||||
for i, plugin in ipairs(list) do
|
||||
local ret, plugin_lua = pcall(require, plugin.id .. "/" .. plugin.id)
|
||||
if ret then
|
||||
local plugin_obj = plugin_lua.new()
|
||||
if plugin_obj.access ~= nil then
|
||||
logger.log(ngx.INFO, "ACCESS", "Executing access() of " .. plugin.id)
|
||||
local ok, err, ret, value = plugin_obj:access()
|
||||
-- Call access() methods
|
||||
logger:log(ngx.INFO, "calling access() methods of plugins ...")
|
||||
for i, plugin in ipairs(plugins) do
|
||||
-- Require call
|
||||
local plugin_lua, err = helpers.require_plugin(plugin.id)
|
||||
if plugin_lua == false then
|
||||
logger:log(ngx.ERR, err)
|
||||
elseif plugin_lua == nil then
|
||||
logger:log(ngx.INFO, err)
|
||||
else
|
||||
-- Check if plugin has access method
|
||||
if plugin_lua.access ~= nil then
|
||||
-- New call
|
||||
local ok, plugin_obj = helpers.new_plugin(plugin_lua)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "ACCESS", "Error while calling access() on plugin " .. plugin.id .. " : " .. err)
|
||||
logger:log(ngx.ERR, plugin_obj)
|
||||
else
|
||||
logger.log(ngx.INFO, "ACCESS", "Return value from " .. plugin.id .. ".access() is : " .. err)
|
||||
end
|
||||
if ret then
|
||||
if type(value) == "number" then
|
||||
if value == utils.get_deny_status() then
|
||||
logger.log(ngx.WARN, "ACCESS", "Denied access from " .. plugin.id .. " : " .. err)
|
||||
ngx.var.reason = plugin.id
|
||||
else
|
||||
logger.log(ngx.NOTICE, "ACCESS", plugin.id .. " returned status " .. tostring(value) .. " : " .. err)
|
||||
end
|
||||
return ngx.exit(value)
|
||||
local ok, ret = helpers.call_plugin(plugin_obj, "access")
|
||||
if not ok then
|
||||
logger:log(ngx.ERR, ret)
|
||||
else
|
||||
return value
|
||||
logger:log(ngx.INFO, plugin.id .. ":access() call successful : " .. ret.msg)
|
||||
end
|
||||
if ret.status then
|
||||
if ret.status == utils.get_deny_status() then
|
||||
ngx.ctx.reason = plugin.id
|
||||
logger:log(ngx.WARN, "denied access from " .. plugin.id .. " : " .. err)
|
||||
else
|
||||
logger:log(ngx.NOTICE, plugin.id .. " returned status " .. tostring(ret.status) .. " : " .. err)
|
||||
end
|
||||
ngx.ctx.status = ret.status
|
||||
break
|
||||
elseif ret.redirect then
|
||||
logger:log(ngx.NOTICE, plugin.id .. " redirect to " .. ret.redirect .. " : " .. err)
|
||||
ngx.ctx.redirect = ret.redirect
|
||||
break
|
||||
end
|
||||
end
|
||||
else
|
||||
logger.log(ngx.INFO, "ACCESS", "access() method not found in " .. plugin.id .. ", skipped execution")
|
||||
logger:log(ngx.INFO, "skipped execution of " .. plugin.id .. " because method access() is not defined")
|
||||
end
|
||||
end
|
||||
end
|
||||
logger:log(ngx.INFO, "called access() methods of plugins")
|
||||
|
||||
logger.log(ngx.INFO, "ACCESS", "Access phase ended")
|
||||
-- Save session if needed
|
||||
local ok, err = utils.save_session()
|
||||
if not ok then
|
||||
logger:log(ngx.ERR, "can't save session : " .. err)
|
||||
end
|
||||
|
||||
logger:log(ngx.INFO, "access phase ended")
|
||||
|
||||
-- Return status if needed
|
||||
if ngx.ctx.status then
|
||||
return ngx.exit(ngx.ctx.status)
|
||||
end
|
||||
|
||||
-- Redirect if needed
|
||||
if ngx.ctx.redirect then
|
||||
return ngx.redirect(ngx.ctx.redirect)
|
||||
end
|
||||
|
||||
return true
|
||||
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
header_filter_by_lua_block {
|
||||
|
||||
local class = require "middleclass"
|
||||
local clogger = require "bunkerweb.logger"
|
||||
local helpers = require "bunkerweb.helpers"
|
||||
local cdatastore = require "bunkerweb.datastore"
|
||||
local cjson = require "cjson"
|
||||
|
||||
-- Don't process internal requests
|
||||
local logger = clogger:new("HEADER")
|
||||
if ngx.req.is_internal() then
|
||||
logger:log(ngx.INFO, "skipped header phase because request is internal")
|
||||
return true
|
||||
end
|
||||
|
||||
-- Start set phase
|
||||
local datastore = cdatastore:new()
|
||||
logger:log(ngx.INFO, "header phase started")
|
||||
|
||||
-- Get plugins
|
||||
local plugins, err = datastore:get("plugins")
|
||||
if not plugins then
|
||||
logger:log(ngx.ERR, "can't get plugins from datastore : " .. err)
|
||||
return false
|
||||
end
|
||||
plugins = cjson.decode(plugins)
|
||||
|
||||
-- Call header() methods
|
||||
logger:log(ngx.INFO, "calling header() methods of plugins ...")
|
||||
for i, plugin in ipairs(plugins) do
|
||||
-- Require call
|
||||
local plugin_lua, err = helpers.require_plugin(plugin.id)
|
||||
if plugin_lua == false then
|
||||
logger:log(ngx.ERR, err)
|
||||
elseif plugin_lua == nil then
|
||||
logger:log(ngx.INFO, err)
|
||||
else
|
||||
-- Check if plugin has header method
|
||||
if plugin_lua.header ~= nil then
|
||||
-- New call
|
||||
local ok, plugin_obj = helpers.new_plugin(plugin_lua)
|
||||
if not ok then
|
||||
logger:log(ngx.ERR, plugin_obj)
|
||||
else
|
||||
local ok, ret = helpers.call_plugin(plugin_obj, "header")
|
||||
if not ok then
|
||||
logger:log(ngx.ERR, ret)
|
||||
else
|
||||
logger:log(ngx.INFO, plugin.id .. ":header() call successful : " .. ret.msg)
|
||||
end
|
||||
end
|
||||
else
|
||||
logger:log(ngx.INFO, "skipped execution of " .. plugin.id .. " because method header() is not defined")
|
||||
end
|
||||
end
|
||||
end
|
||||
logger:log(ngx.INFO, "called header() methods of plugins")
|
||||
|
||||
return true
|
||||
|
||||
}
|
|
@ -1,44 +1,60 @@
|
|||
log_by_lua_block {
|
||||
|
||||
local utils = require "utils"
|
||||
local logger = require "logger"
|
||||
local datastore = require "datastore"
|
||||
local plugins = require "plugins"
|
||||
local class = require "middleclass"
|
||||
local clogger = require "bunkerweb.logger"
|
||||
local helpers = require "bunkerweb.helpers"
|
||||
local cdatastore = require "bunkerweb.datastore"
|
||||
local cjson = require "cjson"
|
||||
|
||||
logger.log(ngx.INFO, "LOG", "Log phase started")
|
||||
-- Start log phase
|
||||
local logger = clogger:new("LOG")
|
||||
local datastore = cdatastore:new()
|
||||
logger:log(ngx.INFO, "log phase started")
|
||||
|
||||
-- List all plugins
|
||||
local list, err = plugins:list()
|
||||
if not list then
|
||||
logger.log(ngx.ERR, "LOG", "Can't list loaded plugins : " .. err)
|
||||
list = {}
|
||||
-- Get plugins
|
||||
local plugins, err = datastore:get("plugins")
|
||||
if not plugins then
|
||||
logger:log(ngx.ERR, "can't get plugins from datastore : " .. err)
|
||||
return false
|
||||
end
|
||||
plugins = cjson.decode(plugins)
|
||||
|
||||
-- Call log method of plugins
|
||||
for i, plugin in ipairs(list) do
|
||||
local ret, plugin_lua = pcall(require, plugin.id .. "/" .. plugin.id)
|
||||
if ret then
|
||||
local plugin_obj = plugin_lua.new()
|
||||
if plugin_obj.log ~= nil then
|
||||
logger.log(ngx.INFO, "LOG", "Executing log() of " .. plugin.id)
|
||||
local ok, err = plugin_obj:log()
|
||||
-- Call log() methods
|
||||
logger:log(ngx.INFO, "calling log() methods of plugins ...")
|
||||
for i, plugin in ipairs(plugins) do
|
||||
-- Require call
|
||||
local plugin_lua, err = helpers.require_plugin(plugin.id)
|
||||
if plugin_lua == false then
|
||||
logger:log(ngx.ERR, err)
|
||||
elseif plugin_lua == nil then
|
||||
logger:log(ngx.INFO, err)
|
||||
else
|
||||
-- Check if plugin has log method
|
||||
if plugin_lua.log ~= nil then
|
||||
-- New call
|
||||
local ok, plugin_obj = helpers.new_plugin(plugin_lua)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "LOG", "Error while calling log() on plugin " .. plugin.id .. " : " .. err)
|
||||
logger:log(ngx.ERR, plugin_obj)
|
||||
else
|
||||
logger.log(ngx.INFO, "LOG", "Return value from " .. plugin.id .. ".log() is : " .. err)
|
||||
local ok, ret = helpers.call_plugin(plugin_obj, "log")
|
||||
if not ok then
|
||||
logger:log(ngx.ERR, ret)
|
||||
else
|
||||
logger:log(ngx.INFO, plugin.id .. ":log() call successful : " .. ret.msg)
|
||||
end
|
||||
end
|
||||
else
|
||||
logger.log(ngx.INFO, "LOG", "log() method not found in " .. plugin.id .. ", skipped execution")
|
||||
logger:log(ngx.INFO, "skipped execution of " .. plugin.id .. " because method log() is not defined")
|
||||
end
|
||||
end
|
||||
end
|
||||
logger:log(ngx.INFO, "called log() methods of plugins")
|
||||
|
||||
-- Display reason at info level
|
||||
local reason = utils.get_reason()
|
||||
if reason then
|
||||
logger.log(ngx.INFO, "LOG", "Client was denied with reason : " .. reason)
|
||||
if ngx.ctx.reason then
|
||||
logger:log(ngx.INFO, "client was denied with reason : " .. reason)
|
||||
end
|
||||
|
||||
logger.log(ngx.INFO, "LOG", "Log phase ended")
|
||||
logger:log(ngx.INFO, "log phase ended")
|
||||
|
||||
}
|
|
@ -15,9 +15,6 @@ server {
|
|||
include /etc/bunkerweb/configs/server-http/{{ SERVER_NAME.split(" ")[0] }}/*.conf;
|
||||
{% endif %}
|
||||
|
||||
# reason variable
|
||||
set $reason '';
|
||||
|
||||
# include LUA files
|
||||
include {{ NGINX_PREFIX }}set-lua.conf;
|
||||
include {{ NGINX_PREFIX }}access-lua.conf;
|
||||
|
|
|
@ -1,39 +1,62 @@
|
|||
set $dummy_set "";
|
||||
set_by_lua_block $dummy_set {
|
||||
|
||||
local utils = require "utils"
|
||||
local logger = require "logger"
|
||||
local datastore = require "datastore"
|
||||
local plugins = require "plugins"
|
||||
local class = require "middleclass"
|
||||
local clogger = require "bunkerweb.logger"
|
||||
local helpers = require "bunkerweb.helpers"
|
||||
local cdatastore = require "bunkerweb.datastore"
|
||||
local cjson = require "cjson"
|
||||
|
||||
logger.log(ngx.INFO, "SET", "Set phase started")
|
||||
|
||||
-- List all plugins
|
||||
local list, err = plugins:list()
|
||||
if not list then
|
||||
logger.log(ngx.ERR, "SET", "Can't list loaded plugins : " .. err)
|
||||
list = {}
|
||||
-- Don't process internal requests
|
||||
local logger = clogger:new("SET")
|
||||
if ngx.req.is_internal() then
|
||||
logger:log(ngx.INFO, "skipped set phase because request is internal")
|
||||
return true
|
||||
end
|
||||
|
||||
-- Call set method of plugins
|
||||
for i, plugin in ipairs(list) do
|
||||
local ret, plugin_lua = pcall(require, plugin.id .. "/" .. plugin.id)
|
||||
if ret then
|
||||
local plugin_obj = plugin_lua.new()
|
||||
if plugin_obj.set ~= nil then
|
||||
logger.log(ngx.INFO, "SET", "Executing set() of " .. plugin.id)
|
||||
local ok, err = plugin_obj:set()
|
||||
-- Start set phase
|
||||
local datastore = cdatastore:new()
|
||||
logger:log(ngx.INFO, "set phase started")
|
||||
|
||||
-- Get plugins
|
||||
local plugins, err = datastore:get("plugins")
|
||||
if not plugins then
|
||||
logger:log(ngx.ERR, "can't get plugins from datastore : " .. err)
|
||||
return false
|
||||
end
|
||||
plugins = cjson.decode(plugins)
|
||||
|
||||
-- Call set() methods
|
||||
logger:log(ngx.INFO, "calling set() methods of plugins ...")
|
||||
for i, plugin in ipairs(plugins) do
|
||||
-- Require call
|
||||
local plugin_lua, err = helpers.require_plugin(plugin.id)
|
||||
if plugin_lua == false then
|
||||
logger:log(ngx.ERR, err)
|
||||
elseif plugin_lua == nil then
|
||||
logger:log(ngx.INFO, err)
|
||||
else
|
||||
-- Check if plugin has set method
|
||||
if plugin_lua.set ~= nil then
|
||||
-- New call
|
||||
local ok, plugin_obj = helpers.new_plugin(plugin_lua)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "SET", "Error while calling set() on plugin " .. plugin.id .. " : " .. err)
|
||||
logger:log(ngx.ERR, plugin_obj)
|
||||
else
|
||||
logger.log(ngx.INFO, "SET", "Return value from " .. plugin.id .. ".set() is : " .. err)
|
||||
local ok, ret = helpers.call_plugin(plugin_obj, "set")
|
||||
if not ok then
|
||||
logger:log(ngx.ERR, ret)
|
||||
else
|
||||
logger:log(ngx.INFO, plugin.id .. ":set() call successful : " .. ret.msg)
|
||||
end
|
||||
end
|
||||
else
|
||||
logger.log(ngx.INFO, "SET", "set() method not found in " .. plugin.id .. ", skipped execution")
|
||||
logger:log(ngx.INFO, "skipped execution of " .. plugin.id .. " because method set() is not defined")
|
||||
end
|
||||
end
|
||||
end
|
||||
logger:log(ngx.INFO, "called set() methods of plugins")
|
||||
|
||||
logger.log(ngx.INFO, "SET", "Set phase ended")
|
||||
return true
|
||||
|
||||
}
|
|
@ -1,247 +1,220 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
|
||||
local utils = require "utils"
|
||||
local datastore = require "datastore"
|
||||
local logger = require "logger"
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local datastore = require "bunkerweb.datastore"
|
||||
local cjson = require "cjson"
|
||||
local session = require "resty.session"
|
||||
local captcha = require "antibot.captcha"
|
||||
local base64 = require "base64"
|
||||
local sha256 = require "resty.sha256"
|
||||
local str = require "resty.string"
|
||||
local http = require "resty.http"
|
||||
local template = require "resty.template"
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
return self, nil
|
||||
local antibot = class("antibot", plugin)
|
||||
|
||||
function antibot:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "antibot")
|
||||
end
|
||||
|
||||
function _M:init()
|
||||
-- Check if init is needed
|
||||
local init_needed, err = utils.has_not_variable("USE_ANTIBOT", "no")
|
||||
if init_needed == nil then
|
||||
return false, err
|
||||
end
|
||||
if not init_needed then
|
||||
return true, "no service uses Antibot, skipping init"
|
||||
end
|
||||
-- Load templates
|
||||
local templates = {}
|
||||
for i, template in ipairs({ "javascript", "captcha", "recaptcha", "hcaptcha" }) do
|
||||
local f, err = io.open("/usr/share/bunkerweb/core/antibot/files/" .. template .. ".html")
|
||||
if not f then
|
||||
return false, "error while loading " .. template .. ".html : " .. err
|
||||
end
|
||||
templates[template] = f:read("*all")
|
||||
f:close()
|
||||
end
|
||||
local ok, err = datastore:set("plugin_antibot_templates", cjson.encode(templates))
|
||||
if not ok then
|
||||
return false, "can't save templates to datastore : " .. err
|
||||
end
|
||||
return true, "success"
|
||||
end
|
||||
|
||||
function _M:access()
|
||||
function antibot:access()
|
||||
-- Check if access is needed
|
||||
local antibot, err = utils.get_variable("USE_ANTIBOT")
|
||||
if antibot == nil then
|
||||
return false, err, nil, nil
|
||||
end
|
||||
if antibot == "no" then
|
||||
return true, "Antibot not activated", nil, nil
|
||||
if self.variables["USE_ANTIBOT"] == "no" then
|
||||
return self:ret(true, "antibot not activated")
|
||||
end
|
||||
|
||||
-- Get challenge URI
|
||||
local challenge_uri, err = utils.get_variable("ANTIBOT_URI")
|
||||
if not challenge_uri then
|
||||
return false, "can't get Antibot URI from datastore : " .. err, nil, nil
|
||||
-- Prepare challenge
|
||||
local ok, err = self:prepare_challenge(antibot, challenge_uri)
|
||||
if not ok then
|
||||
return self:ret(false, "can't prepare challenge : " .. err, ngx.HTTP_INTERNAL_SERVER_ERROR)
|
||||
end
|
||||
|
||||
-- Don't go further if client resolved the challenge
|
||||
local resolved, err, original_uri = self:challenge_resolved(antibot)
|
||||
if resolved == nil then
|
||||
return false, "can't check if challenge is resolved : " .. err, nil, nil
|
||||
return self:ret(false, "can't check if challenge is resolved : " .. err)
|
||||
end
|
||||
if resolved then
|
||||
if ngx.var.uri == challenge_uri then
|
||||
return true, "client already resolved the challenge", true, ngx.redirect(original_uri)
|
||||
return self:ret(true, "client already resolved the challenge", nil, original_uri)
|
||||
end
|
||||
return true, "client already resolved the challenge", nil, nil
|
||||
return self:ret(true, "client already resolved the challenge")
|
||||
end
|
||||
|
||||
-- Redirect to challenge page
|
||||
if ngx.var.uri ~= challenge_uri then
|
||||
local ok, err = self:prepare_challenge(antibot, challenge_uri)
|
||||
if not ok then
|
||||
return false, "can't prepare challenge : " .. err, true, ngx.HTTP_INTERNAL_SERVER_ERROR
|
||||
end
|
||||
return true, "redirecting client to the challenge uri", true, ngx.redirect(challenge_uri)
|
||||
return self:ret(true, "redirecting client to the challenge uri", nil, challenge_uri)
|
||||
end
|
||||
|
||||
-- Display challenge
|
||||
-- Display challenge needed
|
||||
if ngx.var.request_method == "GET" then
|
||||
local ok, err = self:display_challenge(antibot, challenge_uri)
|
||||
if not ok then
|
||||
if err == "can't open session" then
|
||||
local ok, err = self:prepare_challenge(antibot, challenge_uri)
|
||||
if not ok then
|
||||
return false, "can't prepare challenge : " .. err, true, ngx.HTTP_INTERNAL_SERVER_ERROR
|
||||
end
|
||||
return true, "redirecting client to the challenge uri", true, ngx.redirect(challenge_uri)
|
||||
end
|
||||
return false, "display challenge error : " .. err, true, ngx.HTTP_INTERNAL_SERVER_ERROR
|
||||
end
|
||||
return true, "displaying challenge to client", true, ngx.HTTP_OK
|
||||
ngx.ctx.antibot_display_content = true
|
||||
return self:ret(true, "displaying challenge to client", ngx.HTTP_OK)
|
||||
end
|
||||
|
||||
-- Check challenge
|
||||
if ngx.var.request_method == "POST" then
|
||||
local ok, err, redirect = self:check_challenge(antibot)
|
||||
if ok == nil then
|
||||
if err == "can't open session" then
|
||||
local ok, err = self:prepare_challenge(antibot, challenge_uri)
|
||||
if not ok then
|
||||
return false, "can't prepare challenge : " .. err, true, ngx.HTTP_INTERNAL_SERVER_ERROR
|
||||
end
|
||||
return true, "redirecting client to the challenge uri", true, ngx.redirect(challenge_uri)
|
||||
end
|
||||
return false, "check challenge error : " .. err, true, ngx.HTTP_INTERNAL_SERVER_ERROR
|
||||
return self:ret(false, "check challenge error : " .. err, ngx.HTTP_INTERNAL_SERVER_ERROR)
|
||||
end
|
||||
if redirect then
|
||||
return true, "check challenge redirect : " .. redirect, true, ngx.redirect(redirect)
|
||||
return self:ret(true, "check challenge redirect : " .. redirect, nil, redirect)
|
||||
end
|
||||
local ok, err = self:display_challenge(antibot)
|
||||
if not ok then
|
||||
if err == "can't open session" then
|
||||
local ok, err = self:prepare_challenge(antibot, challenge_uri)
|
||||
if not ok then
|
||||
return false, "can't prepare challenge : " .. err, true, ngx.HTTP_INTERNAL_SERVER_ERROR
|
||||
end
|
||||
return true, "redirecting client to the challenge uri", true, ngx.redirect(challenge_uri)
|
||||
end
|
||||
return false, "display challenge error : " .. err, true, ngx.HTTP_INTERNAL_SERVER_ERROR
|
||||
end
|
||||
return true, "displaying challenge to client", true, ngx.HTTP_OK
|
||||
ngx.ctx.antibot_display_content = true
|
||||
return self:ret(true, "displaying challenge to client", ngx.HTTP_OK)
|
||||
end
|
||||
|
||||
-- Method is suspicious, let's deny the request
|
||||
return true, "unsupported HTTP method for Antibot", true, utils.get_deny_status()
|
||||
return self:ret(true, "unsupported HTTP method for antibot", utils.get_deny_status())
|
||||
end
|
||||
|
||||
function _M:challenge_resolved(antibot)
|
||||
local chall_session, present, reason = session.open()
|
||||
if present and chall_session.data.resolved and chall_session.data.type == antibot then
|
||||
return true, "challenge " .. antibot .. " resolved", chall_session.data.original_uri
|
||||
function antibot:content()
|
||||
-- Check if access is needed
|
||||
local antibot, err = utils.get_variable("USE_ANTIBOT")
|
||||
if antibot == nil then
|
||||
return self:ret(false, err)
|
||||
end
|
||||
return false, "challenge " .. antibot .. " not resolved", nil
|
||||
if antibot == "no" then
|
||||
return self:ret(true, "antibot not activated")
|
||||
end
|
||||
-- Check if display content is needed
|
||||
if not ngx.ctx.antibot_display_content then
|
||||
return self:ret(true, "display content not needed")
|
||||
end
|
||||
-- Display content
|
||||
local ok, err = self:display_challenge(antibot)
|
||||
if not ok then
|
||||
return self:ret(false, "display challenge error : " .. err)
|
||||
end
|
||||
return self:ret(true, "content displayed")
|
||||
end
|
||||
|
||||
function _M:prepare_challenge(antibot, challenge_uri)
|
||||
local chall_session, present, reason = session.open()
|
||||
if not present then
|
||||
local chall_session, present, reason = chall_session:start()
|
||||
if not chall_session then
|
||||
return false, "can't start session", nil
|
||||
end
|
||||
chall_session.data.type = antibot
|
||||
chall_session.data.resolved = false
|
||||
if ngx.var.request_uri == challenge_uri then
|
||||
chall_session.data.original_uri = "/"
|
||||
else
|
||||
chall_session.data.original_uri = ngx.var.request_uri
|
||||
end
|
||||
if antibot == "cookie" then
|
||||
chall_session.data.resolved = true
|
||||
end
|
||||
local saved, err = chall_session:save()
|
||||
if not saved then
|
||||
return false, "error while saving session : " .. err
|
||||
function antibot:challenge_resolved()
|
||||
local session, err, exists = utils.get_session()
|
||||
if err then
|
||||
return false, "session error : " .. err
|
||||
end
|
||||
local raw_data = get_session("antibot")
|
||||
if not raw_data then
|
||||
return false, "session is set but no antibot data", nil
|
||||
end
|
||||
local data = cjson.decode(raw_data)
|
||||
if data.resolved and self.variables["USE_ANTIBOT"] == data.antibot then
|
||||
return true, "challenge resolved", data.original_uri
|
||||
end
|
||||
return false, "challenge not resolved", data.original_uri
|
||||
end
|
||||
|
||||
function antibot:prepare_challenge()
|
||||
local session, err, exists = utils.get_session()
|
||||
if err then
|
||||
return false, "session error : " .. err
|
||||
end
|
||||
local set_needed = false
|
||||
local data = nil
|
||||
if exists then
|
||||
local raw_data = get_session("antibot")
|
||||
if raw_data then
|
||||
data = cjson.decode(raw_data)
|
||||
end
|
||||
end
|
||||
return true, antibot .. " challenge prepared"
|
||||
if not data or current_data.antibot ~= self.variables["USE_ANTIBOT"] then
|
||||
data = {
|
||||
type = self.variables["USE_ANTIBOT"],
|
||||
resolved = self.variables["USE_ANTIBOT"] == "cookie",
|
||||
original_uri = ngx.var.request_uri
|
||||
}
|
||||
if ngx.var.original_uri == challenge_uri then
|
||||
data.original_uri = "/"
|
||||
end
|
||||
set_needed = true
|
||||
end
|
||||
if not data.resolved then
|
||||
if self.variables["USE_ANTIBOT"] == "javascript" then
|
||||
data.random = utils.rand(20)
|
||||
set_needed = true
|
||||
elseif self.variables["USE_ANTIBOT"] == "captcha" then
|
||||
local chall_captcha = captcha.new()
|
||||
chall_captcha:font("/usr/share/bunkerweb/core/antibot/files/font.ttf")
|
||||
chall_captcha:generate()
|
||||
data.image = base64.encode(chall_captcha:jpegStr(70))
|
||||
data.text = chall_captcha:getStr()
|
||||
set_needed = true
|
||||
end
|
||||
end
|
||||
if set_needed then
|
||||
utils.set_session("antibot", cjson.encode(data))
|
||||
end
|
||||
return true, "prepared"
|
||||
end
|
||||
|
||||
function _M:display_challenge(antibot, challenge_uri)
|
||||
function antibot:display_challenge(challenge_uri)
|
||||
-- Open session
|
||||
local chall_session, present, reason = session.open()
|
||||
if not present then
|
||||
return false, "can't open session"
|
||||
local session, err, exists = utils.get_session()
|
||||
if err then
|
||||
return false, "can't open session : " .. err
|
||||
end
|
||||
|
||||
-- Get data
|
||||
local raw_data = get_session("antibot")
|
||||
if not raw_data then
|
||||
return false, "session is set but no data"
|
||||
end
|
||||
local data = cjson.decode(raw_data)
|
||||
|
||||
-- Check if session type is equal to antibot type
|
||||
if antibot ~= chall_session.data.type then
|
||||
if self.variables["USE_ANTIBOT"] ~= data.type then
|
||||
return false, "session type is different from antibot type"
|
||||
end
|
||||
|
||||
-- Compute challenges
|
||||
if antibot == "javascript" then
|
||||
chall_session:start()
|
||||
chall_session.data.random = utils.rand(20)
|
||||
chall_session:save()
|
||||
elseif antibot == "captcha" then
|
||||
chall_session:start()
|
||||
local chall_captcha = captcha.new()
|
||||
chall_captcha:font("/usr/share/bunkerweb/core/antibot/files/font.ttf")
|
||||
chall_captcha:generate()
|
||||
chall_session.data.image = base64.encode(chall_captcha:jpegStr(70))
|
||||
chall_session.data.text = chall_captcha:getStr()
|
||||
chall_session:save()
|
||||
end
|
||||
|
||||
-- Load HTML templates
|
||||
local str_templates, err = datastore:get("plugin_antibot_templates")
|
||||
if not str_templates then
|
||||
return false, "can't get templates from datastore : " .. err
|
||||
end
|
||||
local templates = cjson.decode(str_templates)
|
||||
|
||||
local html = ""
|
||||
-- Common variables for templates
|
||||
local template_vars = {
|
||||
antibot_uri = self.variables["ANTIBOT_URI"]
|
||||
}
|
||||
|
||||
-- Javascript case
|
||||
if antibot == "javascript" then
|
||||
html = templates.javascript:format(challenge_uri, chall_session.data.random)
|
||||
if self.variables["USE_ANTIBOT"] == "javascript" then
|
||||
template_vars.random = data.random
|
||||
end
|
||||
|
||||
-- Captcha case
|
||||
if antibot == "captcha" then
|
||||
html = templates.captcha:format(challenge_uri, chall_session.data.image)
|
||||
if self.variables["USE_ANTIBOT"] == "captcha" then
|
||||
template_vars.captcha = data.image
|
||||
end
|
||||
|
||||
-- reCAPTCHA case
|
||||
if antibot == "recaptcha" then
|
||||
local recaptcha_sitekey, err = utils.get_variable("ANTIBOT_RECAPTCHA_SITEKEY")
|
||||
if not recaptcha_sitekey then
|
||||
return false, "can't get reCAPTCHA sitekey variable : " .. err
|
||||
end
|
||||
html = templates.recaptcha:format(recaptcha_sitekey, challenge_uri, recaptcha_sitekey)
|
||||
if self.variables["USE_ANTIBOT"] == "recaptcha" then
|
||||
template_vars.recaptcha_sitekey = self.variables["ANTIBOT_RECAPTCHA_SITEKEY"]
|
||||
end
|
||||
|
||||
-- hCaptcha case
|
||||
if antibot == "hcaptcha" then
|
||||
local hcaptcha_sitekey, err = utils.get_variable("ANTIBOT_HCAPTCHA_SITEKEY")
|
||||
if not hcaptcha_sitekey then
|
||||
return false, "can't get hCaptcha sitekey variable : " .. err
|
||||
end
|
||||
html = templates.hcaptcha:format(challenge_uri, hcaptcha_sitekey)
|
||||
if self.variables["USE_ANTIBOT"] == "hcaptcha" then
|
||||
template_vars.hcaptcha_sitekey = self.variables["ANTIBOT_HCAPTCHA_SITEKEY"]
|
||||
end
|
||||
|
||||
ngx.header["Content-Type"] = "text/html"
|
||||
ngx.say(html)
|
||||
-- Render content
|
||||
template.render(self.variables["USE_ANTIBOT"] .. ".html", template_vars)
|
||||
|
||||
return true, "displayed challenge"
|
||||
end
|
||||
|
||||
function _M:check_challenge(antibot)
|
||||
function antibot:check_challenge()
|
||||
-- Open session
|
||||
local chall_session, present, reason = session.open()
|
||||
if not present then
|
||||
return nil, "can't open session", nil
|
||||
local session, err, exists = utils.get_session()
|
||||
if err then
|
||||
return nil, "can't open session : " .. err, nil
|
||||
end
|
||||
|
||||
-- Get data
|
||||
local raw_data = get_session("antibot")
|
||||
if not raw_data then
|
||||
return false, "session is set but no data", nil
|
||||
end
|
||||
local data = cjson.decode(raw_data)
|
||||
|
||||
-- Check if session type is equal to antibot type
|
||||
if antibot ~= chall_session.data.type then
|
||||
if elf.variables["USE_ANTIBOT"] ~= data.type then
|
||||
return nil, "session type is different from antibot type", nil
|
||||
end
|
||||
|
||||
|
@ -250,59 +223,53 @@ function _M:check_challenge(antibot)
|
|||
local redirect = nil
|
||||
|
||||
-- Javascript case
|
||||
if antibot == "javascript" then
|
||||
if self.variables["USE_ANTIBOT"] == "javascript" then
|
||||
ngx.req.read_body()
|
||||
local args, err = ngx.req.get_post_args(1)
|
||||
if err == "truncated" or not args or not args["challenge"] then
|
||||
return false, "missing challenge arg", nil
|
||||
end
|
||||
local hash = sha256:new()
|
||||
hash:update(chall_session.data.random .. args["challenge"])
|
||||
hash:update(data.random .. args["challenge"])
|
||||
local digest = hash:final()
|
||||
resolved = str.to_hex(digest):find("^0000") ~= nil
|
||||
if not resolved then
|
||||
return false, "wrong value", nil
|
||||
end
|
||||
chall_session:start()
|
||||
chall_session.data.resolved = true
|
||||
chall_session:save()
|
||||
return true, "resolved", chall_session.data.original_uri
|
||||
data.resolved = true
|
||||
utils.set_session("antibot", cjson.encode(data))
|
||||
return true, "resolved", data.original_uri
|
||||
end
|
||||
|
||||
-- Captcha case
|
||||
if antibot == "captcha" then
|
||||
if self.variables["USE_ANTIBOT"] == "captcha" then
|
||||
ngx.req.read_body()
|
||||
local args, err = ngx.req.get_post_args(1)
|
||||
if err == "truncated" or not args or not args["captcha"] then
|
||||
return false, "missing challenge arg", nil
|
||||
end
|
||||
if chall_session.data.text ~= args["captcha"] then
|
||||
if data.text ~= args["captcha"] then
|
||||
return false, "wrong value", nil
|
||||
end
|
||||
chall_session:start()
|
||||
chall_session.data.resolved = true
|
||||
chall_session:save()
|
||||
return true, "resolved", chall_session.data.original_uri
|
||||
data.resolved = true
|
||||
utils.set_session("antibot", cjson.encode(data))
|
||||
return true, "resolved", data.original_uri
|
||||
end
|
||||
|
||||
-- reCAPTCHA case
|
||||
if antibot == "recaptcha" then
|
||||
if self.variables["USE_ANTIBOT"] == "recaptcha" then
|
||||
ngx.req.read_body()
|
||||
local args, err = ngx.req.get_post_args(1)
|
||||
if err == "truncated" or not args or not args["token"] then
|
||||
return false, "missing challenge arg", nil
|
||||
end
|
||||
local recaptcha_secret, err = utils.get_variable("ANTIBOT_RECAPTCHA_SECRET")
|
||||
if not recaptcha_secret then
|
||||
return nil, "can't get reCAPTCHA secret variable : " .. err, nil
|
||||
end
|
||||
local httpc, err = http.new()
|
||||
if not httpc then
|
||||
return false, "can't instantiate http object : " .. err, nil, nil
|
||||
end
|
||||
local res, err = httpc:request_uri("https://www.google.com/recaptcha/api/siteverify", {
|
||||
method = "POST",
|
||||
body = "secret=" .. recaptcha_secret .. "&response=" .. args["token"] .. "&remoteip=" .. ngx.var.remote_addr,
|
||||
body = "secret=" .. self.variables["ANTIBOT_RECAPTCHA_SECRET"] .. "&response=" .. args["token"] .. "&remoteip=" .. ngx.var.remote_addr,
|
||||
headers = {
|
||||
["Content-Type"] = "application/x-www-form-urlencoded"
|
||||
}
|
||||
|
@ -311,41 +278,32 @@ function _M:check_challenge(antibot)
|
|||
if not res then
|
||||
return nil, "can't send request to reCAPTCHA API : " .. err, nil
|
||||
end
|
||||
local ok, data = pcall(cjson.decode, res.body)
|
||||
local ok, rdata = pcall(cjson.decode, res.body)
|
||||
if not ok then
|
||||
return nil, "error while decoding JSON from reCAPTCHA API : " .. data, nil
|
||||
return nil, "error while decoding JSON from reCAPTCHA API : " .. rdata, nil
|
||||
end
|
||||
local recaptcha_score, err = utils.get_variable("ANTIBOT_RECAPTCHA_SCORE")
|
||||
if not recaptcha_score then
|
||||
return nil, "can't get reCAPTCHA score variable : " .. err, nil
|
||||
if not rdata.success or rdata.score < tonumber(self.variables["ANTIBOT_RECAPTCHA_SCORE"]) then
|
||||
return false, "client failed challenge with score " .. tostring(rdata.score), nil
|
||||
end
|
||||
if not data.success or data.score < tonumber(recaptcha_score) then
|
||||
return false, "client failed challenge with score " .. tostring(data.score), nil
|
||||
end
|
||||
chall_session:start()
|
||||
chall_session.data.resolved = true
|
||||
chall_session:save()
|
||||
return true, "resolved", chall_session.data.original_uri
|
||||
data.resolved = true
|
||||
utils.set_session("antibot", cjson.encode(data))
|
||||
return true, "resolved", data.original_uri
|
||||
end
|
||||
|
||||
-- hCaptcha case
|
||||
if antibot == "hcaptcha" then
|
||||
if self.variables["USE_ANTIBOT"] == "hcaptcha" then
|
||||
ngx.req.read_body()
|
||||
local args, err = ngx.req.get_post_args(1)
|
||||
if err == "truncated" or not args or not args["token"] then
|
||||
return false, "missing challenge arg", nil
|
||||
end
|
||||
local hcaptcha_secret, err = utils.get_variable("ANTIBOT_HCAPTCHA_SECRET")
|
||||
if not hcaptcha_secret then
|
||||
return nil, "can't get hCaptcha secret variable : " .. err, nil
|
||||
end
|
||||
local httpc, err = http.new()
|
||||
if not httpc then
|
||||
return false, "can't instantiate http object : " .. err, nil, nil
|
||||
end
|
||||
local res, err = httpc:request_uri("https://hcaptcha.com/siteverify", {
|
||||
method = "POST",
|
||||
body = "secret=" .. hcaptcha_secret .. "&response=" .. args["token"] .. "&remoteip=" .. ngx.var.remote_addr,
|
||||
body = "secret=" .. self.variables["ANTIBOT_HCAPTCHA_SECRET"] .. "&response=" .. args["token"] .. "&remoteip=" .. ngx.var.remote_addr,
|
||||
headers = {
|
||||
["Content-Type"] = "application/x-www-form-urlencoded"
|
||||
}
|
||||
|
@ -354,20 +312,19 @@ function _M:check_challenge(antibot)
|
|||
if not res then
|
||||
return nil, "can't send request to hCaptcha API : " .. err, nil
|
||||
end
|
||||
local ok, data = pcall(cjson.decode, res.body)
|
||||
local ok, hdata = pcall(cjson.decode, res.body)
|
||||
if not ok then
|
||||
return nil, "error while decoding JSON from hCaptcha API : " .. data, nil
|
||||
end
|
||||
if not data.success then
|
||||
if not hdata.success then
|
||||
return false, "client failed challenge", nil
|
||||
end
|
||||
chall_session:start()
|
||||
chall_session.data.resolved = true
|
||||
chall_session:save()
|
||||
return true, "resolved", chall_session.data.original_uri
|
||||
data.resolved = true
|
||||
utils.set_session("antibot", cjson.encode(data))
|
||||
return true, "resolved", data.original_uri
|
||||
end
|
||||
|
||||
return nil, "unknown", nil
|
||||
end
|
||||
|
||||
return _M
|
||||
return antibot
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
local _M = {}
|
||||
|
||||
local gd = require 'gd'
|
||||
local logger = require "logger"
|
||||
|
||||
local mt = { __index = {} }
|
||||
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
{% if USE_ANTIBOT == "yes" +%}
|
||||
location /{{ ANTIBOT_URI }} {
|
||||
root /usr/share/bunkerweb/core/antibot/files;
|
||||
content_by_lua_block {
|
||||
local cantibot = require "antibot.antibot"
|
||||
local clogger = require "bunkerweb.logger"
|
||||
local antibot = cantibot:new()
|
||||
local logger = clogger:new("ANTIBOT")
|
||||
local ok, err = antibot:content()
|
||||
if not ok then
|
||||
logger:log(ngx.ERR, "antibot:content() failed : " .. err)
|
||||
else
|
||||
logger:log(ngx.INFO, "antibot:content() success : " .. err)
|
||||
end
|
||||
}
|
||||
}
|
||||
{% endif %}
|
|
@ -6,26 +6,26 @@
|
|||
<title>Bot Detection</title>
|
||||
<link
|
||||
rel="icon"
|
||||
href="data:image/svg+xml, %%3Csvg version='1.0' xmlns='http://www.w3.org/2000/svg' width='96.000000pt' height='96.000000pt' viewBox='0 0 96.000000 96.000000' preserveAspectRatio='xMidYMid meet'%%3E%%3Cg transform='translate(0.000000,96.000000) scale(0.100000,-0.100000)'%%0Afill='%%23085577' stroke='none'%%3E%%3Cpath d='M535 863 c-22 -2 -139 -17 -260 -34 -228 -31 -267 -43 -272 -85 -2%%0A-10 23 -181 55 -379 l57 -360 400 0 400 0 20 40 c16 31 20 59 19 125 -1 100%%0A-24 165 -73 199 -41 29 -46 57 -22 111 30 67 29 188 -3 256 -13 28 -37 60 -53%%0A72 -55 39 -169 62 -268 55z m-15 -348 c30 -16 60 -61 60 -90 0 -10 -8 -33 -17%%0A-52 -16 -34 -16 -41 0 -116 9 -44 15 -82 12 -85 -6 -7 -92 -21 -131 -21 l-31%%0A-1 -6 85 c-4 75 -8 89 -31 112 -20 20 -26 36 -26 70 0 38 5 50 34 79 39 39 86%%0A45 136 19z'/%%3E%%3C/g%%3E%%3C/svg%%3E"
|
||||
href="data:image/svg+xml, %3Csvg version='1.0' xmlns='http://www.w3.org/2000/svg' width='96.000000pt' height='96.000000pt' viewBox='0 0 96.000000 96.000000' preserveAspectRatio='xMidYMid meet'%3E%3Cg transform='translate(0.000000,96.000000) scale(0.100000,-0.100000)'%0Afill='%23085577' stroke='none'%3E%3Cpath d='M535 863 c-22 -2 -139 -17 -260 -34 -228 -31 -267 -43 -272 -85 -2%0A-10 23 -181 55 -379 l57 -360 400 0 400 0 20 40 c16 31 20 59 19 125 -1 100%0A-24 165 -73 199 -41 29 -46 57 -22 111 30 67 29 188 -3 256 -13 28 -37 60 -53%0A72 -55 39 -169 62 -268 55z m-15 -348 c30 -16 60 -61 60 -90 0 -10 -8 -33 -17%0A-52 -16 -34 -16 -41 0 -116 9 -44 15 -82 12 -85 -6 -7 -92 -21 -131 -21 l-31%0A-1 -6 85 c-4 75 -8 89 -31 112 -20 20 -26 36 -26 70 0 38 5 50 34 79 39 39 86%0A45 136 19z'/%3E%3C/g%3E%3C/svg%3E"
|
||||
type="image/svg+xml"
|
||||
/>
|
||||
<style type="text/css">
|
||||
body,
|
||||
html {
|
||||
width: 100%%;
|
||||
height: 100%%;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: #125678;
|
||||
}
|
||||
body {
|
||||
color: #fff;
|
||||
text-align: center;
|
||||
padding: 0;
|
||||
min-height: 100%%;
|
||||
min-height: 100%;
|
||||
display: table;
|
||||
font-family: "Open Sans", Arial, sans-serif;
|
||||
margin: 0;
|
||||
-ms-text-size-adjust: 100%%;
|
||||
-webkit-text-size-adjust: 100%%;
|
||||
-ms-text-size-adjust: 100%;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
}
|
||||
h1 {
|
||||
display: flex;
|
||||
|
@ -54,7 +54,7 @@
|
|||
}
|
||||
footer {
|
||||
position: fixed;
|
||||
width: 100%%;
|
||||
width: 100%;
|
||||
letter-spacing: 1px;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
|
@ -141,7 +141,7 @@
|
|||
position: absolute;
|
||||
width: 7px;
|
||||
height: 7px;
|
||||
border-radius: 50%%;
|
||||
border-radius: 50%;
|
||||
background: #fff;
|
||||
margin: -4px 0 0 -4px;
|
||||
}
|
||||
|
@ -202,10 +202,10 @@
|
|||
left: 12px;
|
||||
}
|
||||
@keyframes lds-roller {
|
||||
0%% {
|
||||
0% {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
100%% {
|
||||
100% {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
@ -220,13 +220,13 @@
|
|||
JavaScript needs to be enabled in order to visit this website.
|
||||
</p>
|
||||
</noscript>
|
||||
<form method="POST" action="%s" id="form">
|
||||
<form method="POST" action="{{antibot_uri}}" id="form">
|
||||
<input type="hidden" name="token" id="token" />
|
||||
</form>
|
||||
<div>
|
||||
<div
|
||||
class="h-captcha"
|
||||
data-sitekey="%s"
|
||||
data-sitekey="{{hcaptcha_sitekey}}"
|
||||
data-callback="send_challenge"
|
||||
></div>
|
||||
<br />
|
||||
|
|
|
@ -4,24 +4,24 @@
|
|||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>Bot Detection</title>
|
||||
<link rel="icon" href="data:image/svg+xml, %%3Csvg version='1.0' xmlns='http://www.w3.org/2000/svg' width='96.000000pt' height='96.000000pt' viewBox='0 0 96.000000 96.000000' preserveAspectRatio='xMidYMid meet'%%3E%%3Cg transform='translate(0.000000,96.000000) scale(0.100000,-0.100000)'%%0Afill='%%23085577' stroke='none'%%3E%%3Cpath d='M535 863 c-22 -2 -139 -17 -260 -34 -228 -31 -267 -43 -272 -85 -2%%0A-10 23 -181 55 -379 l57 -360 400 0 400 0 20 40 c16 31 20 59 19 125 -1 100%%0A-24 165 -73 199 -41 29 -46 57 -22 111 30 67 29 188 -3 256 -13 28 -37 60 -53%%0A72 -55 39 -169 62 -268 55z m-15 -348 c30 -16 60 -61 60 -90 0 -10 -8 -33 -17%%0A-52 -16 -34 -16 -41 0 -116 9 -44 15 -82 12 -85 -6 -7 -92 -21 -131 -21 l-31%%0A-1 -6 85 c-4 75 -8 89 -31 112 -20 20 -26 36 -26 70 0 38 5 50 34 79 39 39 86%%0A45 136 19z'/%%3E%%3C/g%%3E%%3C/svg%%3E" type="image/svg+xml"/>
|
||||
<link rel="icon" href="data:image/svg+xml, %3Csvg version='1.0' xmlns='http://www.w3.org/2000/svg' width='96.000000pt' height='96.000000pt' viewBox='0 0 96.000000 96.000000' preserveAspectRatio='xMidYMid meet'%3E%3Cg transform='translate(0.000000,96.000000) scale(0.100000,-0.100000)'%0Afill='%23085577' stroke='none'%3E%3Cpath d='M535 863 c-22 -2 -139 -17 -260 -34 -228 -31 -267 -43 -272 -85 -2%0A-10 23 -181 55 -379 l57 -360 400 0 400 0 20 40 c16 31 20 59 19 125 -1 100%0A-24 165 -73 199 -41 29 -46 57 -22 111 30 67 29 188 -3 256 -13 28 -37 60 -53%0A72 -55 39 -169 62 -268 55z m-15 -348 c30 -16 60 -61 60 -90 0 -10 -8 -33 -17%0A-52 -16 -34 -16 -41 0 -116 9 -44 15 -82 12 -85 -6 -7 -92 -21 -131 -21 l-31%0A-1 -6 85 c-4 75 -8 89 -31 112 -20 20 -26 36 -26 70 0 38 5 50 34 79 39 39 86%0A45 136 19z'/%3E%3C/g%3E%3C/svg%3E" type="image/svg+xml"/>
|
||||
<style type="text/css">
|
||||
body,
|
||||
html {
|
||||
width: 100%%;
|
||||
height: 100%%;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: #125678;
|
||||
}
|
||||
body {
|
||||
color: #fff;
|
||||
text-align: center;
|
||||
padding: 0;
|
||||
min-height: 100%%;
|
||||
min-height: 100%;
|
||||
display: table;
|
||||
font-family: "Open Sans", Arial, sans-serif;
|
||||
margin: 0;
|
||||
-ms-text-size-adjust: 100%%;
|
||||
-webkit-text-size-adjust: 100%%;
|
||||
-ms-text-size-adjust: 100%;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
}
|
||||
h1 {
|
||||
display: flex;
|
||||
|
@ -49,7 +49,7 @@
|
|||
}
|
||||
footer {
|
||||
position: fixed;
|
||||
width: 100%%;
|
||||
width: 100%;
|
||||
letter-spacing: 1px;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
|
@ -123,7 +123,7 @@
|
|||
font-size: 16px;
|
||||
}
|
||||
}
|
||||
.lds-roller{display: inline-block;position: relative;width: 80px;height: 80px}.lds-roller div{animation: lds-roller 1.2s cubic-bezier(0.5, 0, 0.5, 1) infinite;transform-origin: 40px 40px}.lds-roller div:after{content: " ";display: block;position: absolute;width: 7px;height: 7px;border-radius: 50%%;background: #fff;margin: -4px 0 0 -4px}.lds-roller div:nth-child(1){animation-delay: -0.036s}.lds-roller div:nth-child(1):after{top: 63px;left: 63px}.lds-roller div:nth-child(2){animation-delay: -0.072s}.lds-roller div:nth-child(2):after{top: 68px;left: 56px}.lds-roller div:nth-child(3){animation-delay: -0.108s}.lds-roller div:nth-child(3):after{top: 71px;left: 48px}.lds-roller div:nth-child(4){animation-delay: -0.144s}.lds-roller div:nth-child(4):after{top: 72px;left: 40px}.lds-roller div:nth-child(5){animation-delay: -0.18s}.lds-roller div:nth-child(5):after{top: 71px;left: 32px}.lds-roller div:nth-child(6){animation-delay: -0.216s}.lds-roller div:nth-child(6):after{top: 68px;left: 24px}.lds-roller div:nth-child(7){animation-delay: -0.252s}.lds-roller div:nth-child(7):after{top: 63px;left: 17px}.lds-roller div:nth-child(8){animation-delay: -0.288s}.lds-roller div:nth-child(8):after{top: 56px;left: 12px}@keyframes lds-roller{0%%{transform: rotate(0deg)}100%%{transform: rotate(360deg)}}#showjs{display:none}#nojs{display:table-cell}
|
||||
.lds-roller{display: inline-block;position: relative;width: 80px;height: 80px}.lds-roller div{animation: lds-roller 1.2s cubic-bezier(0.5, 0, 0.5, 1) infinite;transform-origin: 40px 40px}.lds-roller div:after{content: " ";display: block;position: absolute;width: 7px;height: 7px;border-radius: 50%;background: #fff;margin: -4px 0 0 -4px}.lds-roller div:nth-child(1){animation-delay: -0.036s}.lds-roller div:nth-child(1):after{top: 63px;left: 63px}.lds-roller div:nth-child(2){animation-delay: -0.072s}.lds-roller div:nth-child(2):after{top: 68px;left: 56px}.lds-roller div:nth-child(3){animation-delay: -0.108s}.lds-roller div:nth-child(3):after{top: 71px;left: 48px}.lds-roller div:nth-child(4){animation-delay: -0.144s}.lds-roller div:nth-child(4):after{top: 72px;left: 40px}.lds-roller div:nth-child(5){animation-delay: -0.18s}.lds-roller div:nth-child(5):after{top: 71px;left: 32px}.lds-roller div:nth-child(6){animation-delay: -0.216s}.lds-roller div:nth-child(6):after{top: 68px;left: 24px}.lds-roller div:nth-child(7){animation-delay: -0.252s}.lds-roller div:nth-child(7):after{top: 63px;left: 17px}.lds-roller div:nth-child(8){animation-delay: -0.288s}.lds-roller div:nth-child(8):after{top: 56px;left: 12px}@keyframes lds-roller{0%{transform: rotate(0deg)}100%{transform: rotate(360deg)}}#showjs{display:none}#nojs{display:table-cell}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
@ -136,7 +136,7 @@
|
|||
<div class="lds-roller"><div></div><div></div><div></div><div></div><div></div><div></div><div></div><div></div></div>
|
||||
</p>
|
||||
</div>
|
||||
<form method="POST" action="%s" id="form">
|
||||
<form method="POST" action="{{antibot_uri}}" id="form">
|
||||
<input type="hidden" name="challenge" id="challenge">
|
||||
</form>
|
||||
<footer>
|
||||
|
@ -233,7 +233,7 @@
|
|||
var i, l = input.length * 32,
|
||||
output = '';
|
||||
for (i = 0; i < l; i += 8) {
|
||||
output += String.fromCharCode((input[i >> 5] >>> (24 - i %% 32)) & 0xFF);
|
||||
output += String.fromCharCode((input[i >> 5] >>> (24 - i % 32)) & 0xFF);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
@ -250,7 +250,7 @@
|
|||
output[i] = 0;
|
||||
}
|
||||
for (i = 0; i < l; i += 8) {
|
||||
output[i >> 5] |= (input.charCodeAt(i / 8) & 0xFF) << (24 - i %% 32);
|
||||
output[i >> 5] |= (input.charCodeAt(i / 8) & 0xFF) << (24 - i % 32);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
@ -299,7 +299,7 @@
|
|||
var W = [64];
|
||||
var a, b, c, d, e, f, g, h;
|
||||
var i, j, T1, T2;
|
||||
m[l >> 5] |= 0x80 << (24 - (l %% 32));
|
||||
m[l >> 5] |= 0x80 << (24 - (l % 32));
|
||||
m[(((l + 64) >> 9) << 4) + 15] = l;
|
||||
for (i = 0; i < m.length; i += 16) {
|
||||
a = HASH[0];
|
||||
|
@ -362,7 +362,7 @@
|
|||
return hashBuffer;
|
||||
}
|
||||
(async () => {
|
||||
const nonce = '%s';
|
||||
const nonce = '{{random}}';
|
||||
var i = 0;
|
||||
while (true) {
|
||||
var digestHex = await digestMessage(nonce + i.toString());
|
||||
|
|
|
@ -6,26 +6,26 @@
|
|||
<title>Bot Detection</title>
|
||||
<link
|
||||
rel="icon"
|
||||
href="data:image/svg+xml, %%3Csvg version='1.0' xmlns='http://www.w3.org/2000/svg' width='96.000000pt' height='96.000000pt' viewBox='0 0 96.000000 96.000000' preserveAspectRatio='xMidYMid meet'%%3E%%3Cg transform='translate(0.000000,96.000000) scale(0.100000,-0.100000)'%%0Afill='%%23085577' stroke='none'%%3E%%3Cpath d='M535 863 c-22 -2 -139 -17 -260 -34 -228 -31 -267 -43 -272 -85 -2%%0A-10 23 -181 55 -379 l57 -360 400 0 400 0 20 40 c16 31 20 59 19 125 -1 100%%0A-24 165 -73 199 -41 29 -46 57 -22 111 30 67 29 188 -3 256 -13 28 -37 60 -53%%0A72 -55 39 -169 62 -268 55z m-15 -348 c30 -16 60 -61 60 -90 0 -10 -8 -33 -17%%0A-52 -16 -34 -16 -41 0 -116 9 -44 15 -82 12 -85 -6 -7 -92 -21 -131 -21 l-31%%0A-1 -6 85 c-4 75 -8 89 -31 112 -20 20 -26 36 -26 70 0 38 5 50 34 79 39 39 86%%0A45 136 19z'/%%3E%%3C/g%%3E%%3C/svg%%3E"
|
||||
href="data:image/svg+xml, %3Csvg version='1.0' xmlns='http://www.w3.org/2000/svg' width='96.000000pt' height='96.000000pt' viewBox='0 0 96.000000 96.000000' preserveAspectRatio='xMidYMid meet'%3E%3Cg transform='translate(0.000000,96.000000) scale(0.100000,-0.100000)'%0Afill='%23085577' stroke='none'%3E%3Cpath d='M535 863 c-22 -2 -139 -17 -260 -34 -228 -31 -267 -43 -272 -85 -2%0A-10 23 -181 55 -379 l57 -360 400 0 400 0 20 40 c16 31 20 59 19 125 -1 100%0A-24 165 -73 199 -41 29 -46 57 -22 111 30 67 29 188 -3 256 -13 28 -37 60 -53%0A72 -55 39 -169 62 -268 55z m-15 -348 c30 -16 60 -61 60 -90 0 -10 -8 -33 -17%0A-52 -16 -34 -16 -41 0 -116 9 -44 15 -82 12 -85 -6 -7 -92 -21 -131 -21 l-31%0A-1 -6 85 c-4 75 -8 89 -31 112 -20 20 -26 36 -26 70 0 38 5 50 34 79 39 39 86%0A45 136 19z'/%3E%3C/g%3E%3C/svg%3E"
|
||||
type="image/svg+xml"
|
||||
/>
|
||||
<style type="text/css">
|
||||
body,
|
||||
html {
|
||||
width: 100%%;
|
||||
height: 100%%;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: #125678;
|
||||
}
|
||||
body {
|
||||
color: #fff;
|
||||
text-align: center;
|
||||
padding: 0;
|
||||
min-height: 100%%;
|
||||
min-height: 100%;
|
||||
display: table;
|
||||
font-family: "Open Sans", Arial, sans-serif;
|
||||
margin: 0;
|
||||
-ms-text-size-adjust: 100%%;
|
||||
-webkit-text-size-adjust: 100%%;
|
||||
-ms-text-size-adjust: 100%;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
}
|
||||
h1 {
|
||||
display: flex;
|
||||
|
@ -53,7 +53,7 @@
|
|||
}
|
||||
footer {
|
||||
position: fixed;
|
||||
width: 100%%;
|
||||
width: 100%;
|
||||
letter-spacing: 1px;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
|
@ -120,7 +120,7 @@
|
|||
visibility: hidden;
|
||||
}
|
||||
</style>
|
||||
<script src="https://www.google.com/recaptcha/api.js?render=%s"></script>
|
||||
<script src="https://www.google.com/recaptcha/api.js?render={{recaptcha_sitekey}}"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="cover">
|
||||
|
@ -138,7 +138,7 @@
|
|||
<a href="https://policies.google.com/terms">Terms of Service</a> apply.
|
||||
</p>
|
||||
</div>
|
||||
<form method="POST" action="%s" id="form">
|
||||
<form method="POST" action="{{antibot_uri}}" id="form">
|
||||
<input type="hidden" name="token" id="token" />
|
||||
</form>
|
||||
<footer>
|
||||
|
@ -156,7 +156,7 @@
|
|||
function check_robot() {
|
||||
grecaptcha.ready(function () {
|
||||
grecaptcha
|
||||
.execute("%s", { action: "recaptcha" })
|
||||
.execute("{{recaptcha_sitekey}}", { action: "recaptcha" })
|
||||
.then(function (token) {
|
||||
document.getElementById("token").value = token;
|
||||
document.getElementById("form").submit();
|
||||
|
|
|
@ -6,26 +6,26 @@
|
|||
<title>%s</title>
|
||||
<link
|
||||
rel="icon"
|
||||
href="data:image/svg+xml, %%3Csvg version='1.0' xmlns='http://www.w3.org/2000/svg' width='96.000000pt' height='96.000000pt' viewBox='0 0 96.000000 96.000000' preserveAspectRatio='xMidYMid meet'%%3E%%3Cg transform='translate(0.000000,96.000000) scale(0.100000,-0.100000)'%%0Afill='%%23085577' stroke='none'%%3E%%3Cpath d='M535 863 c-22 -2 -139 -17 -260 -34 -228 -31 -267 -43 -272 -85 -2%%0A-10 23 -181 55 -379 l57 -360 400 0 400 0 20 40 c16 31 20 59 19 125 -1 100%%0A-24 165 -73 199 -41 29 -46 57 -22 111 30 67 29 188 -3 256 -13 28 -37 60 -53%%0A72 -55 39 -169 62 -268 55z m-15 -348 c30 -16 60 -61 60 -90 0 -10 -8 -33 -17%%0A-52 -16 -34 -16 -41 0 -116 9 -44 15 -82 12 -85 -6 -7 -92 -21 -131 -21 l-31%%0A-1 -6 85 c-4 75 -8 89 -31 112 -20 20 -26 36 -26 70 0 38 5 50 34 79 39 39 86%%0A45 136 19z'/%%3E%%3C/g%%3E%%3C/svg%%3E"
|
||||
href="data:image/svg+xml, %3Csvg version='1.0' xmlns='http://www.w3.org/2000/svg' width='96.000000pt' height='96.000000pt' viewBox='0 0 96.000000 96.000000' preserveAspectRatio='xMidYMid meet'%3E%3Cg transform='translate(0.000000,96.000000) scale(0.100000,-0.100000)'%0Afill='%23085577' stroke='none'%3E%3Cpath d='M535 863 c-22 -2 -139 -17 -260 -34 -228 -31 -267 -43 -272 -85 -2%0A-10 23 -181 55 -379 l57 -360 400 0 400 0 20 40 c16 31 20 59 19 125 -1 100%0A-24 165 -73 199 -41 29 -46 57 -22 111 30 67 29 188 -3 256 -13 28 -37 60 -53%0A72 -55 39 -169 62 -268 55z m-15 -348 c30 -16 60 -61 60 -90 0 -10 -8 -33 -17%0A-52 -16 -34 -16 -41 0 -116 9 -44 15 -82 12 -85 -6 -7 -92 -21 -131 -21 l-31%0A-1 -6 85 c-4 75 -8 89 -31 112 -20 20 -26 36 -26 70 0 38 5 50 34 79 39 39 86%0A45 136 19z'/%3E%3C/g%3E%3C/svg%3E"
|
||||
type="image/svg+xml"
|
||||
/>
|
||||
<style type="text/css">
|
||||
body,
|
||||
html {
|
||||
width: 100%%;
|
||||
height: 100%%;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: #125678;
|
||||
}
|
||||
body {
|
||||
color: #fff;
|
||||
text-align: center;
|
||||
padding: 0;
|
||||
min-height: 100%%;
|
||||
min-height: 100%;
|
||||
display: table;
|
||||
font-family: "Open Sans", Arial, sans-serif;
|
||||
margin: 0;
|
||||
-ms-text-size-adjust: 100%%;
|
||||
-webkit-text-size-adjust: 100%%;
|
||||
-ms-text-size-adjust: 100%;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
}
|
||||
h1 {
|
||||
display: flex;
|
||||
|
@ -63,7 +63,7 @@
|
|||
}
|
||||
footer {
|
||||
position: fixed;
|
||||
width: 100%%;
|
||||
width: 100%;
|
||||
letter-spacing: 1px;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
|
|
|
@ -1,34 +1,73 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local datastore = require "bunkerweb.datastore"
|
||||
local clusterstore = require "bunkerweb.clusterstore"
|
||||
|
||||
local utils = require "utils"
|
||||
local datastore = require "datastore"
|
||||
local logger = require "logger"
|
||||
local cjson = require "cjson"
|
||||
local clusterstore = require "clusterstore"
|
||||
local badbehavior = class("badbehavior", plugin)
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
return self, nil
|
||||
function badbehavior:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "badbehavior")
|
||||
-- Check if redis is enabled
|
||||
local use_redis, err = utils.get_variable("USE_REDIS", false)
|
||||
if not use_redis then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
self.use_redis = use_redis == "yes"
|
||||
end
|
||||
|
||||
function _M.increase(premature, use_redis, ip, count_time, ban_time, threshold)
|
||||
function badbehavior:log()
|
||||
-- Check if we are whitelisted
|
||||
if ngx.var.is_whitelisted == "yes" then
|
||||
return self:ret(true, "client is whitelisted")
|
||||
end
|
||||
-- Check if bad behavior is activated
|
||||
if self.variables["USE_BAD_BEHAVIOR"] ~= "yes" then
|
||||
return self:ret(true, "bad behavior not activated")
|
||||
end
|
||||
-- Check if we have a bad status code
|
||||
if not self.variables["BAD_BEHAVIOR_STATUS_CODES"]:match(tostring(ngx.status)) then
|
||||
return self:ret(true, "not increasing counter")
|
||||
end
|
||||
-- Check if we are already banned
|
||||
local banned, err = self.datastore:get("bans_ip_" .. ngx.var.remote_addr)
|
||||
if banned then
|
||||
return self:ret(true, "already banned")
|
||||
end
|
||||
-- Call increase function later and with cosocket enabled
|
||||
local ok, err = ngx.timer.at(0, badbehavior.increase, self, ngx.var.remote_addr)
|
||||
if not ok then
|
||||
return self:ret(false, "can't create increase timer : " .. err)
|
||||
end
|
||||
return self:ret(true, "success")
|
||||
end
|
||||
|
||||
function badbehavior:log_default()
|
||||
return self:log()
|
||||
end
|
||||
|
||||
function badbehavior.increase(premature, obj, ip)
|
||||
-- Our vars
|
||||
local count_time = tonumber(obj.variables["BAD_BEHAVIOR_COUNT_TIME"])
|
||||
local ban_time = tonumber(obj.variables["BAD_BEHAVIOR_BAN_TIME"])
|
||||
local threshold = tonumber(obj.variables["BAD_BEHAVIOR_THRESHOLD"])
|
||||
-- Declare counter
|
||||
local counter = false
|
||||
-- Redis case
|
||||
if use_redis then
|
||||
local redis_counter = _M.redis_increase(ip, count_time, ban_time, threshold)
|
||||
if obj.use_redis then
|
||||
local redis_counter, err = obj:redis_increase(ip)
|
||||
if not redis_counter then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(increase) redis_increase failed, falling back to local")
|
||||
obj.logger:log(ngx.ERR, "(increase) redis_increase failed, falling back to local : " .. err)
|
||||
else
|
||||
counter = redis_counter
|
||||
end
|
||||
end
|
||||
-- Local case
|
||||
if not counter then
|
||||
local local_counter, err = datastore:get("plugin_badbehavior_count_" .. ip)
|
||||
local local_counter, err = obj.datastore:get("plugin_badbehavior_count_" .. ip)
|
||||
if not local_counter and err ~= "not found" then
|
||||
return false, "can't get counts from the datastore : " .. err
|
||||
obj.logger:log(ngx.ERR, "(increase) can't get counts from the datastore : " .. err)
|
||||
end
|
||||
if local_counter == nil then
|
||||
local_counter = 0
|
||||
|
@ -36,202 +75,145 @@ function _M.increase(premature, use_redis, ip, count_time, ban_time, threshold)
|
|||
counter = local_counter + 1
|
||||
end
|
||||
-- Call decrease later
|
||||
local ok, err = ngx.timer.at(count_time, _M.decrease, use_redis, ip)
|
||||
local ok, err = ngx.timer.at(count_time, badbehavior.decrease, obj, ip)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(increase) can't create decrease timer : " .. err)
|
||||
obj.logger:log(ngx.ERR, "(increase) can't create decrease timer : " .. err)
|
||||
end
|
||||
-- Store local counter
|
||||
local ok, err = datastore:set("plugin_badbehavior_count_" .. ip, counter)
|
||||
local ok, err = obj.datastore:set("plugin_badbehavior_count_" .. ip, counter)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(increase) can't save counts to the datastore : " .. err)
|
||||
obj.logger:log(ngx.ERR, "(increase) can't save counts to the datastore : " .. err)
|
||||
return
|
||||
end
|
||||
-- Store local ban
|
||||
if counter > threshold then
|
||||
local ok, err = datastore:set("bans_ip_" .. ip, "bad behavior", ban_time)
|
||||
local ok, err = obj.datastore:set("bans_ip_" .. ip, "bad behavior", ban_time)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(increase) can't save ban to the datastore : " .. err)
|
||||
obj.logger:log(ngx.ERR, "(increase) can't save ban to the datastore : " .. err)
|
||||
return
|
||||
end
|
||||
logger.log(ngx.WARN, "BAD-BEHAVIOR", "IP " .. ip .. " is banned for " .. ban_time .. "s (" .. tostring(counter) .. "/" .. tostring(threshold) .. ")")
|
||||
obj.logger:log(ngx.WARN, "IP " .. ip .. " is banned for " .. ban_time .. "s (" .. tostring(counter) .. "/" .. tostring(threshold) .. ")")
|
||||
end
|
||||
end
|
||||
|
||||
function _M.decrease(premature, use_redis, ip)
|
||||
function badbehavior.decrease(premature, obj, ip)
|
||||
-- Our vars
|
||||
local count_time = tonumber(obj.variables["BAD_BEHAVIOR_COUNT_TIME"])
|
||||
local ban_time = tonumber(obj.variables["BAD_BEHAVIOR_BAN_TIME"])
|
||||
local threshold = tonumber(obj.variables["BAD_BEHAVIOR_THRESHOLD"])
|
||||
-- Declare counter
|
||||
local counter = false
|
||||
-- Redis case
|
||||
if use_redis then
|
||||
local redis_counter = _M.redis_decrease(ip)
|
||||
if obj.use_redis then
|
||||
local redis_counter, err = obj:redis_decrease(ip)
|
||||
if not redis_counter then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(increase) redis_decrease failed, falling back to local")
|
||||
obj.logger:log(ngx.ERR, "(increase) redis_increase failed, falling back to local : " .. err)
|
||||
else
|
||||
counter = redis_counter
|
||||
end
|
||||
end
|
||||
-- Local case
|
||||
if not counter then
|
||||
local local_counter, err = datastore:get("plugin_badbehavior_count_" .. ip)
|
||||
if err then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(decrease) Can't get counts from the datastore : " .. err)
|
||||
return
|
||||
local local_counter, err = obj.datastore:get("plugin_badbehavior_count_" .. ip)
|
||||
if not local_counter and err ~= "not found" then
|
||||
obj.logger:log(ngx.ERR, "(increase) can't get counts from the datastore : " .. err)
|
||||
end
|
||||
if not local_counter then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(decrease) Count is null")
|
||||
return
|
||||
if local_counter == nil or local_counter <= 1 then
|
||||
counter = 0
|
||||
else
|
||||
counter = local_counter - 1
|
||||
end
|
||||
counter = local_counter - 1
|
||||
end
|
||||
-- Update local counter
|
||||
-- Store local counter
|
||||
if counter <= 0 then
|
||||
datastore:delete("plugin_badbehavior_count_" .. ip)
|
||||
return
|
||||
end
|
||||
local ok, err = datastore:set("plugin_badbehavior_count_" .. ip, new_count)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(decrease) Can't save counts to the datastore : " .. err)
|
||||
return
|
||||
local ok, err = obj.datastore:delete("plugin_badbehavior_count_" .. ip)
|
||||
else
|
||||
local ok, err = obj.datastore:delete("plugin_badbehavior_count_" .. ip, counter)
|
||||
if not ok then
|
||||
obj.logger:log(ngx.ERR, "(increase) can't save counts to the datastore : " .. err)
|
||||
return
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function _M:log()
|
||||
-- Get vars
|
||||
self.use = utils.get_variable("USE_BAD_BEHAVIOR")
|
||||
self.ban_time = utils.get_variable("BAD_BEHAVIOR_BAN_TIME")
|
||||
self.status_codes = utils.get_variable("BAD_BEHAVIOR_STATUS_CODES")
|
||||
self.threshold = utils.get_variable("BAD_BEHAVIOR_THRESHOLD")
|
||||
self.count_time = utils.get_variable("BAD_BEHAVIOR_COUNT_TIME")
|
||||
self.use_redis = utils.get_variable("USE_REDIS")
|
||||
-- Check if bad behavior is activated
|
||||
if self.use ~= "yes" then
|
||||
return true, "bad behavior not activated"
|
||||
end
|
||||
-- Check if we have a bad status code
|
||||
if not self.status_codes:match(tostring(ngx.status)) then
|
||||
return true, "not increasing counter"
|
||||
end
|
||||
-- Check if we are whitelisted
|
||||
if ngx.var.is_whitelisted == "yes" then
|
||||
return true, "client is whitelisted"
|
||||
end
|
||||
-- Check if we are already banned
|
||||
local banned, err = datastore:get("bans_ip_" .. ngx.var.remote_addr)
|
||||
if banned then
|
||||
return true, "already banned"
|
||||
end
|
||||
-- Call increase function later and with cosocket enabled
|
||||
local use_redis = false
|
||||
if self.use_redis == "yes" then
|
||||
use_redis = true
|
||||
end
|
||||
local ok, err = ngx.timer.at(0, _M.increase, use_redis, ngx.var.remote_addr, tonumber(self.count_time), tonumber(self.ban_time), tonumber(self.threshold))
|
||||
if not ok then
|
||||
return false, "can't create increase timer : " .. err
|
||||
end
|
||||
return true, "success"
|
||||
end
|
||||
|
||||
function _M:log_default()
|
||||
return _M:log()
|
||||
end
|
||||
|
||||
function _M.redis_increase(ip, count_time, ban_time, threshold)
|
||||
function badbehavior:redis_increase(ip)
|
||||
-- Our vars
|
||||
local count_time = tonumber(self.variables["BAD_BEHAVIOR_COUNT_TIME"])
|
||||
local ban_time = tonumber(self.variables["BAD_BEHAVIOR_BAN_TIME"])
|
||||
-- Connect to server
|
||||
local redis_client, err = clusterstore:connect()
|
||||
if not redis_client then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(increase) Can't connect to redis server : " .. err)
|
||||
return false
|
||||
local cstore, err = clusterstore:new()
|
||||
if not cstore then
|
||||
return false, err
|
||||
end
|
||||
-- Start transaction
|
||||
local ok, err = redis_client:multi()
|
||||
local ok, err = clusterstore:connect()
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(increase) Can't start transaction : " .. err)
|
||||
clusterstore:close(redis_client)
|
||||
return false
|
||||
end
|
||||
-- Increment counter
|
||||
ok, err = redis_client:incr("bad_behavior_" .. ip)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(increase) INCR failed : " .. err)
|
||||
clusterstore:close(redis_client)
|
||||
return false
|
||||
end
|
||||
-- Expires counter
|
||||
ok, err = redis_client:expire("bad_behavior_" .. ip, count_time)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(increase) EXPIRE failed : " .. err)
|
||||
clusterstore:close(redis_client)
|
||||
return false
|
||||
return false, err
|
||||
end
|
||||
-- Exec transaction
|
||||
local exec, err = redis_client:exec()
|
||||
if err then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(increase) EXEC failed : " .. err)
|
||||
clusterstore:close(redis_client)
|
||||
return false
|
||||
end
|
||||
if type(exec) ~= "table" then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(increase) EXEC result is not a table")
|
||||
clusterstore:close(redis_client)
|
||||
return false
|
||||
local calls = {
|
||||
{"incr", {"bad_behavior_" .. ip}},
|
||||
{"expire", {"bad_behavior_" .. ip, count_time}}
|
||||
}
|
||||
local ok, err, exec = clusterstore:multi(calls)
|
||||
if not ok then
|
||||
clusterstore:close()
|
||||
return false, err
|
||||
end
|
||||
-- Extract counter
|
||||
local counter = exec[1]
|
||||
if type(counter) == "table" then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(increase) INCR error : " .. counter[2])
|
||||
clusterstore:close(redis_client)
|
||||
return false
|
||||
clusterstore:close()
|
||||
return false, counter[2]
|
||||
end
|
||||
-- Check expire result
|
||||
local expire = exec[2]
|
||||
if type(expire) == "table" then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(increase) EXPIRE error : " .. expire[2])
|
||||
clusterstore:close(redis_client)
|
||||
return false
|
||||
clusterstore:close()
|
||||
return false, expire[2]
|
||||
end
|
||||
-- Add IP to redis bans if needed
|
||||
if counter > threshold then
|
||||
local ban, err = redis_client:set("ban_" .. ip, "bad behavior", "EX", ban_time)
|
||||
local ok, err = clusterstore:call("set", "ban_" .. ip, "bad behavior", "EX", ban_time)
|
||||
if err then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(increase) SET failed : " .. err)
|
||||
clusterstore:close(redis_client)
|
||||
return false
|
||||
clusterstore:close()
|
||||
return false, err
|
||||
end
|
||||
end
|
||||
-- End connection
|
||||
clusterstore:close(redis_client)
|
||||
clusterstore:close()
|
||||
return counter
|
||||
end
|
||||
|
||||
function _M.redis_decrease(ip)
|
||||
function badbehavior:redis_decrease(ip)
|
||||
-- Connect to server
|
||||
local redis_client, err = clusterstore:connect()
|
||||
if not redis_client then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(decrease) Can't connect to redis server : " .. err)
|
||||
return false
|
||||
local cstore, err = clusterstore:new()
|
||||
if not cstore then
|
||||
return false, err
|
||||
end
|
||||
local ok, err = clusterstore:connect()
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
-- Decrement counter
|
||||
local counter, err = redis_client:decr("bad_behavior_" .. ip)
|
||||
local counter, err = clusterstore:call("decr", "bad_behavior_" .. ip)
|
||||
if err then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(decrease) DECR failed : " .. err)
|
||||
clusterstore:close(redis_client)
|
||||
return false
|
||||
clusterstore:close()
|
||||
return false, err
|
||||
end
|
||||
-- Delete counter
|
||||
if counter < 0 then
|
||||
counter = 0
|
||||
end
|
||||
if counter == 0 then
|
||||
local ok, err = redis_client:del("bad_behavior_" .. ip)
|
||||
local ok, err = clusterstore:call("del", "bad_behavior_" .. ip)
|
||||
if err then
|
||||
logger.log(ngx.ERR, "BAD-BEHAVIOR", "(decrease) DEL failed : " .. err)
|
||||
clusterstore:close(redis_client)
|
||||
return false
|
||||
clusterstore:close()
|
||||
return false, err
|
||||
end
|
||||
end
|
||||
-- End connection
|
||||
clusterstore:close(redis_client)
|
||||
clusterstore:close()
|
||||
return counter
|
||||
end
|
||||
|
||||
return _M
|
||||
return badbehavior
|
|
@ -1,25 +1,46 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local datastore = require "bunkerweb.datastore"
|
||||
local cachestore = require "bunkerweb.cachestore"
|
||||
local cjson = require "cjson"
|
||||
local ipmatcher = require "resty.ipmatcher"
|
||||
|
||||
local utils = require "utils"
|
||||
local datastore = require "datastore"
|
||||
local logger = require "logger"
|
||||
local cjson = require "cjson"
|
||||
local ipmatcher = require "resty.ipmatcher"
|
||||
local blacklist = class("blacklist", plugin)
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
return self, nil
|
||||
function blacklist:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "blacklist")
|
||||
-- Check if redis is enabled
|
||||
local use_redis, err = utils.get_variable("USE_REDIS", false)
|
||||
if not use_redis then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
self.use_redis = use_redis == "yes"
|
||||
-- Check if init is needed
|
||||
if ngx.get_phase() == "init" then
|
||||
local init_needed, err = utils.has_variable("USE_BLACKLIST", "yes")
|
||||
if init_needed == nil then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
self.init_needed = init_needed
|
||||
-- Decode lists
|
||||
else
|
||||
local lists, err = self.datastore:get("plugin_blacklist_lists")
|
||||
if not lists then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
else
|
||||
self.lists = cjson.decode(lists)
|
||||
end
|
||||
end
|
||||
-- Instantiate cachestore
|
||||
self.cachestore = cachestore:new(self.use_redis)
|
||||
end
|
||||
|
||||
function _M:init()
|
||||
function blacklist:init()
|
||||
-- Check if init is needed
|
||||
local init_needed, err = utils.has_variable("USE_BLACKLIST", "yes")
|
||||
if init_needed == nil then
|
||||
return false, err
|
||||
end
|
||||
if not init_needed then
|
||||
return true, "no service uses Blacklist, skipping init"
|
||||
if not self.init_needed then
|
||||
return self:ret(true, "init not needed")
|
||||
end
|
||||
-- Read blacklists
|
||||
local blacklists = {
|
||||
|
@ -46,480 +67,247 @@ function _M:init()
|
|||
end
|
||||
end
|
||||
-- Load them into datastore
|
||||
local ok, err = datastore:set("plugin_blacklist_list", cjson.encode(blacklists))
|
||||
local ok, err = self.datastore:set("plugin_blacklist_lists", cjson.encode(blacklists))
|
||||
if not ok then
|
||||
return false, "can't store Blacklist list into datastore : " .. err
|
||||
return self:ret(false, "can't store blacklist list into datastore : " .. err)
|
||||
end
|
||||
return true, "successfully loaded " .. tostring(i) .. " bad IP/network/rDNS/ASN/User-Agent/URI"
|
||||
return self:ret(true, "successfully loaded " .. tostring(i) .. " IP/network/rDNS/ASN/User-Agent/URI")
|
||||
end
|
||||
|
||||
function _M:access()
|
||||
function blacklist:access()
|
||||
-- Check if access is needed
|
||||
local access_needed, err = utils.get_variable("USE_BLACKLIST")
|
||||
if access_needed == nil then
|
||||
return false, err
|
||||
if self.variables["USE_BLACKLIST"] ~= "yes" then
|
||||
return self:ret(true, "blacklist not activated")
|
||||
end
|
||||
if access_needed ~= "yes" then
|
||||
return true, "Blacklist not activated"
|
||||
end
|
||||
|
||||
-- Check the cache
|
||||
local cached_ip, err = self:is_in_cache("ip" .. ngx.var.remote_addr)
|
||||
local cached_ignored_ip, err = self:is_in_cache("ignore_ip" .. ngx.var.remote_addr)
|
||||
if cached_ignored_ip then
|
||||
logger.log(ngx.NOTICE, "BLACKLIST", "IP is in cached ignore blacklist (info: " .. cached_ignored_ip .. ")")
|
||||
elseif cached_ip and cached_ip ~= "ok" then
|
||||
return true, "IP is in blacklist cache (info = " .. cached_ip .. ")", true, utils.get_deny_status()
|
||||
end
|
||||
local cached_uri, err = self:is_in_cache("uri" .. ngx.var.uri)
|
||||
local cached_ignored_uri, err = self:is_in_cache("ignore_uri" .. ngx.var.uri)
|
||||
if cached_ignored_uri then
|
||||
logger.log(ngx.NOTICE, "BLACKLIST", "URI is in cached ignore blacklist (info: " .. cached_ignored_uri .. ")")
|
||||
elseif cached_uri and cached_uri ~= "ok" then
|
||||
return true, "URI is in blacklist cache (info = " .. cached_uri .. ")", true, utils.get_deny_status()
|
||||
end
|
||||
local cached_ua = true
|
||||
local cached_ignored_ua = false
|
||||
-- Check the caches
|
||||
local checks = {
|
||||
["IP"] = "ip" .. ngx.var.remote_addr
|
||||
}
|
||||
if ngx.var.http_user_agent then
|
||||
cached_ua, err = self:is_in_cache("ua" .. ngx.var.http_user_agent)
|
||||
cached_ignored_ua, err = self:is_in_cache("ignore_ua" .. ngx.var.http_user_agent)
|
||||
if cached_ignored_ua then
|
||||
logger.log(ngx.NOTICE, "BLACKLIST", "User-Agent is in cached ignore blacklist (info: " .. cached_ignored_ua .. ")")
|
||||
elseif cached_ua and cached_ua ~= "ok" then
|
||||
return true, "User-Agent is in blacklist cache (info = " .. cached_ua .. ")", true, utils.get_deny_status()
|
||||
end
|
||||
checks["UA"] = "ua" .. ngx.var.http_user_agent
|
||||
end
|
||||
if cached_ignored_ip and cached_ignored_uri and cached_ignored_ua then
|
||||
logger.log(ngx.NOTICE, "BLACKLIST", "full request is in cached ignore blacklist")
|
||||
elseif cached_ip and cached_uri and cached_ua then
|
||||
return true, "full request is in blacklist cache (not blacklisted)", false, nil
|
||||
if ngx.var.uri then
|
||||
checks["URI"] = "uri" .. ngx.var.uri
|
||||
end
|
||||
|
||||
-- Get list
|
||||
local data, err = datastore:get("plugin_blacklist_list")
|
||||
if not data then
|
||||
return false, "can't get Blacklist list : " .. err, false, nil
|
||||
end
|
||||
local ok, blacklists = pcall(cjson.decode, data)
|
||||
if not ok then
|
||||
return false, "error while decoding blacklists : " .. blacklists, false, nil
|
||||
end
|
||||
|
||||
-- Return value
|
||||
local ret, ret_err = true, "success"
|
||||
|
||||
-- Check if IP is in IP/net blacklist
|
||||
local ip_net, err = utils.get_variable("BLACKLIST_IP")
|
||||
local ignored_ip_net, err = utils.get_variable("BLACKLIST_IGNORE_IP")
|
||||
if ip_net and ip_net ~= "" then
|
||||
for element in ip_net:gmatch("%S+") do
|
||||
table.insert(blacklists["IP"], element)
|
||||
end
|
||||
end
|
||||
if ignored_ip_net and ignored_ip_net ~= "" then
|
||||
for element in ignored_ip_net:gmatch("%S+") do
|
||||
table.insert(blacklists["IGNORE_IP"], element)
|
||||
end
|
||||
end
|
||||
if not cached_ip then
|
||||
local ipm, err = ipmatcher.new(blacklists["IP"])
|
||||
local ipm_ignore, err_ignore = ipmatcher.new(blacklists["IGNORE_IP"])
|
||||
if not ipm then
|
||||
ret = false
|
||||
ret_err = "can't instantiate ipmatcher " .. err
|
||||
elseif not ipm_ignore then
|
||||
ret = false
|
||||
ret_err = "can't instantiate ipmatcher " .. err_ignore
|
||||
else
|
||||
if ipm:match(ngx.var.remote_addr) then
|
||||
if ipm_ignore:match(ngx.var.remote_addr) then
|
||||
self:add_to_cache("ignore_ip" .. ngx.var.remote_addr, "ip/net")
|
||||
logger.log(ngx.NOTICE, "BLACKLIST", "client IP " .. ngx.var.remote_addr .. " is in blacklist but is ignored")
|
||||
else
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "ip/net")
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in blacklist", true, utils.get_deny_status()
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Instantiate ignore variable
|
||||
local ignore = false
|
||||
|
||||
-- Check if rDNS is in blacklist
|
||||
local rdns_global, err = utils.get_variable("BLACKLIST_RDNS_GLOBAL")
|
||||
local check = true
|
||||
if not rdns_global then
|
||||
logger.log(ngx.ERR, "BLACKLIST", "Error while getting BLACKLIST_RDNS_GLOBAL variable : " .. err)
|
||||
elseif rdns_global == "yes" then
|
||||
check, err = utils.ip_is_global(ngx.var.remote_addr)
|
||||
if check == nil then
|
||||
logger.log(ngx.ERR, "BLACKLIST", "Error while getting checking if IP is global : " .. err)
|
||||
end
|
||||
end
|
||||
if not cached_ip and check then
|
||||
local rdns, err = utils.get_rdns(ngx.var.remote_addr)
|
||||
if not rdns then
|
||||
ret = false
|
||||
ret_err = "error while trying to get reverse dns : " .. err
|
||||
else
|
||||
local rdns_list, err = utils.get_variable("BLACKLIST_RDNS")
|
||||
local ignored_rdns_list, err = utils.get_variable("BLACKLIST_IGNORE_RDNS")
|
||||
if rdns_list and rdns_list ~= "" then
|
||||
for element in rdns_list:gmatch("%S+") do
|
||||
table.insert(blacklists["RDNS"], element)
|
||||
end
|
||||
end
|
||||
if ignored_rdns_list and ignored_rdns_list ~= "" then
|
||||
for element in ignored_rdns_list:gmatch("%S+") do
|
||||
table.insert(blacklists["IGNORE_RDNS"], element)
|
||||
end
|
||||
end
|
||||
for i, suffix in ipairs(blacklists["RDNS"]) do
|
||||
if rdns:sub(- #suffix) == suffix then
|
||||
for j, ignore_suffix in ipairs(blacklists["IGNORE_RDNS"]) do
|
||||
if rdns:sub(- #ignore_suffix) == ignore_suffix then
|
||||
ignore = true
|
||||
self:add_to_cache("ignore_rdns" .. ngx.var.remote_addr, "rDNS" .. suffix)
|
||||
logger.log(ngx.NOTICE, "BLACKLIST",
|
||||
"client IP " .. ngx.var.remote_addr .. " is in blacklist (info = rDNS " .. suffix .. ") but is ignored")
|
||||
break
|
||||
end
|
||||
end
|
||||
if not ignore then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "rDNS" .. suffix)
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in blacklist (info = rDNS " .. suffix .. ")", true,
|
||||
utils.get_deny_status()
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if ASN is in blacklist
|
||||
if not cached_ip then
|
||||
if utils.ip_is_global(ngx.var.remote_addr) then
|
||||
local asn, err = utils.get_asn(ngx.var.remote_addr)
|
||||
if not asn then
|
||||
ret = false
|
||||
ret_err = "error while trying to get asn number : " .. err
|
||||
else
|
||||
local asn_list, err = utils.get_variable("BLACKLIST_ASN")
|
||||
local ignored_asn_list, err = utils.get_variable("BLACKLIST_IGNORE_ASN")
|
||||
if asn_list and asn_list ~= "" then
|
||||
for element in asn_list:gmatch("%S+") do
|
||||
table.insert(blacklists["ASN"], element)
|
||||
end
|
||||
end
|
||||
if ignored_asn_list and ignored_asn_list ~= "" then
|
||||
for element in ignored_asn_list:gmatch("%S+") do
|
||||
table.insert(blacklists["IGNORE_ASN"], element)
|
||||
end
|
||||
end
|
||||
for i, asn_bl in ipairs(blacklists["ASN"]) do
|
||||
if tostring(asn) == asn_bl then
|
||||
for j, ignore_asn_bl in ipairs(blacklists["IGNORE_ASN"]) do
|
||||
if tostring(asn) == ignore_asn_bl then
|
||||
ignore = true
|
||||
self:add_to_cache("ignore_asn" .. ngx.var.remote_addr, "ASN" .. tostring(asn))
|
||||
logger.log(ngx.NOTICE, "BLACKLIST",
|
||||
"client IP " .. ngx.var.remote_addr .. " is in blacklist (info = ASN " .. tostring(asn) .. ") but is ignored")
|
||||
break
|
||||
end
|
||||
end
|
||||
if not ignore then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "ASN " .. tostring(asn))
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in blacklist (kind = ASN " .. tostring(asn) .. ")", true,
|
||||
utils.get_deny_status()
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- IP is not blacklisted
|
||||
local ok, err = self:add_to_cache("ip" .. ngx.var.remote_addr, "ok")
|
||||
if not ok then
|
||||
ret = false
|
||||
ret_err = err
|
||||
end
|
||||
|
||||
-- Check if User-Agent is in blacklist
|
||||
if not cached_ua and ngx.var.http_user_agent then
|
||||
local ua_list, err = utils.get_variable("BLACKLIST_USER_AGENT")
|
||||
local ignored_ua_list, err = utils.get_variable("BLACKLIST_IGNORE_USER_AGENT")
|
||||
if ua_list and ua_list ~= "" then
|
||||
for element in ua_list:gmatch("%S+") do
|
||||
table.insert(blacklists["USER_AGENT"], element)
|
||||
end
|
||||
end
|
||||
if ignored_ua_list and ignored_ua_list ~= "" then
|
||||
for element in ignored_ua_list:gmatch("%S+") do
|
||||
table.insert(blacklists["IGNORE_USER_AGENT"], element)
|
||||
end
|
||||
end
|
||||
for i, ua_bl in ipairs(blacklists["USER_AGENT"]) do
|
||||
if ngx.var.http_user_agent:match(ua_bl) then
|
||||
for j, ignore_ua_bl in ipairs(blacklists["IGNORE_USER_AGENT"]) do
|
||||
if ngx.var.http_user_agent:match(ignore_ua_bl) then
|
||||
ignore = true
|
||||
self:add_to_cache("ignore_ua" .. ngx.var.remote_addr, "UA" .. ua_bl)
|
||||
logger.log(ngx.NOTICE, "BLACKLIST",
|
||||
"client User-Agent " .. ngx.var.http_user_agent .. " is in blacklist (matched " .. ua_bl .. ") but is ignored")
|
||||
break
|
||||
end
|
||||
end
|
||||
if not ignore then
|
||||
self:add_to_cache("ua" .. ngx.var.http_user_agent, "UA " .. ua_bl)
|
||||
return ret, "client User-Agent " .. ngx.var.http_user_agent .. " is in blacklist (matched " .. ua_bl .. ")", true,
|
||||
utils.get_deny_status()
|
||||
end
|
||||
end
|
||||
end
|
||||
-- UA is not blacklisted
|
||||
local ok, err = self:add_to_cache("ua" .. ngx.var.http_user_agent, "ok")
|
||||
local already_cached = {
|
||||
["IP"] = false,
|
||||
["URI"] = false,
|
||||
["UA"] = false
|
||||
}
|
||||
for k, v in pairs(checks) do
|
||||
local ok, cached = self:is_in_cache(v)
|
||||
if not ok then
|
||||
ret = false
|
||||
ret_err = err
|
||||
self.logger:log(ngx.ERR, "error while checking cache : " .. cached)
|
||||
elseif cached and cached ~= "ok" then
|
||||
return self:ret(true, k + " is in cached blacklist (info : " .. cached .. ")", utils.get_deny_status())
|
||||
end
|
||||
if cached then
|
||||
already_cached[k] = true
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if URI is in blacklist
|
||||
if not cached_uri then
|
||||
local uri_list, err = utils.get_variable("BLACKLIST_URI")
|
||||
local ignored_uri_list, err = utils.get_variable("BLACKLIST_IGNORE_URI")
|
||||
if uri_list and uri_list ~= "" then
|
||||
for element in uri_list:gmatch("%S+") do
|
||||
table.insert(blacklists["URI"], element)
|
||||
end
|
||||
end
|
||||
if ignored_uri_list and ignored_uri_list ~= "" then
|
||||
for element in ignored_uri_list:gmatch("%S+") do
|
||||
table.insert(blacklists["IGNORE_URI"], element)
|
||||
end
|
||||
end
|
||||
for i, uri_bl in ipairs(blacklists["URI"]) do
|
||||
if ngx.var.uri:match(uri_bl) then
|
||||
for j, ignore_uri_bl in ipairs(blacklists["IGNORE_URI"]) do
|
||||
if ngx.var.uri:match(ignore_uri_bl) then
|
||||
ignore = true
|
||||
self:add_to_cache("ignore_uri" .. ngx.var.remote_addr, "URI" .. uri_bl)
|
||||
logger.log(ngx.NOTICE, "BLACKLIST",
|
||||
"client URI " .. ngx.var.uri .. " is in blacklist (matched " .. uri_bl .. ") but is ignored")
|
||||
break
|
||||
end
|
||||
end
|
||||
if not ignore then
|
||||
self:add_to_cache("uri" .. ngx.var.uri, "URI " .. uri_bl)
|
||||
return ret, "client URI " .. ngx.var.uri .. " is in blacklist (matched " .. uri_bl .. ")", true,
|
||||
utils.get_deny_status()
|
||||
end
|
||||
end
|
||||
end
|
||||
-- Check lists
|
||||
if not self.lists then
|
||||
return self:ret(false, "lists is nil")
|
||||
end
|
||||
|
||||
-- URI is not blacklisted
|
||||
local ok, err = self:add_to_cache("uri" .. ngx.var.uri, "ok")
|
||||
if not ok then
|
||||
ret = false
|
||||
ret_err = err
|
||||
end
|
||||
|
||||
return ret, "IP is not in list (error = " .. ret_err .. ")", false, nil
|
||||
end
|
||||
|
||||
function _M:preread()
|
||||
-- Check if preread is needed
|
||||
local preread_needed, err = utils.get_variable("USE_BLACKLIST")
|
||||
if preread_needed == nil then
|
||||
return false, err
|
||||
end
|
||||
if access_needed ~= "yes" then
|
||||
return true, "Blacklist not activated"
|
||||
end
|
||||
|
||||
-- Check the cache
|
||||
local cached_ip, err = self:is_in_cache("ip" .. ngx.var.remote_addr)
|
||||
local cached_ignored_ip, err = self:is_in_cache("ignore_ip" .. ngx.var.remote_addr)
|
||||
if cached_ignored_ip then
|
||||
logger.log(ngx.NOTICE, "BLACKLIST", "IP is in cached ignore blacklist (info: " .. cached_ignored_ip .. ")")
|
||||
elseif cached_ip and cached_ip ~= "ok" then
|
||||
return true, "IP is in blacklist cache (info = " .. cached_ip .. ")", true, utils.get_deny_status()
|
||||
elseif cached_ip then
|
||||
return true, "IP is in blacklist cache (not blacklisted)", false, nil
|
||||
end
|
||||
|
||||
-- Get list
|
||||
local data, err = datastore:get("plugin_blacklist_list")
|
||||
if not data then
|
||||
return false, "can't get Blacklist list : " .. err, false, nil
|
||||
end
|
||||
local ok, blacklists = pcall(cjson.decode, data)
|
||||
if not ok then
|
||||
return false, "error while decoding blacklists : " .. blacklists, false, nil
|
||||
end
|
||||
|
||||
-- Return value
|
||||
local ret, ret_err = true, "success"
|
||||
|
||||
-- Check if IP is in IP/net blacklist
|
||||
local ip_net, err = utils.get_variable("BLACKLIST_IP")
|
||||
local ignored_ip_net, err = utils.get_variable("BLACKLIST_IGNORE_IP")
|
||||
if ip_net and ip_net ~= "" then
|
||||
for element in ip_net:gmatch("%S+") do
|
||||
table.insert(blacklists["IP"], element)
|
||||
end
|
||||
end
|
||||
if ignored_ip_net and ignored_ip_net ~= "" then
|
||||
for element in ignored_ip_net:gmatch("%S+") do
|
||||
table.insert(blacklists["IGNORE_IP"], element)
|
||||
end
|
||||
end
|
||||
if not cached_ip then
|
||||
local ipm, err = ipmatcher.new(blacklists["IP"])
|
||||
local ipm_ignore, err_ignore = ipmatcher.new(blacklists["IGNORE_IP"])
|
||||
if not ipm then
|
||||
ret = false
|
||||
ret_err = "can't instantiate ipmatcher " .. err
|
||||
elseif not ipm_ignore then
|
||||
ret = false
|
||||
ret_err = "can't instantiate ipmatcher " .. err_ignore
|
||||
else
|
||||
if ipm:match(ngx.var.remote_addr) then
|
||||
if ipm_ignore:match(ngx.var.remote_addr) then
|
||||
self:add_to_cache("ignore_ip" .. ngx.var.remote_addr, "ip/net")
|
||||
logger.log(ngx.NOTICE, "BLACKLIST", "client IP " .. ngx.var.remote_addr .. " is in blacklist but is ignored")
|
||||
else
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "ip/net")
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in blacklist", true, utils.get_deny_status()
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Instantiate ignore variable
|
||||
local ignore = false
|
||||
|
||||
-- Check if rDNS is in blacklist
|
||||
local rdns_global, err = utils.get_variable("BLACKLIST_RDNS_GLOBAL")
|
||||
local check = true
|
||||
if not rdns_global then
|
||||
logger.log(ngx.ERR, "BLACKLIST", "Error while getting BLACKLIST_RDNS_GLOBAL variable : " .. err)
|
||||
elseif rdns_global == "yes" then
|
||||
check, err = utils.ip_is_global(ngx.var.remote_addr)
|
||||
if check == nil then
|
||||
logger.log(ngx.ERR, "BLACKLIST", "Error while getting checking if IP is global : " .. err)
|
||||
end
|
||||
end
|
||||
if not cached_ip and check then
|
||||
local rdns, err = utils.get_rdns(ngx.var.remote_addr)
|
||||
if not rdns then
|
||||
ret = false
|
||||
ret_err = "error while trying to get reverse dns : " .. err
|
||||
else
|
||||
local rdns_list, err = utils.get_variable("BLACKLIST_RDNS")
|
||||
local ignored_rdns_list, err = utils.get_variable("BLACKLIST_IGNORE_RDNS")
|
||||
if rdns_list and rdns_list ~= "" then
|
||||
for element in rdns_list:gmatch("%S+") do
|
||||
table.insert(blacklists["RDNS"], element)
|
||||
end
|
||||
end
|
||||
if ignored_rdns_list and ignored_rdns_list ~= "" then
|
||||
for element in ignored_rdns_list:gmatch("%S+") do
|
||||
table.insert(blacklists["IGNORE_RDNS"], element)
|
||||
end
|
||||
end
|
||||
for i, suffix in ipairs(blacklists["RDNS"]) do
|
||||
if rdns:sub(- #suffix) == suffix then
|
||||
for j, ignore_suffix in ipairs(blacklists["IGNORE_RDNS"]) do
|
||||
if rdns:sub(- #ignore_suffix) == ignore_suffix then
|
||||
ignore = true
|
||||
self:add_to_cache("ignore_rdns" .. ngx.var.remote_addr, "rDNS" .. suffix)
|
||||
logger.log(ngx.NOTICE, "BLACKLIST",
|
||||
"client IP " .. ngx.var.remote_addr .. " is in blacklist (info = rDNS " .. suffix .. ") but is ignored")
|
||||
break
|
||||
end
|
||||
end
|
||||
if not ignore then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "rDNS" .. suffix)
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in blacklist (info = rDNS " .. suffix .. ")", true,
|
||||
utils.get_deny_status()
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if ASN is in blacklist
|
||||
if not cached_ip then
|
||||
if utils.ip_is_global(ngx.var.remote_addr) then
|
||||
local asn, err = utils.get_asn(ngx.var.remote_addr)
|
||||
if not asn then
|
||||
ret = false
|
||||
ret_err = "error while trying to get asn number : " .. err
|
||||
-- Perform checks
|
||||
for k, v in pairs(checks) do
|
||||
if not already_cached[k] then
|
||||
local ok, blacklisted = self:is_blacklisted(k)
|
||||
if ok == nil then
|
||||
self.logger:log(ngx.ERR, "error while checking if " .. k .. " is blacklisted : " .. blacklisted)
|
||||
else
|
||||
local asn_list, err = utils.get_variable("BLACKLIST_ASN")
|
||||
local ignored_asn_list, err = utils.get_variable("BLACKLIST_IGNORE_ASN")
|
||||
if asn_list and asn_list ~= "" then
|
||||
for element in asn_list:gmatch("%S+") do
|
||||
table.insert(blacklists["ASN"], element)
|
||||
end
|
||||
local ok, err = self:add_to_cache(self:kind_to_ele(k), blacklisted)
|
||||
if not ok then
|
||||
self.logger:log(ngx.ERR, "error while adding element to cache : " .. err)
|
||||
end
|
||||
if ignored_asn_list and ignored_asn_list ~= "" then
|
||||
for element in ignored_asn_list:gmatch("%S+") do
|
||||
table.insert(blacklists["IGNORE_ASN"], element)
|
||||
end
|
||||
if blacklisted ~= "ok" then
|
||||
return self:ret(true, k + " is blacklisted (info : " .. blacklisted .. ")", utils.get_deny_status())
|
||||
end
|
||||
for i, asn_bl in ipairs(blacklists["ASN"]) do
|
||||
if tostring(asn) == asn_bl then
|
||||
for j, ignore_asn_bl in ipairs(blacklists["IGNORE_ASN"]) do
|
||||
if tostring(asn) == ignore_asn_bl then
|
||||
ignore = true
|
||||
self:add_to_cache("ignore_asn" .. ngx.var.remote_addr, "ASN" .. tostring(asn))
|
||||
logger.log(ngx.NOTICE, "BLACKLIST",
|
||||
"client IP " .. ngx.var.remote_addr .. " is in blacklist (info = ASN " .. tostring(asn) .. ") but is ignored")
|
||||
break
|
||||
end
|
||||
end
|
||||
if not ignore then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "ASN " .. tostring(asn))
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in blacklist (kind = ASN " .. tostring(asn) .. ")", true,
|
||||
utils.get_deny_status()
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Return
|
||||
return self:ret(true, "not blacklisted")
|
||||
|
||||
end
|
||||
|
||||
function blacklist:preread()
|
||||
return self:access()
|
||||
end
|
||||
|
||||
function blacklist:kind_to_ele(kind)
|
||||
if kind == "IP" then
|
||||
return "ip" .. ngx.var.remote_addr
|
||||
elseif kind == "UA" then
|
||||
return "ua" .. ngx.var.http_user_agent
|
||||
elseif kind == "URI" then
|
||||
return "uri" .. ngx.var.uri
|
||||
end
|
||||
end
|
||||
|
||||
function blacklist:is_in_cache(ele)
|
||||
local ok, data = self.cachestore:get("plugin_blacklist_" .. ele)
|
||||
if not ok then
|
||||
return false, data
|
||||
end
|
||||
return true, data
|
||||
end
|
||||
|
||||
function blacklist:add_to_cache(ele, value)
|
||||
local ok, err = self.cachestore:set("plugin_blacklist_" .. ele, value)
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
function blacklist:is_blacklisted(kind)
|
||||
if kind == "IP" then
|
||||
return self:is_blacklisted_ip()
|
||||
elseif kind == "URI" then
|
||||
return self:is_blacklisted_uri()
|
||||
elseif kind == "UA" then
|
||||
return self:is_blacklisted_ua()
|
||||
end
|
||||
return false, "unknown kind " .. kind
|
||||
end
|
||||
|
||||
function blacklist:is_blacklisted_ip()
|
||||
-- Check if IP is in ignore list
|
||||
local ipm, err = ipmatcher.new(self.lists["IGNORE_IP"])
|
||||
if not ipm then
|
||||
return nil, err
|
||||
end
|
||||
local match, err = ipm:match(ngx.var.remote_addr)
|
||||
if err then
|
||||
return nil, err
|
||||
end
|
||||
if not match then
|
||||
-- Check if IP is in blacklist
|
||||
local ipm, err = ipmatcher.new(self.lists["IP"])
|
||||
if not ipm then
|
||||
return nil, err
|
||||
end
|
||||
local match, err = ipm:match(ngx.var.remote_addr)
|
||||
if err then
|
||||
return nil, err
|
||||
end
|
||||
if match then
|
||||
return true, "ip"
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if rDNS is needed
|
||||
local check_rdns = true
|
||||
local is_global, err = utils.ip_is_global(ngx.var.remote_addr)
|
||||
if self.variables["BLACKLIST_RDNS_GLOBAL"] == "yes" then
|
||||
if is_global == nil then
|
||||
return nil, err
|
||||
end
|
||||
if not is_global then
|
||||
check_rdns = false
|
||||
end
|
||||
end
|
||||
if check_rdns then
|
||||
-- Get rDNS
|
||||
local rdns_list, err = utils.get_rdns(ngx.var.remote_addr)
|
||||
if not rdns_list then
|
||||
return false, err
|
||||
end
|
||||
-- Check if rDNS is in ignore list
|
||||
local ignore = false
|
||||
for i, ignore_suffix in ipairs(self.lists["IGNORE_RDNS"]) do
|
||||
for j, rdns in ipairs(rdns_list) do
|
||||
if rdns:sub(-#ignore_suffix) == ignore_suffix then
|
||||
ignore = true
|
||||
break
|
||||
end
|
||||
end
|
||||
if ignore then
|
||||
break
|
||||
end
|
||||
end
|
||||
-- Check if rDNS is in blacklist
|
||||
if not ignore then
|
||||
for i, suffix in ipairs(self.lists["RDNS"]) do
|
||||
for j, rdns in ipairs(rdns_list) do
|
||||
if rdns:sub(-#suffix) == suffix then
|
||||
return true, "rDNS " .. suffix
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- IP is not blacklisted
|
||||
local ok, err = self:add_to_cache("ip" .. ngx.var.remote_addr, "ok")
|
||||
if not ok then
|
||||
ret = false
|
||||
ret_err = err
|
||||
end
|
||||
return ret, "IP is not in list (error = " .. ret_err .. ")", false, nil
|
||||
end
|
||||
|
||||
function _M:is_in_cache(ele)
|
||||
local kind, err = datastore:get("plugin_blacklist_cache_" .. ngx.var.server_name .. ele)
|
||||
if not kind then
|
||||
if err ~= "not found" then
|
||||
logger.log(ngx.ERR, "BLACKLIST", "Error while accessing cache : " .. err)
|
||||
-- Check if ASN is in ignore list
|
||||
if is_global then
|
||||
local asn, err = utils.get_asn(ngx.var.remote_addr)
|
||||
if not asn then
|
||||
self.logger:log(ngx.ERR, "7")
|
||||
return nil, err
|
||||
end
|
||||
local ignore = false
|
||||
for i, ignore_asn in ipairs(self.lists["IGNORE_ASN"]) do
|
||||
if ignore_asn == tostring(asn) then
|
||||
ignore = true
|
||||
break
|
||||
end
|
||||
end
|
||||
-- Check if ASN is in blacklist
|
||||
if not ignore then
|
||||
for i, bl_asn in ipairs(self.lists["ASN"]) do
|
||||
if bl_asn == tostring(asn) then
|
||||
return true, "ASN " .. bl_asn
|
||||
end
|
||||
end
|
||||
end
|
||||
return false, err
|
||||
end
|
||||
return kind, "success"
|
||||
|
||||
-- Not blacklisted
|
||||
return false, "ok"
|
||||
end
|
||||
|
||||
function _M:add_to_cache(ele, kind)
|
||||
local ok, err = datastore:set("plugin_blacklist_cache_" .. ngx.var.server_name .. ele, kind, 3600)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "BLACKLIST", "Error while adding element to cache : " .. err)
|
||||
return false, err
|
||||
function blacklist:is_blacklisted_uri()
|
||||
-- Check if URI is in ignore list
|
||||
local ignore = false
|
||||
for i, ignore_uri in ipairs(self.lists["IGNORE_URI"]) do
|
||||
if ngx.var.uri:match(ignore_uri) then
|
||||
ignore = true
|
||||
break
|
||||
end
|
||||
end
|
||||
return true, "success"
|
||||
-- Check if URI is in blacklist
|
||||
if not ignore then
|
||||
for i, uri in ipairs(self.lists["URI"]) do
|
||||
if ngx.var.uri:match(uri) then
|
||||
return true, "URI " .. uri
|
||||
end
|
||||
end
|
||||
end
|
||||
-- URI is not blacklisted
|
||||
return false, "ok"
|
||||
end
|
||||
|
||||
return _M
|
||||
function blacklist:is_blacklisted_ua()
|
||||
-- Check if UA is in ignore list
|
||||
local ignore = false
|
||||
for i, ignore_ua in ipairs(self.lists["IGNORE_USER_AGENT"]) do
|
||||
if ngx.var.http_user_agent:match(ignore_ua) then
|
||||
ignore = true
|
||||
break
|
||||
end
|
||||
end
|
||||
-- Check if UA is in blacklist
|
||||
if not ignore then
|
||||
for i, ua in ipairs(self.lists["USER_AGENT"]) do
|
||||
if ngx.var.http_user_agent:match(ua) then
|
||||
return true, "UA " .. ua
|
||||
end
|
||||
end
|
||||
end
|
||||
-- UA is not blacklisted
|
||||
return false, "ok"
|
||||
end
|
||||
|
||||
return blacklist
|
|
@ -1,67 +1,50 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local datastore = require "bunkerweb.datastore"
|
||||
local json = require "cjson"
|
||||
local http = require "resty.http"
|
||||
|
||||
local utils = require "utils"
|
||||
local datastore = require "datastore"
|
||||
local logger = require "logger"
|
||||
local cjson = require "cjson"
|
||||
local http = require "resty.http"
|
||||
local bunkernet = class("bunkernet", plugin)
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
local server, err = datastore:get("variable_BUNKERNET_SERVER")
|
||||
if not server then
|
||||
return nil, "can't get BUNKERNET_SERVER from datastore : " .. err
|
||||
end
|
||||
self.server = server
|
||||
local id, err = datastore:get("plugin_bunkernet_id")
|
||||
if not id then
|
||||
self.id = nil
|
||||
function bunkernet:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "bunkernet")
|
||||
-- Check if init is needed
|
||||
if ngx.get_phase() == "init" then
|
||||
local init_needed, err = utils.has_variable("USE_BUNKERNET", "yes")
|
||||
if init_needed == nil then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
self.init_needed = init_needed
|
||||
-- Get BunkerNet ID
|
||||
else
|
||||
self.id = id
|
||||
local id, err = self.datastore:get("plugin_bunkernet_id")
|
||||
if not id then
|
||||
self.bunkernet_id = nil
|
||||
else
|
||||
self.bunkernet_id = id
|
||||
end
|
||||
end
|
||||
return self, nil
|
||||
end
|
||||
|
||||
function _M:init()
|
||||
local init_needed, err = utils.has_variable("USE_BUNKERNET", "yes")
|
||||
if init_needed == nil then
|
||||
return false, err
|
||||
end
|
||||
if not init_needed then
|
||||
return true, "no service uses BunkerNet, skipping init"
|
||||
function bunkernet:init()
|
||||
-- Check if init is needed
|
||||
if not self.init_needed then
|
||||
return self:ret(true, "no service uses bunkernet, skipping init")
|
||||
end
|
||||
-- Check if instance ID is present
|
||||
local f, err = io.open("/var/cache/bunkerweb/bunkernet/instance.id", "r")
|
||||
if not f then
|
||||
return false, "can't read instance id : " .. err
|
||||
return self:ret(false, "can't read instance id : " .. err)
|
||||
end
|
||||
-- Retrieve instance ID
|
||||
id = f:read("*all"):gsub("[\r\n]", "")
|
||||
f:close()
|
||||
self.id = id
|
||||
-- TODO : regex check just in case
|
||||
-- Send a ping with the ID
|
||||
--local ok, err, status, response = self:ping()
|
||||
-- BunkerNet server is down or instance can't access it
|
||||
--if not ok then
|
||||
--return false, "can't send request to BunkerNet service : " .. err
|
||||
-- Local instance ID is unknown to the server, let's delete it
|
||||
--elseif status == 401 then
|
||||
--local ok, message = os.remove("/var/cache/bunkerweb/bunkernet/instance.id")
|
||||
--if not ok then
|
||||
--return false, "can't remove instance ID " .. message
|
||||
--end
|
||||
--return false, "instance ID is not valid"
|
||||
--elseif status == 429 then
|
||||
--return false, "sent too many requests to the BunkerNet service"
|
||||
--elseif status ~= 200 then
|
||||
--return false, "unknown error from BunkerNet service (HTTP status = " .. tostring(status) .. ")"
|
||||
--end
|
||||
-- Store ID in datastore
|
||||
local ok, err = datastore:set("plugin_bunkernet_id", id)
|
||||
local ok, err = self.datastore:set("plugin_bunkernet_id", id)
|
||||
if not ok then
|
||||
return false, "can't save instance ID to the datastore : " .. err
|
||||
return self:ret(false, "can't save instance ID to the datastore : " .. err)
|
||||
end
|
||||
-- Load databases
|
||||
local ret = true
|
||||
|
@ -81,19 +64,85 @@ function _M:init()
|
|||
end
|
||||
end
|
||||
if not ret then
|
||||
return false, "error while reading database : " .. err
|
||||
return self:ret(false, "error while reading database : " .. err)
|
||||
end
|
||||
f:close()
|
||||
local ok, err = datastore:set("plugin_bunkernet_db", cjson.encode(db))
|
||||
local ok, err = self.datastore:set("plugin_bunkernet_db", cjson.encode(db))
|
||||
if not ok then
|
||||
return false, "can't store BunkerNet database into datastore : " .. err
|
||||
return self:ret(false, "can't store bunkernet database into datastore : " .. err)
|
||||
end
|
||||
return true,
|
||||
"successfully connected to the BunkerNet service " ..
|
||||
self.server .. " with machine ID " .. id .. " and " .. tostring(i) .. " bad IPs in database"
|
||||
return self:ret(true, "successfully connected to the bunkernet service " .. self.server .. " with machine ID " .. id .. " and " .. tostring(i) .. " bad IPs in database")
|
||||
end
|
||||
|
||||
function _M:request(method, url, data)
|
||||
function bunkernet:log(bypass_use_bunkernet)
|
||||
if not bypass_use_bunkernet then
|
||||
-- Check if BunkerNet is enabled
|
||||
if self.variables["USE_BUNKERNET"] ~= "yes" then
|
||||
return self:ret(true, "bunkernet not activated")
|
||||
end
|
||||
end
|
||||
-- Check if BunkerNet ID is generated
|
||||
if not self.bunkernet_id then
|
||||
return self:ret(true, "bunkernet ID is not generated")
|
||||
end
|
||||
-- Check if IP has been blocked
|
||||
local reason = utils.get_reason()
|
||||
if not reason then
|
||||
return self:ret(true, "ip is not blocked")
|
||||
end
|
||||
if reason == "bunkernet" then
|
||||
return self:ret(true, "skipping report because the reason is bunkernet")
|
||||
end
|
||||
-- Check if IP is global
|
||||
local is_global, err = utils.ip_is_global(ngx.var.remote_addr)
|
||||
if is_global == nil then
|
||||
return self:ret(false, "error while checking if IP is global " .. err)
|
||||
end
|
||||
if not is_global then
|
||||
return self:ret(true, "IP is not global")
|
||||
end
|
||||
-- TODO : check if IP has been reported recently
|
||||
local function report_callback(premature, obj, ip, reason, method, url, headers)
|
||||
local ok, err, status, data = obj:report(ip, reason, method, url, headers)
|
||||
if status == 429 then
|
||||
obj.logger:log(ngx.WARN, "bunkernet API is rate limiting us")
|
||||
elseif not ok then
|
||||
obj.logger:log(ngx.ERR, "can't report IP : " .. err)
|
||||
else
|
||||
obj.logger:log(ngx.NOTICE, "successfully reported IP " .. ip .. " (reason : " .. reason .. ")")
|
||||
end
|
||||
end
|
||||
|
||||
local hdr, err = ngx.timer.at(0, report_callback, self, ngx.var.remote_addr, reason, ngx.var.request_method,
|
||||
ngx.var.request_uri, ngx.req.get_headers())
|
||||
if not hdr then
|
||||
return self:ret(false, "can't create report timer : " .. err)
|
||||
end
|
||||
return self:ret(true, "created report timer")
|
||||
end
|
||||
|
||||
function bunkernet:log_default()
|
||||
-- Check if BunkerNet is activated
|
||||
local check, err = utils.has_variable("USE_BUNKERNET", "yes")
|
||||
if check == nil then
|
||||
return false, "error while checking variable USE_BUNKERNET (" .. err .. ")"
|
||||
end
|
||||
if not check then
|
||||
return true, "bunkernet not enabled"
|
||||
end
|
||||
-- Check if default server is disabled
|
||||
local check, err = utils.get_variable("DISABLE_DEFAULT_SERVER", false)
|
||||
if check == nil then
|
||||
return false, "error while getting variable DISABLE_DEFAULT_SERVER (" .. err .. ")"
|
||||
end
|
||||
if check ~= "yes" then
|
||||
return true, "default server not disabled"
|
||||
end
|
||||
-- Call log method
|
||||
return self:log(true)
|
||||
end
|
||||
|
||||
function bunkernet:request(method, url, data)
|
||||
local httpc, err = http.new()
|
||||
if not httpc then
|
||||
return false, "can't instantiate http object : " .. err, nil, nil
|
||||
|
@ -106,7 +155,7 @@ function _M:request(method, url, data)
|
|||
for k, v in pairs(data) do
|
||||
all_data[k] = v
|
||||
end
|
||||
local res, err = httpc:request_uri(self.server .. url, {
|
||||
local res, err = httpc:request_uri(self.variables["BUNKERNET_SERVER"] .. url, {
|
||||
method = method,
|
||||
body = cjson.encode(all_data),
|
||||
headers = {
|
||||
|
@ -128,11 +177,11 @@ function _M:request(method, url, data)
|
|||
return true, "success", res.status, ret
|
||||
end
|
||||
|
||||
function _M:ping()
|
||||
function bunkernet:ping()
|
||||
return self:request("GET", "/ping", {})
|
||||
end
|
||||
|
||||
function _M:report(ip, reason, method, url, headers)
|
||||
function bunkernet:report(ip, reason, method, url, headers)
|
||||
local data = {
|
||||
ip = ip,
|
||||
reason = reason,
|
||||
|
@ -143,107 +192,4 @@ function _M:report(ip, reason, method, url, headers)
|
|||
return self:request("POST", "/report", data)
|
||||
end
|
||||
|
||||
function _M:log(bypass_use_bunkernet)
|
||||
if not bypass_use_bunkernet then
|
||||
-- Check if BunkerNet is activated
|
||||
local use_bunkernet = utils.get_variable("USE_BUNKERNET")
|
||||
if use_bunkernet ~= "yes" then
|
||||
return true, "bunkernet not activated"
|
||||
end
|
||||
end
|
||||
-- Check if BunkerNet ID is generated
|
||||
if not self.id then
|
||||
return true, "bunkernet ID is not generated"
|
||||
end
|
||||
-- Check if IP has been blocked
|
||||
local reason = utils.get_reason()
|
||||
if not reason then
|
||||
return true, "ip is not blocked"
|
||||
end
|
||||
if reason == "bunkernet" then
|
||||
return true, "skipping report because the reason is bunkernet"
|
||||
end
|
||||
-- Check if IP is global
|
||||
local is_global, err = utils.ip_is_global(ngx.var.remote_addr)
|
||||
if is_global == nil then
|
||||
return false, "error while checking if IP is global " .. err
|
||||
end
|
||||
if not is_global then
|
||||
return true, "IP is not global"
|
||||
end
|
||||
-- Only report if it hasn't been reported for the same reason recently
|
||||
--local reported = datastore:get("plugin_bunkernet_cache_" .. ngx.var.remote_addr .. reason)
|
||||
--if reported then
|
||||
--return true, "ip already reported recently"
|
||||
--end
|
||||
local function report_callback(premature, obj, ip, reason, method, url, headers)
|
||||
local ok, err, status, data = obj:report(ip, reason, method, url, headers)
|
||||
if status == 429 then
|
||||
logger.log(ngx.WARN, "BUNKERNET", "BunkerNet API is rate limiting us")
|
||||
elseif not ok then
|
||||
logger.log(ngx.ERR, "BUNKERNET", "Can't report IP : " .. err)
|
||||
else
|
||||
logger.log(ngx.NOTICE, "BUNKERNET", "Successfully reported IP " .. ip .. " (reason : " .. reason .. ")")
|
||||
--local ok, err = datastore:set("plugin_bunkernet_cache_" .. ip .. reason, true, 3600)
|
||||
--if not ok then
|
||||
--logger.log(ngx.ERR, "BUNKERNET", "Can't store cached report : " .. err)
|
||||
--end
|
||||
end
|
||||
end
|
||||
|
||||
local hdr, err = ngx.timer.at(0, report_callback, self, ngx.var.remote_addr, reason, ngx.var.request_method,
|
||||
ngx.var.request_uri, ngx.req.get_headers())
|
||||
if not hdr then
|
||||
return false, "can't create report timer : " .. err
|
||||
end
|
||||
return true, "created report timer"
|
||||
end
|
||||
|
||||
function _M:log_default()
|
||||
-- Check if bunkernet is activated
|
||||
local check, err = utils.has_variable("USE_BUNKERNET", "yes")
|
||||
if check == nil then
|
||||
return false, "error while checking variable USE_BUNKERNET (" .. err .. ")"
|
||||
end
|
||||
if not check then
|
||||
return true, "bunkernet not enabled"
|
||||
end
|
||||
-- Check if default server is disabled
|
||||
local check, err = utils.get_variable("DISABLE_DEFAULT_SERVER", false)
|
||||
if check == nil then
|
||||
return false, "error while getting variable DISABLE_DEFAULT_SERVER (" .. err .. ")"
|
||||
end
|
||||
if check ~= "yes" then
|
||||
return true, "default server not disabled"
|
||||
end
|
||||
-- Call log method
|
||||
return self:log(true)
|
||||
end
|
||||
|
||||
function _M:access()
|
||||
local use_bunkernet = utils.get_variable("USE_BUNKERNET")
|
||||
if use_bunkernet ~= "yes" then
|
||||
return true, "bunkernet not activated", false, nil
|
||||
end
|
||||
-- Check if BunkerNet ID is generated
|
||||
if not self.id then
|
||||
return true, "bunkernet ID is not generated"
|
||||
end
|
||||
local data, err = datastore:get("plugin_bunkernet_db")
|
||||
if not data then
|
||||
return false, "can't get bunkernet db : " .. err, false, nil
|
||||
end
|
||||
local db = cjson.decode(data)
|
||||
for index, value in ipairs(db.ip) do
|
||||
if value == ngx.var.remote_addr then
|
||||
return true, "ip is in database", true, utils.get_deny_status()
|
||||
end
|
||||
end
|
||||
return true, "ip is not in database", false, nil
|
||||
end
|
||||
|
||||
function _M:api()
|
||||
return false, nil, nil
|
||||
end
|
||||
|
||||
return _M
|
||||
return bunkernet
|
|
@ -1,30 +1,22 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local utils = require "bunkerweb.utils"
|
||||
|
||||
local utils = require "utils"
|
||||
local datastore = require "datastore"
|
||||
local logger = require "logger"
|
||||
local cors = class("cors", plugin)
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
return self, nil
|
||||
function cors:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "cors")
|
||||
end
|
||||
|
||||
function _M:access()
|
||||
-- Check if access is needed
|
||||
local cors, err = utils.get_variable("USE_CORS")
|
||||
if cors == nil then
|
||||
return false, err, nil, nil
|
||||
function cors:header()
|
||||
-- Check if header is needed
|
||||
if self.variables["USE_CORS"] ~= "yes" then
|
||||
return self:ret(true, "service doesn't use CORS")
|
||||
end
|
||||
if cors == "no" then
|
||||
return true, "CORS not activated", nil, nil
|
||||
end
|
||||
|
||||
-- Check if method is OPTIONS
|
||||
if ngx.var.request_method ~= "OPTIONS" then
|
||||
return true, "method is not OPTIONS", nil, nil
|
||||
return self:ret(true, "method is not OPTIONS")
|
||||
end
|
||||
|
||||
-- Add headers
|
||||
local cors_headers = {
|
||||
["CORS_MAX_AGE"] = "Access-Control-Max-Age",
|
||||
|
@ -32,10 +24,8 @@ function _M:access()
|
|||
["CORS_ALLOW_HEADERS"] = "Access-Control-Allow-Headers"
|
||||
}
|
||||
for variable, header in pairs(cors_headers) do
|
||||
local value, err = utils.get_variable(variable)
|
||||
if value == nil then
|
||||
logger.log(ngx.ERR, "CORS", "can't get " .. variable .. " from datastore : " .. err)
|
||||
elseif value ~= "" then
|
||||
local value = self.variables[variable]
|
||||
if value ~= "" then
|
||||
ngx.header[header] = value
|
||||
end
|
||||
end
|
||||
|
@ -43,8 +33,7 @@ function _M:access()
|
|||
ngx.header["Content-Length"] = "0"
|
||||
|
||||
-- Send CORS policy with a 204 (no content) status
|
||||
return true, "sent CORS policy", true, ngx.HTTP_NO_CONTENT
|
||||
|
||||
return self:ret(true, "sent CORS policy")
|
||||
end
|
||||
|
||||
return _M
|
||||
return cors
|
|
@ -1,74 +1,81 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local cachestore = require "bunkerweb.cachestore"
|
||||
|
||||
local utils = require "utils"
|
||||
local datastore = require "datastore"
|
||||
local logger = require "logger"
|
||||
local cjson = require "cjson"
|
||||
local country = class("country", plugin)
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
return self, nil
|
||||
function country:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "country")
|
||||
-- Instantiate cachestore
|
||||
local use_redis, err = utils.get_variable("USE_REDIS", false)
|
||||
if not use_redis then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
self.use_redis = use_redis == "yes"
|
||||
self.cachestore = cachestore:new(self.use_redis)
|
||||
end
|
||||
|
||||
function _M:access()
|
||||
-- Get variables
|
||||
local whitelist, err = utils.get_variable("WHITELIST_COUNTRY")
|
||||
if whitelist == nil then
|
||||
return false, err
|
||||
function country:access()
|
||||
-- Don't go further if nothing is enabled
|
||||
if self.variables["WHITELIST_COUNTRY"] == "" and self.variables["BLACKLIST_COUNTRY"] == "" then
|
||||
return self:ret(true, "country not activated")
|
||||
end
|
||||
local blacklist, err = utils.get_variable("BLACKLIST_COUNTRY")
|
||||
if blacklist == nil then
|
||||
return false, err
|
||||
end
|
||||
|
||||
-- Don't go further if nothing is enabled
|
||||
if whitelist == "" and blacklist == "" then
|
||||
return true, "country not activated"
|
||||
end
|
||||
|
||||
-- Check if IP is in cache
|
||||
local data, err = self:is_in_cache(ngx.var.remote_addr)
|
||||
if data then
|
||||
if data.result == "ok" then
|
||||
return true, "client IP " .. ngx.var.remote_addr .. " is in country cache (not blacklisted, country = " .. data.country .. ")", nil, nil
|
||||
return self:ret(true, "client IP " .. ngx.var.remote_addr .. " is in country cache (not blacklisted, country = " .. data.country .. ")")
|
||||
end
|
||||
return true, "client IP " .. ngx.var.remote_addr .. " is in country cache (blacklisted, country = " .. data.country .. ")", true, utils.get_deny_status()
|
||||
return self:ret(true, "client IP " .. ngx.var.remote_addr .. " is in country cache (blacklisted, country = " .. data.country .. ")", utils.get_deny_status())
|
||||
end
|
||||
|
||||
|
||||
-- Don't go further if IP is not global
|
||||
local is_global, err = utils.ip_is_global(ngx.var.remote_addr)
|
||||
if is_global == nil then
|
||||
logger.log(ngx.ERR, "COUNTRY", "error while checking if ip is global : " .. err)
|
||||
return self:ret(false, "error while checking if ip is global : " .. err)
|
||||
elseif not is_global then
|
||||
self:add_to_cache(ngx.var.remote_addr, "unknown", "ok")
|
||||
return true, "client IP " .. ngx.var.remote_addr .. " is not global, skipping check", nil, nil
|
||||
local ok, err = self:add_to_cache(ngx.var.remote_addr, "unknown", "ok")
|
||||
if not ok then
|
||||
return self:ret(false, "error while adding ip to cache : " .. err)
|
||||
end
|
||||
return self:ret(true, "client IP " .. ngx.var.remote_addr .. " is not global, skipping check")
|
||||
end
|
||||
|
||||
|
||||
-- Get the country of client
|
||||
local country, err = utils.get_country(ngx.var.remote_addr)
|
||||
if not country then
|
||||
return false, "can't get country of client IP " .. ngx.var.remote_addr .. " : " .. err, nil, nil
|
||||
return self:ret(false, "can't get country of client IP " .. ngx.var.remote_addr .. " : " .. err)
|
||||
end
|
||||
|
||||
-- Process whitelist first
|
||||
if whitelist ~= "" then
|
||||
for wh_country in whitelist:gmatch("%S+") do
|
||||
if self.variables["WHITELIST_COUNTRY"] ~= "" then
|
||||
for wh_country in self.variables["WHITELIST_COUNTRY"]:gmatch("%S+") do
|
||||
if wh_country == country then
|
||||
self:add_to_cache(ngx.var.remote_addr, country, "ok")
|
||||
return true, "client IP " .. ngx.var.remote_addr .. " is whitelisted (country = " .. country .. ")", nil, nil
|
||||
local ok, err = self:add_to_cache(ngx.var.remote_addr, country, "ok")
|
||||
if not ok then
|
||||
return self:ret(false, "error while adding item to cache : " .. err)
|
||||
end
|
||||
return self:ret(true, "client IP " .. ngx.var.remote_addr .. " is whitelisted (country = " .. country .. ")")
|
||||
end
|
||||
end
|
||||
self:add_to_cache(ngx.var.remote_addr, country, "ko")
|
||||
return true, "client IP " .. ngx.var.remote_addr .. " is not whitelisted (country = " .. country .. ")", true, utils.get_deny_status()
|
||||
local ok, err = self:add_to_cache(ngx.var.remote_addr, country, "ko")
|
||||
if not ok then
|
||||
return self:ret(false, "error while adding item to cache : " .. err)
|
||||
end
|
||||
return self:ret(true, "client IP " .. ngx.var.remote_addr .. " is not whitelisted (country = " .. country .. ")", utils.get_deny_status())
|
||||
end
|
||||
|
||||
-- And then blacklist
|
||||
if blacklist ~= "" then
|
||||
for bl_country in blacklist:gmatch("%S+") do
|
||||
if self.variables["BLACKLIST_COUNTRY"] ~= "" then
|
||||
for bl_country in self.variables["BLACKLIST_COUNTRY"]:gmatch("%S+") do
|
||||
if bl_country == country then
|
||||
self:add_to_cache(ngx.var.remote_addr, country, "ko")
|
||||
return true, "client IP " .. ngx.var.remote_addr .. " is blacklisted (country = " .. country .. ")", true, utils.get_deny_status()
|
||||
local ok, err = self:add_to_cache(ngx.var.remote_addr, country, "ko")
|
||||
if not ok then
|
||||
return self:ret(false, "error while adding item to cache : " .. err)
|
||||
end
|
||||
return self:ret(true, "client IP " .. ngx.var.remote_addr .. " is blacklisted (country = " .. country .. ")", true, utils.get_deny_status())
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -76,37 +83,29 @@ function _M:access()
|
|||
-- Country IP is not in blacklist
|
||||
local ok, err = self:add_to_cache(ngx.var.remote_addr, country, "ok")
|
||||
if not ok then
|
||||
return false, "error while caching IP " .. ngx.var.remote_addr .. " : " .. err, false, nil
|
||||
return self:ret(false, "error while caching IP " .. ngx.var.remote_addr .. " : " .. err)
|
||||
end
|
||||
return true, "client IP " .. ngx.var.remote_addr .. " is not blacklisted (country = " .. country .. ")", nil, nil
|
||||
return self:ret(true, "client IP " .. ngx.var.remote_addr .. " is not blacklisted (country = " .. country .. ")")
|
||||
end
|
||||
|
||||
function _M:preread()
|
||||
function country:preread()
|
||||
return self:access()
|
||||
end
|
||||
|
||||
function _M:is_in_cache(ip)
|
||||
local data, err = datastore:get("plugin_country_cache_" .. ip)
|
||||
if not data then
|
||||
if err ~= "not found" then
|
||||
logger.log(ngx.ERR, "COUNTRY", "Error while accessing cache : " .. err)
|
||||
end
|
||||
return false, err
|
||||
end
|
||||
return cjson.decode(data), "success"
|
||||
end
|
||||
|
||||
function _M:add_to_cache(ip, country, result)
|
||||
local data = {
|
||||
country = country,
|
||||
result = result
|
||||
}
|
||||
local ok, err = datastore:set("plugin_country_cache_" .. ip, cjson.encode(data), 3600)
|
||||
function country:is_in_cache(ip)
|
||||
local ok, data = self.cachestore:get("plugin_country_cache_" .. ip)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "COUNTRY", "Error while adding ip to cache : " .. err)
|
||||
return false, err
|
||||
end
|
||||
return true, "success"
|
||||
return false, data
|
||||
end
|
||||
return true, cjson.decode(data)
|
||||
end
|
||||
|
||||
return _M
|
||||
function country:add_to_cache(ip, country, result)
|
||||
local ok, err = self.cachestore:set("plugin_country_cache_" .. ip, cjson.encode({country = country, result = result}))
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
return country
|
|
@ -1,112 +1,101 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local cachestore = require "bunkerweb.cachestore"
|
||||
local cjson = require "cjson"
|
||||
local resolver = require "resty.dns.resolver"
|
||||
|
||||
local utils = require "utils"
|
||||
local datastore = require "datastore"
|
||||
local logger = require "logger"
|
||||
local cjson = require "cjson"
|
||||
local resolver = require "resty.dns.resolver"
|
||||
local dnsbl = class("dnsbl", plugin)
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
return self, nil
|
||||
function dnsbl:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "dnsbl")
|
||||
-- Instantiate cachestore
|
||||
local use_redis, err = utils.get_variable("USE_REDIS", false)
|
||||
if not use_redis then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
self.use_redis = use_redis == "yes"
|
||||
self.cachestore = cachestore:new(self.use_redis)
|
||||
end
|
||||
|
||||
function _M:init()
|
||||
-- Check if init is needed
|
||||
local init_needed, err = utils.has_variable("USE_DNSBL", "yes")
|
||||
if init_needed == nil then
|
||||
return false, "can't check USE_DNS variable : " .. err
|
||||
end
|
||||
if not init_needed then
|
||||
return true, "no service uses Blacklist, skipping init"
|
||||
end
|
||||
-- Read DNSBL list
|
||||
local str_dnsbls, err = utils.get_variable("DNSBL_LIST", false)
|
||||
if not str_dnsbls then
|
||||
return false, "can't get DNSBL_LIST variable : " .. err
|
||||
end
|
||||
local dnsbls = {}
|
||||
local i = 0
|
||||
for dnsbl in str_dnsbls:gmatch("%S+") do
|
||||
table.insert(dnsbls, dnsbl)
|
||||
i = i + 1
|
||||
end
|
||||
-- Load it into datastore
|
||||
local ok, err = datastore:set("plugin_dnsbl_list", cjson.encode(dnsbls))
|
||||
if not ok then
|
||||
return false, "can't store DNSBL list into datastore : " .. err
|
||||
end
|
||||
return true, "successfully loaded " .. tostring(i) .. " DNSBL server(s)"
|
||||
end
|
||||
|
||||
function _M:access()
|
||||
function dnsbl:access()
|
||||
-- Check if access is needed
|
||||
local access_needed, err = utils.get_variable("USE_DNSBL")
|
||||
if access_needed == nil then
|
||||
return false, err
|
||||
if self.variables["USE_DNSBL"] ~= "yes" then
|
||||
return self:ret(true, "dnsbl not activated")
|
||||
end
|
||||
if access_needed ~= "yes" then
|
||||
return true, "DNSBL not activated"
|
||||
if self.variables["DNSBL_LIST"] == "" then
|
||||
return self:ret(true, "dnsbl list is empty")
|
||||
end
|
||||
|
||||
-- Check if IP is in cache
|
||||
local dnsbl, err = self:is_in_cache(ngx.var.remote_addr)
|
||||
if dnsbl then
|
||||
if dnsbl == "ok" then
|
||||
return true, "client IP " .. ngx.var.remote_addr .. " is in DNSBL cache (not blacklisted)", nil, nil
|
||||
local ok, cached = self:is_in_cache(ngx.var.remote_addr)
|
||||
if not ok then
|
||||
return self:ret(false, "error while checking cache : " .. err)
|
||||
elseif cached then
|
||||
if cached == "ok" then
|
||||
return self:ret(true, "client IP " .. ngx.var.remote_addr .. " is in DNSBL cache (not blacklisted)")
|
||||
end
|
||||
return true, "client IP " .. ngx.var.remote_addr .. " is in DNSBL cache (server = " .. dnsbl .. ")", true, utils.get_deny_status()
|
||||
return self:ret(true, "client IP " .. ngx.var.remote_addr .. " is in DNSBL cache (server = " .. cached .. ")", utils.get_deny_status())
|
||||
end
|
||||
|
||||
-- Don't go further if IP is not global
|
||||
local is_global, err = utils.ip_is_global(ngx.var.remote_addr)
|
||||
if is_global == nil then
|
||||
return false, "can't check if client IP is global : " .. err, nil, nil
|
||||
return self:ret(false, "can't check if client IP is global : " .. err)
|
||||
end
|
||||
if not utils.ip_is_global(ngx.var.remote_addr) then
|
||||
self:add_to_cache(ngx.var.remote_addr, "ok")
|
||||
return true, "client IP is not global, skipping DNSBL check", nil, nil
|
||||
if not is_global then
|
||||
local ok, err self:add_to_cache(ngx.var.remote_addr, "ok")
|
||||
if not ok then
|
||||
return self:ret(false, "error while adding element to cache")
|
||||
end
|
||||
return self:ret(true, "client IP is not global, skipping DNSBL check")
|
||||
end
|
||||
|
||||
-- Get list
|
||||
local data, err = datastore:get("plugin_dnsbl_list")
|
||||
if not data then
|
||||
return false, "can't get DNSBL list : " .. err, false, nil
|
||||
end
|
||||
local ok, dnsbls = pcall(cjson.decode, data)
|
||||
if not ok then
|
||||
return false, "error while decoding DNSBL list : " .. dnsbls, false, nil
|
||||
end
|
||||
|
||||
-- Loop on dnsbl list
|
||||
for i, dnsbl in ipairs(dnsbls) do
|
||||
local result, err = self:is_in_dnsbl(dnsbl, ngx.var.remote_addr)
|
||||
-- Loop on DNSBL list
|
||||
for server in self.variables["DNSBL_LIST"]:gmatch("%S+") do
|
||||
local result, err = self:is_in_dnsbl(server)
|
||||
if result == nil then
|
||||
self.logger:log(ngx.ERR, "error while sending DNS request to " .. server .. " : " .. err)
|
||||
end
|
||||
if result then
|
||||
self:add_to_cache(ngx.var.remote_addr, dnsbl)
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in DNSBL (server = " .. dnsbl .. ")", true, utils.get_deny_status()
|
||||
local ok, err self:add_to_cache(ngx.var.remote_addr, server)
|
||||
if not ok then
|
||||
return self:ret(false, "error while adding element to cache : " .. err)
|
||||
end
|
||||
return self:ret(true, "IP is blacklisted by " .. server, utils.get_deny_status())
|
||||
end
|
||||
end
|
||||
|
||||
-- IP is not in DNSBL
|
||||
local ok, err = self:add_to_cache(ngx.var.remote_addr, "ok")
|
||||
if not ok then
|
||||
return false, "IP is not in DNSBL (error = " .. err .. ")", false, nil
|
||||
return self:ret(false, "IP is not in DNSBL (error = " .. err .. ")")
|
||||
end
|
||||
return true, "IP is not in DNSBL", false, nil
|
||||
|
||||
return self:ret(true, "IP is not in DNSBL", false, nil)
|
||||
end
|
||||
|
||||
function _M:preread()
|
||||
function dnsbl:preread()
|
||||
return self:access()
|
||||
end
|
||||
|
||||
function _M:is_in_dnsbl(dnsbl, ip)
|
||||
local request = resolver.arpa_str(ip) .. "." .. dnsbl
|
||||
function dnsbl:is_in_cache(ip)
|
||||
local ok, data = self.cachestore:get("plugin_dnsbl_" .. ip)
|
||||
if not ok then
|
||||
return false, data
|
||||
end
|
||||
return true, data
|
||||
end
|
||||
|
||||
function dnsbl:add_to_cache(ip, value)
|
||||
local ok, err = self.cachestore:set("plugin_dnsbl_" .. ip, value)
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
function dnsbl:is_in_dnsbl(server)
|
||||
local request = resolver.arpa_str(ip) .. "." .. server
|
||||
local ips, err = utils.get_ips(request)
|
||||
if not ips then
|
||||
logger.log(ngx.ERR, "DNSBL", "Error while asking DNSBL server " .. dnsbl .. " : " .. err)
|
||||
return false, err
|
||||
return nil, err
|
||||
end
|
||||
for i, ip in ipairs(ips) do
|
||||
local a, b, c, d = ip:match("([%d]+).([%d]+).([%d]+).([%d]+)")
|
||||
|
@ -117,24 +106,4 @@ function _M:is_in_dnsbl(dnsbl, ip)
|
|||
return false, "success"
|
||||
end
|
||||
|
||||
function _M:is_in_cache(ip)
|
||||
local kind, err = datastore:get("plugin_dnsbl_cache_" .. ip)
|
||||
if not kind then
|
||||
if err ~= "not found" then
|
||||
logger.log(ngx.ERR, "DNSBL", "Error while accessing cache : " .. err)
|
||||
end
|
||||
return false, err
|
||||
end
|
||||
return kind, "success"
|
||||
end
|
||||
|
||||
function _M:add_to_cache(ip, kind)
|
||||
local ok, err = datastore:set("plugin_dnsbl_cache_" .. ip, kind, 3600)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "DNSBL", "Error while adding ip to cache : " .. err)
|
||||
return false, err
|
||||
end
|
||||
return true, "success"
|
||||
end
|
||||
|
||||
return _M
|
||||
return dnsbl
|
|
@ -12,7 +12,6 @@ location = {{ page }} {
|
|||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
|
||||
{% if INTERCEPTED_ERROR_CODES != "" %}
|
||||
{% for intercepted_error_code in INTERCEPTED_ERROR_CODES.split(" ") %}
|
||||
{% if not intercepted_error_code + "=" in ERRORS +%}
|
||||
|
@ -26,19 +25,15 @@ location = {{ page }} {
|
|||
internal;
|
||||
modsecurity off;
|
||||
default_type 'text/html';
|
||||
root /usr/share/bunkerweb/core/errors/files;
|
||||
content_by_lua_block {
|
||||
local logger = require "logger"
|
||||
local errors = require "errors.errors"
|
||||
local html, err
|
||||
local logger = require "bunkerweb.logger"
|
||||
local cerrors = require "errors.errors"
|
||||
local errors = cerrors:new()
|
||||
if ngx.status == 200 then
|
||||
html, err = errors.error_html(tostring(405))
|
||||
errors:render_template(tostring(405))
|
||||
else
|
||||
html, err = errors.error_html(tostring(ngx.status))
|
||||
end
|
||||
if not html then
|
||||
logger.log(ngx.ERR, "ERRORS", "Error while computing HTML error template for {{ intercepted_error_code }} : " .. err)
|
||||
else
|
||||
ngx.say(html)
|
||||
errors:render_template(tostring(ngx.status))
|
||||
end
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,101 +1,75 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local cjson = require "cjson"
|
||||
local template = require "resty.template"
|
||||
|
||||
local utils = require "utils"
|
||||
local datastore = require "datastore"
|
||||
local logger = require "logger"
|
||||
local cjson = require "cjson"
|
||||
local errors = class("errors", plugin)
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
return self, nil
|
||||
end
|
||||
|
||||
function _M:init()
|
||||
-- Save default errors into datastore
|
||||
local default_errors = {
|
||||
function errors:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "errors")
|
||||
-- Default error texts
|
||||
self.default_errors = {
|
||||
["400"] = {
|
||||
body1 = "Bad Request",
|
||||
body2 = "The server did not understand the request."
|
||||
title = "Bad Request",
|
||||
text = "The server did not understand the request."
|
||||
},
|
||||
["401"] = {
|
||||
body1 = "Not Authorized",
|
||||
body2 = "Valid authentication credentials needed for the target resource."
|
||||
title = "Not Authorized",
|
||||
text = "Valid authentication credentials needed for the target resource."
|
||||
},
|
||||
["403"] = {
|
||||
body1 = "Forbidden",
|
||||
body2 = "Access is forbidden to the requested page."
|
||||
title = "Forbidden",
|
||||
text = "Access is forbidden to the requested page."
|
||||
},
|
||||
["404"] = {
|
||||
body1 = "Not Found",
|
||||
body2 = "The server cannot find the requested page."
|
||||
title = "Not Found",
|
||||
text = "The server cannot find the requested page."
|
||||
},
|
||||
["405"] = {
|
||||
body1 = "Method Not Allowed",
|
||||
body2 = "The method specified in the request is not allowed."
|
||||
title = "Method Not Allowed",
|
||||
text = "The method specified in the request is not allowed."
|
||||
},
|
||||
["413"] = {
|
||||
body1 = "Request Entity Too Large",
|
||||
body2 = "The server will not accept the request, because the request entity is too large."
|
||||
title = "Request Entity Too Large",
|
||||
text = "The server will not accept the request, because the request entity is too large."
|
||||
},
|
||||
["429"] = {
|
||||
body1 = "Too Many Requests",
|
||||
body2 = "Too many requests sent in a given amount of time, try again later."
|
||||
title = "Too Many Requests",
|
||||
text = "Too many requests sent in a given amount of time, try again later."
|
||||
},
|
||||
["500"] = {
|
||||
body1 = "Internal Server Error",
|
||||
body2 = "The request was not completed. The server met an unexpected condition."
|
||||
title = "Internal Server Error",
|
||||
text = "The request was not completed. The server met an unexpected condition."
|
||||
},
|
||||
["501"] = {
|
||||
body1 = "Not Implemented",
|
||||
body2 = "The request was not completed. The server did not support the functionality required."
|
||||
title = "Not Implemented",
|
||||
text = "The request was not completed. The server did not support the functionality required."
|
||||
},
|
||||
["502"] = {
|
||||
body1 = "Bad Gateway",
|
||||
body2 = "The request was not completed. The server received an invalid response from the upstream server."
|
||||
title = "Bad Gateway",
|
||||
text = "The request was not completed. The server received an invalid response from the upstream server."
|
||||
},
|
||||
["503"] = {
|
||||
body1 = "Service Unavailable",
|
||||
body2 = "The request was not completed. The server is temporarily overloading or down."
|
||||
title = "Service Unavailable",
|
||||
text = "The request was not completed. The server is temporarily overloading or down."
|
||||
},
|
||||
["504"] = {
|
||||
body1 = "Gateway Timeout",
|
||||
body2 = "The gateway has timed out."
|
||||
title = "Gateway Timeout",
|
||||
text = "The gateway has timed out."
|
||||
}
|
||||
}
|
||||
local ok, err = datastore:set("plugin_errors_default_errors", cjson.encode(default_errors))
|
||||
if not ok then
|
||||
return false, "can't save default errors to datastore : " .. err
|
||||
end
|
||||
-- Save generic template into datastore
|
||||
local f, err = io.open("/usr/share/bunkerweb/core/errors/files/error.html", "r")
|
||||
if not f then
|
||||
return false, "can't open error.html : " .. err
|
||||
end
|
||||
local template = f:read("*all")
|
||||
f:close()
|
||||
local ok, err = datastore:set("plugin_errors_template", template)
|
||||
if not ok then
|
||||
return false, "can't save error.html to datastore : " .. err
|
||||
end
|
||||
return true, "success"
|
||||
end
|
||||
|
||||
function _M.error_html(code)
|
||||
-- Load default errors texts
|
||||
local default_errors, err = datastore:get("plugin_errors_default_errors")
|
||||
if not default_errors then
|
||||
return false, "can't get default errors from datastore : " .. err
|
||||
end
|
||||
default_errors = cjson.decode(default_errors)
|
||||
-- Load template
|
||||
local template, err = datastore:get("plugin_errors_template")
|
||||
if not template then
|
||||
return false, "can't get template from datastore : " .. err
|
||||
end
|
||||
-- Compute template
|
||||
return template:format(code .. " - " .. default_errors[code].body1, code, default_errors[code].body1,
|
||||
default_errors[code].body2), "success"
|
||||
function errors:render_template(code)
|
||||
-- Render template
|
||||
template.render("error.html", {
|
||||
title = code .. " - " .. self.default_errors[code].title,
|
||||
error_title = self.default_errors[code].title,
|
||||
error_code = code,
|
||||
error_text = self.default_errors[code].text
|
||||
})
|
||||
end
|
||||
|
||||
return _M
|
||||
return errors
|
|
@ -3,29 +3,29 @@
|
|||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>%s</title>
|
||||
<title>{{title}}</title>
|
||||
<link
|
||||
rel="icon"
|
||||
href="data:image/svg+xml, %%3Csvg version='1.0' xmlns='http://www.w3.org/2000/svg' width='96.000000pt' height='96.000000pt' viewBox='0 0 96.000000 96.000000' preserveAspectRatio='xMidYMid meet'%%3E%%3Cg transform='translate(0.000000,96.000000) scale(0.100000,-0.100000)'%%0Afill='%%23085577' stroke='none'%%3E%%3Cpath d='M535 863 c-22 -2 -139 -17 -260 -34 -228 -31 -267 -43 -272 -85 -2%%0A-10 23 -181 55 -379 l57 -360 400 0 400 0 20 40 c16 31 20 59 19 125 -1 100%%0A-24 165 -73 199 -41 29 -46 57 -22 111 30 67 29 188 -3 256 -13 28 -37 60 -53%%0A72 -55 39 -169 62 -268 55z m-15 -348 c30 -16 60 -61 60 -90 0 -10 -8 -33 -17%%0A-52 -16 -34 -16 -41 0 -116 9 -44 15 -82 12 -85 -6 -7 -92 -21 -131 -21 l-31%%0A-1 -6 85 c-4 75 -8 89 -31 112 -20 20 -26 36 -26 70 0 38 5 50 34 79 39 39 86%%0A45 136 19z'/%%3E%%3C/g%%3E%%3C/svg%%3E"
|
||||
href="data:image/svg+xml, %3Csvg version='1.0' xmlns='http://www.w3.org/2000/svg' width='96.000000pt' height='96.000000pt' viewBox='0 0 96.000000 96.000000' preserveAspectRatio='xMidYMid meet'%3E%3Cg transform='translate(0.000000,96.000000) scale(0.100000,-0.100000)'%0Afill='%23085577' stroke='none'%3E%3Cpath d='M535 863 c-22 -2 -139 -17 -260 -34 -228 -31 -267 -43 -272 -85 -2%0A-10 23 -181 55 -379 l57 -360 400 0 400 0 20 40 c16 31 20 59 19 125 -1 100%0A-24 165 -73 199 -41 29 -46 57 -22 111 30 67 29 188 -3 256 -13 28 -37 60 -53%0A72 -55 39 -169 62 -268 55z m-15 -348 c30 -16 60 -61 60 -90 0 -10 -8 -33 -17%0A-52 -16 -34 -16 -41 0 -116 9 -44 15 -82 12 -85 -6 -7 -92 -21 -131 -21 l-31%0A-1 -6 85 c-4 75 -8 89 -31 112 -20 20 -26 36 -26 70 0 38 5 50 34 79 39 39 86%0A45 136 19z'/%3E%3C/g%3E%3C/svg%3E"
|
||||
type="image/svg+xml"
|
||||
/>
|
||||
<style type="text/css">
|
||||
body,
|
||||
html {
|
||||
width: 100%%;
|
||||
height: 100%%;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: #125678;
|
||||
}
|
||||
body {
|
||||
color: #fff;
|
||||
text-align: center;
|
||||
padding: 0;
|
||||
min-height: 100%%;
|
||||
min-height: 100%;
|
||||
display: table;
|
||||
font-family: "Open Sans", Arial, sans-serif;
|
||||
margin: 0;
|
||||
-ms-text-size-adjust: 100%%;
|
||||
-webkit-text-size-adjust: 100%%;
|
||||
-ms-text-size-adjust: 100%;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
}
|
||||
h1 {
|
||||
display: flex;
|
||||
|
@ -63,7 +63,7 @@
|
|||
}
|
||||
footer {
|
||||
position: fixed;
|
||||
width: 100%%;
|
||||
width: 100%;
|
||||
letter-spacing: 1px;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
|
@ -89,8 +89,8 @@
|
|||
<body>
|
||||
<div class="cover">
|
||||
<div class="message">
|
||||
<h1>%s<small>%s</small></h1>
|
||||
<p class="lead">%s</p>
|
||||
<h1>{{error_title}}<small>{{error_code}}</small></h1>
|
||||
<p class="lead">{{error_text}}</p>
|
||||
</div>
|
||||
</div>
|
||||
<footer>
|
||||
|
|
|
@ -1,33 +1,53 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local cachestore = require "bunkerweb.cachestore"
|
||||
local cjson = require "cjson"
|
||||
local ipmatcher = require "resty.ipmatcher"
|
||||
|
||||
local utils = require "utils"
|
||||
local datastore = require "datastore"
|
||||
local logger = require "logger"
|
||||
local cjson = require "cjson"
|
||||
local ipmatcher = require "resty.ipmatcher"
|
||||
local greylist = class("greylist", plugin)
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
return self, nil
|
||||
function greylist:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "greylist")
|
||||
-- Check if redis is enabled
|
||||
local use_redis, err = utils.get_variable("USE_REDIS", false)
|
||||
if not use_redis then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
self.use_redis = use_redis == "yes"
|
||||
-- Check if init is needed
|
||||
if ngx.get_phase() == "init" then
|
||||
local init_needed, err = utils.has_variable("USE_GREYLIST", "yes")
|
||||
if init_needed == nil then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
self.init_needed = init_needed
|
||||
-- Decode lists
|
||||
elseif self.variables["USE_GREYLIST"] == "yes" then
|
||||
local lists, err = self.datastore:get("plugin_greylist_lists")
|
||||
if not lists then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
else
|
||||
self.lists = cjson.decode(lists)
|
||||
end
|
||||
end
|
||||
-- Instantiate cachestore
|
||||
self.cachestore = cachestore:new(self.use_redis)
|
||||
end
|
||||
|
||||
function _M:init()
|
||||
function greylist:init()
|
||||
-- Check if init is needed
|
||||
local init_needed, err = utils.has_variable("USE_GREYLIST", "yes")
|
||||
if init_needed == nil then
|
||||
return false, err
|
||||
if not self.init_needed then
|
||||
return self:ret(true, "init not needed")
|
||||
end
|
||||
if not init_needed then
|
||||
return true, "no service uses Greylist, skipping init"
|
||||
end
|
||||
-- Read greylists
|
||||
-- Read blacklists
|
||||
local greylists = {
|
||||
["IP"] = {},
|
||||
["RDNS"] = {},
|
||||
["ASN"] = {},
|
||||
["USER_AGENT"] = {},
|
||||
["URI"] = {}
|
||||
["URI"] = {},
|
||||
}
|
||||
local i = 0
|
||||
for kind, _ in pairs(greylists) do
|
||||
|
@ -41,325 +61,189 @@ function _M:init()
|
|||
end
|
||||
end
|
||||
-- Load them into datastore
|
||||
local ok, err = datastore:set("plugin_greylist_list", cjson.encode(greylists))
|
||||
local ok, err = self.datastore:set("plugin_greylist_lists", cjson.encode(greylists))
|
||||
if not ok then
|
||||
return false, "can't store Greylist list into datastore : " .. err
|
||||
return self:ret(false, "can't store greylist list into datastore : " .. err)
|
||||
end
|
||||
return true, "successfully loaded " .. tostring(i) .. " greylisted IP/network/rDNS/ASN/User-Agent/URI"
|
||||
return self:ret(true, "successfully loaded " .. tostring(i) .. " bad IP/network/rDNS/ASN/User-Agent/URI")
|
||||
end
|
||||
|
||||
function _M:access()
|
||||
function greylist:access()
|
||||
-- Check if access is needed
|
||||
local access_needed, err = utils.get_variable("USE_GREYLIST")
|
||||
if access_needed == nil then
|
||||
return false, err, false, nil
|
||||
if self.variables["USE_GREYLIST"] ~= "yes" then
|
||||
return self:ret(true, "greylist not activated")
|
||||
end
|
||||
if access_needed ~= "yes" then
|
||||
return true, "Greylist not activated", false, nil
|
||||
end
|
||||
|
||||
-- Check the cache
|
||||
local cached_ip, err = self:is_in_cache("ip" .. ngx.var.remote_addr)
|
||||
if cached_ip and cached_ip ~= "ok" then
|
||||
return true, "IP is in greylist cache (info = " .. cached_ip .. ")", false, ngx.OK
|
||||
end
|
||||
local cached_uri, err = self:is_in_cache("uri" .. ngx.var.uri)
|
||||
if cached_uri and cached_uri ~= "ok" then
|
||||
return true, "URI is in greylist cache (info = " .. cached_uri .. ")", false, ngx.OK
|
||||
end
|
||||
local cached_ua = true
|
||||
-- Check the caches
|
||||
local checks = {
|
||||
["IP"] = "ip" .. ngx.var.remote_addr
|
||||
}
|
||||
if ngx.var.http_user_agent then
|
||||
cached_ua, err = self:is_in_cache("ua" .. ngx.var.http_user_agent)
|
||||
if cached_ua and cached_ua ~= "ok" then
|
||||
return true, "User-Agent is in greylist cache (info = " .. cached_ua .. ")", false, ngx.OK
|
||||
checks["UA"] = "ua" .. ngx.var.http_user_agent
|
||||
end
|
||||
if ngx.var.uri then
|
||||
checks["URI"] = "uri" .. ngx.var.uri
|
||||
end
|
||||
local already_cached = {
|
||||
["IP"] = false,
|
||||
["URI"] = false,
|
||||
["UA"] = false
|
||||
}
|
||||
for k, v in pairs(checks) do
|
||||
local cached, err = self:is_in_cache(v)
|
||||
if not cached and err ~= "success" then
|
||||
self.logger:log(ngx.ERR, "error while checking cache : " .. err)
|
||||
elseif cached and cached ~= "ok" then
|
||||
return self:ret(true, k + " is in cached greylist", utils.get_deny_status())
|
||||
end
|
||||
if cached then
|
||||
already_cached[k] = true
|
||||
end
|
||||
end
|
||||
if cached_ip and cached_uri and cached_ua then
|
||||
return true, "full request is in greylist cache (not greylisted)", false, nil
|
||||
-- Check lists
|
||||
if not self.lists then
|
||||
return self:ret(false, "lists is nil")
|
||||
end
|
||||
|
||||
-- Get list
|
||||
local data, err = datastore:get("plugin_greylist_list")
|
||||
if not data then
|
||||
return false, "can't get Greylist list : " .. err, false, nil
|
||||
end
|
||||
local ok, greylists = pcall(cjson.decode, data)
|
||||
if not ok then
|
||||
return false, "error while decoding greylists : " .. greylists, false, nil
|
||||
end
|
||||
|
||||
-- Return value
|
||||
local ret, ret_err = true, "success"
|
||||
|
||||
-- Check if IP is in IP/net greylist
|
||||
local ip_net, err = utils.get_variable("GREYLIST_IP")
|
||||
if ip_net and ip_net ~= "" then
|
||||
for element in ip_net:gmatch("%S+") do
|
||||
table.insert(greylists["IP"], element)
|
||||
end
|
||||
end
|
||||
if not cached_ip then
|
||||
local ipm, err = ipmatcher.new(greylists["IP"])
|
||||
if not ipm then
|
||||
ret = false
|
||||
ret_err = "can't instantiate ipmatcher " .. err
|
||||
else
|
||||
if ipm:match(ngx.var.remote_addr) then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "ip/net")
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in greylist", false, ngx.OK
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if rDNS is in greylist
|
||||
local rdns_global, err = utils.get_variable("GREYLIST_RDNS_GLOBAL")
|
||||
local check = true
|
||||
if not rdns_global then
|
||||
logger.log(ngx.ERR, "GREYLIST", "Error while getting GREYLIST_RDNS_GLOBAL variable : " .. err)
|
||||
elseif rdns_global == "yes" then
|
||||
check, err = utils.ip_is_global(ngx.var.remote_addr)
|
||||
if check == nil then
|
||||
logger.log(ngx.ERR, "GREYLIST", "Error while getting checking if IP is global : " .. err)
|
||||
end
|
||||
end
|
||||
if not cached_ip and check then
|
||||
local rdns, err = utils.get_rdns(ngx.var.remote_addr)
|
||||
if not rdns then
|
||||
ret = false
|
||||
ret_err = "error while trying to get reverse dns : " .. err
|
||||
else
|
||||
local rdns_list, err = utils.get_variable("GREYLIST_RDNS")
|
||||
if rdns_list and rdns_list ~= "" then
|
||||
for element in rdns_list:gmatch("%S+") do
|
||||
table.insert(greylists["RDNS"], element)
|
||||
-- Perform checks
|
||||
for k, v in pairs(checks) do
|
||||
if not already_cached[k] then
|
||||
local greylisted, err = self:is_greylisted(k)
|
||||
if greylisted == nil then
|
||||
self.logger:log(ngx.ERR, "error while checking if " .. k .. " is greylisted : " .. err)
|
||||
else
|
||||
local ok, err = self:add_to_cache(self:kind_to_ele(k), greylisted or "ok")
|
||||
if not ok then
|
||||
self.logger:log(ngx.ERR, "error while adding element to cache : " .. err)
|
||||
end
|
||||
if greylisted == "ko" then
|
||||
return self:ret(true, k + " is not in greylist", utils.get_deny_status())
|
||||
end
|
||||
end
|
||||
for i, suffix in ipairs(greylists["RDNS"]) do
|
||||
if rdns:sub(- #suffix) == suffix then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "rDNS " .. suffix)
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in greylist (info = rDNS " .. suffix .. ")", false, ngx.OK
|
||||
end
|
||||
end
|
||||
|
||||
-- Return
|
||||
return self:ret(true, "greylisted")
|
||||
end
|
||||
|
||||
function greylist:preread()
|
||||
return self:access()
|
||||
end
|
||||
|
||||
function greylist:kind_to_ele(kind)
|
||||
if kind == "IP" then
|
||||
return "ip" .. ngx.var.remote_addr
|
||||
elseif kind == "UA" then
|
||||
return "ua" .. ngx.var.http_user_agent
|
||||
elseif kind == "URI" then
|
||||
return "uri" .. ngx.var.uri
|
||||
end
|
||||
end
|
||||
|
||||
function greylist:is_greylisted(kind)
|
||||
if kind == "IP" then
|
||||
return self:is_greylisted_ip()
|
||||
elseif kind == "URI" then
|
||||
return self:is_greylisted_uri()
|
||||
elseif kind == "UA" then
|
||||
return self:is_greylisted_ua()
|
||||
end
|
||||
return false, "unknown kind " .. kind
|
||||
end
|
||||
|
||||
function greylist:is_greylisted_ip()
|
||||
-- Check if IP is in blacklist
|
||||
local ipm, err = ipmatcher.new(self.lists["IP"])
|
||||
if not ipm then
|
||||
return nil, err
|
||||
end
|
||||
local match, err = ipm:match(ngx.var.remote_addr)
|
||||
if err then
|
||||
return nil, err
|
||||
end
|
||||
if match then
|
||||
return true, "ip"
|
||||
end
|
||||
|
||||
-- Check if rDNS is needed
|
||||
local check_rdns = true
|
||||
local is_global, err = utils.ip_is_global(ngx.var.remote_addr)
|
||||
if self.variables["BLACKLIST_RDNS_GLOBAL"] == "yes" then
|
||||
if is_global == nil then
|
||||
return nil, err
|
||||
end
|
||||
if not is_global then
|
||||
check_rdns = false
|
||||
end
|
||||
end
|
||||
if check_rdns then
|
||||
-- Get rDNS
|
||||
local rdns_list, err = utils.get_rdns(ngx.var.remote_addr)
|
||||
if not rdns_list then
|
||||
return nil, err
|
||||
end
|
||||
-- Check if rDNS is in greylist
|
||||
for i, suffix in ipairs(self.lists["RDNS"]) do
|
||||
for j, rdns in ipairs(rdns_list) do
|
||||
if rdns:sub(-#suffix) == suffix then
|
||||
return true, "rDNS " .. suffix
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if ASN is in greylist
|
||||
if not cached_ip then
|
||||
if utils.ip_is_global(ngx.var.remote_addr) then
|
||||
local asn, err = utils.get_asn(ngx.var.remote_addr)
|
||||
if not asn then
|
||||
ret = false
|
||||
ret_err = "error while trying to get asn number : " .. err
|
||||
else
|
||||
local asn_list, err = utils.get_variable("GREYLIST_ASN")
|
||||
if asn_list and asn_list ~= "" then
|
||||
for element in asn_list:gmatch("%S+") do
|
||||
table.insert(greylists["ASN"], element)
|
||||
end
|
||||
end
|
||||
for i, asn_bl in ipairs(greylists["ASN"]) do
|
||||
if tostring(asn) == asn_bl then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "ASN " .. tostring(asn))
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in greylist (kind = ASN " .. tostring(asn) .. ")", false,
|
||||
ngx.OK
|
||||
end
|
||||
end
|
||||
if is_global then
|
||||
local asn, err = utils.get_asn(ngx.var.remote_addr)
|
||||
if not asn then
|
||||
return nil, err
|
||||
end
|
||||
for i, bl_asn in ipairs(self.lists["ASN"]) do
|
||||
if bl_asn == tostring(asn) then
|
||||
return true, "ASN " .. bl_asn
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- IP is not greylisted
|
||||
local ok, err = self:add_to_cache("ip" .. ngx.var.remote_addr, "ok")
|
||||
if not ok then
|
||||
ret = false
|
||||
ret_err = err
|
||||
end
|
||||
-- Not greylisted
|
||||
return false, "ko"
|
||||
end
|
||||
|
||||
-- Check if User-Agent is in greylist
|
||||
if not cached_ua and ngx.var.http_user_agent then
|
||||
local ua_list, err = utils.get_variable("GREYLIST_USER_AGENT")
|
||||
if ua_list and ua_list ~= "" then
|
||||
for element in ua_list:gmatch("%S+") do
|
||||
table.insert(greylists["USER_AGENT"], element)
|
||||
end
|
||||
end
|
||||
for i, ua_bl in ipairs(greylists["USER_AGENT"]) do
|
||||
if ngx.var.http_user_agent:match(ua_bl) then
|
||||
self:add_to_cache("ua" .. ngx.var.http_user_agent, "UA " .. ua_bl)
|
||||
return ret, "client User-Agent " .. ngx.var.http_user_agent .. " is in greylist (matched " .. ua_bl .. ")", false,
|
||||
ngx.OK
|
||||
end
|
||||
end
|
||||
-- UA is not greylisted
|
||||
local ok, err = self:add_to_cache("ua" .. ngx.var.http_user_agent, "ok")
|
||||
if not ok then
|
||||
ret = false
|
||||
ret_err = err
|
||||
function greylist:is_greylisted_uri()
|
||||
-- Check if URI is in blacklist
|
||||
for i, uri in ipairs(self.lists["URI"]) do
|
||||
if ngx.var.uri:match(uri) then
|
||||
return true, "URI " .. uri
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if URI is in greylist
|
||||
if not cached_uri then
|
||||
local uri_list, err = utils.get_variable("GREYLIST_URI")
|
||||
if uri_list and uri_list ~= "" then
|
||||
for element in uri_list:gmatch("%S+") do
|
||||
table.insert(greylists["URI"], element)
|
||||
end
|
||||
end
|
||||
for i, uri_bl in ipairs(greylists["URI"]) do
|
||||
if ngx.var.uri:match(uri_bl) then
|
||||
self:add_to_cache("uri" .. ngx.var.uri, "URI " .. uri_bl)
|
||||
return ret, "client URI " .. ngx.var.uri .. " is in greylist (matched " .. uri_bl .. ")", false, ngx.OK
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- URI is not greylisted
|
||||
local ok, err = self:add_to_cache("uri" .. ngx.var.uri, "ok")
|
||||
if not ok then
|
||||
ret = false
|
||||
ret_err = err
|
||||
end
|
||||
|
||||
return ret, "IP is not in list (error = " .. ret_err .. ")", true, utils.get_deny_status()
|
||||
return false, "ko"
|
||||
end
|
||||
|
||||
function _M:preread()
|
||||
-- Check if preread is needed
|
||||
local preread_needed, err = utils.get_variable("USE_GREYLIST")
|
||||
if preread_needed == nil then
|
||||
return false, err, false, nil
|
||||
end
|
||||
if preread_needed ~= "yes" then
|
||||
return true, "Greylist not activated", false, nil
|
||||
end
|
||||
|
||||
-- Check the cache
|
||||
local cached_ip, err = self:is_in_cache("ip" .. ngx.var.remote_addr)
|
||||
if cached_ip and cached_ip ~= "ok" then
|
||||
return true, "IP is in greylist cache (info = " .. cached_ip .. ")", false, ngx.OK
|
||||
end
|
||||
if cached_ip then
|
||||
return true, "full request is in greylist cache (not greylisted)", false, nil
|
||||
end
|
||||
|
||||
-- Get list
|
||||
local data, err = datastore:get("plugin_greylist_list")
|
||||
if not data then
|
||||
return false, "can't get Greylist list : " .. err, false, nil
|
||||
end
|
||||
local ok, greylists = pcall(cjson.decode, data)
|
||||
if not ok then
|
||||
return false, "error while decoding greylists : " .. greylists, false, nil
|
||||
end
|
||||
|
||||
-- Return value
|
||||
local ret, ret_err = true, "success"
|
||||
|
||||
-- Check if IP is in IP/net greylist
|
||||
local ip_net, err = utils.get_variable("GREYLIST_IP")
|
||||
if ip_net and ip_net ~= "" then
|
||||
for element in ip_net:gmatch("%S+") do
|
||||
table.insert(greylists["IP"], element)
|
||||
function greylist:is_greylisted_ua()
|
||||
-- Check if UA is in greylist
|
||||
for i, ua in ipairs(self.lists["USER_AGENT"]) do
|
||||
if ngx.var.http_user_agent:match(ua) then
|
||||
return true, "UA " .. ua
|
||||
end
|
||||
end
|
||||
if not cached_ip then
|
||||
local ipm, err = ipmatcher.new(greylists["IP"])
|
||||
if not ipm then
|
||||
ret = false
|
||||
ret_err = "can't instantiate ipmatcher " .. err
|
||||
else
|
||||
if ipm:match(ngx.var.remote_addr) then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "ip/net")
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in greylist", false, ngx.OK
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if rDNS is in greylist
|
||||
local rdns_global, err = utils.get_variable("GREYLIST_RDNS_GLOBAL")
|
||||
local check = true
|
||||
if not rdns_global then
|
||||
logger.log(ngx.ERR, "GREYLIST", "Error while getting GREYLIST_RDNS_GLOBAL variable : " .. err)
|
||||
elseif rdns_global == "yes" then
|
||||
check, err = utils.ip_is_global(ngx.var.remote_addr)
|
||||
if check == nil then
|
||||
logger.log(ngx.ERR, "GREYLIST", "Error while getting checking if IP is global : " .. err)
|
||||
end
|
||||
end
|
||||
if not cached_ip and check then
|
||||
local rdns, err = utils.get_rdns(ngx.var.remote_addr)
|
||||
if not rdns then
|
||||
ret = false
|
||||
ret_err = "error while trying to get reverse dns : " .. err
|
||||
else
|
||||
local rdns_list, err = utils.get_variable("GREYLIST_RDNS")
|
||||
if rdns_list and rdns_list ~= "" then
|
||||
for element in rdns_list:gmatch("%S+") do
|
||||
table.insert(greylists["RDNS"], element)
|
||||
end
|
||||
end
|
||||
for i, suffix in ipairs(greylists["RDNS"]) do
|
||||
if rdns:sub(- #suffix) == suffix then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "rDNS " .. suffix)
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in greylist (info = rDNS " .. suffix .. ")", false, ngx.OK
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if ASN is in greylist
|
||||
if not cached_ip then
|
||||
if utils.ip_is_global(ngx.var.remote_addr) then
|
||||
local asn, err = utils.get_asn(ngx.var.remote_addr)
|
||||
if not asn then
|
||||
ret = false
|
||||
ret_err = "error while trying to get asn number : " .. err
|
||||
else
|
||||
local asn_list, err = utils.get_variable("GREYLIST_ASN")
|
||||
if asn_list and asn_list ~= "" then
|
||||
for element in asn_list:gmatch("%S+") do
|
||||
table.insert(greylists["ASN"], element)
|
||||
end
|
||||
end
|
||||
for i, asn_bl in ipairs(greylists["ASN"]) do
|
||||
if tostring(asn) == asn_bl then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "ASN " .. tostring(asn))
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in greylist (kind = ASN " .. tostring(asn) .. ")", false,
|
||||
ngx.OK
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- IP is not greylisted
|
||||
local ok, err = self:add_to_cache("ip" .. ngx.var.remote_addr, "ok")
|
||||
if not ok then
|
||||
ret = false
|
||||
ret_err = err
|
||||
end
|
||||
return ret, "IP is not in list (error = " .. ret_err .. ")", true, utils.get_deny_status()
|
||||
-- UA is not greylisted
|
||||
return false, "ko"
|
||||
end
|
||||
|
||||
function _M:is_in_cache(ele)
|
||||
local kind, err = datastore:get("plugin_greylist_cache_" .. ngx.var.server_name .. ele)
|
||||
if not kind then
|
||||
if err ~= "not found" then
|
||||
logger.log(ngx.ERR, "GREYLIST", "Error while accessing cache : " .. err)
|
||||
end
|
||||
function greylist:is_in_cache(ele)
|
||||
local ok, data = self.cachestore:get("plugin_greylist_" .. ele)
|
||||
if not ok then
|
||||
return false, data
|
||||
end
|
||||
return true, data
|
||||
end
|
||||
|
||||
function greylist:add_to_cache(ele, value)
|
||||
local ok, err = self.cachestore:set("plugin_greylist_" .. ele, value)
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
return kind, "success"
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
function _M:add_to_cache(ele, kind)
|
||||
local ok, err = datastore:set("plugin_greylist_cache_" .. ngx.var.server_name .. ele, kind, 3600)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "GREYLIST", "Error while adding element to cache : " .. err)
|
||||
return false, err
|
||||
end
|
||||
return true, "success"
|
||||
end
|
||||
|
||||
return _M
|
||||
return greylist
|
|
@ -1,23 +1,24 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local cjson = require "cjson"
|
||||
|
||||
local logger = require "logger"
|
||||
local cjson = require "cjson"
|
||||
local letsencrypt = class("letsencrypt", plugin)
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
return self, nil
|
||||
function letsencrypt:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "letsencrypt")
|
||||
end
|
||||
|
||||
function _M:access()
|
||||
function letsencrypt:access()
|
||||
if string.sub(ngx.var.uri, 1, string.len("/.well-known/acme-challenge/")) == "/.well-known/acme-challenge/" then
|
||||
logger.log(ngx.NOTICE, "LETS-ENCRYPT", "Got a visit from Let's Encrypt, let's whitelist it.")
|
||||
return true, "success", true, ngx.exit(ngx.OK)
|
||||
self.logger:log(ngx.NOTICE, "got a visit from Let's Encrypt, let's whitelist it")
|
||||
return self:ret(true, "visit from LE", ngx.OK)
|
||||
end
|
||||
return true, "success", false, nil
|
||||
return self:ret(true, "success")
|
||||
end
|
||||
|
||||
function _M:api()
|
||||
function letsencrypt:api()
|
||||
if not string.match(ngx.var.uri, "^/lets%-encrypt/challenge$") or
|
||||
(ngx.var.request_method ~= "POST" and ngx.var.request_method ~= "DELETE") then
|
||||
return false, nil, nil
|
||||
|
@ -47,4 +48,4 @@ function _M:api()
|
|||
return true, ngx.HTTP_NOT_FOUND, { status = "error", msg = "unknown request" }
|
||||
end
|
||||
|
||||
return _M
|
||||
return letsencrypt
|
|
@ -1,29 +1,62 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local datastore = require "bunkerweb.datastore"
|
||||
local clusterstore = require "bunkerweb.clusterstore"
|
||||
local cjson = require "cjson"
|
||||
|
||||
local utils = require "utils"
|
||||
local datastore = require "datastore"
|
||||
local logger = require "logger"
|
||||
local cjson = require "cjson"
|
||||
local limit = class("limit", plugin)
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
return self, nil
|
||||
function limit:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "limit")
|
||||
-- Check if redis is enabled
|
||||
local use_redis, err = utils.get_variable("USE_REDIS", false)
|
||||
if not use_redis then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
self.use_redis = use_redis == "yes"
|
||||
-- Load rules if needed
|
||||
if ngx.get_phase() == "access" then
|
||||
if self.variables["USE_LIMIT_REQ"] == "yes" then
|
||||
-- Get all rules from datastore
|
||||
local limited = false
|
||||
local all_rules, err = self.datastore:get("plugin_limit_rules")
|
||||
if not all_rules then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
return
|
||||
end
|
||||
all_rules = cjson.decode(all_rules)
|
||||
self.rules = {}
|
||||
-- Extract global rules
|
||||
if all_rules.global then
|
||||
for k, v in pairs(all_rules.global) do
|
||||
self.rules[k] = v
|
||||
end
|
||||
end
|
||||
-- Extract and overwrite if needed server rules
|
||||
if all_rules[ngx.var.server_name] then
|
||||
for k, v in pairs(all_rules[ngx.var.server_name]) do
|
||||
self.rules[k] = v
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function _M:init()
|
||||
function limit:init()
|
||||
-- Check if init is needed
|
||||
local init_needed, err = utils.has_variable("USE_LIMIT_REQ", "yes")
|
||||
if init_needed == nil then
|
||||
return false, err
|
||||
return self:ret(false, err)
|
||||
end
|
||||
if not init_needed then
|
||||
return true, "no service uses Limit for requests, skipping init"
|
||||
return self:ret(true, "no service uses Limit for requests, skipping init")
|
||||
end
|
||||
-- Get variables
|
||||
local variables, err = utils.get_multiple_variables({"LIMIT_REQ_URL", "LIMIT_REQ_RATE"})
|
||||
if variables == nil then
|
||||
return false, err
|
||||
return self:ret(false, err)
|
||||
end
|
||||
-- Store URLs and rates
|
||||
local data = {}
|
||||
|
@ -41,75 +74,143 @@ function _M:init()
|
|||
end
|
||||
end
|
||||
end
|
||||
local ok, err = datastore:set("plugin_limit_rules", cjson.encode(data))
|
||||
local ok, err = self.datastore:set("plugin_limit_rules", cjson.encode(data))
|
||||
if not ok then
|
||||
return false, err
|
||||
return self:ret(false, err)
|
||||
end
|
||||
return true, "successfully loaded " .. tostring(i) .. " limit rules for requests"
|
||||
return self:ret(true, "successfully loaded " .. tostring(i) .. " limit rules for requests")
|
||||
end
|
||||
|
||||
function _M:access()
|
||||
function limit:access()
|
||||
-- Check if we are whitelisted
|
||||
if ngx.var.is_whitelisted == "yes" then
|
||||
return self:ret(true, "client is whitelisted")
|
||||
end
|
||||
-- Check if access is needed
|
||||
local access_needed, err = utils.get_variable("USE_LIMIT_REQ")
|
||||
if access_needed == nil then
|
||||
return false, err, nil, nil
|
||||
if self.variables["USE_LIMIT_REQ"] ~= "yes" then
|
||||
return self:ret(true, "limit req is disabled")
|
||||
end
|
||||
if access_needed ~= "yes" then
|
||||
return true, "Limit for request not activated", nil, nil
|
||||
end
|
||||
|
||||
-- Don't go further if URL is not limited
|
||||
local limited = false
|
||||
local all_rules, err = datastore:get("plugin_limit_rules")
|
||||
if not all_rules then
|
||||
return false, err, nil, nil
|
||||
end
|
||||
all_rules = cjson.decode(all_rules)
|
||||
local limited = false
|
||||
local rate = ""
|
||||
if not limited and all_rules[ngx.var.server_name] then
|
||||
for k, v in pairs(all_rules[ngx.var.server_name]) do
|
||||
if ngx.var.uri:match(k) and k ~= "/" then
|
||||
limited = true
|
||||
rate = all_rules[ngx.var.server_name][k]
|
||||
break
|
||||
end
|
||||
-- Check if URI is limited
|
||||
local rate = nil
|
||||
local uri = nil
|
||||
for k, v in pairs(self.rules) do
|
||||
if k ~= "/" and ngx.var.uri:match(k) then
|
||||
rate = v
|
||||
uri = k
|
||||
break
|
||||
end
|
||||
end
|
||||
if all_rules.global and not limited then
|
||||
for k, v in pairs(all_rules.global) do
|
||||
if ngx.var.uri:match(k) and k ~= "/" then
|
||||
limited = true
|
||||
rate = all_rules.global[k]
|
||||
break
|
||||
end
|
||||
if not rate then
|
||||
if self.rules["/"] then
|
||||
rate = self.rules["/"]
|
||||
uri = "/"
|
||||
else
|
||||
return self:ret(true, "no rule for " .. ngx.var.uri)
|
||||
end
|
||||
end
|
||||
if not limited then
|
||||
if all_rules[ngx.var.server_name] and all_rules[ngx.var.server_name]["/"] then
|
||||
limited = true
|
||||
rate = all_rules[ngx.var.server_name]["/"]
|
||||
elseif all_rules.global and all_rules.global["/"] then
|
||||
limited = true
|
||||
rate = all_rules.global["/"]
|
||||
end
|
||||
if not limited then
|
||||
return true, "URL " .. ngx.var.uri .. " is not limited by a rule, skipping check", nil, nil
|
||||
end
|
||||
end
|
||||
|
||||
-- Get the rate
|
||||
-- Check if limit is reached
|
||||
local _, _, rate_max, rate_time = rate:find("(%d+)r/(.)")
|
||||
|
||||
-- Get current requests timestamps
|
||||
local requests, err = datastore:get("plugin_limit_cache_" .. ngx.var.server_name .. ngx.var.remote_addr .. ngx.var.uri)
|
||||
if not requests and err ~= "not found" then
|
||||
return false, err, nil, nil
|
||||
elseif err == "not found" then
|
||||
requests = "{}"
|
||||
local limited, err, current_rate = self:limit_req(tonumber(rate_max), rate_time)
|
||||
if limited == nil then
|
||||
return self:ret(false, err)
|
||||
end
|
||||
|
||||
-- Limit reached
|
||||
if limited then
|
||||
return self:ret(true, "client IP " .. ngx.var.remote_addr .. " is limited for URL " .. ngx.var.uri .. " (current rate = " .. current_rate .. "r/" .. rate_time .. " and max rate = " .. rate .. ")", ngx.HTTP_TOO_MANY_REQUESTS)
|
||||
end
|
||||
-- Limit not reached
|
||||
return self:ret(true, "client IP " .. ngx.var.remote_addr .. " is not limited for URL " .. ngx.var.uri .. " (current rate = " .. current_rate .. "r/" .. rate_time .. " and max rate = " .. rate .. ")")
|
||||
end
|
||||
|
||||
function limit:limit_req(rate_max, rate_time)
|
||||
local timestamps = nil
|
||||
-- Redis case
|
||||
if self.use_redis then
|
||||
local redis_timestamps, err = self:limit_req_redis(rate_max, rate_time)
|
||||
if redis_timestamps == nil then
|
||||
self.logger:log(ngx.ERR, "limit_req_redis failed, falling back to local : " .. err)
|
||||
else
|
||||
timestamps = redis_timestamps
|
||||
-- Save the new timestamps
|
||||
local ok, err = self.datastore:set("plugin_limit_cache_" .. ngx.var.server_name .. ngx.var.remote_addr .. ngx.var.uri, cjson.encode(timestamps), delay)
|
||||
if not ok then
|
||||
return nil, "can't update timestamps : " .. err
|
||||
end
|
||||
end
|
||||
end
|
||||
-- Local case (or fallback)
|
||||
if timestamps == nil then
|
||||
local local_timestamps, err = self:limit_req_local(rate_max, rate_time)
|
||||
if local_timestamps == nil then
|
||||
return nil, "limit_req_local failed : " .. err
|
||||
end
|
||||
timestamps = local_timestamps
|
||||
end
|
||||
if #timestamps > rate_max then
|
||||
return true, "success - limited", #timestamps
|
||||
end
|
||||
return false, "success - not limited", #timestamps
|
||||
end
|
||||
|
||||
function limit:limit_req_local(rate_max, rate_time)
|
||||
-- Get timestamps
|
||||
local timestamps, err = self.datastore:get("plugin_limit_cache_" .. ngx.var.server_name .. ngx.var.remote_addr .. ngx.var.uri)
|
||||
if not timestamps and err ~= "not found" then
|
||||
return nil, err
|
||||
elseif err == "not found" then
|
||||
timestamps = "{}"
|
||||
end
|
||||
timestamps = cjson.decode(timestamps)
|
||||
-- Compute new timestamps
|
||||
local updated, new_timestamps, delay = self:limit_req_timestamps(rate_max, rate_time, timestamps)
|
||||
-- Save new timestamps if needed
|
||||
if updated then
|
||||
local ok, err = self.datastore:set("plugin_limit_cache_" .. ngx.var.server_name .. ngx.var.remote_addr .. ngx.var.uri, cjson.encode(timestamps), delay)
|
||||
if not ok then
|
||||
return nil, err
|
||||
end
|
||||
end
|
||||
return new_timestamps, "success"
|
||||
end
|
||||
|
||||
function limit:limit_req_redis(rate_max, rate_time)
|
||||
-- Connect to server
|
||||
local cstore, err = clusterstore:new()
|
||||
if not cstore then
|
||||
return nil, err
|
||||
end
|
||||
local ok, err = clusterstore:connect()
|
||||
if not ok then
|
||||
return nil, err
|
||||
end
|
||||
-- Get timestamps
|
||||
local timestamps, err = clusterstore:call("get", "limit_" .. ngx.var.server_name .. ngx.var.remote_addr .. ngx.var.uri)
|
||||
if err then
|
||||
clusterstore:close()
|
||||
return nil, err
|
||||
end
|
||||
if timestamps then
|
||||
timestamps = cjson.decode(timestamps)
|
||||
else
|
||||
timestamps = {}
|
||||
end
|
||||
-- Compute new timestamps
|
||||
local updated, new_timestamps, delay = self:limit_req_timestamps(rate_max, rate_time, timestamps)
|
||||
-- Save new timestamps if needed
|
||||
if updated then
|
||||
local ok, err = clusterstore:call("set", "limit_" .. ngx.var.server_name .. ngx.var.remote_addr .. ngx.var.uri, cjson.encode(new_timestamps), "EX", delay)
|
||||
if not ok then
|
||||
clusterstore:close()
|
||||
return nil, err
|
||||
end
|
||||
end
|
||||
lusterstore:close()
|
||||
return new_timestamps, "success"
|
||||
end
|
||||
|
||||
function limit:limit_req_timestamps(rate_max, rate_time, timestamps)
|
||||
-- Compute new timestamps
|
||||
local updated = false
|
||||
local new_timestamps = {}
|
||||
local current_timestamp = os.time(os.date("!*t"))
|
||||
local delay = 0
|
||||
|
@ -122,29 +223,20 @@ function _M:access()
|
|||
elseif rate_time == "d" then
|
||||
delay = 86400
|
||||
end
|
||||
for i, timestamp in ipairs(cjson.decode(requests)) do
|
||||
-- Keep only timestamp within the delay
|
||||
for i, timestamp in ipairs(timestamps) do
|
||||
if current_timestamp - timestamp <= delay then
|
||||
table.insert(new_timestamps, timestamp)
|
||||
else
|
||||
updated = true
|
||||
end
|
||||
end
|
||||
-- Only insert the new timestamp if client is not limited already to avoid infinite insert
|
||||
if #new_timestamps <= tonumber(rate_max) then
|
||||
if #new_timestamps <= rate_max then
|
||||
table.insert(new_timestamps, current_timestamp)
|
||||
updated = true
|
||||
end
|
||||
|
||||
-- Save the new timestamps
|
||||
local ok, err = datastore:set("plugin_limit_cache_" .. ngx.var.server_name .. ngx.var.remote_addr .. ngx.var.uri, cjson.encode(new_timestamps), delay)
|
||||
if not ok then
|
||||
return false, "can't update timestamps : " .. err, nil, nil
|
||||
end
|
||||
|
||||
-- Deny if the rate is higher than the one defined in rule
|
||||
if #new_timestamps > tonumber(rate_max) then
|
||||
return true, "client IP " .. ngx.var.remote_addr .. " is limited for URL " .. ngx.var.uri .. " (current rate = " .. tostring(#new_timestamps) .. "r/" .. rate_time .. " and max rate = " .. rate .. ")", true, ngx.HTTP_TOO_MANY_REQUESTS
|
||||
end
|
||||
|
||||
-- Limit not reached
|
||||
return true, "client IP " .. ngx.var.remote_addr .. " is not limited for URL " .. ngx.var.uri .. " (current rate = " .. tostring(#new_timestamps) .. "r/" .. rate_time .. " and max rate = " .. rate .. ")", nil, nil
|
||||
return updated, new_timestamps, delay
|
||||
end
|
||||
|
||||
return _M
|
||||
return limit
|
|
@ -1,43 +1,35 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local logger = require "bunkerweb.logger"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local clusterstore = require "bunkerweb.clusterstore"
|
||||
|
||||
local utils = require "utils"
|
||||
local datastore = require "datastore"
|
||||
local logger = require "logger"
|
||||
local cjson = require "cjson"
|
||||
local resolver = require "resty.dns.resolver"
|
||||
local clusterstore = require "clusterstore"
|
||||
local redis = class("redis", plugin)
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
return self, nil
|
||||
function redis:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "redis")
|
||||
end
|
||||
|
||||
function _M:init()
|
||||
function redis:init()
|
||||
-- Check if init is needed
|
||||
local use_redis, err = utils.get_variable("USE_REDIS", false)
|
||||
if use_redis == nil then
|
||||
return false, "can't check USE_REDIS variable : " .. err
|
||||
end
|
||||
if use_redis ~= "yes" then
|
||||
return true, "redis not used"
|
||||
if self.variables["USE_REDIS"] then
|
||||
return self:ret(true, "redis not used")
|
||||
end
|
||||
-- Check redis connection
|
||||
local redis_client, err = clusterstore:connect()
|
||||
if not redis_client then
|
||||
return false, "can't connect to redis server"
|
||||
local ok, err = clusterstore:connect()
|
||||
if not ok then
|
||||
return self:ret(false, "redis connect error : " .. err)
|
||||
end
|
||||
local ok, err = redis_client:ping()
|
||||
local ok, err = clusterstore:call("ping")
|
||||
clusterstore:close()
|
||||
if err then
|
||||
clusterstore:close(redis_client)
|
||||
return false, "error while sending ping command : " .. err
|
||||
return self:ret(false, "error while sending ping command : " .. err)
|
||||
end
|
||||
if not ok then
|
||||
clusterstore:close(redis_client)
|
||||
return false, "ping command failed"
|
||||
return self:ret(false, "ping command failed")
|
||||
end
|
||||
clusterstore:close(redis_client)
|
||||
return true, "redis ping successful"
|
||||
return self:ret(true, "redis ping successful")
|
||||
end
|
||||
|
||||
return _M
|
||||
return redis
|
|
@ -1,65 +1,55 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local cachestore = require "bunkerweb.cachestore"
|
||||
|
||||
local utils = require "utils"
|
||||
local datastore = require "datastore"
|
||||
local logger = require "logger"
|
||||
local cjson = require "cjson"
|
||||
local reversescan = class("reversescan", plugin)
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
return self, nil
|
||||
function reversescan:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "reversescan")
|
||||
-- Instantiate cachestore
|
||||
local use_redis, err = utils.get_variable("USE_REDIS", false)
|
||||
if not use_redis then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
self.use_redis = use_redis == "yes"
|
||||
self.cachestore = cachestore:new(self.use_redis)
|
||||
end
|
||||
|
||||
function _M:access()
|
||||
function reversescan:access()
|
||||
-- Check if access is needed
|
||||
local access_needed, err = utils.get_variable("USE_REVERSE_SCAN")
|
||||
if access_needed == nil then
|
||||
return false, "can't get USE_REVERSE_SCAN setting from datastore : " .. err, nil, nil
|
||||
end
|
||||
if access_needed ~= "yes" then
|
||||
return true, "reverse scan not activated", nil, nil
|
||||
end
|
||||
-- Get ports
|
||||
local ports, err = utils.get_variable("REVERSE_SCAN_PORTS")
|
||||
if ports == nil then
|
||||
return false, "can't get REVERSE_SCAN_PORTS setting from datastore : " .. err, nil, nil
|
||||
end
|
||||
if ports == "" then
|
||||
return true, "no port defined", nil, nil
|
||||
end
|
||||
-- Get timeout
|
||||
local timeout, err = utils.get_variable("REVERSE_SCAN_TIMEOUT")
|
||||
if timeout == nil then
|
||||
return false, "can't get REVERSE_SCAN_TIMEOUT setting from datastore : " .. err, nil, nil
|
||||
if self.variables["USE_REVERSE_SCAN"] ~= "yes" then
|
||||
return self:ret(true, "reverse scan not activated")
|
||||
end
|
||||
-- Loop on ports
|
||||
for port in ports:gmatch("%S+") do
|
||||
for port in self.variables["REVERSE_SCAN_PORTS"]:gmatch("%S+") do
|
||||
-- Check if the scan is already cached
|
||||
local cached, err = self:is_in_cache(ngx.var.remote_addr .. ":" .. port)
|
||||
if cached == nil then
|
||||
return false, "error getting cache from datastore : " .. err, nil, nil
|
||||
return self:ret(false, "error getting cache from datastore : " .. err)
|
||||
end
|
||||
if cached == "open" then
|
||||
return true, "port " .. port .. " is opened for IP " .. ngx.var.remote_addr, true, utils.get_deny_status()
|
||||
return self:ret(true, "port " .. port .. " is opened for IP " .. ngx.var.remote_addr, utils.get_deny_status())
|
||||
elseif not cached then
|
||||
-- Do the scan
|
||||
local res, err = self:scan(ngx.var.remote_addr, tonumber(port), tonumber(timeout))
|
||||
local res, err = self:scan(ngx.var.remote_addr, tonumber(port), tonumber(self.variables["REVERSE_SCAN_TIMEOUT"]))
|
||||
-- Cache the result
|
||||
local ok, err = self:add_to_cache(ngx.var.remote_addr .. ":" .. port, res)
|
||||
if not ok then
|
||||
return false, "error updating cache from datastore : " .. err, nil, nil
|
||||
return self:ret(false, "error updating cache from datastore : " .. err)
|
||||
end
|
||||
-- Deny request if port is open
|
||||
if res == "open" then
|
||||
return true, "port " .. port .. " is opened for IP " .. ngx.var.remote_addr, true, utils.get_deny_status()
|
||||
return self:ret(true, "port " .. port .. " is opened for IP " .. ngx.var.remote_addr, utils.get_deny_status())
|
||||
end
|
||||
end
|
||||
end
|
||||
return nil, "no port open for IP " .. ngx.var.remote_addr, nil, nil
|
||||
-- No port opened
|
||||
return self:ret(true, "no port open for IP " .. ngx.var.remote_addr)
|
||||
end
|
||||
|
||||
function _M:scan(ip, port, timeout)
|
||||
function reversescan:scan(ip, port, timeout)
|
||||
local tcpsock = ngx.socket.tcp()
|
||||
tcpsock:settimeout(timeout)
|
||||
local ok, err = tcpsock:connect(ip, port)
|
||||
|
@ -70,24 +60,20 @@ function _M:scan(ip, port, timeout)
|
|||
return "open", nil
|
||||
end
|
||||
|
||||
function _M:is_in_cache(ele)
|
||||
local res, err = datastore:get("plugin_reversescan_" .. ele)
|
||||
if not res then
|
||||
if err == "not found" then
|
||||
return false, nil
|
||||
end
|
||||
return nil, err
|
||||
end
|
||||
return true, res
|
||||
function reversescan:is_in_cache(ip_port)
|
||||
local ok, data = self.cachestore:get("plugin_reversescan_cache_" .. ip_port)
|
||||
if not ok then
|
||||
return false, data
|
||||
end
|
||||
return true, data
|
||||
end
|
||||
|
||||
function _M:add_to_cache(ele, value)
|
||||
local ok, err = datastore:set("plugin_reversescan_" .. ele, value, 86400)
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
return true, nil
|
||||
function reversescan:add_to_cache(ip_port, value)
|
||||
local ok, err = self.cachestore:set("plugin_reversescan_cache_" .. ip_port, value)
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
return _M
|
||||
return reversescan
|
|
@ -10,7 +10,7 @@
|
|||
"default": "random",
|
||||
"help": "Secret used to encrypt sessions variables for storing data related to challenges.",
|
||||
"id": "session-secret",
|
||||
"label": "Session secret",
|
||||
"label": "Sessions secret",
|
||||
"regex": "^\\w+$",
|
||||
"type": "password"
|
||||
},
|
||||
|
@ -18,8 +18,8 @@
|
|||
"context": "global",
|
||||
"default": "random",
|
||||
"help": "Name of the cookie given to clients.",
|
||||
"id": "session-name",
|
||||
"label": "Session name",
|
||||
"id": "sessions-name",
|
||||
"label": "Sessions name",
|
||||
"regex": "^\\w+$",
|
||||
"type": "text"
|
||||
},
|
||||
|
@ -27,8 +27,8 @@
|
|||
"context": "global",
|
||||
"default": "1800",
|
||||
"help": "Maximum time (in seconds) of inactivity before the session is invalidated.",
|
||||
"id": "session-idling-timeout",
|
||||
"label": "Session idling timeout",
|
||||
"id": "sessions-idling-timeout",
|
||||
"label": "Sessions idling timeout",
|
||||
"regex": "^\\d+$",
|
||||
"type": "text"
|
||||
},
|
||||
|
@ -36,8 +36,8 @@
|
|||
"context": "global",
|
||||
"default": "3600",
|
||||
"help": "Maximum time (in seconds) before a session must be renewed.",
|
||||
"id": "session-rolling-timeout",
|
||||
"label": "Session rolling timeout",
|
||||
"id": "sessions-rolling-timeout",
|
||||
"label": "Sessions rolling timeout",
|
||||
"regex": "^\\d+$",
|
||||
"type": "text"
|
||||
},
|
||||
|
@ -45,8 +45,8 @@
|
|||
"context": "global",
|
||||
"default": "86400",
|
||||
"help": "Maximum time (in seconds) before a session is destroyed.",
|
||||
"id": "session-absolute-timeout",
|
||||
"label": "Session absolute timeout",
|
||||
"id": "sessions-absolute-timeout",
|
||||
"label": "SessionS absolute timeout",
|
||||
"regex": "^\\d+$",
|
||||
"type": "text"
|
||||
}
|
||||
|
|
|
@ -1,22 +1,18 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
|
||||
local utils = require "utils"
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local session = require "resty.session"
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
return self, nil
|
||||
local sessions = class("sessions", plugin)
|
||||
|
||||
function sessions:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "sessions")
|
||||
end
|
||||
|
||||
function _M:init()
|
||||
-- Get vars
|
||||
local vars = {
|
||||
["SESSIONS_SECRET"] = "",
|
||||
["SESSIONS_NAME"] = "",
|
||||
["SESSIONS_IDLING_TIMEOUT"] = "",
|
||||
["SESSIONS_ROLLING_TIMEOUT"] = "",
|
||||
["SESSIONS_ABSOLUTE_TIMEOUT"] = "",
|
||||
function sessions:init()
|
||||
-- Get redis vars
|
||||
local redis_vars = {
|
||||
["USE_REDIS"] = "",
|
||||
["REDIS_HOST"] = "",
|
||||
["REDIS_PORT"] = "",
|
||||
|
@ -25,63 +21,46 @@ function _M:init()
|
|||
["REDIS_KEEPALIVE_IDLE"] = "",
|
||||
["REDIS_KEEPALIVE_POOL"] = ""
|
||||
}
|
||||
for k, v in pairs(vars) do
|
||||
for k, v in pairs(redis_vars) do
|
||||
local var, err = utils.get_variable(k, false)
|
||||
if var == nil then
|
||||
return false, "can't get " .. k .. " variable : " .. err
|
||||
return self:ret(false, "can't get " .. k .. " variable : " .. err)
|
||||
end
|
||||
end
|
||||
-- Init configuration
|
||||
local config = {
|
||||
secret = vars["SESSIONS_SECRET"],
|
||||
cookie_name = vars["SESSIONS_NAME"],
|
||||
idling_timeout = tonumber(vars["SESSIONS_IDLING_TIMEOUT"]),
|
||||
rolling_timeout = tonumber(vars["SESSIONS_ROLLING_TIMEOUT"]),
|
||||
absolute_timeout = tonumber(vars["SESSIONS_ABSOLUTE_TIMEOUT"])
|
||||
secret = self.variables["SESSIONS_SECRET"],
|
||||
cookie_name = self.variables["SESSIONS_NAME"],
|
||||
idling_timeout = tonumber(self.variables["SESSIONS_IDLING_TIMEOUT"]),
|
||||
rolling_timeout = tonumber(self.variables["SESSIONS_ROLLING_TIMEOUT"]),
|
||||
absolute_timeout = tonumber(self.variables["SESSIONS_ABSOLUTE_TIMEOUT"])
|
||||
}
|
||||
if vars["SESSIONS_SECRET"] == "random" then
|
||||
if self.variables["SESSIONS_SECRET"] == "random" then
|
||||
config.secret = utils.rand(16)
|
||||
end
|
||||
if vars["SESSIONS_NAME"] == "random" then
|
||||
if self.variables["SESSIONS_NAME"] == "random" then
|
||||
config.cookie_name = utils.rand(16)
|
||||
end
|
||||
if vars["USE_REDIS"] == "no" then
|
||||
if redis_vars["USE_REDIS"] ~= "yes" then
|
||||
config.storage = "cookie"
|
||||
else
|
||||
config.storage = "redis"
|
||||
config.redis = {
|
||||
prefix = "session_",
|
||||
connect_timeout = tonumber(vars["REDIS_TIMEOUT"]),
|
||||
send_timeout = tonumber(vars["REDIS_TIMEOUT"]),
|
||||
read_timeout = tonumber(vars["REDIS_TIMEOUT"]),
|
||||
keepalive_timeout = tonumber(vars["REDIS_KEEPALIVE_IDLE"]),
|
||||
prefix = "sessions_",
|
||||
connect_timeout = tonumber(redis_vars["REDIS_TIMEOUT"]),
|
||||
send_timeout = tonumber(redis_vars["REDIS_TIMEOUT"]),
|
||||
read_timeout = tonumber(redis_vars["REDIS_TIMEOUT"]),
|
||||
keepalive_timeout = tonumber(redis_vars["REDIS_KEEPALIVE_IDLE"]),
|
||||
pool = "bw",
|
||||
pool_size = tonumber(vars["REDIS_KEEPALIVE_POOL"]),
|
||||
ssl = vars["REDIS_SSL"] == "yes",
|
||||
host = vars["REDIS_HOST"],
|
||||
port = tonumber(vars["REDIS_HOST"]),
|
||||
database = tonumber(vars["REDIS_DATABASE"])
|
||||
pool_size = tonumber(redis_vars["REDIS_KEEPALIVE_POOL"]),
|
||||
ssl = redis_vars["REDIS_SSL"] == "yes",
|
||||
host = redis_vars["REDIS_HOST"],
|
||||
port = tonumber(redis_vars["REDIS_HOST"]),
|
||||
database = tonumber(redis_vars["REDIS_DATABASE"])
|
||||
}
|
||||
end
|
||||
session.init(config)
|
||||
return true, "session init successful"
|
||||
return self:ret(true, "sessions init successful")
|
||||
end
|
||||
|
||||
function _M:access()
|
||||
-- Start session and refresh it if needed
|
||||
local client_session, err, exists, refreshed = session.start()
|
||||
if err then
|
||||
return false, "can't open session : " .. err, nil, nil
|
||||
end
|
||||
-- Refresh it
|
||||
if exists then
|
||||
local ok, err = client_session:refresh()
|
||||
if err then
|
||||
return false, "can't refresh session : " .. err, nil, nil
|
||||
end
|
||||
return true, "session exists", nil, nil
|
||||
end
|
||||
return true, "session doesn't exist", nil, nil
|
||||
end
|
||||
|
||||
return _M
|
||||
return sessions
|
|
@ -1,26 +1,47 @@
|
|||
local _M = {}
|
||||
_M.__index = _M
|
||||
local class = require "middleclass"
|
||||
local plugin = require "bunkerweb.plugin"
|
||||
local utils = require "bunkerweb.utils"
|
||||
local datastore = require "bunkerweb.datastore"
|
||||
local cachestore = require "bunkerweb.cachestore"
|
||||
local cjson = require "cjson"
|
||||
local ipmatcher = require "resty.ipmatcher"
|
||||
local env = require "resty.env"
|
||||
|
||||
local utils = require "utils"
|
||||
local datastore = require "datastore"
|
||||
local logger = require "logger"
|
||||
local cjson = require "cjson"
|
||||
local ipmatcher = require "resty.ipmatcher"
|
||||
local env = require "resty.env"
|
||||
local whitelist = class("whitelist", plugin)
|
||||
|
||||
function _M.new()
|
||||
local self = setmetatable({}, _M)
|
||||
return self, nil
|
||||
function whitelist:initialize()
|
||||
-- Call parent initialize
|
||||
plugin.initialize(self, "whitelist")
|
||||
-- Check if redis is enabled
|
||||
local use_redis, err = utils.get_variable("USE_REDIS", false)
|
||||
if not use_redis then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
self.use_redis = use_redis == "yes"
|
||||
-- Check if init is needed
|
||||
if ngx.get_phase() == "init" then
|
||||
local init_needed, err = utils.has_variable("USE_WHITELIST", "yes")
|
||||
if init_needed == nil then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
end
|
||||
self.init_needed = init_needed
|
||||
-- Decode lists
|
||||
else
|
||||
local lists, err = self.datastore:get("plugin_whitelist_lists")
|
||||
if not lists then
|
||||
self.logger:log(ngx.ERR, err)
|
||||
else
|
||||
self.lists = cjson.decode(lists)
|
||||
end
|
||||
end
|
||||
-- Instantiate cachestore
|
||||
self.cachestore = cachestore:new(self.use_redis)
|
||||
end
|
||||
|
||||
function _M:init()
|
||||
function whitelist:init()
|
||||
-- Check if init is needed
|
||||
local init_needed, err = utils.has_variable("USE_WHITELIST", "yes")
|
||||
if init_needed == nil then
|
||||
return false, err
|
||||
end
|
||||
if not init_needed then
|
||||
return true, "no service uses Whitelist, skipping init"
|
||||
if not self.init_needed then
|
||||
return self:ret(true, "init not needed")
|
||||
end
|
||||
-- Read whitelists
|
||||
local whitelists = {
|
||||
|
@ -42,379 +63,224 @@ function _M:init()
|
|||
end
|
||||
end
|
||||
-- Load them into datastore
|
||||
local ok, err = datastore:set("plugin_whitelist_list", cjson.encode(whitelists))
|
||||
local ok, err = self.datastore:set("plugin_whitelist_lists", cjson.encode(whitelists))
|
||||
if not ok then
|
||||
return false, "can't store Whitelist list into datastore : " .. err
|
||||
return self:ret(false, "can't store whitelist list into datastore : " .. err)
|
||||
end
|
||||
return true, "successfully loaded " .. tostring(i) .. " whitelisted IP/network/rDNS/ASN/User-Agent/URI"
|
||||
return self:ret(true, "successfully loaded " .. tostring(i) .. " IP/network/rDNS/ASN/User-Agent/URI")
|
||||
end
|
||||
|
||||
function _M:set()
|
||||
|
||||
function whitelist:set()
|
||||
-- Set default value
|
||||
ngx.var.is_whitelisted = "no"
|
||||
env.set("is_whitelisted", "no")
|
||||
-- Check if set is needed
|
||||
if self.variables["USE_WHITELIST"] ~= "yes" then
|
||||
return self:ret(true, "whitelist not activated")
|
||||
end
|
||||
-- Check cache
|
||||
local whitelisted, err = self:check_cache()
|
||||
if whitelisted == nil then
|
||||
return self:ret(false, err)
|
||||
elseif whitelisted then
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
env.set("is_whitelisted", "yes")
|
||||
return self:ret(true, err)
|
||||
end
|
||||
return self:ret(true, "not in whitelist cache")
|
||||
end
|
||||
|
||||
function whitelist:access()
|
||||
-- Check if access is needed
|
||||
local set_needed, err = utils.get_variable("USE_WHITELIST")
|
||||
if set_needed == nil then
|
||||
if self.variables["USE_WHITELIST"] ~= "yes" then
|
||||
return self:ret(true, "whitelist not activated")
|
||||
end
|
||||
-- Check cache
|
||||
local whitelisted, err, already_cached = self:check_cache()
|
||||
if whitelisted == nil then
|
||||
return self:ret(false, err)
|
||||
elseif whitelisted then
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
env.set("is_whitelisted", "yes")
|
||||
return self:ret(true, err, ngx.OK)
|
||||
end
|
||||
-- Perform checks
|
||||
for k, v in pairs(already_cached) do
|
||||
if not already_cached[k] then
|
||||
local ok, whitelisted = self:is_whitelisted(k)
|
||||
if ok == nil then
|
||||
self.logger:log(ngx.ERR, "error while checking if " .. k .. " is whitelisted : " .. err)
|
||||
else
|
||||
local ok, err = self:add_to_cache(self:kind_to_ele(k), whitelisted)
|
||||
if not ok then
|
||||
self.logger:log(ngx.ERR, "error while adding element to cache : " .. err)
|
||||
end
|
||||
if whitelisted ~= "ok" then
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
env.set("is_whitelisted", "yes")
|
||||
return self:ret(true, k + " is whitelisted (info : " .. whitelisted .. ")", ngx.OK)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
-- Not whitelisted
|
||||
return self:ret(true, "not whitelisted")
|
||||
end
|
||||
|
||||
function whitelist:preread()
|
||||
return self:access()
|
||||
end
|
||||
|
||||
function whitelist:kind_to_ele(kind)
|
||||
if kind == "IP" then
|
||||
return "ip" .. ngx.var.remote_addr
|
||||
elseif kind == "UA" then
|
||||
return "ua" .. ngx.var.http_user_agent
|
||||
elseif kind == "URI" then
|
||||
return "uri" .. ngx.var.uri
|
||||
end
|
||||
end
|
||||
|
||||
function whitelist:check_cache()
|
||||
-- Check the caches
|
||||
local checks = {
|
||||
["IP"] = "ip" .. ngx.var.remote_addr
|
||||
}
|
||||
if ngx.var.http_user_agent then
|
||||
checks["UA"] = "ua" .. ngx.var.http_user_agent
|
||||
end
|
||||
if ngx.var.uri then
|
||||
checks["URI"] = "uri" .. ngx.var.uri
|
||||
end
|
||||
local already_cached = {
|
||||
["IP"] = false,
|
||||
["URI"] = false,
|
||||
["UA"] = false
|
||||
}
|
||||
for k, v in pairs(checks) do
|
||||
local ok, cached = self:is_in_cache(v)
|
||||
if not ok then
|
||||
self.logger:log(ngx.ERR, "error while checking cache : " .. cached)
|
||||
elseif cached and cached ~= "ok" then
|
||||
return true, k + " is in cached whitelist (info : " .. cached .. ")"
|
||||
end
|
||||
if cached then
|
||||
already_cached[k] = true
|
||||
end
|
||||
end
|
||||
-- Check lists
|
||||
if not self.lists then
|
||||
return nil, "lists is nil"
|
||||
end
|
||||
-- Not cached/whitelisted
|
||||
return false, "not cached/whitelisted", already_cached
|
||||
end
|
||||
|
||||
function whitelist:is_in_cache(ele)
|
||||
local ok, data = self.cachestore:get("plugin_whitelist_" .. ele)
|
||||
if not ok then
|
||||
return false, data
|
||||
end
|
||||
return true, data
|
||||
end
|
||||
|
||||
function whitelist:add_to_cache(ele, value)
|
||||
local ok, err = self.cachestore:set("plugin_whitelist_" .. ele, value)
|
||||
if not ok then
|
||||
return false, err
|
||||
end
|
||||
if set_needed ~= "yes" then
|
||||
return true, "whitelist not enabled"
|
||||
return true
|
||||
end
|
||||
|
||||
function whitelist:is_whitelisted(kind)
|
||||
if kind == "IP" then
|
||||
return self:is_whitelisted_ip()
|
||||
elseif kind == "URI" then
|
||||
return self:is_whitelisted_uri()
|
||||
elseif kind == "UA" then
|
||||
return self:is_whitelisted_ua()
|
||||
end
|
||||
return false, "unknown kind " .. kind
|
||||
end
|
||||
|
||||
function whitelist:is_whitelisted_ip()
|
||||
-- Check if IP is in whitelist
|
||||
local ipm, err = ipmatcher.new(self.lists["IP"])
|
||||
if not ipm then
|
||||
return nil, err
|
||||
end
|
||||
local match, err = ipm:match(ngx.var.remote_addr)
|
||||
if err then
|
||||
return nil, err
|
||||
end
|
||||
if match then
|
||||
return true, "ip"
|
||||
end
|
||||
|
||||
-- Check the cache
|
||||
local cached_ip, err = self:is_in_cache("ip" .. ngx.var.remote_addr)
|
||||
if cached_ip and cached_ip ~= "ok" then
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
env.set("is_whitelisted", "yes")
|
||||
return true, "ip whitelisted"
|
||||
-- Check if rDNS is needed
|
||||
local check_rdns = true
|
||||
local is_global, err = utils.ip_is_global(ngx.var.remote_addr)
|
||||
if self.variables["WHITELIST_RDNS_GLOBAL"] == "yes" then
|
||||
if is_global == nil then
|
||||
return nil, err
|
||||
end
|
||||
if not is_global then
|
||||
check_rdns = false
|
||||
end
|
||||
end
|
||||
local cached_uri, err = self:is_in_cache("uri" .. ngx.var.uri)
|
||||
if cached_uri and cached_uri ~= "ok" then
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
env.set("is_whitelisted", "yes")
|
||||
return true, "uri whitelisted"
|
||||
if check_rdns then
|
||||
-- Get rDNS
|
||||
local rdns_list, err = utils.get_rdns(ngx.var.remote_addr)
|
||||
if not rdns_list then
|
||||
return nil, err
|
||||
end
|
||||
-- Check if rDNS is in whitelist
|
||||
for i, suffix in ipairs(self.lists["RDNS"]) do
|
||||
for j, rdns in ipairs(rdns_list) do
|
||||
if rdns:sub(-#suffix) == suffix then
|
||||
return true, "rDNS " .. suffix
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
local cached_ua = true
|
||||
if ngx.var.http_user_agent then
|
||||
cached_ua, err = self:is_in_cache("ua" .. ngx.var.http_user_agent)
|
||||
if cached_ua and cached_ua ~= "ok" then
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
env.set("is_whitelisted", "yes")
|
||||
return true, "ua whitelisted"
|
||||
|
||||
-- Check if ASN is in whitelist
|
||||
if is_global then
|
||||
local asn, err = utils.get_asn(ngx.var.remote_addr)
|
||||
if not asn then
|
||||
return nil, err
|
||||
end
|
||||
for i, bl_asn in ipairs(self.lists["ASN"]) do
|
||||
if bl_asn == tostring(asn) then
|
||||
return true, "ASN " .. bl_asn
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Not whitelisted
|
||||
return true, "not whitelisted"
|
||||
return false, "ok"
|
||||
end
|
||||
|
||||
function _M:access()
|
||||
-- Check if access is needed
|
||||
local access_needed, err = utils.get_variable("USE_WHITELIST")
|
||||
if access_needed == nil then
|
||||
return false, err, nil, nil
|
||||
end
|
||||
if access_needed ~= "yes" then
|
||||
return true, "Whitelist not activated", nil, nil
|
||||
end
|
||||
|
||||
-- Check the cache
|
||||
local cached_ip, err = self:is_in_cache("ip" .. ngx.var.remote_addr)
|
||||
if cached_ip and cached_ip ~= "ok" then
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
return true, "IP is in whitelist cache (info = " .. cached_ip .. ")", true, ngx.OK
|
||||
end
|
||||
local cached_uri, err = self:is_in_cache("uri" .. ngx.var.uri)
|
||||
if cached_uri and cached_uri ~= "ok" then
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
return true, "URI is in whitelist cache (info = " .. cached_uri .. ")", true, ngx.OK
|
||||
end
|
||||
local cached_ua = true
|
||||
if ngx.var.http_user_agent then
|
||||
cached_ua, err = self:is_in_cache("ua" .. ngx.var.http_user_agent)
|
||||
if cached_ua and cached_ua ~= "ok" then
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
return true, "User-Agent is in whitelist cache (info = " .. cached_ua .. ")", true, ngx.OK
|
||||
end
|
||||
end
|
||||
if cached_ip and cached_uri and cached_ua then
|
||||
return true, "full request is in whitelist cache (not whitelisted)", nil, nil
|
||||
end
|
||||
|
||||
-- Get list
|
||||
local data, err = datastore:get("plugin_whitelist_list")
|
||||
if not data then
|
||||
return false, "can't get Whitelist list : " .. err, false, nil
|
||||
end
|
||||
local ok, whitelists = pcall(cjson.decode, data)
|
||||
if not ok then
|
||||
return false, "error while decoding whitelists : " .. whitelists, false, nil
|
||||
end
|
||||
|
||||
-- Return value
|
||||
local ret, ret_err = true, "success"
|
||||
|
||||
-- Check if IP is in IP/net whitelist
|
||||
local ip_net, err = utils.get_variable("WHITELIST_IP")
|
||||
if ip_net and ip_net ~= "" then
|
||||
for element in ip_net:gmatch("%S+") do
|
||||
table.insert(whitelists["IP"], element)
|
||||
end
|
||||
end
|
||||
if not cached_ip then
|
||||
local ipm, err = ipmatcher.new(whitelists["IP"])
|
||||
if not ipm then
|
||||
ret = false
|
||||
ret_err = "can't instantiate ipmatcher " .. err
|
||||
else
|
||||
if ipm:match(ngx.var.remote_addr) then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "ip/net")
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in whitelist", true, ngx.OK
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if rDNS is in whitelist
|
||||
local rdns_global, err = utils.get_variable("WHITELIST_RDNS_GLOBAL")
|
||||
local check = true
|
||||
if not rdns_global then
|
||||
logger.log(ngx.ERR, "WHITELIST", "Error while getting WHITELIST_RDNS_GLOBAL variable : " .. err)
|
||||
elseif rdns_global == "yes" then
|
||||
check, err = utils.ip_is_global(ngx.var.remote_addr)
|
||||
if check == nil then
|
||||
logger.log(ngx.ERR, "WHITELIST", "Error while getting checking if IP is global : " .. err)
|
||||
end
|
||||
end
|
||||
if not cached_ip and check then
|
||||
local rdns, err = utils.get_rdns(ngx.var.remote_addr)
|
||||
if not rdns then
|
||||
ret = false
|
||||
ret_err = "error while trying to get reverse dns : " .. err
|
||||
else
|
||||
local rdns_list, err = utils.get_variable("WHITELIST_RDNS")
|
||||
if rdns_list and rdns_list ~= "" then
|
||||
for element in rdns_list:gmatch("%S+") do
|
||||
table.insert(whitelists["RDNS"], element)
|
||||
end
|
||||
end
|
||||
for i, suffix in ipairs(whitelists["RDNS"]) do
|
||||
if rdns:sub(- #suffix) == suffix then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "rDNS " .. suffix)
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in whitelist (info = rDNS " .. suffix .. ")", true, ngx.OK
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if ASN is in whitelist
|
||||
if not cached_ip then
|
||||
if utils.ip_is_global(ngx.var.remote_addr) then
|
||||
local asn, err = utils.get_asn(ngx.var.remote_addr)
|
||||
if not asn then
|
||||
ret = false
|
||||
ret_err = "error while trying to get asn number : " .. err
|
||||
else
|
||||
local asn_list, err = utils.get_variable("WHITELIST_ASN")
|
||||
if asn_list and asn_list ~= "" then
|
||||
for element in asn_list:gmatch("%S+") do
|
||||
table.insert(whitelists["ASN"], element)
|
||||
end
|
||||
end
|
||||
for i, asn_bl in ipairs(whitelists["ASN"]) do
|
||||
if tostring(asn) == asn_bl then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "ASN " .. tostring(asn))
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in whitelist (kind = ASN " .. tostring(asn) .. ")", true,
|
||||
ngx.OK
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- IP is not whitelisted
|
||||
local ok, err = self:add_to_cache("ip" .. ngx.var.remote_addr, "ok")
|
||||
if not ok then
|
||||
ret = false
|
||||
ret_err = err
|
||||
end
|
||||
|
||||
-- Check if User-Agent is in whitelist
|
||||
if not cached_ua and ngx.var.http_user_agent then
|
||||
local ua_list, err = utils.get_variable("WHITELIST_USER_AGENT")
|
||||
if ua_list and ua_list ~= "" then
|
||||
for element in ua_list:gmatch("%S+") do
|
||||
table.insert(whitelists["USER_AGENT"], element)
|
||||
end
|
||||
end
|
||||
for i, ua_bl in ipairs(whitelists["USER_AGENT"]) do
|
||||
if ngx.var.http_user_agent:match(ua_bl) then
|
||||
self:add_to_cache("ua" .. ngx.var.http_user_agent, "UA " .. ua_bl)
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
return ret, "client User-Agent " .. ngx.var.http_user_agent .. " is in whitelist (matched " .. ua_bl .. ")", true,
|
||||
ngx.OK
|
||||
end
|
||||
end
|
||||
-- UA is not whitelisted
|
||||
local ok, err = self:add_to_cache("ua" .. ngx.var.http_user_agent, "ok")
|
||||
if not ok then
|
||||
ret = false
|
||||
ret_err = err
|
||||
end
|
||||
end
|
||||
|
||||
function whitelist:is_whitelisted_uri()
|
||||
-- Check if URI is in whitelist
|
||||
if not cached_uri then
|
||||
local uri_list, err = utils.get_variable("WHITELIST_URI")
|
||||
if uri_list and uri_list ~= "" then
|
||||
for element in uri_list:gmatch("%S+") do
|
||||
table.insert(whitelists["URI"], element)
|
||||
end
|
||||
end
|
||||
for i, uri_bl in ipairs(whitelists["URI"]) do
|
||||
if ngx.var.uri:match(uri_bl) then
|
||||
self:add_to_cache("uri" .. ngx.var.uri, "URI " .. uri_bl)
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
return ret, "client URI " .. ngx.var.uri .. " is in whitelist (matched " .. uri_bl .. ")", true, ngx.OK
|
||||
end
|
||||
for i, uri in ipairs(self.lists["URI"]) do
|
||||
if ngx.var.uri:match(uri) then
|
||||
return true, "URI " .. uri
|
||||
end
|
||||
end
|
||||
|
||||
-- URI is not whitelisted
|
||||
local ok, err = self:add_to_cache("uri" .. ngx.var.uri, "ok")
|
||||
if not ok then
|
||||
ret = false
|
||||
ret_err = err
|
||||
end
|
||||
|
||||
return ret, "IP is not in list (error = " .. ret_err .. ")", false, nil
|
||||
return false, "ok"
|
||||
end
|
||||
|
||||
function _M:preread()
|
||||
-- Check if preread is needed
|
||||
local preread_needed, err = utils.get_variable("USE_WHITELIST")
|
||||
if preread_needed == nil then
|
||||
return false, err, nil, nil
|
||||
end
|
||||
if preread_needed ~= "yes" then
|
||||
return true, "Whitelist not activated", nil, nil
|
||||
end
|
||||
|
||||
-- Check the cache
|
||||
local cached_ip, err = self:is_in_cache("ip" .. ngx.var.remote_addr)
|
||||
if cached_ip and cached_ip ~= "ok" then
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
return true, "IP is in whitelist cache (info = " .. cached_ip .. ")", true, ngx.OK
|
||||
end
|
||||
if cached_ip then
|
||||
return true, "full request is in whitelist cache (not whitelisted)", nil, nil
|
||||
end
|
||||
|
||||
-- Get list
|
||||
local data, err = datastore:get("plugin_whitelist_list")
|
||||
if not data then
|
||||
return false, "can't get Whitelist list : " .. err, false, nil
|
||||
end
|
||||
local ok, whitelists = pcall(cjson.decode, data)
|
||||
if not ok then
|
||||
return false, "error while decoding whitelists : " .. whitelists, false, nil
|
||||
end
|
||||
|
||||
-- Return value
|
||||
local ret, ret_err = true, "success"
|
||||
|
||||
-- Check if IP is in IP/net whitelist
|
||||
local ip_net, err = utils.get_variable("WHITELIST_IP")
|
||||
if ip_net and ip_net ~= "" then
|
||||
for element in ip_net:gmatch("%S+") do
|
||||
table.insert(whitelists["IP"], element)
|
||||
function whitelist:is_whitelisted_ua()
|
||||
-- Check if UA is in whitelist
|
||||
for i, ua in ipairs(self.lists["USER_AGENT"]) do
|
||||
if ngx.var.http_user_agent:match(ua) then
|
||||
return true, "UA " .. ua
|
||||
end
|
||||
end
|
||||
if not cached_ip then
|
||||
local ipm, err = ipmatcher.new(whitelists["IP"])
|
||||
if not ipm then
|
||||
ret = false
|
||||
ret_err = "can't instantiate ipmatcher " .. err
|
||||
else
|
||||
if ipm:match(ngx.var.remote_addr) then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "ip/net")
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in whitelist", true, ngx.OK
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if rDNS is in whitelist
|
||||
local rdns_global, err = utils.get_variable("WHITELIST_RDNS_GLOBAL")
|
||||
local check = true
|
||||
if not rdns_global then
|
||||
logger.log(ngx.ERR, "WHITELIST", "Error while getting WHITELIST_RDNS_GLOBAL variable : " .. err)
|
||||
elseif rdns_global == "yes" then
|
||||
check, err = utils.ip_is_global(ngx.var.remote_addr)
|
||||
if check == nil then
|
||||
logger.log(ngx.ERR, "WHITELIST", "Error while getting checking if IP is global : " .. err)
|
||||
end
|
||||
end
|
||||
if not cached_ip and check then
|
||||
local rdns, err = utils.get_rdns(ngx.var.remote_addr)
|
||||
if not rdns then
|
||||
ret = false
|
||||
ret_err = "error while trying to get reverse dns : " .. err
|
||||
else
|
||||
local rdns_list, err = utils.get_variable("WHITELIST_RDNS")
|
||||
if rdns_list and rdns_list ~= "" then
|
||||
for element in rdns_list:gmatch("%S+") do
|
||||
table.insert(whitelists["RDNS"], element)
|
||||
end
|
||||
end
|
||||
for i, suffix in ipairs(whitelists["RDNS"]) do
|
||||
if rdns:sub(- #suffix) == suffix then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "rDNS " .. suffix)
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in whitelist (info = rDNS " .. suffix .. ")", true, ngx.OK
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Check if ASN is in whitelist
|
||||
if not cached_ip then
|
||||
if utils.ip_is_global(ngx.var.remote_addr) then
|
||||
local asn, err = utils.get_asn(ngx.var.remote_addr)
|
||||
if not asn then
|
||||
ret = false
|
||||
ret_err = "error while trying to get asn number : " .. err
|
||||
else
|
||||
local asn_list, err = utils.get_variable("WHITELIST_ASN")
|
||||
if asn_list and asn_list ~= "" then
|
||||
for element in asn_list:gmatch("%S+") do
|
||||
table.insert(whitelists["ASN"], element)
|
||||
end
|
||||
end
|
||||
for i, asn_bl in ipairs(whitelists["ASN"]) do
|
||||
if tostring(asn) == asn_bl then
|
||||
self:add_to_cache("ip" .. ngx.var.remote_addr, "ASN " .. tostring(asn))
|
||||
ngx.var.is_whitelisted = "yes"
|
||||
return ret, "client IP " .. ngx.var.remote_addr .. " is in whitelist (kind = ASN " .. tostring(asn) .. ")", true,
|
||||
ngx.OK
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- IP is not whitelisted
|
||||
local ok, err = self:add_to_cache("ip" .. ngx.var.remote_addr, "ok")
|
||||
if not ok then
|
||||
ret = false
|
||||
ret_err = err
|
||||
end
|
||||
return ret, "IP is not in list (error = " .. ret_err .. ")", false, nil
|
||||
-- UA is not whiteklisted
|
||||
return false, "ok"
|
||||
end
|
||||
|
||||
function _M:is_in_cache(ele)
|
||||
local kind, err = datastore:get("plugin_whitelist_cache_" .. ngx.var.server_name .. ele)
|
||||
if not kind then
|
||||
if err ~= "not found" then
|
||||
logger.log(ngx.ERR, "WHITELIST", "Error while accessing cache : " .. err)
|
||||
end
|
||||
return false, err
|
||||
end
|
||||
return kind, "success"
|
||||
end
|
||||
|
||||
function _M:add_to_cache(ele, kind)
|
||||
local ok, err = datastore:set("plugin_whitelist_cache_" .. ngx.var.server_name .. ele, kind, 3600)
|
||||
if not ok then
|
||||
logger.log(ngx.ERR, "WHITELIST", "Error while adding element to cache : " .. err)
|
||||
return false, err
|
||||
end
|
||||
return true, "success"
|
||||
end
|
||||
|
||||
return _M
|
||||
return whitelist
|
|
@ -119,13 +119,49 @@
|
|||
},
|
||||
"DATASTORE_MEMORY_SIZE": {
|
||||
"context": "global",
|
||||
"default": "256m",
|
||||
"default": "64m",
|
||||
"help": "Size of the internal datastore.",
|
||||
"id": "datastore-memory-size",
|
||||
"label": "Datastore memory size",
|
||||
"regex": "^\\d+[kKmMgG]?$",
|
||||
"type": "text"
|
||||
},
|
||||
"CACHESTORE_MEMORY_SIZE": {
|
||||
"context": "global",
|
||||
"default": "64m",
|
||||
"help": "Size of the internal cachestore.",
|
||||
"id": "cachestore-memory-size",
|
||||
"label": "Cachestore memory size",
|
||||
"regex": "^\\d+[kKmMgG]?$",
|
||||
"type": "text"
|
||||
},
|
||||
"CACHESTORE_IPC_MEMORY_SIZE": {
|
||||
"context": "global",
|
||||
"default": "16m",
|
||||
"help": "Size of the internal cachestore (ipc).",
|
||||
"id": "cachestore-ipc-memory-size",
|
||||
"label": "Cachestore ipc memory size",
|
||||
"regex": "^\\d+[kKmMgG]?$",
|
||||
"type": "text"
|
||||
},
|
||||
"CACHESTORE_MISS_MEMORY_SIZE": {
|
||||
"context": "global",
|
||||
"default": "16m",
|
||||
"help": "Size of the internal cachestore (miss).",
|
||||
"id": "cachestore-miss-memory-size",
|
||||
"label": "Cachestore miss memory size",
|
||||
"regex": "^\\d+[kKmMgG]?$",
|
||||
"type": "text"
|
||||
},
|
||||
"CACHESTORE_LOCKS_MEMORY_SIZE": {
|
||||
"context": "global",
|
||||
"default": "16m",
|
||||
"help": "Size of the internal cachestore (locks).",
|
||||
"id": "cachestore-locks-memory-size",
|
||||
"label": "Cachestore locks memory size",
|
||||
"regex": "^\\d+[kKmMgG]?$",
|
||||
"type": "text"
|
||||
},
|
||||
"USE_API": {
|
||||
"context": "global",
|
||||
"default": "yes",
|
||||
|
|
|
@ -128,10 +128,10 @@ function do_and_check_cmd() {
|
|||
return 0
|
||||
}
|
||||
|
||||
# nginx 1.22.1
|
||||
# nginx 1.24.0
|
||||
echo "ℹ️ Downloading nginx"
|
||||
NGINX_VERSION="1.22.1"
|
||||
secure_download "https://nginx.org/download/nginx-${NGINX_VERSION}.tar.gz" "nginx-${NGINX_VERSION}.tar.gz" "1d468dcfa9bbd348b8a5dc514ac1428a789e73a92384c039b73a51ce376785f74bf942872c5594a9fcda6bbf44758bd727ce15ac2395f1aa989c507014647dcc"
|
||||
NGINX_VERSION="1.24.0"
|
||||
secure_download "https://nginx.org/download/nginx-${NGINX_VERSION}.tar.gz" "nginx-${NGINX_VERSION}.tar.gz" "1114e37de5664a8109c99cfb2faa1f42ff8ac63c932bcf3780d645e5ed32c0b2ac446f80305b4465994c8f9430604968e176ae464fd80f632d1cb2c8f6007ff3"
|
||||
if [ -f "deps/src/nginx-${NGINX_VERSION}.tar.gz" ] ; then
|
||||
do_and_check_cmd tar -xvzf deps/src/nginx-${NGINX_VERSION}.tar.gz -C deps/src
|
||||
do_and_check_cmd rm -f deps/src/nginx-${NGINX_VERSION}.tar.gz
|
||||
|
@ -148,21 +148,21 @@ if [ -f "deps/src/lua-${LUA_VERSION}.tar.gz" ] ; then
|
|||
do_and_check_cmd patch deps/src/lua-${LUA_VERSION}/src/Makefile deps/misc/lua.patch2
|
||||
fi
|
||||
|
||||
# LuaJIT v2.1-20220915
|
||||
# LuaJIT v2.1-20230410
|
||||
echo "ℹ️ Downloading LuaJIT"
|
||||
git_secure_clone "https://github.com/openresty/luajit2.git" "8384278b14988390cf030b787537aa916a9709bb"
|
||||
git_secure_clone "https://github.com/openresty/luajit2.git" "04f33ff01da97905a1641985fb5c840d234f97f1"
|
||||
|
||||
# lua-nginx-module v0.10.23
|
||||
# lua-nginx-module v0.10.24
|
||||
echo "ℹ️ Downloading lua-nginx-module"
|
||||
git_secure_clone "https://github.com/openresty/lua-nginx-module.git" "5e05fa3adb0d2492ecaaf2cb76498e23765aa6ab"
|
||||
git_secure_clone "https://github.com/openresty/lua-nginx-module.git" "68acad14e4a8f42e31d4a4bb5ed44d6f5b55fc1c"
|
||||
|
||||
# lua-resty-core v0.1.25
|
||||
# lua-resty-core v0.1.26
|
||||
echo "ℹ️ Downloading lua-resty-core"
|
||||
git_secure_clone "https://github.com/openresty/lua-resty-core.git" "0173d96c9eb77b513b989b765716fd2498f09dd9"
|
||||
git_secure_clone "https://github.com/openresty/lua-resty-core.git" "407000a9856d3a5aab34e8c73f6ab0f049f8b8d7"
|
||||
|
||||
# lua-resty-lrucache v0.13
|
||||
echo "ℹ️ Downloading lua-resty-lrucache"
|
||||
git_secure_clone "https://github.com/openresty/lua-resty-lrucache.git" "2ab2624c841cbf04785cc6384c5e213933d3b5f2"
|
||||
git_secure_clone "https://github.com/openresty/lua-resty-lrucache.git" "a79615ec9dc547fdb4aaee59ef8f5a50648ce9fd"
|
||||
|
||||
# lua-resty-dns v0.22
|
||||
echo "ℹ️ Downloading lua-resty-dns"
|
||||
|
@ -180,29 +180,29 @@ git_secure_clone "https://github.com/bungle/lua-resty-random.git" "17b604f7f7dd2
|
|||
echo "ℹ️ Downloading lua-resty-string"
|
||||
git_secure_clone "https://github.com/openresty/lua-resty-string.git" "b192878f6ed31b0af237935bbc5a8110a3c2256c"
|
||||
|
||||
# lua-cjson v2.1.0.9
|
||||
# lua-cjson v2.1.0.12
|
||||
echo "ℹ️ Downloading lua-cjson"
|
||||
git_secure_clone "https://github.com/openresty/lua-cjson.git" "891962b11d6d3b1b7275550b5c109e16c73ac94f"
|
||||
git_secure_clone "https://github.com/openresty/lua-cjson.git" "881accc8fadca5ec02aa34d364df2a1aa25cd2f9"
|
||||
|
||||
# lua-gd v2.0.33r3+
|
||||
echo "ℹ️ Downloading lua-gd"
|
||||
git_secure_clone "https://github.com/ittner/lua-gd.git" "2ce8e478a8591afd71e607506bc8c64b161bbd30"
|
||||
|
||||
# lua-resty-http v0.16.1
|
||||
# lua-resty-http v0.17.1
|
||||
echo "ℹ️ Downloading lua-resty-http"
|
||||
git_secure_clone "https://github.com/ledgetech/lua-resty-http.git" "9bf951dfe162dd9710a0e1f4525738d4902e9d20"
|
||||
git_secure_clone "https://github.com/ledgetech/lua-resty-http.git" "4ab4269cf442ba52507aa2c718f606054452fcad"
|
||||
|
||||
# lualogging v1.8.0
|
||||
# lualogging v1.8.2
|
||||
echo "ℹ️ Downloading lualogging"
|
||||
git_secure_clone "https://github.com/lunarmodules/lualogging.git" "1c6fcf5f68e4d0324c5977f1a27083c06f4d1b8f"
|
||||
git_secure_clone "https://github.com/lunarmodules/lualogging.git" "465c994788f1bc18fca950934fa5ec9a909f496c"
|
||||
|
||||
# luasocket v3.1.0
|
||||
echo "ℹ️ Downloading luasocket"
|
||||
git_secure_clone "https://github.com/diegonehab/luasocket.git" "95b7efa9da506ef968c1347edf3fc56370f0deed"
|
||||
|
||||
# luasec v1.2.0
|
||||
# luasec v1.3.1
|
||||
echo "ℹ️ Downloading luasec"
|
||||
git_secure_clone "https://github.com/brunoos/luasec.git" "d9215ee00f6694a228daad50ee85827a4cd13583"
|
||||
git_secure_clone "https://github.com/brunoos/luasec.git" "fddde111f7fe9ad5417d75ebbd70429d13eaad97"
|
||||
|
||||
# lua-resty-ipmatcher v0.6.1 (3 commits after just in case)
|
||||
echo "ℹ️ Downloading lua-resty-ipmatcher"
|
||||
|
@ -215,13 +215,13 @@ if [ "$dopatch" = "yes" ] ; then
|
|||
do_and_check_cmd patch deps/src/lua-resty-ipmatcher/resty/ipmatcher.lua deps/misc/ipmatcher.patch
|
||||
fi
|
||||
|
||||
# lua-resty-redis v0.29
|
||||
# lua-resty-redis v0.30
|
||||
echo "ℹ️ Downloading lua-resty-redis"
|
||||
git_secure_clone "https://github.com/openresty/lua-resty-redis.git" "053f989c7f43d8edc79d5151e73b79249c6b5d94"
|
||||
git_secure_clone "https://github.com/openresty/lua-resty-redis.git" "d7c25f1b339d79196ff67f061c547a73a920b580"
|
||||
|
||||
# lua-resty-upload v0.10 (8 commits after just in case)
|
||||
# lua-resty-upload v0.11
|
||||
echo "ℹ️ Downloading lua-resty-upload"
|
||||
git_secure_clone "https://github.com/openresty/lua-resty-upload.git" "73c89846e866bf5d0660ffa881df37fd63f04391"
|
||||
git_secure_clone "https://github.com/openresty/lua-resty-upload.git" "03704aee42f7135e7782688d8a9af63a16015edc"
|
||||
|
||||
# luajit-geoip v2.1.0
|
||||
echo "ℹ️ Downloading luajit-geoip"
|
||||
|
@ -242,14 +242,42 @@ git_secure_clone "https://github.com/iskolbin/lbase64.git" "c261320edbdf82c16409
|
|||
echo "ℹ️ Downloading lua-resty-env"
|
||||
git_secure_clone "https://github.com/3scale/lua-resty-env.git" "adb294def823dd910ffa11972d2c61eab7cfce3e"
|
||||
|
||||
# ModSecurity v3.0.8 (19 commits after just in case)
|
||||
echo "ℹ️ Downloading ModSecurity"
|
||||
if [ ! -d "deps/src/ModSecurity" ] ; then
|
||||
dopatch="yes"
|
||||
# lua-resty-mlcache v2.6.0
|
||||
echo "ℹ️ Downloading lua-resty-mlcache"
|
||||
git_secure_clone "https://github.com/thibaultcha/lua-resty-mlcache.git" "f140f56663cbdb9cdd247d29f75c299c702ff6b4"
|
||||
|
||||
# lua-resty-template v2.0
|
||||
echo "ℹ️ Downloading lua-resty-template"
|
||||
git_secure_clone "https://github.com/bungle/lua-resty-template.git" "c08c6bc9e27710806990f2dec0f03b19406976ac"
|
||||
|
||||
# lua-resty-lock v0.09
|
||||
echo "ℹ️ Downloading lua-resty-lock"
|
||||
git_secure_clone "https://github.com/openresty/lua-resty-lock.git" "9dc550e56b6f3b1a2f1a31bb270a91813b5b6861"
|
||||
|
||||
# lua-pack v2.0.0
|
||||
echo "ℹ️ Downloading lua-pack"
|
||||
dopatch="no"
|
||||
if [ ! -d "deps/src/lua-pack" ] ; then
|
||||
dopatch="yes"
|
||||
fi
|
||||
git_secure_clone "https://github.com/SpiderLabs/ModSecurity.git" "40f7a5067c695b1770920b881f30abc09a4e02b3"
|
||||
git_secure_clone "https://github.com/Kong/lua-pack.git" "495bf30606b9744140258df349862981e3ee7820"
|
||||
if [ "$dopatch" = "yes" ] ; then
|
||||
do_and_check_cmd patch deps/src/ModSecurity/configure.ac deps/misc/modsecurity.patch
|
||||
do_and_check_cmd cp deps/misc/lua-pack.Makefile deps/src/lua-pack/Makefile
|
||||
fi
|
||||
|
||||
# lua-resty-openssl v0.8.21
|
||||
echo "ℹ️ Downloading lua-resty-openssl"
|
||||
git_secure_clone "https://github.com/fffonion/lua-resty-openssl.git" "15bc59b97feb5acf25fbdd9426cf73870cf7c838"
|
||||
|
||||
# ModSecurity v3.0.9
|
||||
echo "ℹ️ Downloading ModSecurity"
|
||||
dopatch="no"
|
||||
if [ ! -d "deps/src/ModSecurity" ] ; then
|
||||
dopatch="yes"
|
||||
fi
|
||||
git_secure_clone "https://github.com/SpiderLabs/ModSecurity.git" "205dac0e8c675182f96b5c2fb06be7d1cf7af2b2"
|
||||
if [ "$dopatch" = "yes" ] ; then
|
||||
do_and_check_cmd patch deps/src/ModSecurity/configure.ac deps/misc/modsecurity.patch
|
||||
fi
|
||||
|
||||
# libinjection v3.10.0+
|
||||
|
@ -285,10 +313,10 @@ git_secure_clone "https://github.com/AirisX/nginx_cookie_flag_module.git" "4e48a
|
|||
echo "ℹ️ Downloading ngx_brotli"
|
||||
git_secure_clone "https://github.com/google/ngx_brotli.git" "6e975bcb015f62e1f303054897783355e2a877dc"
|
||||
|
||||
# ngx_devel_kit
|
||||
# ngx_devel_kit v0.3.2
|
||||
echo "ℹ️ Downloading ngx_devel_kit"
|
||||
git_secure_clone "https://github.com/vision5/ngx_devel_kit.git" "b4642d6ca01011bd8cd30b253f5c3872b384fd21"
|
||||
|
||||
# stream-lua-nginx-module
|
||||
# stream-lua-nginx-module v0.0.13
|
||||
echo "ℹ️ Downloading stream-lua-nginx-module"
|
||||
git_secure_clone "https://github.com/openresty/stream-lua-nginx-module.git" "2ef14f373b991b911c4eb5d09aa333352be9a756"
|
||||
git_secure_clone "https://github.com/openresty/stream-lua-nginx-module.git" "309198abf26266f1a3e53c71388ed7bb9d1e5ea2"
|
|
@ -123,6 +123,26 @@ do_and_check_cmd cp -r /tmp/bunkerweb/deps/src/lbase64/base64.lua /usr/share/bun
|
|||
echo "ℹ️ Installing lua-resty-env"
|
||||
do_and_check_cmd cp -r /tmp/bunkerweb/deps/src/lua-resty-env/src/resty/env.lua /usr/share/bunkerweb/deps/lib/lua/resty
|
||||
|
||||
# Installing lua-resty-mlcache
|
||||
echo "ℹ️ Installing lua-resty-mlcache"
|
||||
do_and_check_cmd cp -r /tmp/bunkerweb/deps/src/lua-resty-mlcache/lib/resty/* /usr/share/bunkerweb/deps/lib/lua/resty
|
||||
|
||||
# Installing lua-resty-template
|
||||
echo "ℹ️ Installing lua-resty-template"
|
||||
do_and_check_cmd cp -r /tmp/bunkerweb/deps/src/lua-resty-template/lib/resty/* /usr/share/bunkerweb/deps/lib/lua/resty
|
||||
|
||||
# Installing lua-resty-lock
|
||||
echo "ℹ️ Installing lua-resty-lock"
|
||||
CHANGE_DIR="/tmp/bunkerweb/deps/src/lua-resty-lock" do_and_check_cmd make PREFIX=/usr/share/bunkerweb/deps LUA_LIB_DIR=/usr/share/bunkerweb/deps/lib/lua install
|
||||
|
||||
# Installing lua-pack
|
||||
echo "ℹ️ Installing lua-pack"
|
||||
CHANGE_DIR="/tmp/bunkerweb/deps/src/lua-pack" do_and_check_cmd make INST_LIBDIR=/usr/share/bunkerweb/deps/lib/lua LUA_LIBDIR=-L/usr/share/bunkerweb/deps/lib LUA_INCDIR=-I/usr/share/bunkerweb/deps/include install
|
||||
|
||||
# Installing lua-resty-openssl
|
||||
echo "ℹ️ Installing lua-resty-openssl"
|
||||
CHANGE_DIR="/tmp/bunkerweb/deps/src/lua-resty-openssl" do_and_check_cmd make LUA_LIB_DIR=/usr/share/bunkerweb/deps/lib/lua install
|
||||
|
||||
# Compile dynamic modules
|
||||
echo "ℹ️ Compiling and installing dynamic modules"
|
||||
CONFARGS="$(nginx -V 2>&1 | sed -n -e 's/^.*arguments: //p')"
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
LUA ?= lua5.1
|
||||
LUA_LIBDIR ?= $(shell pkg-config $(LUA) --libs)
|
||||
LUA_INCDIR ?= $(shell pkg-config $(LUA) --cflags)
|
||||
|
||||
LIBFLAG ?= -shared
|
||||
CFLAGS ?= -std=c99 -O2 -Wall
|
||||
|
||||
.PHONY: all clean install
|
||||
|
||||
all: lua_pack.so
|
||||
|
||||
lua_pack.so: lua_pack.o
|
||||
$(CC) $(LIBFLAG) $(LUA_LIBDIR) $< -o $@
|
||||
|
||||
%.o: %.c
|
||||
$(CC) -c $(CFLAGS) -fPIC $(LUA_INCDIR) $< -o $@
|
||||
|
||||
install: lua_pack.so
|
||||
cp lua_pack.so $(INST_LIBDIR)
|
||||
|
||||
clean:
|
||||
rm -f *.so *.o *.rock
|
||||
|
||||
# eof
|
|
@ -1,6 +1,9 @@
|
|||
---
|
||||
name: Bug report for version 2.x
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
|
|
|
@ -2,6 +2,9 @@
|
|||
name: Bug report for version 3.x
|
||||
about: Create a report to help us improve. If you don't know a specific detail or
|
||||
piece of information leave it blank, if necessary we will help you to figure out.
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
|
@ -17,7 +20,7 @@ Output of:
|
|||
3. Error logs
|
||||
4. If there is a crash, the core dump file.
|
||||
|
||||
_Notice:_ Be carefully to not leak any confidential information.
|
||||
_Notice:_ Be careful to not leak any confidential information.
|
||||
|
||||
**To Reproduce**
|
||||
|
||||
|
@ -33,8 +36,8 @@ A **curl** command line that mimics the original request and reproduces the prob
|
|||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Server (please complete the following information):**
|
||||
- ModSecurity version (and connector): [e.g. ModSecurity v3.0.1 with nginx-connector v1.0.0]
|
||||
- WebServer: [e.g. nginx-1.15.5]
|
||||
- ModSecurity version (and connector): [e.g. ModSecurity v3.0.8 with nginx-connector v1.0.3]
|
||||
- WebServer: [e.g. nginx-1.18.0]
|
||||
- OS (and distro): [e.g. Linux, archlinux]
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,24 @@
|
|||
v3.x.y - YYYY-MMM-DD (to be released)
|
||||
-------------------------------------
|
||||
v3.0.9 - 2023-Apr-12
|
||||
--------------------
|
||||
|
||||
- Fix: possible segfault on reload if duplicate ip+CIDR in ip match list
|
||||
[Issue #2877, #2890 - @tomsommer, @martinhsv]
|
||||
- Add some member variable inits in Transaction class (possible segfault)
|
||||
[Issue #2886 - @GNU-Plus-Windows-User, @airween, @mdounin, @martinhsv]
|
||||
- Resolve memory leak on reload (bison-generated variable)
|
||||
[Issue #2876 - @martinhsv]
|
||||
- Support equals sign in XPath expressions
|
||||
[Issue #2328 - @dennus, @martinhsv]
|
||||
- Encode two special chars in error.log output
|
||||
[Issue #2854 - @airween, @martinhsv]
|
||||
- Add JIT support for PCRE2
|
||||
[Issue #2791 - @wfjsw, @airween, @FireBurn, @martinhsv]
|
||||
- Support comments in ipMatchFromFile file via '#' token
|
||||
[Issue #2554 - @tomsommer, @martinhsv]
|
||||
- Use name package name libmaxminddb with pkg-config
|
||||
[Issue #2595, #2596 - @frankvanbever, @ffontaine, @arnout]
|
||||
- Fix: FILES_TMP_CONTENT collection key should use part name
|
||||
[Issue #2831 - @airween]
|
||||
- Use AS_HELP_STRING instead of obsolete AC_HELP_STRING macro
|
||||
[Issue #2806 - @hughmcmaster]
|
||||
- During configure, do not check for pcre if pcre2 specified
|
||||
|
|
|
@ -279,6 +279,7 @@ TESTS+=test/test-cases/regression/variable-variation-count.json
|
|||
TESTS+=test/test-cases/regression/variable-variation-exclusion.json
|
||||
TESTS+=test/test-cases/regression/variable-WEBAPPID.json
|
||||
TESTS+=test/test-cases/regression/variable-WEBSERVER_ERROR_LOG.json
|
||||
TESTS+=test/test-cases/regression/variable-XML.json
|
||||
TESTS+=test/test-cases/secrules-language-tests/operators/beginsWith.json
|
||||
TESTS+=test/test-cases/secrules-language-tests/operators/contains.json
|
||||
TESTS+=test/test-cases/secrules-language-tests/operators/containsWord.json
|
||||
|
|
|
@ -60,12 +60,10 @@ else
|
|||
# Nothing about MaxMind was informed, using the pkg-config to figure things out.
|
||||
if test -n "${PKG_CONFIG}"; then
|
||||
MAXMIND_PKG_NAME=""
|
||||
for x in ${MAXMIND_POSSIBLE_LIB_NAMES}; do
|
||||
if ${PKG_CONFIG} --exists ${x}; then
|
||||
MAXMIND_PKG_NAME="$x"
|
||||
break
|
||||
fi
|
||||
done
|
||||
if ${PKG_CONFIG} --exists libmaxminddb; then
|
||||
MAXMIND_PKG_NAME="libmaxminddb"
|
||||
break
|
||||
fi
|
||||
fi
|
||||
AC_MSG_NOTICE([Nothing about MaxMind was informed during the configure phase. Trying to detect it on the platform...])
|
||||
if test -n "${MAXMIND_PKG_NAME}"; then
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
--- /dev/null
|
||||
+++ /dev/null
|
||||
@@ -322,12 +322,12 @@
|
||||
|
||||
# Decide if we want to build the tests or not.
|
||||
-buildTestUtilities=false
|
||||
-if test "x$YAJL_FOUND" = "x1"; then
|
||||
+# buildTestUtilities=false
|
||||
+# if test "x$YAJL_FOUND" = "x1"; then
|
||||
# Regression tests will not be able to run without the logging support.
|
||||
# But we still have the unit tests.
|
||||
# if test "$debugLogs" = "true"; then
|
||||
- buildTestUtilities=true
|
||||
+# buildTestUtilities=true
|
||||
# fi
|
||||
-fi
|
||||
+# fi
|
||||
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
/*
|
||||
* ModSecurity, http://www.modsecurity.org/
|
||||
* Copyright (c) 2015 - 2021 Trustwave Holdings, Inc. (http://www.trustwave.com/)
|
||||
* Copyright (c) 2015 - 2023 Trustwave Holdings, Inc. (http://www.trustwave.com/)
|
||||
*
|
||||
* You may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
|
@ -190,7 +190,7 @@ namespace modsecurity {
|
|||
|
||||
#define MODSECURITY_MAJOR "3"
|
||||
#define MODSECURITY_MINOR "0"
|
||||
#define MODSECURITY_PATCHLEVEL "8"
|
||||
#define MODSECURITY_PATCHLEVEL "9"
|
||||
#define MODSECURITY_TAG ""
|
||||
#define MODSECURITY_TAG_NUM "100"
|
||||
|
||||
|
@ -198,7 +198,7 @@ namespace modsecurity {
|
|||
MODSECURITY_MINOR "." MODSECURITY_PATCHLEVEL \
|
||||
MODSECURITY_TAG
|
||||
|
||||
#define MODSECURITY_VERSION_NUM 3080100
|
||||
#define MODSECURITY_VERSION_NUM 3090100
|
||||
|
||||
#define MODSECURITY_CHECK_VERSION(a) (MODSECURITY_VERSION_NUM <= a)
|
||||
|
||||
|
|
|
@ -105,6 +105,7 @@ bool VerifyCC::init(const std::string ¶m2, std::string *error) {
|
|||
if (m_pc == NULL) {
|
||||
return false;
|
||||
}
|
||||
m_pcje = pcre2_jit_compile(m_pc, PCRE2_JIT_COMPLETE);
|
||||
#else
|
||||
const char *errptr = NULL;
|
||||
int erroffset = 0;
|
||||
|
@ -142,8 +143,16 @@ bool VerifyCC::evaluate(Transaction *t, RuleWithActions *rule,
|
|||
PCRE2_SPTR pcre2_i = reinterpret_cast<PCRE2_SPTR>(i.c_str());
|
||||
pcre2_match_data *match_data = pcre2_match_data_create_from_pattern(m_pc, NULL);
|
||||
|
||||
int ret;
|
||||
for (offset = 0; offset < target_length; offset++) {
|
||||
int ret = pcre2_match(m_pc, pcre2_i, target_length, offset, 0, match_data, NULL);
|
||||
|
||||
if (m_pcje == 0) {
|
||||
ret = pcre2_jit_match(m_pc, pcre2_i, target_length, offset, 0, match_data, NULL);
|
||||
}
|
||||
|
||||
if (m_pcje != 0 || ret == PCRE2_ERROR_JIT_STACKLIMIT) {
|
||||
ret = pcre2_match(m_pc, pcre2_i, target_length, offset, PCRE2_NO_JIT, match_data, NULL);
|
||||
}
|
||||
|
||||
/* If there was no match, then we are done. */
|
||||
if (ret < 0) {
|
||||
|
|
|
@ -39,7 +39,8 @@ class VerifyCC : public Operator {
|
|||
explicit VerifyCC(std::unique_ptr<RunTimeString> param)
|
||||
: Operator("VerifyCC", std::move(param)),
|
||||
#if WITH_PCRE2
|
||||
m_pc(NULL) { }
|
||||
m_pc(NULL),
|
||||
m_pcje(PCRE2_ERROR_JIT_BADOPTION) { }
|
||||
#else
|
||||
m_pc(NULL),
|
||||
m_pce(NULL) { }
|
||||
|
@ -53,6 +54,7 @@ class VerifyCC : public Operator {
|
|||
private:
|
||||
#if WITH_PCRE2
|
||||
pcre2_code *m_pc;
|
||||
int m_pcje;
|
||||
#else
|
||||
pcre *m_pc;
|
||||
pcre_extra *m_pce;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
/*
|
||||
* ModSecurity, http://www.modsecurity.org/
|
||||
* Copyright (c) 2015 - 2021 Trustwave Holdings, Inc. (http://www.trustwave.com/)
|
||||
* Copyright (c) 2015 - 2023 Trustwave Holdings, Inc. (http://www.trustwave.com/)
|
||||
*
|
||||
* You may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
|
@ -34,6 +34,7 @@ Driver::Driver()
|
|||
|
||||
|
||||
Driver::~Driver() {
|
||||
|
||||
while (loc.empty() == false) {
|
||||
yy::location *a = loc.back();
|
||||
loc.pop_back();
|
||||
|
@ -129,9 +130,11 @@ int Driver::parse(const std::string &f, const std::string &ref) {
|
|||
m_lastRule = nullptr;
|
||||
loc.push_back(new yy::location());
|
||||
if (ref.empty()) {
|
||||
loc.back()->begin.filename = loc.back()->end.filename = new std::string("<<reference missing or not informed>>");
|
||||
m_filenames.push_back("<<reference missing or not informed>>");
|
||||
loc.back()->begin.filename = loc.back()->end.filename = &(m_filenames.back());
|
||||
} else {
|
||||
loc.back()->begin.filename = loc.back()->end.filename = new std::string(ref);
|
||||
m_filenames.push_back(ref);
|
||||
loc.back()->begin.filename = loc.back()->end.filename = &(m_filenames.back());
|
||||
}
|
||||
|
||||
if (f.empty()) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
/*
|
||||
* ModSecurity, http://www.modsecurity.org/
|
||||
* Copyright (c) 2015 - 2021 Trustwave Holdings, Inc. (http://www.trustwave.com/)
|
||||
* Copyright (c) 2015 - 2023 Trustwave Holdings, Inc. (http://www.trustwave.com/)
|
||||
*
|
||||
* You may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
|
@ -53,14 +53,6 @@ typedef struct Driver_t Driver;
|
|||
#endif
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* FIXME: There is a memory leak in the filename at yy::location.
|
||||
* The filename should be converted into a shared string to
|
||||
* save memory or be associated with the life cycle of the
|
||||
* driver class.
|
||||
*
|
||||
**/
|
||||
class Driver : public RulesSetProperties {
|
||||
public:
|
||||
Driver();
|
||||
|
@ -92,6 +84,13 @@ class Driver : public RulesSetProperties {
|
|||
RuleWithActions *m_lastRule;
|
||||
|
||||
RulesSetPhases m_rulesSetPhases;
|
||||
|
||||
// Retain a list of new'd filenames so that they are available during the lifetime
|
||||
// of the Driver object, but so that they will get cleaned up by the Driver
|
||||
// destructor. This is to resolve a memory leak of yy.position.filename in location.hh.
|
||||
// Ordinarily other solutions would have been preferable, but location.hh is a
|
||||
// bison-generated file, which makes some alternative solutions impractical.
|
||||
std::list<std::string> m_filenames;
|
||||
};
|
||||
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -897,6 +897,7 @@ namespace yy {
|
|||
// "RUN_TIME_VAR_TIME_YEAR"
|
||||
// "VARIABLE"
|
||||
// "Dictionary element"
|
||||
// "Dictionary element, with equals"
|
||||
// "Dictionary element, selected by regexp"
|
||||
char dummy1[sizeof (std::string)];
|
||||
|
||||
|
@ -1314,7 +1315,8 @@ namespace yy {
|
|||
TOK_RUN_TIME_VAR_TIME_YEAR = 596, // "RUN_TIME_VAR_TIME_YEAR"
|
||||
TOK_VARIABLE = 597, // "VARIABLE"
|
||||
TOK_DICT_ELEMENT = 598, // "Dictionary element"
|
||||
TOK_DICT_ELEMENT_REGEXP = 599 // "Dictionary element, selected by regexp"
|
||||
TOK_DICT_ELEMENT_WITH_EQUALS = 599, // "Dictionary element, with equals"
|
||||
TOK_DICT_ELEMENT_REGEXP = 600 // "Dictionary element, selected by regexp"
|
||||
};
|
||||
/// Backward compatibility alias (Bison 3.6).
|
||||
typedef token_kind_type yytokentype;
|
||||
|
@ -1331,7 +1333,7 @@ namespace yy {
|
|||
{
|
||||
enum symbol_kind_type
|
||||
{
|
||||
YYNTOKENS = 345, ///< Number of tokens.
|
||||
YYNTOKENS = 346, ///< Number of tokens.
|
||||
S_YYEMPTY = -2,
|
||||
S_YYEOF = 0, // "end of file"
|
||||
S_YYerror = 1, // error
|
||||
|
@ -1677,23 +1679,24 @@ namespace yy {
|
|||
S_RUN_TIME_VAR_TIME_YEAR = 341, // "RUN_TIME_VAR_TIME_YEAR"
|
||||
S_VARIABLE = 342, // "VARIABLE"
|
||||
S_DICT_ELEMENT = 343, // "Dictionary element"
|
||||
S_DICT_ELEMENT_REGEXP = 344, // "Dictionary element, selected by regexp"
|
||||
S_YYACCEPT = 345, // $accept
|
||||
S_input = 346, // input
|
||||
S_line = 347, // line
|
||||
S_audit_log = 348, // audit_log
|
||||
S_actions = 349, // actions
|
||||
S_actions_may_quoted = 350, // actions_may_quoted
|
||||
S_op = 351, // op
|
||||
S_op_before_init = 352, // op_before_init
|
||||
S_expression = 353, // expression
|
||||
S_variables = 354, // variables
|
||||
S_variables_pre_process = 355, // variables_pre_process
|
||||
S_variables_may_be_quoted = 356, // variables_may_be_quoted
|
||||
S_var = 357, // var
|
||||
S_act = 358, // act
|
||||
S_setvar_action = 359, // setvar_action
|
||||
S_run_time_string = 360 // run_time_string
|
||||
S_DICT_ELEMENT_WITH_EQUALS = 344, // "Dictionary element, with equals"
|
||||
S_DICT_ELEMENT_REGEXP = 345, // "Dictionary element, selected by regexp"
|
||||
S_YYACCEPT = 346, // $accept
|
||||
S_input = 347, // input
|
||||
S_line = 348, // line
|
||||
S_audit_log = 349, // audit_log
|
||||
S_actions = 350, // actions
|
||||
S_actions_may_quoted = 351, // actions_may_quoted
|
||||
S_op = 352, // op
|
||||
S_op_before_init = 353, // op_before_init
|
||||
S_expression = 354, // expression
|
||||
S_variables = 355, // variables
|
||||
S_variables_pre_process = 356, // variables_pre_process
|
||||
S_variables_may_be_quoted = 357, // variables_may_be_quoted
|
||||
S_var = 358, // var
|
||||
S_act = 359, // act
|
||||
S_setvar_action = 360, // setvar_action
|
||||
S_run_time_string = 361 // run_time_string
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -1927,6 +1930,7 @@ namespace yy {
|
|||
case symbol_kind::S_RUN_TIME_VAR_TIME_YEAR: // "RUN_TIME_VAR_TIME_YEAR"
|
||||
case symbol_kind::S_VARIABLE: // "VARIABLE"
|
||||
case symbol_kind::S_DICT_ELEMENT: // "Dictionary element"
|
||||
case symbol_kind::S_DICT_ELEMENT_WITH_EQUALS: // "Dictionary element, with equals"
|
||||
case symbol_kind::S_DICT_ELEMENT_REGEXP: // "Dictionary element, selected by regexp"
|
||||
value.move< std::string > (std::move (that.value));
|
||||
break;
|
||||
|
@ -2300,6 +2304,7 @@ switch (yykind)
|
|||
case symbol_kind::S_RUN_TIME_VAR_TIME_YEAR: // "RUN_TIME_VAR_TIME_YEAR"
|
||||
case symbol_kind::S_VARIABLE: // "VARIABLE"
|
||||
case symbol_kind::S_DICT_ELEMENT: // "Dictionary element"
|
||||
case symbol_kind::S_DICT_ELEMENT_WITH_EQUALS: // "Dictionary element, with equals"
|
||||
case symbol_kind::S_DICT_ELEMENT_REGEXP: // "Dictionary element, selected by regexp"
|
||||
value.template destroy< std::string > ();
|
||||
break;
|
||||
|
@ -7648,6 +7653,21 @@ switch (yykind)
|
|||
return symbol_type (token::TOK_DICT_ELEMENT, v, l);
|
||||
}
|
||||
#endif
|
||||
#if 201103L <= YY_CPLUSPLUS
|
||||
static
|
||||
symbol_type
|
||||
make_DICT_ELEMENT_WITH_EQUALS (std::string v, location_type l)
|
||||
{
|
||||
return symbol_type (token::TOK_DICT_ELEMENT_WITH_EQUALS, std::move (v), std::move (l));
|
||||
}
|
||||
#else
|
||||
static
|
||||
symbol_type
|
||||
make_DICT_ELEMENT_WITH_EQUALS (const std::string& v, const location_type& l)
|
||||
{
|
||||
return symbol_type (token::TOK_DICT_ELEMENT_WITH_EQUALS, v, l);
|
||||
}
|
||||
#endif
|
||||
#if 201103L <= YY_CPLUSPLUS
|
||||
static
|
||||
symbol_type
|
||||
|
@ -7993,7 +8013,7 @@ switch (yykind)
|
|||
/// Constants.
|
||||
enum
|
||||
{
|
||||
yylast_ = 3344, ///< Last index in yytable_.
|
||||
yylast_ = 3346, ///< Last index in yytable_.
|
||||
yynnts_ = 16, ///< Number of nonterminal symbols.
|
||||
yyfinal_ = 339 ///< Termination state number.
|
||||
};
|
||||
|
@ -8073,10 +8093,11 @@ switch (yykind)
|
|||
305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
|
||||
315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
|
||||
325, 326, 327, 328, 329, 330, 331, 332, 333, 334,
|
||||
335, 336, 337, 338, 339, 340, 341, 342, 343, 344
|
||||
335, 336, 337, 338, 339, 340, 341, 342, 343, 344,
|
||||
345
|
||||
};
|
||||
// Last valid token kind.
|
||||
const int code_max = 599;
|
||||
const int code_max = 600;
|
||||
|
||||
if (t <= 0)
|
||||
return symbol_kind::S_YYEOF;
|
||||
|
@ -8292,6 +8313,7 @@ switch (yykind)
|
|||
case symbol_kind::S_RUN_TIME_VAR_TIME_YEAR: // "RUN_TIME_VAR_TIME_YEAR"
|
||||
case symbol_kind::S_VARIABLE: // "VARIABLE"
|
||||
case symbol_kind::S_DICT_ELEMENT: // "Dictionary element"
|
||||
case symbol_kind::S_DICT_ELEMENT_WITH_EQUALS: // "Dictionary element, with equals"
|
||||
case symbol_kind::S_DICT_ELEMENT_REGEXP: // "Dictionary element, selected by regexp"
|
||||
value.copy< std::string > (YY_MOVE (that.value));
|
||||
break;
|
||||
|
@ -8551,6 +8573,7 @@ switch (yykind)
|
|||
case symbol_kind::S_RUN_TIME_VAR_TIME_YEAR: // "RUN_TIME_VAR_TIME_YEAR"
|
||||
case symbol_kind::S_VARIABLE: // "VARIABLE"
|
||||
case symbol_kind::S_DICT_ELEMENT: // "Dictionary element"
|
||||
case symbol_kind::S_DICT_ELEMENT_WITH_EQUALS: // "Dictionary element, with equals"
|
||||
case symbol_kind::S_DICT_ELEMENT_REGEXP: // "Dictionary element, selected by regexp"
|
||||
value.move< std::string > (YY_MOVE (s.value));
|
||||
break;
|
||||
|
@ -8646,7 +8669,7 @@ switch (yykind)
|
|||
}
|
||||
|
||||
} // yy
|
||||
#line 8650 "seclang-parser.hh"
|
||||
#line 8673 "seclang-parser.hh"
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -319,7 +319,8 @@ using namespace modsecurity::operators;
|
|||
%initial-action
|
||||
{
|
||||
// Initialize the initial location.
|
||||
@$.begin.filename = @$.end.filename = new std::string(driver.file);
|
||||
driver.m_filenames.push_back(driver.file);
|
||||
@$.begin.filename = @$.end.filename = &(driver.m_filenames.back());
|
||||
};
|
||||
%define parse.trace
|
||||
%define parse.error verbose
|
||||
|
@ -680,6 +681,7 @@ using namespace modsecurity::operators;
|
|||
RUN_TIME_VAR_TIME_YEAR "RUN_TIME_VAR_TIME_YEAR"
|
||||
VARIABLE "VARIABLE"
|
||||
DICT_ELEMENT "Dictionary element"
|
||||
DICT_ELEMENT_WITH_EQUALS "Dictionary element, with equals"
|
||||
DICT_ELEMENT_REGEXP "Dictionary element, selected by regexp"
|
||||
;
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -420,10 +420,8 @@ DICT_ELEMENT ([^\"|,\n \t}=]|([^\\]\\\"))+
|
|||
DICT_ELEMENT_WITH_PIPE [^ =\t"]+
|
||||
DICT_ELEMENT_NO_PIPE [^ =\|\t"]+
|
||||
DICT_ELEMENT_NO_MACRO ([^\"|,%{\n \t}=]|([^\\]\\\"))+
|
||||
DICT_ELEMENT_WITH_EQUALS ([^\"|,\n \t}]|([^\\]\\\"))+
|
||||
|
||||
DICT_ELEMENT_TWO [^\"\=, \t\r\n\\]*
|
||||
DICT_ELEMENT_TWO_QUOTED [^\"\'\=\r\n\\]*
|
||||
DICT_ELEMENT_TWO2 [A-Za-z_ -\%\{\.\}\-\/]+
|
||||
DIRECTIVE (?i:SecRule)
|
||||
DIRECTIVE_SECRULESCRIPT (?i:SecRuleScript)
|
||||
FREE_TEXT_NEW_LINE [^\"|\n]+
|
||||
|
@ -1068,7 +1066,7 @@ EQUALS_MINUS (?i:=\-)
|
|||
[\/]{DICT_ELEMENT_NO_PIPE}[\/][|] { BEGIN_PREVIOUS(); yyless(yyleng - 1); return p::make_DICT_ELEMENT_REGEXP(std::string(yytext, 1, yyleng-2), *driver.loc.back()); }
|
||||
['][\/]{DICT_ELEMENT_WITH_PIPE}[\/]['] { BEGIN_PREVIOUS(); yyless(yyleng - 0); return p::make_DICT_ELEMENT_REGEXP(std::string(yytext, 2, yyleng-4), *driver.loc.back()); }
|
||||
['][\/]{DICT_ELEMENT_WITH_PIPE}[\/]['][|] { BEGIN_PREVIOUS(); yyless(yyleng - 1); return p::make_DICT_ELEMENT_REGEXP(std::string(yytext, 2, yyleng-4), *driver.loc.back()); }
|
||||
{DICT_ELEMENT} { BEGIN_PREVIOUS(); return p::make_DICT_ELEMENT(yytext, *driver.loc.back()); }
|
||||
{DICT_ELEMENT_WITH_EQUALS} { BEGIN_PREVIOUS(); return p::make_DICT_ELEMENT(yytext, *driver.loc.back()); }
|
||||
|
||||
[\/]{DICT_ELEMENT_NO_PIPE}[\/][,] { BEGIN_PREVIOUS(); yyless(yyleng - 1); return p::make_DICT_ELEMENT_REGEXP(std::string(yytext, 1, yyleng-2), *driver.loc.back()); }
|
||||
['][\/]{DICT_ELEMENT_NO_PIPE}[\/]['][,] { BEGIN_PREVIOUS(); yyless(yyleng - 1); return p::make_DICT_ELEMENT_REGEXP(std::string(yytext, 2, yyleng-4), *driver.loc.back()); }
|
||||
|
@ -1257,7 +1255,8 @@ EQUALS_MINUS (?i:=\-)
|
|||
std::string err;
|
||||
std::string f = modsecurity::utils::find_resource(s, *driver.loc.back()->end.filename, &err);
|
||||
driver.loc.push_back(new yy::location());
|
||||
driver.loc.back()->begin.filename = driver.loc.back()->end.filename = new std::string(f);
|
||||
driver.m_filenames.push_back(f);
|
||||
driver.loc.back()->begin.filename = driver.loc.back()->end.filename = &(driver.m_filenames.back());
|
||||
yyin = fopen(f.c_str(), "r" );
|
||||
if (!yyin) {
|
||||
BEGIN(INITIAL);
|
||||
|
@ -1285,7 +1284,8 @@ EQUALS_MINUS (?i:=\-)
|
|||
for (auto& s: files) {
|
||||
std::string f = modsecurity::utils::find_resource(s, *driver.loc.back()->end.filename, &err);
|
||||
driver.loc.push_back(new yy::location());
|
||||
driver.loc.back()->begin.filename = driver.loc.back()->end.filename = new std::string(f);
|
||||
driver.m_filenames.push_back(f);
|
||||
driver.loc.back()->begin.filename = driver.loc.back()->end.filename = &(driver.m_filenames.back());
|
||||
|
||||
yyin = fopen(f.c_str(), "r" );
|
||||
if (!yyin) {
|
||||
|
@ -1314,7 +1314,8 @@ EQUALS_MINUS (?i:=\-)
|
|||
c.setKey(key);
|
||||
|
||||
driver.loc.push_back(new yy::location());
|
||||
driver.loc.back()->begin.filename = driver.loc.back()->end.filename = new std::string(url);
|
||||
driver.m_filenames.push_back(url);
|
||||
driver.loc.back()->begin.filename = driver.loc.back()->end.filename = &(driver.m_filenames.back());
|
||||
YY_BUFFER_STATE temp = YY_CURRENT_BUFFER;
|
||||
yypush_buffer_state(temp);
|
||||
|
||||
|
|
|
@ -29,17 +29,17 @@ std::string RuleMessage::_details(const RuleMessage *rm) {
|
|||
msg.append(" [file \"" + std::string(*rm->m_ruleFile.get()) + "\"]");
|
||||
msg.append(" [line \"" + std::to_string(rm->m_ruleLine) + "\"]");
|
||||
msg.append(" [id \"" + std::to_string(rm->m_ruleId) + "\"]");
|
||||
msg.append(" [rev \"" + rm->m_rev + "\"]");
|
||||
msg.append(" [rev \"" + utils::string::toHexIfNeeded(rm->m_rev, true) + "\"]");
|
||||
msg.append(" [msg \"" + rm->m_message + "\"]");
|
||||
msg.append(" [data \"" + utils::string::limitTo(200, rm->m_data) + "\"]");
|
||||
msg.append(" [data \"" + utils::string::toHexIfNeeded(utils::string::limitTo(200, rm->m_data), true) + "\"]");
|
||||
msg.append(" [severity \"" +
|
||||
std::to_string(rm->m_severity) + "\"]");
|
||||
msg.append(" [ver \"" + rm->m_ver + "\"]");
|
||||
msg.append(" [ver \"" + utils::string::toHexIfNeeded(rm->m_ver, true) + "\"]");
|
||||
msg.append(" [maturity \"" + std::to_string(rm->m_maturity) + "\"]");
|
||||
msg.append(" [accuracy \"" + std::to_string(rm->m_accuracy) + "\"]");
|
||||
|
||||
for (auto &a : rm->m_tags) {
|
||||
msg.append(" [tag \"" + a + "\"]");
|
||||
msg.append(" [tag \"" + utils::string::toHexIfNeeded(a, true) + "\"]");
|
||||
}
|
||||
|
||||
msg.append(" [hostname \"" + *rm->m_serverIpAddress.get() \
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
/*
|
||||
* ModSecurity, http://www.modsecurity.org/
|
||||
* Copyright (c) 2015 - 2021 Trustwave Holdings, Inc. (http://www.trustwave.com/)
|
||||
* Copyright (c) 2015 - 2023 Trustwave Holdings, Inc. (http://www.trustwave.com/)
|
||||
*
|
||||
* You may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
|
@ -101,11 +101,11 @@ namespace modsecurity {
|
|||
*/
|
||||
Transaction::Transaction(ModSecurity *ms, RulesSet *rules, void *logCbData)
|
||||
: m_creationTimeStamp(utils::cpu_seconds()),
|
||||
/* m_clientIpAddress(nullptr), */
|
||||
m_clientIpAddress(std::make_shared<std::string>("")),
|
||||
m_httpVersion(""),
|
||||
/* m_serverIpAddress(""), */
|
||||
m_serverIpAddress(std::make_shared<std::string>("")),
|
||||
m_uri(""),
|
||||
/* m_uri_no_query_string_decoded(""), */
|
||||
m_uri_no_query_string_decoded(std::make_shared<std::string>("")),
|
||||
m_ARGScombinedSizeDouble(0),
|
||||
m_clientPort(0),
|
||||
m_highestSeverityAction(255),
|
||||
|
@ -175,11 +175,11 @@ Transaction::Transaction(ModSecurity *ms, RulesSet *rules, void *logCbData)
|
|||
|
||||
Transaction::Transaction(ModSecurity *ms, RulesSet *rules, char *id, void *logCbData)
|
||||
: m_creationTimeStamp(utils::cpu_seconds()),
|
||||
/* m_clientIpAddress(""), */
|
||||
m_clientIpAddress(std::make_shared<std::string>("")),
|
||||
m_httpVersion(""),
|
||||
/* m_serverIpAddress(""), */
|
||||
m_serverIpAddress(std::make_shared<std::string>("")),
|
||||
m_uri(""),
|
||||
/* m_uri_no_query_string_decoded(""), */
|
||||
m_uri_no_query_string_decoded(std::make_shared<std::string>("")),
|
||||
m_ARGScombinedSizeDouble(0),
|
||||
m_clientPort(0),
|
||||
m_highestSeverityAction(255),
|
||||
|
@ -814,7 +814,8 @@ int Transaction::processRequestBody() {
|
|||
m_variableReqbodyError.set("1", 0);
|
||||
m_variableReqbodyErrorMsg.set("Request body excluding files is bigger than the maximum expected.", 0);
|
||||
m_variableInboundDataError.set("1", m_variableOffset);
|
||||
ms_dbg(5, "Request body excluding files is bigger than the maximum expected.");
|
||||
ms_dbg(5, "Request body excluding files is bigger than the maximum expected. Limit: " \
|
||||
+ std::to_string(m_rules->m_requestBodyNoFilesLimit.m_value));
|
||||
requestBodyNoFilesLimitExceeded = true;
|
||||
}
|
||||
}
|
||||
|
@ -901,7 +902,8 @@ int Transaction::processRequestBody() {
|
|||
m_variableReqbodyError.set("1", 0);
|
||||
m_variableReqbodyErrorMsg.set("Request body excluding files is bigger than the maximum expected.", 0);
|
||||
m_variableInboundDataError.set("1", m_variableOffset);
|
||||
ms_dbg(5, "Request body excluding files is bigger than the maximum expected.");
|
||||
ms_dbg(5, "Request body excluding files is bigger than the maximum expected. Limit: " \
|
||||
+ std::to_string(m_rules->m_requestBodyNoFilesLimit.m_value));
|
||||
} else {
|
||||
m_variableReqbodyError.set("0", m_variableOffset);
|
||||
m_variableReqbodyProcessorError.set("0", m_variableOffset);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
/*
|
||||
* ModSecurity, http://www.modsecurity.org/
|
||||
* Copyright (c) 2015 - 2021 Trustwave Holdings, Inc. (http://www.trustwave.com/)
|
||||
* Copyright (c) 2015 - 2022 Trustwave Holdings, Inc. (http://www.trustwave.com/)
|
||||
*
|
||||
* You may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
|
@ -88,6 +88,10 @@ IpTree::~IpTree() {
|
|||
bool IpTree::addFromBuffer(std::istream *ss, std::string *error) {
|
||||
char *error_msg = NULL;
|
||||
for (std::string line; std::getline(*ss, line); ) {
|
||||
size_t comment_start = line.find('#');
|
||||
if (comment_start != std::string::npos) {
|
||||
line = line.substr(0, comment_start);
|
||||
}
|
||||
int res = add_ip_from_param(line.c_str(), &m_tree, &error_msg);
|
||||
if (res != 0) {
|
||||
if (error_msg != NULL) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
/*
|
||||
* ModSecurity for Apache 2.x, http://www.modsecurity.org/
|
||||
* Copyright (c) 2015 - 2021 Trustwave Holdings, Inc. (http://www.trustwave.com/)
|
||||
* Copyright (c) 2015 - 2023 Trustwave Holdings, Inc. (http://www.trustwave.com/)
|
||||
*
|
||||
* You may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
|
@ -259,6 +259,7 @@ int InsertNetmask(TreeNode *node, TreeNode *parent, TreeNode *new_node,
|
|||
|
||||
node->count++;
|
||||
node->netmasks = reinterpret_cast<unsigned char *>(malloc(node->count * sizeof(unsigned char)));
|
||||
memset(node->netmasks, 0, (node->count * sizeof(unsigned char)));
|
||||
|
||||
if(node->netmasks == NULL)
|
||||
return 0;
|
||||
|
@ -410,6 +411,7 @@ TreeNode *CPTAddElement(unsigned char *ipdata, unsigned int ip_bitmask, CPTTree
|
|||
node->count++;
|
||||
new_node = node;
|
||||
node->netmasks = reinterpret_cast<unsigned char *>(malloc(node->count * sizeof(unsigned char)));
|
||||
memset(node->netmasks, 0, (node->count * sizeof(unsigned char)));
|
||||
|
||||
if ((node->count -1) == 0) {
|
||||
node->netmasks[0] = netmask;
|
||||
|
@ -418,16 +420,16 @@ TreeNode *CPTAddElement(unsigned char *ipdata, unsigned int ip_bitmask, CPTTree
|
|||
|
||||
node->netmasks[node->count - 1] = netmask;
|
||||
|
||||
i = node->count - 2;
|
||||
while (i >= 0) {
|
||||
if (netmask < node->netmasks[i]) {
|
||||
node->netmasks[i + 1] = netmask;
|
||||
int index = node->count - 2;
|
||||
while (index >= 0) {
|
||||
if (netmask < node->netmasks[index]) {
|
||||
node->netmasks[index + 1] = netmask;
|
||||
break;
|
||||
}
|
||||
|
||||
node->netmasks[i + 1] = node->netmasks[i];
|
||||
node->netmasks[i] = netmask;
|
||||
i--;
|
||||
node->netmasks[index + 1] = node->netmasks[index];
|
||||
node->netmasks[index] = netmask;
|
||||
index--;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -481,6 +483,7 @@ TreeNode *CPTAddElement(unsigned char *ipdata, unsigned int ip_bitmask, CPTTree
|
|||
}
|
||||
|
||||
i_node->netmasks = reinterpret_cast<unsigned char *>(malloc((node->count - i) * sizeof(unsigned char)));
|
||||
memset(i_node->netmasks, 0, ((node->count - i) * sizeof(unsigned char)));
|
||||
|
||||
if(i_node->netmasks == NULL) {
|
||||
free(new_node->prefix);
|
||||
|
|
|
@ -73,6 +73,7 @@ Regex::Regex(const std::string& pattern_, bool ignoreCase)
|
|||
PCRE2_SIZE erroroffset = 0;
|
||||
m_pc = pcre2_compile(pcre2_pattern, PCRE2_ZERO_TERMINATED,
|
||||
pcre2_options, &errornumber, &erroroffset, NULL);
|
||||
m_pcje = pcre2_jit_compile(m_pc, PCRE2_JIT_COMPLETE);
|
||||
#else
|
||||
const char *errptr = NULL;
|
||||
int erroffset;
|
||||
|
@ -111,15 +112,22 @@ Regex::~Regex() {
|
|||
|
||||
std::list<SMatch> Regex::searchAll(const std::string& s) const {
|
||||
std::list<SMatch> retList;
|
||||
int rc;
|
||||
int rc = 0;
|
||||
#ifdef WITH_PCRE2
|
||||
PCRE2_SPTR pcre2_s = reinterpret_cast<PCRE2_SPTR>(s.c_str());
|
||||
PCRE2_SIZE offset = 0;
|
||||
|
||||
pcre2_match_data *match_data = pcre2_match_data_create_from_pattern(m_pc, NULL);
|
||||
do {
|
||||
rc = pcre2_match(m_pc, pcre2_s, s.length(),
|
||||
offset, 0, match_data, NULL);
|
||||
if (m_pcje == 0) {
|
||||
rc = pcre2_jit_match(m_pc, pcre2_s, s.length(),
|
||||
offset, 0, match_data, NULL);
|
||||
}
|
||||
|
||||
if (m_pcje != 0 || rc == PCRE2_ERROR_JIT_STACKLIMIT) {
|
||||
rc = pcre2_match(m_pc, pcre2_s, s.length(),
|
||||
offset, PCRE2_NO_JIT, match_data, NULL);
|
||||
}
|
||||
PCRE2_SIZE *ovector = pcre2_get_ovector_pointer(match_data);
|
||||
#else
|
||||
const char *subject = s.c_str();
|
||||
|
@ -159,7 +167,14 @@ bool Regex::searchOneMatch(const std::string& s, std::vector<SMatchCapture>& cap
|
|||
#ifdef WITH_PCRE2
|
||||
PCRE2_SPTR pcre2_s = reinterpret_cast<PCRE2_SPTR>(s.c_str());
|
||||
pcre2_match_data *match_data = pcre2_match_data_create_from_pattern(m_pc, NULL);
|
||||
int rc = pcre2_match(m_pc, pcre2_s, s.length(), 0, 0, match_data, NULL);
|
||||
int rc = 0;
|
||||
if (m_pcje == 0) {
|
||||
rc = pcre2_jit_match(m_pc, pcre2_s, s.length(), 0, 0, match_data, NULL);
|
||||
}
|
||||
|
||||
if (m_pcje != 0 || rc == PCRE2_ERROR_JIT_STACKLIMIT) {
|
||||
rc = pcre2_match(m_pc, pcre2_s, s.length(), 0, PCRE2_NO_JIT, match_data, NULL);
|
||||
}
|
||||
PCRE2_SIZE *ovector = pcre2_get_ovector_pointer(match_data);
|
||||
#else
|
||||
const char *subject = s.c_str();
|
||||
|
@ -198,7 +213,7 @@ bool Regex::searchGlobal(const std::string& s, std::vector<SMatchCapture>& captu
|
|||
pcre2_options = PCRE2_NOTEMPTY_ATSTART | PCRE2_ANCHORED;
|
||||
}
|
||||
int rc = pcre2_match(m_pc, pcre2_s, s.length(),
|
||||
startOffset, pcre2_options, match_data, NULL);
|
||||
startOffset, pcre2_options, match_data, NULL);
|
||||
PCRE2_SIZE *ovector = pcre2_get_ovector_pointer(match_data);
|
||||
|
||||
#else
|
||||
|
@ -270,9 +285,16 @@ int Regex::search(const std::string& s, SMatch *match) const {
|
|||
#ifdef WITH_PCRE2
|
||||
PCRE2_SPTR pcre2_s = reinterpret_cast<PCRE2_SPTR>(s.c_str());
|
||||
pcre2_match_data *match_data = pcre2_match_data_create_from_pattern(m_pc, NULL);
|
||||
int ret = pcre2_match(m_pc, pcre2_s, s.length(),
|
||||
0, 0, match_data, NULL) > 0;
|
||||
|
||||
int ret = 0;
|
||||
if (m_pcje == 0) {
|
||||
ret = pcre2_match(m_pc, pcre2_s, s.length(),
|
||||
0, 0, match_data, NULL) > 0;
|
||||
}
|
||||
|
||||
if (m_pcje != 0 || ret == PCRE2_ERROR_JIT_STACKLIMIT) {
|
||||
ret = pcre2_match(m_pc, pcre2_s, s.length(),
|
||||
0, PCRE2_NO_JIT, match_data, NULL) > 0;
|
||||
}
|
||||
if (ret > 0) { // match
|
||||
PCRE2_SIZE *ovector = pcre2_get_ovector_pointer(match_data);
|
||||
#else
|
||||
|
@ -297,7 +319,14 @@ int Regex::search(const std::string& s) const {
|
|||
#ifdef WITH_PCRE2
|
||||
PCRE2_SPTR pcre2_s = reinterpret_cast<PCRE2_SPTR>(s.c_str());
|
||||
pcre2_match_data *match_data = pcre2_match_data_create_from_pattern(m_pc, NULL);
|
||||
int rc = pcre2_match(m_pc, pcre2_s, s.length(), 0, 0, match_data, NULL);
|
||||
int rc = 0;
|
||||
if (m_pcje == 0) {
|
||||
rc = pcre2_jit_match(m_pc, pcre2_s, s.length(), 0, 0, match_data, NULL);
|
||||
}
|
||||
|
||||
if (m_pcje != 0 || rc == PCRE2_ERROR_JIT_STACKLIMIT) {
|
||||
rc = pcre2_match(m_pc, pcre2_s, s.length(), 0, PCRE2_NO_JIT, match_data, NULL);
|
||||
}
|
||||
pcre2_match_data_free(match_data);
|
||||
if (rc > 0) {
|
||||
return 1; // match
|
||||
|
|
|
@ -85,6 +85,7 @@ class Regex {
|
|||
private:
|
||||
#if WITH_PCRE2
|
||||
pcre2_code *m_pc;
|
||||
int m_pcje;
|
||||
#else
|
||||
pcre *m_pc = NULL;
|
||||
pcre_extra *m_pce = NULL;
|
||||
|
|
|
@ -135,13 +135,14 @@ std::string string_to_hex(const std::string& input) {
|
|||
return output;
|
||||
}
|
||||
|
||||
|
||||
std::string toHexIfNeeded(const std::string &str) {
|
||||
std::string toHexIfNeeded(const std::string &str, bool escape_spec) {
|
||||
// escape_spec: escape special chars or not
|
||||
// spec chars: '"' (quotation mark, ascii 34), '\' (backslash, ascii 92)
|
||||
std::stringstream res;
|
||||
|
||||
for (int i = 0; i < str.size(); i++) {
|
||||
int c = (unsigned char)str.at(i);
|
||||
if (c < 32 || c > 126) {
|
||||
if (c < 32 || c > 126 || (escape_spec == true && (c == 34 || c == 92))) {
|
||||
res << "\\x" << std::setw(2) << std::setfill('0') << std::hex << c;
|
||||
} else {
|
||||
res << str.at(i);
|
||||
|
@ -267,7 +268,6 @@ void replaceAll(std::string *str, const std::string& from,
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
} // namespace string
|
||||
} // namespace utils
|
||||
} // namespace modsecurity
|
||||
|
|
|
@ -61,7 +61,7 @@ std::string dash_if_empty(const std::string *str);
|
|||
std::string limitTo(int amount, const std::string &str);
|
||||
std::string removeBracketsIfNeeded(std::string a);
|
||||
std::string string_to_hex(const std::string& input);
|
||||
std::string toHexIfNeeded(const std::string &str);
|
||||
std::string toHexIfNeeded(const std::string &str, bool escape_spec = false);
|
||||
std::string tolower(std::string str);
|
||||
std::string toupper(std::string str);
|
||||
std::vector<std::string> ssplit(std::string str, char delimiter);
|
||||
|
|
|
@ -60,7 +60,7 @@ ctunullpointer:src/rule_with_operator.cc:135
|
|||
ctunullpointer:src/rule_with_operator.cc:95
|
||||
passedByValue:src/variables/global.h:109
|
||||
passedByValue:src/variables/global.h:110
|
||||
passedByValue:src/parser/driver.cc:45
|
||||
passedByValue:src/parser/driver.cc:46
|
||||
passedByValue:test/common/modsecurity_test.cc:49
|
||||
passedByValue:test/common/modsecurity_test.cc:98
|
||||
unreadVariable:src/rule_with_operator.cc:219
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
127.0.0.1
|
||||
# Comment line
|
||||
10.10.10.1
|
||||
::1
|
||||
200.249.12.31
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
},
|
||||
"rules":[
|
||||
"SecRuleEngine On",
|
||||
"SecRemoteRules key https://gist.githubusercontent.com/zimmerle/a4c1ec028999f7df71d0cc80f4f271ca/raw/4c74363bf4eae974180f1a82007196e58729dd16/modsecurity-regression-test-secremoterules.txt",
|
||||
"SecRemoteRules key https://gist.githubusercontent.com/martinhsv/20705a36b7cfa8ff6d0dee0d4efce7e7/raw/faa96c7838b1fe972c1f0881efacbb440f9a4a5e/modsecurity-regression-rules.txt",
|
||||
"SecRule ARGS \"@contains somethingelse\" \"id:9,pass,t:trim\""
|
||||
]
|
||||
},
|
||||
|
|
|
@ -129,7 +129,7 @@
|
|||
},
|
||||
"rules":[
|
||||
"SecRuleEngine On",
|
||||
"SecRule REMOTE_ADDR \"@ipMatchFromFile https://www.modsecurity.org/modsecurity-regression-test.txt\" \"id:1,phase:3,pass,t:trim\""
|
||||
"SecRule REMOTE_ADDR \"@ipMatchFromFile https://gist.githubusercontent.com/martinhsv/20705a36b7cfa8ff6d0dee0d4efce7e7/raw/b9321f190eb0e81b98cb65a56db3d7e0a4f59314/modsecurity-regression-ip-list.txt\" \"id:1,phase:3,pass,t:trim\""
|
||||
]
|
||||
}
|
||||
]
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
[
|
||||
{
|
||||
"enabled":1,
|
||||
"version_min":300000,
|
||||
"title":"Testing XPath expression with equals sign",
|
||||
"expected":{
|
||||
"http_code": 403
|
||||
},
|
||||
"client":{
|
||||
"ip":"200.249.12.31",
|
||||
"port":123
|
||||
},
|
||||
"request":{
|
||||
"headers":{
|
||||
"Host":"localhost",
|
||||
"User-Agent":"curl/7.38.0",
|
||||
"Accept":"*/*",
|
||||
"Content-Type": "text/xml"
|
||||
},
|
||||
"uri":"/?key=value&key=other_value",
|
||||
"method":"POST",
|
||||
"body": [
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>",
|
||||
"<!DOCTYPE author [",
|
||||
"<!ELEMENT book ANY>",
|
||||
"<!ENTITY js SYSTEM \"/etc/passwd\">",
|
||||
"]>",
|
||||
"<bookstore>",
|
||||
"<some-tag>aaa</some-tag><some-tag>bbb</some-tag>",
|
||||
"</bookstore>"
|
||||
]
|
||||
|
||||
},
|
||||
"server":{
|
||||
"ip":"200.249.12.31",
|
||||
"port":80
|
||||
},
|
||||
"rules":[
|
||||
"SecRuleEngine On",
|
||||
"SecRequestBodyAccess On",
|
||||
"SecRule REQUEST_HEADERS:Content-Type \"^text/xml$\" \"id:500011,phase:1,t:none,t:lowercase,nolog,pass,ctl:requestBodyProcessor=XML\"",
|
||||
"SecRule XML://bookstore/*[local-name()='some-tag'] \"bbb\" \"id:500012,phase:3,t:none,t:lowercase,log,deny,status:403\""
|
||||
]
|
||||
}
|
||||
]
|
||||
|
|
@ -8,7 +8,7 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
cc: ["gcc", "clang"]
|
||||
luaVersion: ["5.1", "5.2", "5.3", "luajit", "luajit-openresty"]
|
||||
luaVersion: ["5.1", "5.2", "5.3", "5.4", "luajit", "luajit-openresty"]
|
||||
include:
|
||||
- luaVersion: "luajit"
|
||||
runtestArgs: "LUA_INCLUDE_DIR=.lua/include/luajit-2.1"
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
sudo: required
|
||||
dist: trusty
|
||||
dist: Focal
|
||||
|
||||
os: linux
|
||||
|
||||
|
@ -7,7 +7,6 @@ language: c
|
|||
|
||||
compiler:
|
||||
- gcc
|
||||
- clang
|
||||
|
||||
addons:
|
||||
apt:
|
||||
|
@ -18,6 +17,7 @@ addons:
|
|||
- libipc-run3-perl
|
||||
- lua5.1
|
||||
- lua5.1-dev
|
||||
- cmake
|
||||
|
||||
cache:
|
||||
apt: true
|
||||
|
@ -31,6 +31,7 @@ env:
|
|||
- LUAJIT=1 LUA_DIR=/usr/local LUA_INCLUDE_DIR=$LUA_DIR/include/luajit-2.1 LUA_SUFFIX=--lua-suffix=jit
|
||||
|
||||
install:
|
||||
- sudo ln -s /usr/bin/cmake /usr/local/bin/cmake
|
||||
- if [ -n "$LUAJIT" ]; then git clone -b v2.1-agentzh https://github.com/openresty/luajit2.git; fi
|
||||
- if [ -n "$LUAJIT" ]; then cd ./luajit2; fi
|
||||
- if [ -n "$LUAJIT" ]; then make -j$JOBS CCDEBUG=-g Q= PREFIX=$LUAJIT_PREFIX CC=$CC XCFLAGS='-DLUA_USE_APICHECK -DLUA_USE_ASSERT' > build.log 2>&1 || (cat build.log && exit 1); fi
|
||||
|
@ -47,8 +48,14 @@ install:
|
|||
- cd ..
|
||||
|
||||
script:
|
||||
- cppcheck -i ./luajit2 -i --force --error-exitcode=1 --enable=warning . > build.log 2>&1 || (cat build.log && exit 1)
|
||||
- sh runtests.sh
|
||||
- cppcheck -i ./luajit2 --force --error-exitcode=1 --enable=warning . > build.log 2>&1 || (cat build.log && exit 1)
|
||||
- bash runtests.sh
|
||||
- make
|
||||
- prove -Itests tests
|
||||
- TEST_LUA_USE_VALGRIND=1 prove -Itests tests > build.log 2>&1; export e=$?
|
||||
- cat build.log
|
||||
- grep -E '^==[0-9]+==' build.log; if [ "$?" == 0 ]; then exit 1; else exit $e; fi
|
||||
- cmake -DUSE_INTERNAL_FPCONV=1 .
|
||||
- make
|
||||
- prove -Itests tests
|
||||
- TEST_LUA_USE_VALGRIND=1 prove -Itests tests > build.log 2>&1; export e=$?
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
# Windows: set LUA_DIR=c:\lua51
|
||||
|
||||
project(lua-cjson C)
|
||||
cmake_minimum_required(VERSION 2.6)
|
||||
cmake_minimum_required(VERSION 2.8.12)
|
||||
|
||||
option(USE_INTERNAL_FPCONV "Use internal strtod() / g_fmt() code for performance")
|
||||
option(MULTIPLE_THREADS "Support multi-threaded apps with internal fpconv - recommended" ON)
|
||||
|
|
|
@ -165,7 +165,7 @@ encode_escape_forward_slash
|
|||
|
||||
**default:** true
|
||||
|
||||
If enabled, forward slash '/' will be encoded as '\/'.
|
||||
If enabled, forward slash '/' will be encoded as '\\/'.
|
||||
|
||||
If disabled, forward slash '/' will be encoded as '/' (no escape is applied).
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -24,7 +24,7 @@
|
|||
#define IEEE_8087
|
||||
#endif
|
||||
|
||||
#define MALLOC(n) xmalloc(n)
|
||||
#define MALLOC xmalloc
|
||||
|
||||
static void *xmalloc(size_t size)
|
||||
{
|
||||
|
@ -50,6 +50,10 @@ static pthread_mutex_t private_dtoa_lock[2] = {
|
|||
PTHREAD_MUTEX_INITIALIZER
|
||||
};
|
||||
|
||||
#define dtoa_get_threadno pthread_self
|
||||
void
|
||||
set_max_dtoa_threads(unsigned int n);
|
||||
|
||||
#define ACQUIRE_DTOA_LOCK(n) do { \
|
||||
int r = pthread_mutex_lock(&private_dtoa_lock[n]); \
|
||||
if (r) { \
|
||||
|
|
|
@ -55,7 +55,7 @@ static char locale_decimal_point = '.';
|
|||
* localconv() may not be thread safe (=>crash), and nl_langinfo() is
|
||||
* not supported on some platforms. Use sprintf() instead - if the
|
||||
* locale does change, at least Lua CJSON won't crash. */
|
||||
static void fpconv_update_locale()
|
||||
static void fpconv_update_locale(void)
|
||||
{
|
||||
char buf[8];
|
||||
|
||||
|
@ -202,7 +202,7 @@ int fpconv_g_fmt(char *str, double num, int precision)
|
|||
return len;
|
||||
}
|
||||
|
||||
void fpconv_init()
|
||||
void fpconv_init(void)
|
||||
{
|
||||
fpconv_update_locale();
|
||||
}
|
||||
|
|
|
@ -7,12 +7,22 @@
|
|||
# define FPCONV_G_FMT_BUFSIZE 32
|
||||
|
||||
#ifdef USE_INTERNAL_FPCONV
|
||||
#ifdef MULTIPLE_THREADS
|
||||
#include "dtoa_config.h"
|
||||
#include <unistd.h>
|
||||
static inline void fpconv_init()
|
||||
{
|
||||
// Add one to try and avoid core id multiplier alignment
|
||||
set_max_dtoa_threads((sysconf(_SC_NPROCESSORS_CONF) + 1) * 3);
|
||||
}
|
||||
#else
|
||||
static inline void fpconv_init()
|
||||
{
|
||||
/* Do nothing - not required */
|
||||
}
|
||||
#endif
|
||||
#else
|
||||
extern void fpconv_init();
|
||||
extern void fpconv_init(void);
|
||||
#endif
|
||||
|
||||
extern int fpconv_g_fmt(char*, double, int);
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
package = "lua-cjson"
|
||||
version = "2.1.0.9-1"
|
||||
version = "2.1.0.11-1"
|
||||
|
||||
source = {
|
||||
url = "git+https://github.com/openresty/lua-cjson",
|
||||
tag = "2.1.0.9",
|
||||
tag = "2.1.0.11",
|
||||
}
|
||||
|
||||
description = {
|
||||
|
@ -34,6 +34,9 @@ build = {
|
|||
-- Uncomment the line below on Solaris platforms if required.
|
||||
-- "USE_INTERNAL_ISINF"
|
||||
}
|
||||
},
|
||||
["cjson.safe"] = {
|
||||
sources = { "lua_cjson.c", "strbuf.c", "fpconv.c" }
|
||||
}
|
||||
},
|
||||
install = {
|
|
@ -52,7 +52,7 @@
|
|||
#endif
|
||||
|
||||
#ifndef CJSON_VERSION
|
||||
#define CJSON_VERSION "2.1.0.9"
|
||||
#define CJSON_VERSION "2.1.0.11"
|
||||
#endif
|
||||
|
||||
#ifdef _MSC_VER
|
||||
|
@ -82,6 +82,7 @@
|
|||
#define DEFAULT_ENCODE_EMPTY_TABLE_AS_OBJECT 1
|
||||
#define DEFAULT_DECODE_ARRAY_WITH_ARRAY_MT 0
|
||||
#define DEFAULT_ENCODE_ESCAPE_FORWARD_SLASH 1
|
||||
#define DEFAULT_ENCODE_SKIP_UNSUPPORTED_VALUE_TYPES 0
|
||||
|
||||
#ifdef DISABLE_INVALID_NUMBERS
|
||||
#undef DEFAULT_DECODE_INVALID_NUMBERS
|
||||
|
@ -165,6 +166,7 @@ typedef struct {
|
|||
int decode_invalid_numbers;
|
||||
int decode_max_depth;
|
||||
int decode_array_with_array_mt;
|
||||
int encode_skip_unsupported_value_types;
|
||||
} json_config_t;
|
||||
|
||||
typedef struct {
|
||||
|
@ -356,6 +358,16 @@ static int json_cfg_decode_array_with_array_mt(lua_State *l)
|
|||
return 1;
|
||||
}
|
||||
|
||||
/* Configure how to treat invalid types */
|
||||
static int json_cfg_encode_skip_unsupported_value_types(lua_State *l)
|
||||
{
|
||||
json_config_t *cfg = json_arg_init(l, 1);
|
||||
|
||||
json_enum_option(l, 1, &cfg->encode_skip_unsupported_value_types, NULL, 1);
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
/* Configures JSON encoding buffer persistence */
|
||||
static int json_cfg_encode_keep_buffer(lua_State *l)
|
||||
{
|
||||
|
@ -463,6 +475,7 @@ static void json_create_config(lua_State *l)
|
|||
cfg->encode_empty_table_as_object = DEFAULT_ENCODE_EMPTY_TABLE_AS_OBJECT;
|
||||
cfg->decode_array_with_array_mt = DEFAULT_DECODE_ARRAY_WITH_ARRAY_MT;
|
||||
cfg->encode_escape_forward_slash = DEFAULT_ENCODE_ESCAPE_FORWARD_SLASH;
|
||||
cfg->encode_skip_unsupported_value_types = DEFAULT_ENCODE_SKIP_UNSUPPORTED_VALUE_TYPES;
|
||||
|
||||
#if DEFAULT_ENCODE_KEEP_BUFFER > 0
|
||||
strbuf_init(&cfg->encode_buf, 0);
|
||||
|
@ -627,7 +640,7 @@ static void json_check_encode_depth(lua_State *l, json_config_t *cfg,
|
|||
current_depth);
|
||||
}
|
||||
|
||||
static void json_append_data(lua_State *l, json_config_t *cfg,
|
||||
static int json_append_data(lua_State *l, json_config_t *cfg,
|
||||
int current_depth, strbuf_t *json);
|
||||
|
||||
/* json_append_array args:
|
||||
|
@ -637,19 +650,24 @@ static void json_append_data(lua_State *l, json_config_t *cfg,
|
|||
static void json_append_array(lua_State *l, json_config_t *cfg, int current_depth,
|
||||
strbuf_t *json, int array_length)
|
||||
{
|
||||
int comma, i;
|
||||
int comma, i, json_pos, err;
|
||||
|
||||
strbuf_append_char(json, '[');
|
||||
|
||||
comma = 0;
|
||||
for (i = 1; i <= array_length; i++) {
|
||||
if (comma)
|
||||
json_pos = strbuf_length(json);
|
||||
if (comma++ > 0)
|
||||
strbuf_append_char(json, ',');
|
||||
else
|
||||
comma = 1;
|
||||
|
||||
lua_rawgeti(l, -1, i);
|
||||
json_append_data(l, cfg, current_depth, json);
|
||||
err = json_append_data(l, cfg, current_depth, json);
|
||||
if (err) {
|
||||
strbuf_set_length(json, json_pos);
|
||||
if (comma == 1) {
|
||||
comma = 0;
|
||||
}
|
||||
}
|
||||
lua_pop(l, 1);
|
||||
}
|
||||
|
||||
|
@ -697,7 +715,7 @@ static void json_append_number(lua_State *l, json_config_t *cfg,
|
|||
static void json_append_object(lua_State *l, json_config_t *cfg,
|
||||
int current_depth, strbuf_t *json)
|
||||
{
|
||||
int comma, keytype;
|
||||
int comma, keytype, json_pos, err;
|
||||
|
||||
/* Object */
|
||||
strbuf_append_char(json, '{');
|
||||
|
@ -706,10 +724,9 @@ static void json_append_object(lua_State *l, json_config_t *cfg,
|
|||
/* table, startkey */
|
||||
comma = 0;
|
||||
while (lua_next(l, -2) != 0) {
|
||||
if (comma)
|
||||
json_pos = strbuf_length(json);
|
||||
if (comma++ > 0)
|
||||
strbuf_append_char(json, ',');
|
||||
else
|
||||
comma = 1;
|
||||
|
||||
/* table, key, value */
|
||||
keytype = lua_type(l, -2);
|
||||
|
@ -727,7 +744,14 @@ static void json_append_object(lua_State *l, json_config_t *cfg,
|
|||
}
|
||||
|
||||
/* table, key, value */
|
||||
json_append_data(l, cfg, current_depth, json);
|
||||
err = json_append_data(l, cfg, current_depth, json);
|
||||
if (err) {
|
||||
strbuf_set_length(json, json_pos);
|
||||
if (comma == 1) {
|
||||
comma = 0;
|
||||
}
|
||||
}
|
||||
|
||||
lua_pop(l, 1);
|
||||
/* table, key */
|
||||
}
|
||||
|
@ -735,8 +759,8 @@ static void json_append_object(lua_State *l, json_config_t *cfg,
|
|||
strbuf_append_char(json, '}');
|
||||
}
|
||||
|
||||
/* Serialise Lua data into JSON string. */
|
||||
static void json_append_data(lua_State *l, json_config_t *cfg,
|
||||
/* Serialise Lua data into JSON string. Return 1 if error an error happened, else 0 */
|
||||
static int json_append_data(lua_State *l, json_config_t *cfg,
|
||||
int current_depth, strbuf_t *json)
|
||||
{
|
||||
int len;
|
||||
|
@ -800,16 +824,22 @@ static void json_append_data(lua_State *l, json_config_t *cfg,
|
|||
case LUA_TLIGHTUSERDATA:
|
||||
if (lua_touserdata(l, -1) == NULL) {
|
||||
strbuf_append_mem(json, "null", 4);
|
||||
} else if (lua_touserdata(l, -1) == &json_array) {
|
||||
} else if (lua_touserdata(l, -1) == json_lightudata_mask(&json_array)) {
|
||||
json_append_array(l, cfg, current_depth, json, 0);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
/* Remaining types (LUA_TFUNCTION, LUA_TUSERDATA, LUA_TTHREAD,
|
||||
* and LUA_TLIGHTUSERDATA) cannot be serialised */
|
||||
json_encode_exception(l, cfg, json, -1, "type not supported");
|
||||
if (cfg->encode_skip_unsupported_value_types) {
|
||||
return 1;
|
||||
} else {
|
||||
json_encode_exception(l, cfg, json, -1, "type not supported");
|
||||
}
|
||||
|
||||
/* never returns */
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int json_encode(lua_State *l)
|
||||
|
@ -1479,6 +1509,7 @@ static int lua_cjson_new(lua_State *l)
|
|||
{ "encode_invalid_numbers", json_cfg_encode_invalid_numbers },
|
||||
{ "decode_invalid_numbers", json_cfg_decode_invalid_numbers },
|
||||
{ "encode_escape_forward_slash", json_cfg_encode_escape_forward_slash },
|
||||
{ "encode_skip_unsupported_value_types", json_cfg_encode_skip_unsupported_value_types },
|
||||
{ "new", lua_cjson_new },
|
||||
{ NULL, NULL }
|
||||
};
|
||||
|
|
|
@ -67,6 +67,16 @@ if [ -z "$SKIP_CMAKE" ]; then
|
|||
cp -r lua/cjson build/cjson.so tests
|
||||
do_tests
|
||||
rm -rf build tests/cjson{,.so}
|
||||
|
||||
echo "===== Testing Cmake fpconv build ====="
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -DUSE_INTERNAL_FPCONV=1 ..
|
||||
make
|
||||
cd ..
|
||||
cp -r lua/cjson build/cjson.so tests
|
||||
do_tests
|
||||
rm -rf build tests/cjson{,.so}
|
||||
else
|
||||
echo "===== Skipping Cmake build ====="
|
||||
fi
|
||||
|
|
|
@ -150,7 +150,7 @@ static int calculate_new_size(strbuf_t *s, int len)
|
|||
/* Exponential sizing */
|
||||
while (newsize < reqsize)
|
||||
newsize *= -s->increment;
|
||||
} else {
|
||||
} else if (s->increment != 0) {
|
||||
/* Linear sizing */
|
||||
newsize = ((newsize + s->increment - 1) / s->increment) * s->increment;
|
||||
}
|
||||
|
|
|
@ -70,6 +70,7 @@ static char *strbuf_string(strbuf_t *s, int *len);
|
|||
static void strbuf_ensure_empty_length(strbuf_t *s, int len);
|
||||
static char *strbuf_empty_ptr(strbuf_t *s);
|
||||
static void strbuf_extend_length(strbuf_t *s, int len);
|
||||
static void strbuf_set_length(strbuf_t *s, int len);
|
||||
|
||||
/* Update */
|
||||
extern void strbuf_append_fmt(strbuf_t *s, int len, const char *fmt, ...);
|
||||
|
@ -108,6 +109,11 @@ static inline char *strbuf_empty_ptr(strbuf_t *s)
|
|||
return s->buf + s->length;
|
||||
}
|
||||
|
||||
static inline void strbuf_set_length(strbuf_t *s, int len)
|
||||
{
|
||||
s->length = len;
|
||||
}
|
||||
|
||||
static inline void strbuf_extend_length(strbuf_t *s, int len)
|
||||
{
|
||||
s->length += len;
|
||||
|
|
|
@ -306,3 +306,29 @@ print(b)
|
|||
{"test":"http:\/\/google.com\/google"}
|
||||
{"test":"http://google.com/google"}
|
||||
{"test":"http:\/\/google.com\/google"}
|
||||
|
||||
|
||||
|
||||
=== TEST 22: disable error on invalid type
|
||||
--- lua
|
||||
local cjson = require "cjson"
|
||||
local f = function (x) return 2*x end
|
||||
local res, err = pcall(cjson.encode, f)
|
||||
print(err)
|
||||
local t = {f = f, valid = "valid"}
|
||||
local res, err = pcall(cjson.encode, t)
|
||||
print(err)
|
||||
local arr = {"one", "two", f, "three"}
|
||||
local res, err = pcall(cjson.encode, arr)
|
||||
print(err)
|
||||
cjson.encode_skip_unsupported_value_types(true)
|
||||
print(cjson.encode(f))
|
||||
print(cjson.encode(t))
|
||||
print(cjson.encode(arr))
|
||||
--- out
|
||||
Cannot serialise function: type not supported
|
||||
Cannot serialise function: type not supported
|
||||
Cannot serialise function: type not supported
|
||||
|
||||
{"valid":"valid"}
|
||||
["one","two","three"]
|
||||
|
|
|
@ -93,7 +93,7 @@ local cjson_tests = {
|
|||
-- Test API variables
|
||||
{ "Check module name, version",
|
||||
function () return json._NAME, json._VERSION end, { },
|
||||
true, { "cjson", "2.1.0.9" } },
|
||||
true, { "cjson", "2.1.0.11" } },
|
||||
|
||||
-- Test decoding simple types
|
||||
{ "Decode string",
|
||||
|
|
|
@ -64,8 +64,8 @@ before_install:
|
|||
- sudo apt install --only-upgrade ca-certificates
|
||||
- '! grep -n -P ''(?<=.{80}).+'' --color `find src -name ''*.c''` `find . -name ''*.h''` || (echo "ERROR: Found C source lines exceeding 80 columns." > /dev/stderr; exit 1)'
|
||||
- '! grep -n -P ''\t+'' --color `find src -name ''*.c''` `find . -name ''*.h''` || (echo "ERROR: Cannot use tabs." > /dev/stderr; exit 1)'
|
||||
- sudo cpanm --notest Test::Nginx IPC::Run > build.log 2>&1 || (cat build.log && exit 1)
|
||||
|
||||
- /usr/bin/env perl $(command -v cpanm) --sudo --notest Test::Nginx IPC::Run > build.log 2>&1 || (cat build.log && exit 1)
|
||||
- pyenv global 2.7
|
||||
install:
|
||||
- if [ ! -f download-cache/drizzle7-$DRIZZLE_VER.tar.gz ]; then wget -P download-cache http://openresty.org/download/drizzle7-$DRIZZLE_VER.tar.gz; fi
|
||||
- if [ ! -f download-cache/pcre-$PCRE_VER.tar.gz ]; then wget -P download-cache https://downloads.sourceforge.net/project/pcre/pcre/${PCRE_VER}/pcre-${PCRE_VER}.tar.gz; fi
|
||||
|
@ -143,4 +143,4 @@ script:
|
|||
- dig +short myip.opendns.com @resolver1.opendns.com || exit 0
|
||||
- dig +short @$TEST_NGINX_RESOLVER openresty.org || exit 0
|
||||
- dig +short @$TEST_NGINX_RESOLVER agentzh.org || exit 0
|
||||
- prove -I. -Itest-nginx/lib -r t/
|
||||
- /usr/bin/env perl $(command -v prove) -I. -Itest-nginx/lib -r t/
|
||||
|
|
|
@ -8200,7 +8200,7 @@ tcpsock:setoption
|
|||
|
||||
**context:** *rewrite_by_lua*, access_by_lua*, content_by_lua*, ngx.timer.*, ssl_certificate_by_lua*, ssl_session_fetch_by_lua*, ssl_client_hello_by_lua**
|
||||
|
||||
This function is added for [LuaSocket](http://w3.impa.br/~diego/software/luasocket/tcp.html) API compatibility and does nothing for now. Its functionality is implemented `v0.10.18`.
|
||||
This function is added for [LuaSocket](http://w3.impa.br/~diego/software/luasocket/tcp.html) API compatibility, its functionality is implemented `v0.10.18`.
|
||||
|
||||
This feature was first introduced in the `v0.5.0rc1` release.
|
||||
|
||||
|
@ -9034,7 +9034,7 @@ ngx.worker.pids
|
|||
|
||||
**context:** *set_by_lua*, rewrite_by_lua*, access_by_lua*, content_by_lua*, header_filter_by_lua*, body_filter_by_lua*, log_by_lua*, ngx.timer.*, exit_worker_by_lua**
|
||||
|
||||
This function returns a Lua table for all Nginx worker process ID (PID). Nginx uses channel to send the current worker PID to another worker in the worker process start or restart. So this API can get all current worker PID.
|
||||
This function returns a Lua table for all Nginx worker process IDs (PIDs). Nginx uses channel to send the current worker PID to another worker in the worker process start or restart. So this API can get all current worker PIDs. Windows does not have this API.
|
||||
|
||||
This API was first introduced in the `0.10.23` release.
|
||||
|
||||
|
|
|
@ -94,7 +94,7 @@ END
|
|||
case "$NGX_PLATFORM" in
|
||||
Darwin:*)
|
||||
case "$NGX_MACHINE" in
|
||||
amd64 | arm64 | x86_64 | i386)
|
||||
amd64 | x86_64 | i386)
|
||||
echo "adding extra linking options needed by LuaJIT on $NGX_MACHINE"
|
||||
luajit_ld_opt="$luajit_ld_opt -pagezero_size 10000 -image_base 100000000"
|
||||
ngx_feature_libs="$ngx_feature_libs -pagezero_size 10000 -image_base 100000000"
|
||||
|
|
|
@ -19,7 +19,10 @@
|
|||
/* Public API for other Nginx modules */
|
||||
|
||||
|
||||
#define ngx_http_lua_version 10023
|
||||
#define ngx_http_lua_version 10024
|
||||
|
||||
|
||||
typedef struct ngx_http_lua_co_ctx_s ngx_http_lua_co_ctx_t;
|
||||
|
||||
|
||||
typedef struct {
|
||||
|
@ -56,6 +59,17 @@ ngx_shm_zone_t *ngx_http_lua_find_zone(u_char *name_data, size_t name_len);
|
|||
ngx_shm_zone_t *ngx_http_lua_shared_memory_add(ngx_conf_t *cf, ngx_str_t *name,
|
||||
size_t size, void *tag);
|
||||
|
||||
ngx_http_lua_co_ctx_t *ngx_http_lua_get_cur_co_ctx(ngx_http_request_t *r);
|
||||
|
||||
void ngx_http_lua_set_cur_co_ctx(ngx_http_request_t *r,
|
||||
ngx_http_lua_co_ctx_t *coctx);
|
||||
|
||||
lua_State *ngx_http_lua_get_co_ctx_vm(ngx_http_lua_co_ctx_t *coctx);
|
||||
|
||||
void ngx_http_lua_co_ctx_resume_helper(ngx_http_lua_co_ctx_t *coctx, int nrets);
|
||||
|
||||
int ngx_http_lua_get_lua_http10_buffering(ngx_http_request_t *r);
|
||||
|
||||
|
||||
#endif /* _NGX_HTTP_LUA_API_H_INCLUDED_ */
|
||||
|
||||
|
|
|
@ -213,4 +213,132 @@ ngx_http_lua_shared_memory_init(ngx_shm_zone_t *shm_zone, void *data)
|
|||
return NGX_OK;
|
||||
}
|
||||
|
||||
|
||||
ngx_http_lua_co_ctx_t *
|
||||
ngx_http_lua_get_cur_co_ctx(ngx_http_request_t *r)
|
||||
{
|
||||
ngx_http_lua_ctx_t *ctx;
|
||||
|
||||
ctx = ngx_http_get_module_ctx(r, ngx_http_lua_module);
|
||||
|
||||
return ctx->cur_co_ctx;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
ngx_http_lua_set_cur_co_ctx(ngx_http_request_t *r, ngx_http_lua_co_ctx_t *coctx)
|
||||
{
|
||||
ngx_http_lua_ctx_t *ctx;
|
||||
|
||||
ctx = ngx_http_get_module_ctx(r, ngx_http_lua_module);
|
||||
|
||||
coctx->data = r;
|
||||
|
||||
ctx->cur_co_ctx = coctx;
|
||||
}
|
||||
|
||||
|
||||
lua_State *
|
||||
ngx_http_lua_get_co_ctx_vm(ngx_http_lua_co_ctx_t *coctx)
|
||||
{
|
||||
return coctx->co;
|
||||
}
|
||||
|
||||
|
||||
static ngx_int_t
|
||||
ngx_http_lua_co_ctx_resume(ngx_http_request_t *r)
|
||||
{
|
||||
lua_State *vm;
|
||||
ngx_connection_t *c;
|
||||
ngx_int_t rc;
|
||||
ngx_uint_t nreqs;
|
||||
ngx_http_lua_ctx_t *ctx;
|
||||
|
||||
ctx = ngx_http_get_module_ctx(r, ngx_http_lua_module);
|
||||
if (ctx == NULL) {
|
||||
return NGX_ERROR;
|
||||
}
|
||||
|
||||
ctx->resume_handler = ngx_http_lua_wev_handler;
|
||||
|
||||
c = r->connection;
|
||||
vm = ngx_http_lua_get_lua_vm(r, ctx);
|
||||
nreqs = c->requests;
|
||||
|
||||
rc = ngx_http_lua_run_thread(vm, r, ctx, ctx->cur_co_ctx->nrets);
|
||||
|
||||
ngx_log_debug1(NGX_LOG_DEBUG_HTTP, r->connection->log, 0,
|
||||
"lua run thread returned %d", rc);
|
||||
|
||||
if (rc == NGX_AGAIN) {
|
||||
return ngx_http_lua_run_posted_threads(c, vm, r, ctx, nreqs);
|
||||
}
|
||||
|
||||
if (rc == NGX_DONE) {
|
||||
ngx_http_lua_finalize_request(r, NGX_DONE);
|
||||
return ngx_http_lua_run_posted_threads(c, vm, r, ctx, nreqs);
|
||||
}
|
||||
|
||||
if (ctx->entered_content_phase) {
|
||||
ngx_http_lua_finalize_request(r, rc);
|
||||
return NGX_DONE;
|
||||
}
|
||||
|
||||
return rc;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
ngx_http_lua_co_ctx_resume_helper(ngx_http_lua_co_ctx_t *coctx, int nrets)
|
||||
{
|
||||
ngx_connection_t *c;
|
||||
ngx_http_request_t *r;
|
||||
ngx_http_lua_ctx_t *ctx;
|
||||
ngx_http_log_ctx_t *log_ctx;
|
||||
|
||||
r = coctx->data;
|
||||
c = r->connection;
|
||||
|
||||
ctx = ngx_http_get_module_ctx(r, ngx_http_lua_module);
|
||||
|
||||
if (ctx == NULL) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (c->fd != (ngx_socket_t) -1) { /* not a fake connection */
|
||||
log_ctx = c->log->data;
|
||||
log_ctx->current_request = r;
|
||||
}
|
||||
|
||||
coctx->nrets = nrets;
|
||||
coctx->cleanup = NULL;
|
||||
|
||||
ngx_log_debug2(NGX_LOG_DEBUG_HTTP, c->log, 0,
|
||||
"lua coctx resume handler: \"%V?%V\"", &r->uri, &r->args);
|
||||
|
||||
ctx->cur_co_ctx = coctx;
|
||||
|
||||
if (ctx->entered_content_phase) {
|
||||
(void) ngx_http_lua_co_ctx_resume(r);
|
||||
|
||||
} else {
|
||||
ctx->resume_handler = ngx_http_lua_co_ctx_resume;
|
||||
ngx_http_core_run_phases(r);
|
||||
}
|
||||
|
||||
ngx_http_run_posted_requests(c);
|
||||
}
|
||||
|
||||
|
||||
int
|
||||
ngx_http_lua_get_lua_http10_buffering(ngx_http_request_t *r)
|
||||
{
|
||||
ngx_http_lua_loc_conf_t *llcf;
|
||||
|
||||
llcf = ngx_http_get_module_loc_conf(r, ngx_http_lua_module);
|
||||
|
||||
return llcf->http10_buffering;
|
||||
}
|
||||
|
||||
|
||||
/* vi:set ft=c ts=4 sw=4 et fdm=marker: */
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue