Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into chore_add_http2_tes…
Browse files Browse the repository at this point in the history
…tcase
  • Loading branch information
starsz committed Oct 17, 2023
2 parents 9ea8f6a + 0b38ea2 commit 64d74ac
Show file tree
Hide file tree
Showing 42 changed files with 879 additions and 166 deletions.
19 changes: 9 additions & 10 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -77,30 +77,29 @@ title: Changelog

### Change

- :warning: Remove the `etcd.use_grpc` and no longer support communication with ETCD using protocols with gRPC: [#10015](https://github.com/apache/apisix/pull/10015)
- :warning: Removing conf server, the data plane no longer supports communication with the data plane, and needs to be adjusted from `config_provider: control_plane` to `config_provider: etcd`: [#10012](https://github.com/apache/apisix/pull/10012)
- :warning: Remove gRPC support between APISIX and etcd and remove `etcd.use_grpc` configuration option: [#10015](https://github.com/apache/apisix/pull/10015)
- :warning: Remove conf server. The data plane no longer supports direct communication with the control plane, and the configuration should be adjusted from `config_provider: control_plane` to `config_provider: etcd`: [#10012](https://github.com/apache/apisix/pull/10012)
- :warning: Enforce strict schema validation on the properties of the core APISIX resources: [#10233](https://github.com/apache/apisix/pull/10233)

### Core

- :sunrise: support configuring the buffer size of the access log: [#10225](https://github.com/apache/apisix/pull/10225)
- :sunrise: Support for passing resolv.conf in dns discovery: [#9770](https://github.com/apache/apisix/pull/9770)
- :sunrise: No longer relying on trust: [#10121](https://github.com/apache/apisix/pull/10065)
- :sunrise: Strictly validate the input of core resources: [#10233](https://github.com/apache/apisix/pull/10233)
- :sunrise: Add dubbo protocols Support in the xrpc [#9660](https://github.com/apache/apisix/pull/9660)
- :sunrise: Support configuring the buffer size of the access log: [#10225](https://github.com/apache/apisix/pull/10225)
- :sunrise: Support the use of local DNS resolvers in service discovery by configuring `resolv_conf`: [#9770](https://github.com/apache/apisix/pull/9770)
- :sunrise: Remove Rust dependency for installation: [#10121](https://github.com/apache/apisix/pull/10121)
- :sunrise: Support Dubbo protocol in xRPC [#9660](https://github.com/apache/apisix/pull/9660)

### Plugins

- :sunrise: Support https in traffic-split plugin: [#9115](https://github.com/apache/apisix/pull/9115)
- :sunrise: Support for passing resolv.conf in dns discovery: [#9770](https://github.com/apache/apisix/pull/9770)
- :sunrise: Support rewrite request body in external plugin:[#9990](https://github.com/apache/apisix/pull/9990)
- :sunrise: Support set nginx variables in opentelemetry plugin: [#8871](https://github.com/apache/apisix/pull/8871)
- :sunrise: Support unix sock host pattern in the chaitin-waf plugin: [#10161](https://github.com/apache/apisix/pull/10161)

### Bugfixes

- Fix graphql post request route matching exception: [#10198](https://github.com/apache/apisix/pull/10198)
- Fix GraphQL POST request route matching exception: [#10198](https://github.com/apache/apisix/pull/10198)
- Fix error on array of multiline string in `apisix.yaml`: [#10193](https://github.com/apache/apisix/pull/10193)
- Fix provide error instead of nil panic when cache_zone is missing in proxy-cache plugin: [#10138](https://github.com/apache/apisix/pull/10138)
- Add error handlers for invalid `cache_zone` configuration in the `proxy-cache` plugin: [#10138](https://github.com/apache/apisix/pull/10138)

## 3.5.0

Expand Down
16 changes: 16 additions & 0 deletions apisix/admin/services.lua
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
--
local core = require("apisix.core")
local get_routes = require("apisix.router").http_routes
local get_stream_routes = require("apisix.router").stream_routes
local apisix_upstream = require("apisix.upstream")
local resource = require("apisix.admin.resource")
local schema_plugin = require("apisix.admin.plugins").check_schema
Expand Down Expand Up @@ -99,6 +100,21 @@ local function delete_checker(id)
end
end

local stream_routes, stream_routes_ver = get_stream_routes()
core.log.info("stream_routes: ", core.json.delay_encode(stream_routes, true))
core.log.info("stream_routes_ver: ", stream_routes_ver)
if stream_routes_ver and stream_routes then
for _, route in ipairs(stream_routes) do
if type(route) == "table" and route.value
and route.value.service_id
and tostring(route.value.service_id) == id then
return 400, {error_msg = "can not delete this service directly,"
.. " stream_route [" .. route.value.id
.. "] is still using it now"}
end
end
end

return nil, nil
end

Expand Down
17 changes: 17 additions & 0 deletions apisix/admin/stream_routes.lua
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,23 @@ local function check_conf(id, conf, need_id, schema)
end
end

local service_id = conf.service_id
if service_id then
local key = "/services/" .. service_id
local res, err = core.etcd.get(key)
if not res then
return nil, {error_msg = "failed to fetch service info by "
.. "service id [" .. service_id .. "]: "
.. err}
end

if res.status ~= 200 then
return nil, {error_msg = "failed to fetch service info by "
.. "service id [" .. service_id .. "], "
.. "response code: " .. res.status}
end
end

local ok, err = stream_route_checker(conf, true)
if not ok then
return nil, {error_msg = err}
Expand Down
2 changes: 1 addition & 1 deletion apisix/balancer.lua
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ local function fetch_health_nodes(upstream, checker)
if ok then
up_nodes = transform_node(up_nodes, node)
elseif err then
core.log.error("failed to get health check target status, addr: ",
core.log.warn("failed to get health check target status, addr: ",
node.host, ":", port or node.port, ", host: ", host, ", err: ", err)
end
end
Expand Down
1 change: 1 addition & 0 deletions apisix/constants.lua
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ return {
},
STREAM_ETCD_DIRECTORY = {
["/upstreams"] = true,
["/services"] = true,
["/plugins"] = true,
["/ssls"] = true,
["/stream_routes"] = true,
Expand Down
2 changes: 1 addition & 1 deletion apisix/core/config_util.lua
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ function _M.fire_all_clean_handlers(item)
clean_handler.f(item)
end

item.clean_handlers = nil
item.clean_handlers = {}
end


Expand Down
19 changes: 11 additions & 8 deletions apisix/discovery/consul/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -197,21 +197,20 @@ local function get_opts(consul_server, is_catalog)
port = consul_server.port,
connect_timeout = consul_server.connect_timeout,
read_timeout = consul_server.read_timeout,
default_args = {
token = consul_server.token,
}
}
if not consul_server.keepalive then
return opts
end

opts.default_args.wait = consul_server.wait_timeout --blocked wait!=0; unblocked by wait=0

if is_catalog then
opts.default_args = {
wait = consul_server.wait_timeout, --blocked wait!=0; unblocked by wait=0
index = consul_server.catalog_index,
}
opts.default_args.index = consul_server.catalog_index
else
opts.default_args = {
wait = consul_server.wait_timeout, --blocked wait!=0; unblocked by wait=0
index = consul_server.health_index,
}
opts.default_args.index = consul_server.health_index
end

return opts
Expand Down Expand Up @@ -396,6 +395,9 @@ function _M.connect(premature, consul_server, retry_delay)
port = consul_server.port,
connect_timeout = consul_server.connect_timeout,
read_timeout = consul_server.read_timeout,
default_args = {
token = consul_server.token
}
})
local catalog_success, catalog_res, catalog_err = pcall(function()
return consul_client:get(consul_server.consul_watch_catalog_url)
Expand Down Expand Up @@ -545,6 +547,7 @@ local function format_consul_params(consul_conf)
core.table.insert(consul_server_list, {
host = host,
port = port,
token = consul_conf.token,
connect_timeout = consul_conf.timeout.connect,
read_timeout = consul_conf.timeout.read,
wait_timeout = consul_conf.timeout.wait,
Expand Down
1 change: 1 addition & 0 deletions apisix/discovery/consul/schema.lua
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ return {
type = "string",
}
},
token = {type = "string", default = ""},
fetch_interval = {type = "integer", minimum = 1, default = 3},
keepalive = {
type = "boolean",
Expand Down
18 changes: 7 additions & 11 deletions apisix/discovery/consul_kv/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -320,18 +320,14 @@ end

local function format_consul_params(consul_conf)
local consul_server_list = core.table.new(0, #consul_conf.servers)
local args
local args = {
token = consul_conf.token,
recurse = true
}

if consul_conf.keepalive == false then
args = {
recurse = true,
}
elseif consul_conf.keepalive then
args = {
recurse = true,
wait = consul_conf.timeout.wait, --blocked wait!=0; unblocked by wait=0
index = 0,
}
if consul_conf.keepalive then
args.wait = consul_conf.timeout.wait --blocked wait!=0; unblocked by wait=0
args.index = 0
end

for _, v in pairs(consul_conf.servers) do
Expand Down
1 change: 1 addition & 0 deletions apisix/discovery/consul_kv/schema.lua
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ return {
type = "string",
}
},
token = {type = "string", default = ""},
fetch_interval = {type = "integer", minimum = 1, default = 3},
keepalive = {
type = "boolean",
Expand Down
2 changes: 1 addition & 1 deletion apisix/http/service.lua
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ function _M.init_worker()
filter = filter,
})
if not services then
error("failed to create etcd instance for fetching upstream: " .. err)
error("failed to create etcd instance for fetching /services: " .. err)
return
end
end
Expand Down
29 changes: 29 additions & 0 deletions apisix/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -1021,6 +1021,7 @@ function _M.stream_init_worker()
plugin.init_worker()
xrpc.init_worker()
router.stream_init_worker()
require("apisix.http.service").init_worker()
apisix_upstream.init_worker()

local we = require("resty.worker.events")
Expand Down Expand Up @@ -1078,6 +1079,34 @@ function _M.stream_preread_phase()

api_ctx.matched_upstream = upstream

elseif matched_route.value.service_id then
local service = service_fetch(matched_route.value.service_id)
if not service then
core.log.error("failed to fetch service configuration by ",
"id: ", matched_route.value.service_id)
return core.response.exit(404)
end

matched_route = plugin.merge_service_stream_route(service, matched_route)
api_ctx.matched_route = matched_route
api_ctx.conf_type = "stream_route&service"
api_ctx.conf_version = matched_route.modifiedIndex .. "&" .. service.modifiedIndex
api_ctx.conf_id = matched_route.value.id .. "&" .. service.value.id
api_ctx.service_id = service.value.id
api_ctx.service_name = service.value.name
api_ctx.matched_upstream = matched_route.value.upstream
if matched_route.value.upstream_id and not matched_route.value.upstream then
local upstream = apisix_upstream.get_by_id(matched_route.value.upstream_id)
if not upstream then
if is_http then
return core.response.exit(502)
end

return ngx_exit(1)
end

api_ctx.matched_upstream = upstream
end
else
if matched_route.has_domain then
local err
Expand Down
46 changes: 46 additions & 0 deletions apisix/plugin.lua
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@ local stream_local_plugins_hash = core.table.new(0, 32)
local merged_route = core.lrucache.new({
ttl = 300, count = 512
})
local merged_stream_route = core.lrucache.new({
ttl = 300, count = 512
})
local expr_lrucache = core.lrucache.new({
ttl = 300, count = 512
})
Expand Down Expand Up @@ -637,6 +640,49 @@ function _M.merge_service_route(service_conf, route_conf)
end


local function merge_service_stream_route(service_conf, route_conf)
-- because many fields in Service are not supported by stream route,
-- so we copy the stream route as base object
local new_conf = core.table.deepcopy(route_conf)
if service_conf.value.plugins then
for name, conf in pairs(service_conf.value.plugins) do
if not new_conf.value.plugins then
new_conf.value.plugins = {}
end

if not new_conf.value.plugins[name] then
new_conf.value.plugins[name] = conf
end
end
end

new_conf.value.service_id = nil

if not new_conf.value.upstream and service_conf.value.upstream then
new_conf.value.upstream = service_conf.value.upstream
end

if not new_conf.value.upstream_id and service_conf.value.upstream_id then
new_conf.value.upstream_id = service_conf.value.upstream_id
end

return new_conf
end


function _M.merge_service_stream_route(service_conf, route_conf)
core.log.info("service conf: ", core.json.delay_encode(service_conf, true))
core.log.info(" stream route conf: ", core.json.delay_encode(route_conf, true))

local version = route_conf.modifiedIndex .. "#" .. service_conf.modifiedIndex
local route_service_key = route_conf.value.id .. "#"
.. version
return merged_stream_route(route_service_key, version,
merge_service_stream_route,
service_conf, route_conf)
end


local function merge_consumer_route(route_conf, consumer_conf, consumer_group_conf)
if not consumer_conf.plugins or
core.table.nkeys(consumer_conf.plugins) == 0
Expand Down
2 changes: 1 addition & 1 deletion apisix/plugins/cors.lua
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ local schema = {
type = "array",
description =
"you can use regex to allow specific origins when no credentials," ..
"for example use [.*\\.test.com] to allow a.test.com and b.test.com",
"for example use [.*\\.test.com$] to allow a.test.com and b.test.com",
items = {
type = "string",
minLength = 1,
Expand Down
22 changes: 7 additions & 15 deletions apisix/schema_def.lua
Original file line number Diff line number Diff line change
Expand Up @@ -402,16 +402,10 @@ local upstream_schema = {
},
},
dependencies = {
client_cert = {
required = {"client_key"},
["not"] = {required = {"client_cert_id"}}
},
client_key = {
required = {"client_cert"},
["not"] = {required = {"client_cert_id"}}
},
client_cert = {required = {"client_key"}},
client_key = {required = {"client_cert"}},
client_cert_id = {
["not"] = {required = {"client_client", "client_key"}}
["not"] = {required = {"client_cert", "client_key"}}
}
}
},
Expand Down Expand Up @@ -784,10 +778,6 @@ _M.ssl = {
},
required = {"ca"},
},
exptime = {
type = "integer",
minimum = 1588262400, -- 2020/5/1 0:0:0
},
labels = labels_def,
status = {
description = "ssl status, 1 to enable, 0 to disable",
Expand All @@ -804,8 +794,6 @@ _M.ssl = {
enum = {"TLSv1.1", "TLSv1.2", "TLSv1.3"}
},
},
validity_end = timestamp_def,
validity_start = timestamp_def,
create_time = timestamp_def,
update_time = timestamp_def
},
Expand Down Expand Up @@ -920,6 +908,7 @@ _M.stream_route = {
},
upstream = upstream_schema,
upstream_id = id_schema,
service_id = id_schema,
plugins = plugins_schema,
protocol = xrpc_protocol_schema,
},
Expand Down Expand Up @@ -949,6 +938,9 @@ _M.plugins = {
_M.plugin_config = {
type = "object",
properties = {
name = {
type = "string",
},
id = id_schema,
desc = desc_def,
plugins = plugins_schema,
Expand Down
Loading

0 comments on commit 64d74ac

Please sign in to comment.