Copilot commented on code in PR #12883:
URL: https://github.com/apache/apisix/pull/12883#discussion_r2679325713
##########
docs/en/latest/plugins/ai-request-rewrite.md:
##########
@@ -36,7 +36,7 @@ The `ai-request-rewrite` plugin intercepts client requests
before they are forwa
| **Field** | **Required** | **Type** | **Description**
|
| ------------------------- | ------------ | -------- |
------------------------------------------------------------------------------------
|
| prompt | Yes | String | The prompt send to LLM
service. |
-| provider | Yes | String | Name of the LLM
service. Available options: openai, deekseek, azure-openai, aimlapi and
openai-compatible. When `aimlapi` is selected, the plugin uses the
OpenAI-compatible driver with a default endpoint of
`https://api.aimlapi.com/v1/chat/completions`. |
+| provider | Yes | String | Name of the LLM
service. Available options: openai, deekseek, azure-openai, aimlapi,
gemini-openai, and openai-compatible. When `aimlapi` is selected, the plugin
uses the OpenAI-compatible driver with a default endpoint of
`https://api.aimlapi.com/v1/chat/completions`. |
Review Comment:
Spelling error: "deekseek" should be "deepseek" to match the correct
provider name used throughout the codebase.
##########
docs/zh/latest/plugins/ai-request-rewrite.md:
##########
@@ -36,7 +36,7 @@ description: ai-request-rewrite 插件在客户端请求转发到上游服务之
| **字段** | **必选项** | **类型** | **描述**
|
| ------------------------- | ------------ | -------- |
------------------------------------------------------------------------------------
|
| prompt | 是 | String | 发送到 LLM 服务的提示。
|
-| provider | 是 | String | LLM
服务的名称。可用选项:openai、deekseek、azure-openai、aimlapi 和 openai-compatible。当选择
`aimlapi` 时,插件使用 OpenAI 兼容驱动程序,默认端点为
`https://api.aimlapi.com/v1/chat/completions`。 |
+| provider | 是 | String | LLM
服务的名称。可用选项:openai、deekseek、azure-openai、aimlapi、gemini-openai 和
openai-compatible。当选择 `aimlapi` 时,插件使用 OpenAI 兼容驱动程序,默认端点为
`https://api.aimlapi.com/v1/chat/completions`。 |
Review Comment:
Spelling error: "deekseek" should be "deepseek" to match the correct
provider name used throughout the codebase.
##########
t/plugin/ai-proxy-gemini-openai.t:
##########
@@ -0,0 +1,297 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+use t::APISIX 'no_plan';
+
+log_level("info");
+repeat_each(1);
+no_long_string();
+no_root_location();
+
+
+my $resp_file = 't/assets/ai-proxy-response.json';
+open(my $fh, '<', $resp_file) or die "Could not open file '$resp_file' $!";
+my $resp = do { local $/; <$fh> };
+close($fh);
+
+print "Hello, World!\n";
+print $resp;
+
+
+add_block_preprocessor(sub {
+ my ($block) = @_;
+
+ if (!defined $block->request) {
+ $block->set_value("request", "GET /t");
+ }
+
+ my $user_yaml_config = <<_EOC_;
+plugins:
+ - ai-proxy-multi
+ - prometheus
+_EOC_
+ $block->set_value("extra_yaml_config", $user_yaml_config);
+
+ my $http_config = $block->http_config // <<_EOC_;
+ server {
+ server_name openai;
+ listen 6724;
+
+ default_type 'application/json';
+
+ location /v1/chat/completions {
+ content_by_lua_block {
+ local json = require("cjson.safe")
+
+ if ngx.req.get_method() ~= "POST" then
+ ngx.status = 400
+ ngx.say("Unsupported request method: ",
ngx.req.get_method())
+ end
+ ngx.req.read_body()
+ local body, err = ngx.req.get_body_data()
+ body, err = json.decode(body)
+
+ local test_type = ngx.req.get_headers()["test-type"]
+ if test_type == "options" then
+ if body.foo == "bar" then
+ ngx.status = 200
+ ngx.say("options works")
+ else
+ ngx.status = 500
+ ngx.say("model options feature doesn't work")
+ end
+ return
+ end
+
+ local header_auth = ngx.req.get_headers()["authorization"]
+ local query_auth = ngx.req.get_uri_args()["apikey"]
+
+ if header_auth ~= "Bearer token" and query_auth ~=
"apikey" then
+ ngx.status = 401
+ ngx.say("Unauthorized")
+ return
+ end
+
+ if header_auth == "Bearer token" or query_auth == "apikey"
then
+ ngx.req.read_body()
+ local body, err = ngx.req.get_body_data()
+ body, err = json.decode(body)
+
+ if not body.messages or #body.messages < 1 then
+ ngx.status = 400
+ ngx.say([[{ "error": "bad request"}]])
+ return
+ end
+ if body.messages[1].content == "write an SQL query to
get all rows from student table" then
+ ngx.print("SELECT * FROM STUDENTS")
+ return
+ end
+
+ ngx.status = 200
+ ngx.say([[$resp]])
+ return
+ end
+
+
+ ngx.status = 503
+ ngx.say("reached the end of the test suite")
+ }
+ }
+
+ location /random {
+ content_by_lua_block {
+ ngx.say("path override works")
+ }
+ }
+ }
+_EOC_
+
+ $block->set_value("http_config", $http_config);
+});
+
+run_tests();
+
+__DATA__
+
+=== TEST 1: set route with right auth header
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "uri": "/anything",
+ "plugins": {
+ "ai-proxy-multi": {
+ "instances": [
+ {
+ "name": "gemini-openai",
+ "provider": "gemini-openai",
+ "weight": 1,
+ "auth": {
+ "header": {
+ "Authorization": "Bearer token"
+ }
+ },
+ "options": {
+ "model": "gemini-1.5-flash",
+ "max_tokens": 512,
+ "temperature": 1.0
+ },
+ "override": {
+ "endpoint":
"http://localhost:6724/v1/chat/completions"
+ }
+ }
+ ],
+ "ssl_verify": false
+ }
+ }
+ }]]
+ )
+
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+ }
+--- response_body
+passed
+
+
+
+=== TEST 2: send request
+--- request
+POST /anything
+{ "messages": [ { "role": "system", "content": "You are a mathematician" }, {
"role": "user", "content": "What is 1+1?"} ] }
+--- more_headers
+Authorization: Bearer token
+--- error_code: 200
+--- response_body eval
+qr/\{ "content": "1 \+ 1 = 2\.", "role": "assistant" \}/
+
+
+
+=== TEST 3: set route with stream = true (SSE)
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1',
+ ngx.HTTP_PUT,
+ [[{
+ "uri": "/anything",
+ "plugins": {
+ "ai-proxy-multi": {
+ "instances": [
+ {
+ "name": "gemini-openai",
+ "provider": "gemini-openai",
+ "weight": 1,
+ "auth": {
+ "header": {
+ "Authorization": "Bearer token"
+ }
+ },
+ "options": {
+ "model": "gemini-1.5-flash",
+ "max_tokens": 512,
+ "temperature": 1.0,
+ "stream": true
+ },
+ "override": {
+ "endpoint":
"http://localhost:7737/v1/chat/completions"
Review Comment:
The endpoint configuration in TEST 3 uses port 7737 which differs from port
6724 used elsewhere in the test file. However, the mock server is only
configured to listen on port 6724 (line 52). This will cause TEST 4 to fail as
it tries to connect to a non-existent server on port 7737.
```suggestion
"endpoint":
"http://localhost:6724/v1/chat/completions"
```
##########
t/plugin/ai-proxy-gemini-openai.t:
##########
@@ -0,0 +1,297 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+use t::APISIX 'no_plan';
+
+log_level("info");
+repeat_each(1);
+no_long_string();
+no_root_location();
+
+
+my $resp_file = 't/assets/ai-proxy-response.json';
+open(my $fh, '<', $resp_file) or die "Could not open file '$resp_file' $!";
+my $resp = do { local $/; <$fh> };
+close($fh);
+
+print "Hello, World!\n";
+print $resp;
+
+
Review Comment:
Debug print statements should be removed before merging. These lines appear
to be left over from development and testing.
```suggestion
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]