Copilot commented on code in PR #12936:
URL: https://github.com/apache/apisix/pull/12936#discussion_r2720026773


##########
apisix/plugins/ai-drivers/anthropic.lua:
##########
@@ -15,10 +15,87 @@
 -- limitations under the License.
 --
 
-return require("apisix.plugins.ai-drivers.openai-base").new(
-    {
-        host = "api.anthropic.com",
-        path = "/v1/chat/completions",
-        port = 443
+local base = require("apisix.plugins.ai-drivers.ai-driver-base")
+local core = require("apisix.core")
+local setmetatable = setmetatable
+
+local _M = { 
+    name = "anthropic",
+    host = "api.anthropic.com",
+    path = "/v1/messages",
+    port = 443,
+}
+
+local mt = { __index = setmetatable(_M, { __index = base }) }
+
+local ANTHROPIC_VERSION = "2023-06-01"
+local FINISH_REASON_MAP = {
+    ["end_turn"] = "stop",
+    ["max_tokens"] = "length",
+    ["stop_sequence"] = "stop",
+    ["tool_use"] = "tool_calls",
+}
+
+function _M.new(opts)
+    return setmetatable(opts or {}, mt)
+end
+
+function _M:transform_request(conf, request_table)
+    local anthropic_body = {
+        model = conf.model,
+        messages = {},
+        max_tokens = request_table.max_tokens or 1024,
+        stream = request_table.stream,
     }
-)
+
+    -- Protocol Translation: Extract system prompt
+    for _, msg in ipairs(request_table.messages) do
+        if msg.role == "system" then
+            anthropic_body.system = msg.content
+        else
+            core.table.insert(anthropic_body.messages, {
+                role = msg.role,
+                content = msg.content
+            })
+        end
+    end
+
+    local headers = {
+        ["Content-Type"] = "application/json",
+        ["x-api-key"] = conf.api_key,
+        ["anthropic-version"] = ANTHROPIC_VERSION,
+    }
+
+    return anthropic_body, headers

Review Comment:
   The transform_request method returns headers but doesn't handle the 
authentication scheme correctly for the ai-proxy plugin's architecture. The 
plugin expects drivers to use headers passed via extra_opts.headers (see 
openai-base.lua:244-245), but this driver creates its own headers. The api_key 
from conf.api_key needs to be properly integrated with the auth headers system 
used by the base plugin (see base.lua:64 where auth.header is passed).



##########
docs/en/latest/plugins/ai-proxy-anthropic.md:
##########
@@ -0,0 +1,84 @@
+---
+title: AI Proxy with Anthropic Provider
+keywords:
+  - Apache APISIX
+  - API Gateway
+  - Plugin
+  - ai-proxy
+  - Anthropic
+  - Claude
+description: This document provides a comprehensive guide on using the 
Anthropic provider within the ai-proxy plugin, detailing configuration, 
protocol translation, and usage examples.
+---
+
+## Description
+
+The \`ai-proxy\` plugin enables seamless integration with **Anthropic 
(Claude)** as a native provider. This plugin acts as a high-performance 
translation layer, allowing you to use standard OpenAI-style requests to 
interact with Claude models via Anthropic's native Messages API.
+
+## Attributes
+
+When the \`provider\` is set to \`anthropic\`, the following attributes are 
used to configure the connection:
+
+| Name | Type | Required | Default | Description |
+| :--- | :--- | :--- | :--- | :--- |
+| provider | string | Yes | | Must be set to \`anthropic\`. |
+| model | string | Yes | | The Anthropic model ID (e.g., 
\`claude-3-5-sonnet-20240620\`). |
+| api_key | string | Yes | | Your Anthropic API key for authentication. |
+| override.endpoint | string | No | \`https://api.anthropic.com/v1/messages\` 
| Custom endpoint for the Anthropic provider. |
+
+## Usage
+
+APISIX automatically performs "protocol translation" when using the Anthropic 
provider. This ensures that your existing OpenAI-compatible applications can 
switch to Claude models without any code modifications.
+
+### Protocol Translation Details
+
+1. **System Prompt Handling**: OpenAI embeds system instructions within the 
\`messages\` array. APISIX automatically extracts these and maps them to 
Anthropic's mandatory top-level \`system\` field.
+2. **Header Adaptation**:
+   - Translates \`Authorization: Bearer <key>\` to \`x-api-key: <key>\`.
+   - Automatically injects the \`anthropic-version: 2023-06-01\` header.
+3. **Response Conversion**: Anthropic's response format is converted back to 
the OpenAI-compatible structure, including token usage statistics.
+

Review Comment:
   The documentation states that APISIX performs "protocol translation" from 
OpenAI format to Anthropic format, but the actual implementation in 
anthropic.lua does not properly handle this conversion. The driver's 
transform_request method copies message content as-is (line 58) without 
converting from OpenAI's string content format to Anthropic's array-of-objects 
format. This documentation is inconsistent with the implementation.



##########
t/plugin/ai-proxy-anthropic.t:
##########
@@ -1,298 +1,268 @@
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
+--
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements.  See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
 
 use t::APISIX 'no_plan';
+use Test::Nginx::Socket::Lua;
 
-log_level("info");
 repeat_each(1);
 no_long_string();
 no_root_location();
 
-
-my $resp_file = 't/assets/openai-compatible-api-response.json';
-open(my $fh, '<', $resp_file) or die "Could not open file '$resp_file' $!";
-my $resp = do { local $/; <$fh> };
-close($fh);
-
-print "Hello, World!\n";
-print $resp;
-
-
 add_block_preprocessor(sub {
     my ($block) = @_;
 
-    if (!defined $block->request) {
-        $block->set_value("request", "GET /t");
-    }
+    my $http_config = <<'EOF';
+    server {
+        listen 1999;
+
+        location /v1/messages {
+            content_by_lua_block {
+                local core = require("apisix.core")
+                ngx.req.read_body()
+                local body = core.json.decode(ngx.req.get_body_data())
+
+                -- 1. Required Header: x-api-key
+                if ngx.var.http_x_api_key ~= "test-key" then
+                    ngx.status = 401
+                    
ngx.say([[{"type":"error","error":{"type":"authentication_error","message":"invalid
 api key"}}]])
+                    return
+                end
 
-    my $user_yaml_config = <<_EOC_;
-plugins:
-  - ai-proxy-multi
-  - prometheus
-_EOC_
-    $block->set_value("extra_yaml_config", $user_yaml_config);
-
-    my $http_config = $block->http_config // <<_EOC_;
-        server {
-            server_name anthropic;
-            listen 6725;
-
-            default_type 'application/json';
-
-            location /v1/chat/completions {
-                content_by_lua_block {
-                    local json = require("cjson.safe")
-
-                    if ngx.req.get_method() ~= "POST" then
-                        ngx.status = 400
-                        ngx.say("Unsupported request method: ", 
ngx.req.get_method())
-                    end
-                    ngx.req.read_body()
-                    local body, err = ngx.req.get_body_data()
-                    body, err = json.decode(body)
-
-                    local test_type = ngx.req.get_headers()["test-type"]
-                    if test_type == "options" then
-                        if body.foo == "bar" then
-                            ngx.status = 200
-                            ngx.say("options works")
-                        else
-                            ngx.status = 500
-                            ngx.say("model options feature doesn't work")
-                        end
-                        return
-                    end
-
-                    local header_auth = ngx.req.get_headers()["authorization"]
-                    local query_auth = ngx.req.get_uri_args()["apikey"]
-
-                    if header_auth ~= "Bearer token" and query_auth ~= 
"apikey" then
-                        ngx.status = 401
-                        ngx.say("Unauthorized")
-                        return
-                    end
-
-                    if header_auth == "Bearer token" or query_auth == "apikey" 
then
-                        ngx.req.read_body()
-                        local body, err = ngx.req.get_body_data()
-                        body, err = json.decode(body)
-
-                        if not body.messages or #body.messages < 1 then
-                            ngx.status = 400
-                            ngx.say([[{ "error": "bad request"}]])
-                            return
-                        end
-                        if body.messages[1].content == "write an SQL query to 
get all rows from student table" then
-                            ngx.print("SELECT * FROM STUDENTS")
-                            return
-                        end
-
-                        ngx.status = 200
-                        ngx.say([[$resp]])
-                        return
-                    end
-
-
-                    ngx.status = 503
-                    ngx.say("reached the end of the test suite")
-                }
-            }
+                -- 2. Required Header: anthropic-version
+                if ngx.var.http_anthropic_version ~= "2023-06-01" then
+                    ngx.status = 400
+                    ngx.say("missing anthropic-version")
+                    return
+                end
 
-            location /random {
-                content_by_lua_block {
-                    ngx.say("path override works")
+                -- 3. Required Parameter: max_tokens
+                if not body.max_tokens then
+                    ngx.status = 400
+                    ngx.say("missing max_tokens")
+                    return
+                end
+
+                -- 4. Validate Anthropic's native message structure
+                --    Messages must have content as array with type field
+                local msg = body.messages[1]
+                if type(msg.content) ~= "table"
+                   or msg.content[1].type ~= "text" then
+                    ngx.status = 400
+                    ngx.say("invalid anthropic message format")
+                    return
+                end

Review Comment:
   The test mock server validates the Anthropic native message format (lines 
61-67), expecting content to be a table with a type field. However, there's no 
code in the Anthropic driver that performs this transformation. The driver 
would need to convert OpenAI's string-based content to Anthropic's array-based 
content structure, but the transform_request method at line 58 just copies 
content as-is. This test would fail with an actual OpenAI-format input.



##########
t/plugin/ai-proxy-anthropic.t:
##########
@@ -1,298 +1,268 @@
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
+--
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements.  See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
 
 use t::APISIX 'no_plan';
+use Test::Nginx::Socket::Lua;
 
-log_level("info");
 repeat_each(1);
 no_long_string();
 no_root_location();
 
-
-my $resp_file = 't/assets/openai-compatible-api-response.json';
-open(my $fh, '<', $resp_file) or die "Could not open file '$resp_file' $!";
-my $resp = do { local $/; <$fh> };
-close($fh);
-
-print "Hello, World!\n";
-print $resp;
-
-
 add_block_preprocessor(sub {
     my ($block) = @_;
 
-    if (!defined $block->request) {
-        $block->set_value("request", "GET /t");
-    }
+    my $http_config = <<'EOF';
+    server {
+        listen 1999;
+
+        location /v1/messages {
+            content_by_lua_block {
+                local core = require("apisix.core")
+                ngx.req.read_body()
+                local body = core.json.decode(ngx.req.get_body_data())
+
+                -- 1. Required Header: x-api-key
+                if ngx.var.http_x_api_key ~= "test-key" then
+                    ngx.status = 401
+                    
ngx.say([[{"type":"error","error":{"type":"authentication_error","message":"invalid
 api key"}}]])
+                    return
+                end
 
-    my $user_yaml_config = <<_EOC_;
-plugins:
-  - ai-proxy-multi
-  - prometheus
-_EOC_
-    $block->set_value("extra_yaml_config", $user_yaml_config);
-
-    my $http_config = $block->http_config // <<_EOC_;
-        server {
-            server_name anthropic;
-            listen 6725;
-
-            default_type 'application/json';
-
-            location /v1/chat/completions {
-                content_by_lua_block {
-                    local json = require("cjson.safe")
-
-                    if ngx.req.get_method() ~= "POST" then
-                        ngx.status = 400
-                        ngx.say("Unsupported request method: ", 
ngx.req.get_method())
-                    end
-                    ngx.req.read_body()
-                    local body, err = ngx.req.get_body_data()
-                    body, err = json.decode(body)
-
-                    local test_type = ngx.req.get_headers()["test-type"]
-                    if test_type == "options" then
-                        if body.foo == "bar" then
-                            ngx.status = 200
-                            ngx.say("options works")
-                        else
-                            ngx.status = 500
-                            ngx.say("model options feature doesn't work")
-                        end
-                        return
-                    end
-
-                    local header_auth = ngx.req.get_headers()["authorization"]
-                    local query_auth = ngx.req.get_uri_args()["apikey"]
-
-                    if header_auth ~= "Bearer token" and query_auth ~= 
"apikey" then
-                        ngx.status = 401
-                        ngx.say("Unauthorized")
-                        return
-                    end
-
-                    if header_auth == "Bearer token" or query_auth == "apikey" 
then
-                        ngx.req.read_body()
-                        local body, err = ngx.req.get_body_data()
-                        body, err = json.decode(body)
-
-                        if not body.messages or #body.messages < 1 then
-                            ngx.status = 400
-                            ngx.say([[{ "error": "bad request"}]])
-                            return
-                        end
-                        if body.messages[1].content == "write an SQL query to 
get all rows from student table" then
-                            ngx.print("SELECT * FROM STUDENTS")
-                            return
-                        end
-
-                        ngx.status = 200
-                        ngx.say([[$resp]])
-                        return
-                    end
-
-
-                    ngx.status = 503
-                    ngx.say("reached the end of the test suite")
-                }
-            }
+                -- 2. Required Header: anthropic-version
+                if ngx.var.http_anthropic_version ~= "2023-06-01" then
+                    ngx.status = 400
+                    ngx.say("missing anthropic-version")
+                    return
+                end
 
-            location /random {
-                content_by_lua_block {
-                    ngx.say("path override works")
+                -- 3. Required Parameter: max_tokens
+                if not body.max_tokens then
+                    ngx.status = 400
+                    ngx.say("missing max_tokens")
+                    return
+                end
+
+                -- 4. Validate Anthropic's native message structure
+                --    Messages must have content as array with type field
+                local msg = body.messages[1]
+                if type(msg.content) ~= "table"
+                   or msg.content[1].type ~= "text" then
+                    ngx.status = 400
+                    ngx.say("invalid anthropic message format")
+                    return
+                end
+
+                -- 5. Return mock Anthropic response
+                ngx.status = 200
+                ngx.say([[
+                {
+                  "id": "msg_123",
+                  "type": "message",
+                  "role": "assistant",
+                  "content": [
+                    { "type": "text", "text": "Hello from Claude" }
+                  ],
+                  "stop_reason": "end_turn"
                 }
+                ]])
             }
         }
-_EOC_
+    }
+EOF
 
     $block->set_value("http_config", $http_config);
 });
 
-run_tests();
-
 __DATA__
 
-=== TEST 1: set route with right auth header
+=== TEST 1: Create route with Anthropic provider
 --- config
     location /t {
         content_by_lua_block {
             local t = require("lib.test_admin").test
+
+            -- Create a route that directly exposes Anthropic's native endpoint
             local code, body = t('/apisix/admin/routes/1',
-                 ngx.HTTP_PUT,
-                 [[{
-                    "uri": "/anything",
+                ngx.HTTP_PUT,
+                [[{
+                    "uri": "/v1/messages",
                     "plugins": {
-                        "ai-proxy-multi": {
-                            "instances": [
-                                {
-                                    "name": "anthropic",
-                                    "provider": "anthropic",
-                                    "weight": 1,
-                                    "auth": {
-                                        "header": {
-                                            "Authorization": "Bearer token"
-                                        }
-                                    },
-                                    "options": {
-                                        "model": "claude-sonnet-4-5",
-                                        "max_tokens": 512,
-                                        "temperature": 1.0
-                                    },
-                                    "override": {
-                                        "endpoint": 
"http://localhost:6725/v1/chat/completions";
-                                    }
-                                }
-                            ],
-                            "ssl_verify": false
+                        "ai-proxy": {
+                            "provider": "anthropic",
+                            "api_key": "test-key",
+                            "override": {
+                                "endpoint": "http://127.0.0.1:1999/v1/messages";
+                            }
                         }
                     }
                 }]]
             )
 
             if code >= 300 then
                 ngx.status = code
+                ngx.say(body)
+                return
             end
-            ngx.say(body)
+
+            ngx.say("route created successfully")
         }
     }
 --- response_body
-passed
+route created successfully
 
 
 
-=== TEST 2: send request
+=== TEST 2: Send Anthropic native format request
 --- request
-POST /anything
-{ "messages": [ { "role": "system", "content": "You are a mathematician" }, { 
"role": "user", "content": "What is 1+1?"} ] }
+POST /v1/messages
+{
+  "model": "claude-3",
+  "max_tokens": 128,
+  "messages": [
+    {
+      "role": "user",
+      "content": [
+        { "type": "text", "text": "Hello" }
+      ]
+    }
+  ]
+}

Review Comment:
   TEST 2 sends a request already in Anthropic's native format (lines 135-142), 
not in OpenAI format. This doesn't test the protocol translation feature that 
the PR description and documentation claim to provide. A proper test should 
send an OpenAI-style request and verify it gets translated to Anthropic format.



##########
docs/en/latest/plugins/ai-proxy-anthropic.md:
##########
@@ -0,0 +1,84 @@
+---
+title: AI Proxy with Anthropic Provider
+keywords:
+  - Apache APISIX
+  - API Gateway
+  - Plugin
+  - ai-proxy
+  - Anthropic
+  - Claude
+description: This document provides a comprehensive guide on using the 
Anthropic provider within the ai-proxy plugin, detailing configuration, 
protocol translation, and usage examples.
+---
+
+## Description
+
+The \`ai-proxy\` plugin enables seamless integration with **Anthropic 
(Claude)** as a native provider. This plugin acts as a high-performance 
translation layer, allowing you to use standard OpenAI-style requests to 
interact with Claude models via Anthropic's native Messages API.
+
+## Attributes
+
+When the \`provider\` is set to \`anthropic\`, the following attributes are 
used to configure the connection:
+
+| Name | Type | Required | Default | Description |
+| :--- | :--- | :--- | :--- | :--- |
+| provider | string | Yes | | Must be set to \`anthropic\`. |
+| model | string | Yes | | The Anthropic model ID (e.g., 
\`claude-3-5-sonnet-20240620\`). |
+| api_key | string | Yes | | Your Anthropic API key for authentication. |
+| override.endpoint | string | No | \`https://api.anthropic.com/v1/messages\` 
| Custom endpoint for the Anthropic provider. |
+
+## Usage
+
+APISIX automatically performs "protocol translation" when using the Anthropic 
provider. This ensures that your existing OpenAI-compatible applications can 
switch to Claude models without any code modifications.
+
+### Protocol Translation Details
+
+1. **System Prompt Handling**: OpenAI embeds system instructions within the 
\`messages\` array. APISIX automatically extracts these and maps them to 
Anthropic's mandatory top-level \`system\` field.
+2. **Header Adaptation**:
+   - Translates \`Authorization: Bearer <key>\` to \`x-api-key: <key>\`.
+   - Automatically injects the \`anthropic-version: 2023-06-01\` header.
+3. **Response Conversion**: Anthropic's response format is converted back to 
the OpenAI-compatible structure, including token usage statistics.
+
+## Example
+
+### Basic Configuration
+
+The following example shows how to configure the \`ai-proxy\` plugin with the 
Anthropic provider. Note that the \`uri\` is set to Anthropic's native endpoint:
+
+\`\`\`json
+{
+    "uri": "/v1/messages",
+    "plugins": {
+        "ai-proxy": {
+            "provider": "anthropic",
+            "model": "claude-3-5-sonnet-20240620",
+            "api_key": "your-anthropic-api-key"
+        }
+    },
+    "upstream": {
+        "type": "roundrobin",
+        "nodes": {
+            "api.anthropic.com:443": 1
+        },
+        "pass_host": "host",
+        "scheme": "https"
+    }
+}
+\`\`\`
+
+### Request Example
+
+Once configured, you can send a standard OpenAI-style request to the 
configured URI:
+
+\`\`\`bash
+curl http://127.0.0.1:9080/v1/messages -X POST \
+-H "Content-Type: application/json" \
+-d '{
+    "model": "gpt-4",

Review Comment:
   The documentation example specifies model as "gpt-4" (line 75), which is an 
OpenAI model identifier, not an Anthropic model. According to the attributes 
table (line 24), the model should be an Anthropic model ID like 
"claude-3-5-sonnet-20240620". Using an OpenAI model name in the example is 
confusing and would likely fail with the actual Anthropic API.
   ```suggestion
       "model": "claude-3-5-sonnet-20240620",
   ```



##########
apisix/plugins/ai-drivers/anthropic.lua:
##########
@@ -15,10 +15,87 @@
 -- limitations under the License.
 --
 
-return require("apisix.plugins.ai-drivers.openai-base").new(
-    {
-        host = "api.anthropic.com",
-        path = "/v1/chat/completions",
-        port = 443
+local base = require("apisix.plugins.ai-drivers.ai-driver-base")
+local core = require("apisix.core")
+local setmetatable = setmetatable
+
+local _M = { 
+    name = "anthropic",
+    host = "api.anthropic.com",
+    path = "/v1/messages",
+    port = 443,
+}
+
+local mt = { __index = setmetatable(_M, { __index = base }) }
+
+local ANTHROPIC_VERSION = "2023-06-01"
+local FINISH_REASON_MAP = {
+    ["end_turn"] = "stop",
+    ["max_tokens"] = "length",
+    ["stop_sequence"] = "stop",
+    ["tool_use"] = "tool_calls",
+}
+
+function _M.new(opts)
+    return setmetatable(opts or {}, mt)
+end
+
+function _M:transform_request(conf, request_table)
+    local anthropic_body = {
+        model = conf.model,
+        messages = {},
+        max_tokens = request_table.max_tokens or 1024,
+        stream = request_table.stream,
     }
-)
+
+    -- Protocol Translation: Extract system prompt
+    for _, msg in ipairs(request_table.messages) do
+        if msg.role == "system" then
+            anthropic_body.system = msg.content
+        else
+            core.table.insert(anthropic_body.messages, {
+                role = msg.role,
+                content = msg.content
+            })
+        end
+    end
+
+    local headers = {
+        ["Content-Type"] = "application/json",
+        ["x-api-key"] = conf.api_key,
+        ["anthropic-version"] = ANTHROPIC_VERSION,
+    }
+
+    return anthropic_body, headers
+end
+
+function _M:transform_response(response_body)
+    local body = core.json.decode(response_body)
+    if not body or not body.content then
+        return nil, "invalid response from anthropic"
+    end
+
+    return {
+        id = body.id,
+        object = "chat.completion",
+        created = os.time(),
+        model = body.model,
+        choices = {
+            {
+                index = 0,
+                message = {
+                    role = "assistant",
+                    content = body.content[1].text,
+                },
+                finish_reason = FINISH_REASON_MAP[body.stop_reason] or "stop"
+            }
+        },
+        usage = {
+            prompt_tokens = body.usage.input_tokens,
+            completion_tokens = body.usage.output_tokens,
+            total_tokens = body.usage.input_tokens + body.usage.output_tokens
+        }
+    }
+end
+

Review Comment:
   The transform_response method is never called by the ai-proxy plugin 
framework. Looking at openai-base.lua, response processing happens within the 
`request` method via the `read_response` function (lines 79-193). The Anthropic 
driver would need to implement its own response reading logic within a 
`request` method, or the base plugin framework would need to be modified to 
support a transform_response callback.
   ```suggestion
   
   ```



##########
apisix/plugins/ai-drivers/ai-driver-base.lua:
##########
@@ -0,0 +1,47 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements.  See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+local http = require("resty.http" )
+local core = require("apisix.core")
+
+local _M = {}
+local mt = { __index = _M }
+
+function _M.new(opts)
+    return setmetatable(opts or {}, mt)
+end
+
+function _M:request(url, body, headers, timeout)
+    local httpc = http.new( )
+    if timeout then
+        httpc:set_timeout(timeout )
+    end
+
+    local res, err = httpc:request_uri(url, {
+        method = "POST",
+        body = core.json.encode(body ),

Review Comment:
   The base driver has trailing spaces after function calls and keywords (lines 
17, 28, 30, 35). While minor, this is inconsistent with the codebase style seen 
in other driver files.



##########
docs/en/latest/plugins/ai-proxy-anthropic.md:
##########
@@ -0,0 +1,84 @@
+---
+title: AI Proxy with Anthropic Provider
+keywords:
+  - Apache APISIX
+  - API Gateway
+  - Plugin
+  - ai-proxy
+  - Anthropic
+  - Claude
+description: This document provides a comprehensive guide on using the 
Anthropic provider within the ai-proxy plugin, detailing configuration, 
protocol translation, and usage examples.
+---
+
+## Description
+
+The \`ai-proxy\` plugin enables seamless integration with **Anthropic 
(Claude)** as a native provider. This plugin acts as a high-performance 
translation layer, allowing you to use standard OpenAI-style requests to 
interact with Claude models via Anthropic's native Messages API.
+
+## Attributes
+
+When the \`provider\` is set to \`anthropic\`, the following attributes are 
used to configure the connection:
+
+| Name | Type | Required | Default | Description |
+| :--- | :--- | :--- | :--- | :--- |
+| provider | string | Yes | | Must be set to \`anthropic\`. |
+| model | string | Yes | | The Anthropic model ID (e.g., 
\`claude-3-5-sonnet-20240620\`). |
+| api_key | string | Yes | | Your Anthropic API key for authentication. |
+| override.endpoint | string | No | \`https://api.anthropic.com/v1/messages\` 
| Custom endpoint for the Anthropic provider. |
+
+## Usage
+
+APISIX automatically performs "protocol translation" when using the Anthropic 
provider. This ensures that your existing OpenAI-compatible applications can 
switch to Claude models without any code modifications.
+
+### Protocol Translation Details
+
+1. **System Prompt Handling**: OpenAI embeds system instructions within the 
\`messages\` array. APISIX automatically extracts these and maps them to 
Anthropic's mandatory top-level \`system\` field.
+2. **Header Adaptation**:
+   - Translates \`Authorization: Bearer <key>\` to \`x-api-key: <key>\`.
+   - Automatically injects the \`anthropic-version: 2023-06-01\` header.
+3. **Response Conversion**: Anthropic's response format is converted back to 
the OpenAI-compatible structure, including token usage statistics.
+
+## Example
+
+### Basic Configuration
+
+The following example shows how to configure the \`ai-proxy\` plugin with the 
Anthropic provider. Note that the \`uri\` is set to Anthropic's native endpoint:
+
+\`\`\`json
+{
+    "uri": "/v1/messages",
+    "plugins": {
+        "ai-proxy": {
+            "provider": "anthropic",
+            "model": "claude-3-5-sonnet-20240620",
+            "api_key": "your-anthropic-api-key"
+        }
+    },
+    "upstream": {
+        "type": "roundrobin",
+        "nodes": {
+            "api.anthropic.com:443": 1
+        },
+        "pass_host": "host",
+        "scheme": "https"
+    }
+}
+\`\`\`
+
+### Request Example
+
+Once configured, you can send a standard OpenAI-style request to the 
configured URI:
+
+\`\`\`bash
+curl http://127.0.0.1:9080/v1/messages -X POST \
+-H "Content-Type: application/json" \
+-d '{
+    "model": "gpt-4",
+    "messages": [
+        {"role": "system", "content": "You are a professional translator."},
+        {"role": "user", "content": "Translate the following to French: Hello, 
how are you?"}
+    ],
+    "max_tokens": 500
+}'
+\`\`\`

Review Comment:
   The request example shows sending an OpenAI-style request with string 
content (lines 76-78), but claims it will be transformed to Anthropic-native 
format. However, the test file shows that Anthropic's native format requires 
content to be an array of objects (lines 138-140). The documentation should 
either show the actual Anthropic native format that must be sent, or clarify 
that the OpenAI-to-Anthropic translation feature is not yet implemented.



##########
apisix/plugins/ai-drivers/anthropic.lua:
##########
@@ -15,10 +15,87 @@
 -- limitations under the License.
 --
 
-return require("apisix.plugins.ai-drivers.openai-base").new(
-    {
-        host = "api.anthropic.com",
-        path = "/v1/chat/completions",
-        port = 443
+local base = require("apisix.plugins.ai-drivers.ai-driver-base")
+local core = require("apisix.core")
+local setmetatable = setmetatable
+
+local _M = { 
+    name = "anthropic",
+    host = "api.anthropic.com",
+    path = "/v1/messages",
+    port = 443,
+}
+
+local mt = { __index = setmetatable(_M, { __index = base }) }
+
+local ANTHROPIC_VERSION = "2023-06-01"
+local FINISH_REASON_MAP = {
+    ["end_turn"] = "stop",
+    ["max_tokens"] = "length",
+    ["stop_sequence"] = "stop",
+    ["tool_use"] = "tool_calls",
+}
+
+function _M.new(opts)
+    return setmetatable(opts or {}, mt)
+end
+
+function _M:transform_request(conf, request_table)
+    local anthropic_body = {
+        model = conf.model,
+        messages = {},
+        max_tokens = request_table.max_tokens or 1024,
+        stream = request_table.stream,
     }
-)
+
+    -- Protocol Translation: Extract system prompt
+    for _, msg in ipairs(request_table.messages) do
+        if msg.role == "system" then
+            anthropic_body.system = msg.content
+        else
+            core.table.insert(anthropic_body.messages, {
+                role = msg.role,
+                content = msg.content
+            })
+        end
+    end

Review Comment:
   The transform_request method doesn't validate that request_table.messages 
exists or is non-empty before iterating over it (line 52). If called with an 
invalid request, this would cause a runtime error. The validate_request method 
should catch this, but since that method is missing from this driver, there's 
no validation happening.



##########
apisix/plugins/ai-drivers/anthropic.lua:
##########
@@ -15,10 +15,87 @@
 -- limitations under the License.
 --
 
-return require("apisix.plugins.ai-drivers.openai-base").new(
-    {
-        host = "api.anthropic.com",
-        path = "/v1/chat/completions",
-        port = 443
+local base = require("apisix.plugins.ai-drivers.ai-driver-base")
+local core = require("apisix.core")
+local setmetatable = setmetatable
+
+local _M = { 
+    name = "anthropic",
+    host = "api.anthropic.com",
+    path = "/v1/messages",
+    port = 443,
+}
+
+local mt = { __index = setmetatable(_M, { __index = base }) }
+
+local ANTHROPIC_VERSION = "2023-06-01"
+local FINISH_REASON_MAP = {
+    ["end_turn"] = "stop",
+    ["max_tokens"] = "length",
+    ["stop_sequence"] = "stop",
+    ["tool_use"] = "tool_calls",
+}
+
+function _M.new(opts)
+    return setmetatable(opts or {}, mt)
+end
+
+function _M:transform_request(conf, request_table)
+    local anthropic_body = {
+        model = conf.model,
+        messages = {},
+        max_tokens = request_table.max_tokens or 1024,
+        stream = request_table.stream,
     }
-)
+
+    -- Protocol Translation: Extract system prompt
+    for _, msg in ipairs(request_table.messages) do
+        if msg.role == "system" then
+            anthropic_body.system = msg.content
+        else
+            core.table.insert(anthropic_body.messages, {
+                role = msg.role,
+                content = msg.content

Review Comment:
   The transform_request method assumes messages content is always a string 
(line 58), but according to the test file (lines 138-140), Anthropic's native 
format uses content as an array of objects with type and text fields. This code 
would copy OpenAI-style string content directly, which would not match 
Anthropic's expected format and would fail the validation in the test mock 
server (lines 62-66).
   ```suggestion
               local content = msg.content
               if type(content) == "string" then
                   content = {
                       {
                           type = "text",
                           text = content,
                       }
                   }
               end
               core.table.insert(anthropic_body.messages, {
                   role = msg.role,
                   content = content,
   ```



##########
apisix/plugins/ai-drivers/ai-driver-base.lua:
##########
@@ -0,0 +1,47 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements.  See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+local http = require("resty.http" )
+local core = require("apisix.core")
+
+local _M = {}
+local mt = { __index = _M }
+
+function _M.new(opts)
+    return setmetatable(opts or {}, mt)
+end
+
+function _M:request(url, body, headers, timeout)
+    local httpc = http.new( )
+    if timeout then
+        httpc:set_timeout(timeout )
+    end
+
+    local res, err = httpc:request_uri(url, {
+        method = "POST",
+        body = core.json.encode(body ),
+        headers = headers,
+        ssl_verify = false,

Review Comment:
   The `request` helper unconditionally sets `ssl_verify = false` when calling 
`httpc:request_uri`, which disables TLS certificate validation for all outbound 
AI provider requests. This allows any on-path attacker (e.g., via DNS poisoning 
or network MITM) to impersonate the Anthropic endpoint and capture prompts, 
responses, and API keys. Update this logic to validate TLS certificates by 
default (using `ssl_verify = true` or equivalent) and only allow disabling 
verification explicitly in tightly controlled test or debug configurations.
   ```suggestion
       -- Enable TLS certificate verification by default; allow explicit opt-out
       -- via self.ssl_verify == false for tightly controlled test/debug setups.
       local ssl_verify = true
       if self and self.ssl_verify == false then
           ssl_verify = false
       end
   
       local res, err = httpc:request_uri(url, {
           method = "POST",
           body = core.json.encode(body ),
           headers = headers,
           ssl_verify = ssl_verify,
   ```



##########
apisix/plugins/ai-drivers/anthropic.lua:
##########
@@ -15,10 +15,87 @@
 -- limitations under the License.
 --
 
-return require("apisix.plugins.ai-drivers.openai-base").new(
-    {
-        host = "api.anthropic.com",
-        path = "/v1/chat/completions",
-        port = 443
+local base = require("apisix.plugins.ai-drivers.ai-driver-base")
+local core = require("apisix.core")
+local setmetatable = setmetatable
+
+local _M = { 
+    name = "anthropic",
+    host = "api.anthropic.com",
+    path = "/v1/messages",
+    port = 443,
+}
+
+local mt = { __index = setmetatable(_M, { __index = base }) }
+
+local ANTHROPIC_VERSION = "2023-06-01"
+local FINISH_REASON_MAP = {
+    ["end_turn"] = "stop",
+    ["max_tokens"] = "length",
+    ["stop_sequence"] = "stop",
+    ["tool_use"] = "tool_calls",
+}
+
+function _M.new(opts)
+    return setmetatable(opts or {}, mt)
+end
+
+function _M:transform_request(conf, request_table)
+    local anthropic_body = {
+        model = conf.model,
+        messages = {},
+        max_tokens = request_table.max_tokens or 1024,
+        stream = request_table.stream,
     }
-)
+
+    -- Protocol Translation: Extract system prompt
+    for _, msg in ipairs(request_table.messages) do
+        if msg.role == "system" then
+            anthropic_body.system = msg.content
+        else
+            core.table.insert(anthropic_body.messages, {
+                role = msg.role,
+                content = msg.content
+            })
+        end
+    end
+
+    local headers = {
+        ["Content-Type"] = "application/json",
+        ["x-api-key"] = conf.api_key,
+        ["anthropic-version"] = ANTHROPIC_VERSION,
+    }
+
+    return anthropic_body, headers
+end
+
+function _M:transform_response(response_body)
+    local body = core.json.decode(response_body)
+    if not body or not body.content then
+        return nil, "invalid response from anthropic"
+    end
+
+    return {
+        id = body.id,
+        object = "chat.completion",
+        created = os.time(),
+        model = body.model,
+        choices = {
+            {
+                index = 0,
+                message = {
+                    role = "assistant",
+                    content = body.content[1].text,
+                },
+                finish_reason = FINISH_REASON_MAP[body.stop_reason] or "stop"
+            }
+        },
+        usage = {
+            prompt_tokens = body.usage.input_tokens,
+            completion_tokens = body.usage.output_tokens,
+            total_tokens = body.usage.input_tokens + body.usage.output_tokens
+        }
+    }
+end
+
+return _M

Review Comment:
   The Anthropic driver is missing the required `validate_request` method that 
is called by the ai-proxy base plugin (see 
apisix/plugins/ai-proxy/base.lua:56). This method must be implemented to 
validate incoming requests. Looking at the openai-base.lua driver (lines 
56-68), the method should validate Content-Type headers and decode the JSON 
request body.



##########
apisix/plugins/ai-drivers/ai-driver-base.lua:
##########
@@ -0,0 +1,47 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements.  See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+local http = require("resty.http" )
+local core = require("apisix.core")
+
+local _M = {}
+local mt = { __index = _M }
+
+function _M.new(opts)
+    return setmetatable(opts or {}, mt)
+end
+
+function _M:request(url, body, headers, timeout)
+    local httpc = http.new( )
+    if timeout then
+        httpc:set_timeout(timeout )
+    end
+
+    local res, err = httpc:request_uri(url, {
+        method = "POST",
+        body = core.json.encode(body ),
+        headers = headers,
+        ssl_verify = false,
+    })
+
+    if not res then
+        return nil, "failed to request AI provider: " .. err
+    end
+

Review Comment:
   The ai-driver-base.lua provides a simple request method, but this is 
insufficient for AI driver implementations. The method lacks critical 
functionality that drivers need: 1) streaming response handling (SSE), 2) 
context variable updates for logging/metrics, 3) response filtering hooks, 4) 
token usage extraction, and 5) proper error handling. Looking at 
openai-base.lua:196-290, a proper AI driver request method requires 
significantly more complexity. This base implementation cannot serve as a 
functional parent for AI drivers.
   ```suggestion
   -- Generic request helper for AI drivers.
   -- Supports:
   --   * Optional streaming responses (SSE-style) via opts.stream / 
opts.on_chunk
   --   * Context updates for logging/metrics via opts.ctx or self.ctx
   --   * Response filtering hooks via opts.filter_response or 
self.filter_response
   --   * Optional token usage extraction via opts.extract_usage or 
self.extract_usage
   --   * Improved error handling and status code checking
   function _M:request(url, body, headers, timeout, opts)
       opts = opts or {}
       headers = headers or {}
   
       local ctx = opts.ctx or self.ctx
       local filter_response = opts.filter_response or self.filter_response
       local extract_usage = opts.extract_usage or self.extract_usage
       local on_chunk = opts.on_chunk or self.on_chunk
       local stream = opts.stream == true
   
       local httpc, err = http.new()
       if not httpc then
           return nil, "failed to create HTTP client for AI provider: " .. (err 
or "unknown error")
       end
   
       local effective_timeout = timeout or opts.timeout
       if effective_timeout then
           httpc:set_timeout(effective_timeout)
       end
   
       local encoded_body, encode_err = core.json.encode(body)
       if not encoded_body then
           return nil, "failed to encode request body as JSON: " .. (encode_err 
or "unknown error")
       end
   
       if ctx then
           ctx.bytes_sent = (ctx.bytes_sent or 0) + #encoded_body
       end
   
       local params = {
           method = "POST",
           body = encoded_body,
           headers = headers,
           ssl_verify = false,
       }
   
       if stream then
           params.stream = true
           params.keepalive = false
       end
   
       local res, req_err = httpc:request_uri(url, params)
       if not res then
           return nil, "failed to request AI provider: " .. (req_err or 
"unknown error")
       end
   
       if ctx then
           ctx.last_status = res.status
       end
   
       -- Treat HTTP 4xx/5xx as errors for callers, but still expose details.
       if res.status >= 400 then
           local msg = "AI provider returned HTTP status " .. 
tostring(res.status)
           if res.body and res.body ~= "" then
               msg = msg .. ": " .. res.body
           end
           return nil, msg
       end
   
       -- Streaming mode: read chunks and feed them to callbacks / filters.
       if stream then
           local reader = res.body_reader
           if not reader then
               return nil, "streaming requested but no body_reader available 
from AI provider response"
           end
   
           while true do
               local chunk, read_err = reader(8192)
   
               if not chunk then
                   if read_err and read_err ~= "closed" then
                       return nil, "failed to read AI provider stream: " .. 
read_err
                   end
                   break
               end
   
               if chunk ~= "" then
                   if ctx then
                       ctx.bytes_received = (ctx.bytes_received or 0) + #chunk
                   end
   
                   if filter_response and type(filter_response) == "function" 
then
                       local filtered, ferr = filter_response(chunk, ctx)
                       if ferr then
                           return nil, "response filter error: " .. 
tostring(ferr)
                       end
                       chunk = filtered or ""
                   end
   
                   if on_chunk and type(on_chunk) == "function" and chunk ~= "" 
then
                       local ok_cb, cb_err = pcall(on_chunk, chunk, ctx)
                       if not ok_cb then
                           return nil, "on_chunk callback failed: " .. 
tostring(cb_err)
                       end
                   end
               end
           end
   
           -- For streaming, return the response metadata but without relying 
on body.
           res.body = nil
           return res
       end
   
       -- Non-streaming response handling.
       if res.body and ctx then
           ctx.bytes_received = (ctx.bytes_received or 0) + #res.body
       end
   
       -- Apply optional filtering on the full body.
       if filter_response and type(filter_response) == "function" and res.body 
then
           local filtered, ferr = filter_response(res.body, ctx)
           if ferr then
               return nil, "response filter error: " .. tostring(ferr)
           end
           res.body = filtered or res.body
       end
   
       -- Optional token usage extraction from JSON body.
       if extract_usage and type(extract_usage) == "function" and res.body then
           local ok_dec, decoded = pcall(core.json.decode, res.body)
           if ok_dec and decoded then
               local ok_ext, usage = pcall(extract_usage, decoded, ctx)
               if ok_ext and usage and ctx then
                   ctx.token_usage = (ctx.token_usage or 0) + (usage or 0)
               end
           end
       end
   ```



##########
apisix/plugins/ai-drivers/anthropic.lua:
##########
@@ -15,10 +15,87 @@
 -- limitations under the License.
 --
 
-return require("apisix.plugins.ai-drivers.openai-base").new(
-    {
-        host = "api.anthropic.com",
-        path = "/v1/chat/completions",
-        port = 443
+local base = require("apisix.plugins.ai-drivers.ai-driver-base")
+local core = require("apisix.core")
+local setmetatable = setmetatable
+
+local _M = { 
+    name = "anthropic",
+    host = "api.anthropic.com",
+    path = "/v1/messages",
+    port = 443,
+}
+
+local mt = { __index = setmetatable(_M, { __index = base }) }
+
+local ANTHROPIC_VERSION = "2023-06-01"
+local FINISH_REASON_MAP = {
+    ["end_turn"] = "stop",
+    ["max_tokens"] = "length",
+    ["stop_sequence"] = "stop",
+    ["tool_use"] = "tool_calls",
+}
+
+function _M.new(opts)
+    return setmetatable(opts or {}, mt)
+end
+
+function _M:transform_request(conf, request_table)
+    local anthropic_body = {
+        model = conf.model,
+        messages = {},
+        max_tokens = request_table.max_tokens or 1024,
+        stream = request_table.stream,
     }
-)
+
+    -- Protocol Translation: Extract system prompt
+    for _, msg in ipairs(request_table.messages) do
+        if msg.role == "system" then
+            anthropic_body.system = msg.content
+        else
+            core.table.insert(anthropic_body.messages, {
+                role = msg.role,
+                content = msg.content
+            })
+        end
+    end
+
+    local headers = {
+        ["Content-Type"] = "application/json",
+        ["x-api-key"] = conf.api_key,
+        ["anthropic-version"] = ANTHROPIC_VERSION,
+    }
+
+    return anthropic_body, headers
+end
+
+function _M:transform_response(response_body)
+    local body = core.json.decode(response_body)
+    if not body or not body.content then
+        return nil, "invalid response from anthropic"
+    end
+
+    return {
+        id = body.id,
+        object = "chat.completion",
+        created = os.time(),
+        model = body.model,
+        choices = {
+            {
+                index = 0,
+                message = {
+                    role = "assistant",
+                    content = body.content[1].text,
+                },
+                finish_reason = FINISH_REASON_MAP[body.stop_reason] or "stop"
+            }
+        },
+        usage = {
+            prompt_tokens = body.usage.input_tokens,
+            completion_tokens = body.usage.output_tokens,
+            total_tokens = body.usage.input_tokens + body.usage.output_tokens
+        }
+    }
+end
+
+return _M

Review Comment:
   The Anthropic driver is missing the required `request` method that is called 
by the ai-proxy base plugin (see apisix/plugins/ai-proxy/base.lua:87). This is 
the main method that sends the HTTP request to the AI provider. Looking at 
openai-base.lua (lines 196-290), this method should handle HTTP connection, 
request sending, response reading (including streaming), and connection pooling.



##########
apisix/plugins/ai-drivers/anthropic.lua:
##########
@@ -15,10 +15,87 @@
 -- limitations under the License.
 --
 
-return require("apisix.plugins.ai-drivers.openai-base").new(
-    {
-        host = "api.anthropic.com",
-        path = "/v1/chat/completions",
-        port = 443
+local base = require("apisix.plugins.ai-drivers.ai-driver-base")
+local core = require("apisix.core")
+local setmetatable = setmetatable
+
+local _M = { 
+    name = "anthropic",
+    host = "api.anthropic.com",
+    path = "/v1/messages",
+    port = 443,
+}
+
+local mt = { __index = setmetatable(_M, { __index = base }) }
+
+local ANTHROPIC_VERSION = "2023-06-01"
+local FINISH_REASON_MAP = {
+    ["end_turn"] = "stop",
+    ["max_tokens"] = "length",
+    ["stop_sequence"] = "stop",
+    ["tool_use"] = "tool_calls",
+}
+
+function _M.new(opts)
+    return setmetatable(opts or {}, mt)
+end
+
+function _M:transform_request(conf, request_table)
+    local anthropic_body = {
+        model = conf.model,
+        messages = {},
+        max_tokens = request_table.max_tokens or 1024,
+        stream = request_table.stream,

Review Comment:
   The driver doesn't implement streaming (SSE) support. While 
transform_request copies the stream parameter (line 48), there's no code to 
handle SSE response parsing. Anthropic uses a different SSE event format than 
OpenAI (event types like "message_start", "content_block_delta", etc.), which 
would need custom parsing logic. Without a proper `request` method that handles 
streaming responses, the stream parameter is non-functional.
   ```suggestion
   
   ```



##########
apisix/plugins/ai-drivers/anthropic.lua:
##########
@@ -15,10 +15,87 @@
 -- limitations under the License.
 --
 
-return require("apisix.plugins.ai-drivers.openai-base").new(
-    {
-        host = "api.anthropic.com",
-        path = "/v1/chat/completions",
-        port = 443
+local base = require("apisix.plugins.ai-drivers.ai-driver-base")
+local core = require("apisix.core")
+local setmetatable = setmetatable
+
+local _M = { 
+    name = "anthropic",
+    host = "api.anthropic.com",
+    path = "/v1/messages",
+    port = 443,
+}
+
+local mt = { __index = setmetatable(_M, { __index = base }) }
+
+local ANTHROPIC_VERSION = "2023-06-01"
+local FINISH_REASON_MAP = {
+    ["end_turn"] = "stop",
+    ["max_tokens"] = "length",
+    ["stop_sequence"] = "stop",
+    ["tool_use"] = "tool_calls",
+}
+
+function _M.new(opts)
+    return setmetatable(opts or {}, mt)
+end
+
+function _M:transform_request(conf, request_table)
+    local anthropic_body = {
+        model = conf.model,

Review Comment:
   The transform_request method always uses conf.model (line 45), which means 
it ignores the model specified in the request_table. Looking at 
openai-base.lua:254-261, the standard pattern is to allow model_options to 
override request parameters, but also to preserve the request's model if no 
override is specified. This implementation prevents users from specifying 
different models per request.
   ```suggestion
       local model = (conf.model_options and conf.model_options.model)
                      or request_table.model
                      or conf.model
   
       local anthropic_body = {
           model = model,
   ```



##########
apisix/plugins/ai-drivers/anthropic.lua:
##########
@@ -15,10 +15,87 @@
 -- limitations under the License.
 --
 
-return require("apisix.plugins.ai-drivers.openai-base").new(
-    {
-        host = "api.anthropic.com",
-        path = "/v1/chat/completions",
-        port = 443
+local base = require("apisix.plugins.ai-drivers.ai-driver-base")
+local core = require("apisix.core")
+local setmetatable = setmetatable
+
+local _M = { 
+    name = "anthropic",
+    host = "api.anthropic.com",
+    path = "/v1/messages",
+    port = 443,
+}
+
+local mt = { __index = setmetatable(_M, { __index = base }) }
+
+local ANTHROPIC_VERSION = "2023-06-01"
+local FINISH_REASON_MAP = {
+    ["end_turn"] = "stop",
+    ["max_tokens"] = "length",
+    ["stop_sequence"] = "stop",
+    ["tool_use"] = "tool_calls",
+}
+
+function _M.new(opts)
+    return setmetatable(opts or {}, mt)
+end
+
+function _M:transform_request(conf, request_table)
+    local anthropic_body = {
+        model = conf.model,
+        messages = {},
+        max_tokens = request_table.max_tokens or 1024,
+        stream = request_table.stream,
     }
-)
+
+    -- Protocol Translation: Extract system prompt
+    for _, msg in ipairs(request_table.messages) do
+        if msg.role == "system" then
+            anthropic_body.system = msg.content
+        else
+            core.table.insert(anthropic_body.messages, {
+                role = msg.role,
+                content = msg.content
+            })
+        end
+    end
+
+    local headers = {
+        ["Content-Type"] = "application/json",
+        ["x-api-key"] = conf.api_key,
+        ["anthropic-version"] = ANTHROPIC_VERSION,
+    }
+
+    return anthropic_body, headers
+end
+
+function _M:transform_response(response_body)
+    local body = core.json.decode(response_body)
+    if not body or not body.content then
+        return nil, "invalid response from anthropic"
+    end
+
+    return {
+        id = body.id,
+        object = "chat.completion",
+        created = os.time(),
+        model = body.model,
+        choices = {
+            {
+                index = 0,
+                message = {
+                    role = "assistant",
+                    content = body.content[1].text,

Review Comment:
   The transform_response method attempts to access body.content[1].text 
without verifying that body.content is an array or that it has at least one 
element. This will cause a runtime error if the Anthropic API returns an empty 
content array or if content[1] doesn't have a text field. Proper nil checks 
should be added before accessing nested fields.



##########
apisix/plugins/ai-drivers/anthropic.lua:
##########
@@ -15,10 +15,87 @@
 -- limitations under the License.
 --
 
-return require("apisix.plugins.ai-drivers.openai-base").new(
-    {
-        host = "api.anthropic.com",
-        path = "/v1/chat/completions",
-        port = 443
+local base = require("apisix.plugins.ai-drivers.ai-driver-base")
+local core = require("apisix.core")
+local setmetatable = setmetatable
+
+local _M = { 
+    name = "anthropic",
+    host = "api.anthropic.com",
+    path = "/v1/messages",
+    port = 443,
+}
+
+local mt = { __index = setmetatable(_M, { __index = base }) }
+
+local ANTHROPIC_VERSION = "2023-06-01"
+local FINISH_REASON_MAP = {
+    ["end_turn"] = "stop",
+    ["max_tokens"] = "length",
+    ["stop_sequence"] = "stop",
+    ["tool_use"] = "tool_calls",
+}
+
+function _M.new(opts)
+    return setmetatable(opts or {}, mt)
+end
+
+function _M:transform_request(conf, request_table)
+    local anthropic_body = {
+        model = conf.model,
+        messages = {},
+        max_tokens = request_table.max_tokens or 1024,
+        stream = request_table.stream,
     }
-)
+
+    -- Protocol Translation: Extract system prompt
+    for _, msg in ipairs(request_table.messages) do
+        if msg.role == "system" then
+            anthropic_body.system = msg.content
+        else
+            core.table.insert(anthropic_body.messages, {
+                role = msg.role,
+                content = msg.content
+            })
+        end
+    end
+
+    local headers = {
+        ["Content-Type"] = "application/json",
+        ["x-api-key"] = conf.api_key,
+        ["anthropic-version"] = ANTHROPIC_VERSION,
+    }
+
+    return anthropic_body, headers
+end
+
+function _M:transform_response(response_body)
+    local body = core.json.decode(response_body)
+    if not body or not body.content then
+        return nil, "invalid response from anthropic"
+    end
+
+    return {
+        id = body.id,
+        object = "chat.completion",
+        created = os.time(),
+        model = body.model,
+        choices = {
+            {
+                index = 0,
+                message = {
+                    role = "assistant",
+                    content = body.content[1].text,
+                },
+                finish_reason = FINISH_REASON_MAP[body.stop_reason] or "stop"
+            }
+        },
+        usage = {
+            prompt_tokens = body.usage.input_tokens,
+            completion_tokens = body.usage.output_tokens,
+            total_tokens = body.usage.input_tokens + body.usage.output_tokens
+        }

Review Comment:
   The transform_response method attempts to access body.usage.input_tokens and 
body.usage.output_tokens without checking if body.usage exists. If Anthropic's 
response doesn't include usage information, this will cause a runtime error. 
Proper nil checks should be added.



##########
apisix/plugins/ai-drivers/anthropic.lua:
##########
@@ -15,10 +15,87 @@
 -- limitations under the License.
 --
 
-return require("apisix.plugins.ai-drivers.openai-base").new(
-    {
-        host = "api.anthropic.com",
-        path = "/v1/chat/completions",
-        port = 443
+local base = require("apisix.plugins.ai-drivers.ai-driver-base")
+local core = require("apisix.core")
+local setmetatable = setmetatable
+
+local _M = { 
+    name = "anthropic",
+    host = "api.anthropic.com",
+    path = "/v1/messages",
+    port = 443,
+}
+
+local mt = { __index = setmetatable(_M, { __index = base }) }
+
+local ANTHROPIC_VERSION = "2023-06-01"
+local FINISH_REASON_MAP = {
+    ["end_turn"] = "stop",
+    ["max_tokens"] = "length",
+    ["stop_sequence"] = "stop",
+    ["tool_use"] = "tool_calls",
+}
+
+function _M.new(opts)
+    return setmetatable(opts or {}, mt)
+end

Review Comment:
   The metatable setup creates a chain where _M.__index points to a table with 
__index = base (line 29). This means instances will first look in _M, then in 
base. However, this doesn't properly inherit from base because _M methods don't 
call parent methods and base.new is never used. The correct pattern for 
inheritance would be to either: 1) have _M.new call base.new and properly set 
up the metatable chain, or 2) directly inherit like openai.lua does by 
returning an instance from openai-base.new().



##########
docs/en/latest/plugins/ai-proxy-anthropic.md:
##########
@@ -0,0 +1,84 @@
+---
+title: AI Proxy with Anthropic Provider
+keywords:
+  - Apache APISIX
+  - API Gateway
+  - Plugin
+  - ai-proxy
+  - Anthropic
+  - Claude
+description: This document provides a comprehensive guide on using the 
Anthropic provider within the ai-proxy plugin, detailing configuration, 
protocol translation, and usage examples.
+---
+
+## Description
+
+The \`ai-proxy\` plugin enables seamless integration with **Anthropic 
(Claude)** as a native provider. This plugin acts as a high-performance 
translation layer, allowing you to use standard OpenAI-style requests to 
interact with Claude models via Anthropic's native Messages API.
+
+## Attributes
+
+When the \`provider\` is set to \`anthropic\`, the following attributes are 
used to configure the connection:
+
+| Name | Type | Required | Default | Description |
+| :--- | :--- | :--- | :--- | :--- |
+| provider | string | Yes | | Must be set to \`anthropic\`. |
+| model | string | Yes | | The Anthropic model ID (e.g., 
\`claude-3-5-sonnet-20240620\`). |
+| api_key | string | Yes | | Your Anthropic API key for authentication. |
+| override.endpoint | string | No | \`https://api.anthropic.com/v1/messages\` 
| Custom endpoint for the Anthropic provider. |
+
+## Usage
+
+APISIX automatically performs "protocol translation" when using the Anthropic 
provider. This ensures that your existing OpenAI-compatible applications can 
switch to Claude models without any code modifications.
+
+### Protocol Translation Details
+
+1. **System Prompt Handling**: OpenAI embeds system instructions within the 
\`messages\` array. APISIX automatically extracts these and maps them to 
Anthropic's mandatory top-level \`system\` field.
+2. **Header Adaptation**:
+   - Translates \`Authorization: Bearer <key>\` to \`x-api-key: <key>\`.
+   - Automatically injects the \`anthropic-version: 2023-06-01\` header.
+3. **Response Conversion**: Anthropic's response format is converted back to 
the OpenAI-compatible structure, including token usage statistics.
+
+## Example
+
+### Basic Configuration
+
+The following example shows how to configure the \`ai-proxy\` plugin with the 
Anthropic provider. Note that the \`uri\` is set to Anthropic's native endpoint:
+
+\`\`\`json
+{
+    "uri": "/v1/messages",
+    "plugins": {
+        "ai-proxy": {
+            "provider": "anthropic",
+            "model": "claude-3-5-sonnet-20240620",
+            "api_key": "your-anthropic-api-key"
+        }
+    },
+    "upstream": {
+        "type": "roundrobin",
+        "nodes": {
+            "api.anthropic.com:443": 1
+        },
+        "pass_host": "host",
+        "scheme": "https"
+    }

Review Comment:
   The example configuration specifies an upstream block (lines 56-63), but 
this conflicts with how the ai-proxy plugin works. The plugin uses 
ctx.bypass_nginx_upstream = true (ai-proxy.lua:51) to bypass the nginx upstream 
and handle the HTTP request directly in the driver. The upstream configuration 
shown here would not be used. The correct approach is to use the 
override.endpoint attribute within the plugin configuration, as shown in the 
test file (lines 107-109).



##########
docs/en/latest/plugins/ai-proxy-anthropic.md:
##########
@@ -0,0 +1,84 @@
+---
+title: AI Proxy with Anthropic Provider
+keywords:
+  - Apache APISIX
+  - API Gateway
+  - Plugin
+  - ai-proxy
+  - Anthropic
+  - Claude
+description: This document provides a comprehensive guide on using the 
Anthropic provider within the ai-proxy plugin, detailing configuration, 
protocol translation, and usage examples.
+---
+
+## Description
+
+The \`ai-proxy\` plugin enables seamless integration with **Anthropic 
(Claude)** as a native provider. This plugin acts as a high-performance 
translation layer, allowing you to use standard OpenAI-style requests to 
interact with Claude models via Anthropic's native Messages API.
+
+## Attributes
+
+When the \`provider\` is set to \`anthropic\`, the following attributes are 
used to configure the connection:
+
+| Name | Type | Required | Default | Description |
+| :--- | :--- | :--- | :--- | :--- |
+| provider | string | Yes | | Must be set to \`anthropic\`. |
+| model | string | Yes | | The Anthropic model ID (e.g., 
\`claude-3-5-sonnet-20240620\`). |
+| api_key | string | Yes | | Your Anthropic API key for authentication. |
+| override.endpoint | string | No | \`https://api.anthropic.com/v1/messages\` 
| Custom endpoint for the Anthropic provider. |
+
+## Usage
+
+APISIX automatically performs "protocol translation" when using the Anthropic 
provider. This ensures that your existing OpenAI-compatible applications can 
switch to Claude models without any code modifications.
+
+### Protocol Translation Details
+
+1. **System Prompt Handling**: OpenAI embeds system instructions within the 
\`messages\` array. APISIX automatically extracts these and maps them to 
Anthropic's mandatory top-level \`system\` field.
+2. **Header Adaptation**:
+   - Translates \`Authorization: Bearer <key>\` to \`x-api-key: <key>\`.
+   - Automatically injects the \`anthropic-version: 2023-06-01\` header.
+3. **Response Conversion**: Anthropic's response format is converted back to 
the OpenAI-compatible structure, including token usage statistics.
+
+## Example
+
+### Basic Configuration
+
+The following example shows how to configure the \`ai-proxy\` plugin with the 
Anthropic provider. Note that the \`uri\` is set to Anthropic's native endpoint:
+
+\`\`\`json
+{
+    "uri": "/v1/messages",

Review Comment:
   The documentation describes the uri as set to "/v1/messages" (line 48), but 
this is Anthropic's endpoint path, not the APISIX route URI. For the ai-proxy 
plugin, the URI typically matches the client's request path (e.g., 
"/v1/chat/completions" for OpenAI compatibility). The override.endpoint should 
specify the full Anthropic URL. This example configuration would not work as 
described.



##########
t/plugin/ai-proxy-anthropic.t:
##########
@@ -1,298 +1,268 @@
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
+--
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements.  See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
 
 use t::APISIX 'no_plan';
+use Test::Nginx::Socket::Lua;
 
-log_level("info");
 repeat_each(1);
 no_long_string();
 no_root_location();
 
-
-my $resp_file = 't/assets/openai-compatible-api-response.json';
-open(my $fh, '<', $resp_file) or die "Could not open file '$resp_file' $!";
-my $resp = do { local $/; <$fh> };
-close($fh);
-
-print "Hello, World!\n";
-print $resp;
-
-
 add_block_preprocessor(sub {
     my ($block) = @_;
 
-    if (!defined $block->request) {
-        $block->set_value("request", "GET /t");
-    }
+    my $http_config = <<'EOF';
+    server {
+        listen 1999;
+
+        location /v1/messages {
+            content_by_lua_block {
+                local core = require("apisix.core")
+                ngx.req.read_body()
+                local body = core.json.decode(ngx.req.get_body_data())
+
+                -- 1. Required Header: x-api-key
+                if ngx.var.http_x_api_key ~= "test-key" then
+                    ngx.status = 401
+                    
ngx.say([[{"type":"error","error":{"type":"authentication_error","message":"invalid
 api key"}}]])
+                    return
+                end
 
-    my $user_yaml_config = <<_EOC_;
-plugins:
-  - ai-proxy-multi
-  - prometheus
-_EOC_
-    $block->set_value("extra_yaml_config", $user_yaml_config);
-
-    my $http_config = $block->http_config // <<_EOC_;
-        server {
-            server_name anthropic;
-            listen 6725;
-
-            default_type 'application/json';
-
-            location /v1/chat/completions {
-                content_by_lua_block {
-                    local json = require("cjson.safe")
-
-                    if ngx.req.get_method() ~= "POST" then
-                        ngx.status = 400
-                        ngx.say("Unsupported request method: ", 
ngx.req.get_method())
-                    end
-                    ngx.req.read_body()
-                    local body, err = ngx.req.get_body_data()
-                    body, err = json.decode(body)
-
-                    local test_type = ngx.req.get_headers()["test-type"]
-                    if test_type == "options" then
-                        if body.foo == "bar" then
-                            ngx.status = 200
-                            ngx.say("options works")
-                        else
-                            ngx.status = 500
-                            ngx.say("model options feature doesn't work")
-                        end
-                        return
-                    end
-
-                    local header_auth = ngx.req.get_headers()["authorization"]
-                    local query_auth = ngx.req.get_uri_args()["apikey"]
-
-                    if header_auth ~= "Bearer token" and query_auth ~= 
"apikey" then
-                        ngx.status = 401
-                        ngx.say("Unauthorized")
-                        return
-                    end
-
-                    if header_auth == "Bearer token" or query_auth == "apikey" 
then
-                        ngx.req.read_body()
-                        local body, err = ngx.req.get_body_data()
-                        body, err = json.decode(body)
-
-                        if not body.messages or #body.messages < 1 then
-                            ngx.status = 400
-                            ngx.say([[{ "error": "bad request"}]])
-                            return
-                        end
-                        if body.messages[1].content == "write an SQL query to 
get all rows from student table" then
-                            ngx.print("SELECT * FROM STUDENTS")
-                            return
-                        end
-
-                        ngx.status = 200
-                        ngx.say([[$resp]])
-                        return
-                    end
-
-
-                    ngx.status = 503
-                    ngx.say("reached the end of the test suite")
-                }
-            }
+                -- 2. Required Header: anthropic-version
+                if ngx.var.http_anthropic_version ~= "2023-06-01" then
+                    ngx.status = 400
+                    ngx.say("missing anthropic-version")
+                    return
+                end
 
-            location /random {
-                content_by_lua_block {
-                    ngx.say("path override works")
+                -- 3. Required Parameter: max_tokens
+                if not body.max_tokens then
+                    ngx.status = 400
+                    ngx.say("missing max_tokens")
+                    return
+                end
+
+                -- 4. Validate Anthropic's native message structure
+                --    Messages must have content as array with type field
+                local msg = body.messages[1]
+                if type(msg.content) ~= "table"
+                   or msg.content[1].type ~= "text" then
+                    ngx.status = 400
+                    ngx.say("invalid anthropic message format")
+                    return
+                end
+
+                -- 5. Return mock Anthropic response
+                ngx.status = 200
+                ngx.say([[
+                {
+                  "id": "msg_123",
+                  "type": "message",
+                  "role": "assistant",
+                  "content": [
+                    { "type": "text", "text": "Hello from Claude" }
+                  ],
+                  "stop_reason": "end_turn"
                 }
+                ]])
             }
         }
-_EOC_
+    }
+EOF
 
     $block->set_value("http_config", $http_config);
 });
 
-run_tests();
-
 __DATA__
 
-=== TEST 1: set route with right auth header
+=== TEST 1: Create route with Anthropic provider
 --- config
     location /t {
         content_by_lua_block {
             local t = require("lib.test_admin").test
+
+            -- Create a route that directly exposes Anthropic's native endpoint
             local code, body = t('/apisix/admin/routes/1',
-                 ngx.HTTP_PUT,
-                 [[{
-                    "uri": "/anything",
+                ngx.HTTP_PUT,
+                [[{
+                    "uri": "/v1/messages",
                     "plugins": {
-                        "ai-proxy-multi": {
-                            "instances": [
-                                {
-                                    "name": "anthropic",
-                                    "provider": "anthropic",
-                                    "weight": 1,
-                                    "auth": {
-                                        "header": {
-                                            "Authorization": "Bearer token"
-                                        }
-                                    },
-                                    "options": {
-                                        "model": "claude-sonnet-4-5",
-                                        "max_tokens": 512,
-                                        "temperature": 1.0
-                                    },
-                                    "override": {
-                                        "endpoint": 
"http://localhost:6725/v1/chat/completions";
-                                    }
-                                }
-                            ],
-                            "ssl_verify": false
+                        "ai-proxy": {
+                            "provider": "anthropic",
+                            "api_key": "test-key",
+                            "override": {
+                                "endpoint": "http://127.0.0.1:1999/v1/messages";
+                            }
                         }
                     }
                 }]]
             )
 
             if code >= 300 then
                 ngx.status = code
+                ngx.say(body)
+                return
             end
-            ngx.say(body)
+
+            ngx.say("route created successfully")
         }
     }
 --- response_body
-passed
+route created successfully
 
 
 
-=== TEST 2: send request
+=== TEST 2: Send Anthropic native format request
 --- request
-POST /anything
-{ "messages": [ { "role": "system", "content": "You are a mathematician" }, { 
"role": "user", "content": "What is 1+1?"} ] }
+POST /v1/messages
+{
+  "model": "claude-3",
+  "max_tokens": 128,
+  "messages": [
+    {
+      "role": "user",
+      "content": [
+        { "type": "text", "text": "Hello" }
+      ]
+    }
+  ]
+}
 --- more_headers
-Authorization: Bearer token
+x-api-key: test-key
+anthropic-version: 2023-06-01
+Content-Type: application/json
 --- error_code: 200
---- response_body eval
-qr/\{ "content": "1 \+ 1 = 2\.", "role": "assistant" \}/
-
-
-
-=== TEST 3: set route with stream = true (SSE)
---- config
-    location /t {
-        content_by_lua_block {
-            local t = require("lib.test_admin").test
-            local code, body = t('/apisix/admin/routes/1',
-                 ngx.HTTP_PUT,
-                 [[{
-                    "uri": "/anything",
-                    "plugins": {
-                        "ai-proxy-multi": {
-                            "instances": [
-                                {
-                                    "name": "anthropic",
-                                    "provider": "anthropic",
-                                    "weight": 1,
-                                    "auth": {
-                                        "header": {
-                                            "Authorization": "Bearer token"
-                                        }
-                                    },
-                                    "options": {
-                                        "model": "claude-sonnet-4-5",
-                                        "max_tokens": 512,
-                                        "temperature": 1.0,
-                                        "stream": true
-                                    },
-                                    "override": {
-                                        "endpoint": 
"http://localhost:7737/v1/chat/completions";
-                                    }
-                                }
-                            ],
-                            "ssl_verify": false
-                        }
-                    }
-                 }]]
-            )
-
-            if code >= 300 then
-                ngx.status = code
-            end
-            ngx.say(body)
-        }
-    }
---- response_body
-passed
+--- response_body_like eval
+qr/"type"\s*:\s*"message"/
 
 
 
-=== TEST 4: test is SSE works as expected
+=== TEST 3: Test Anthropic streaming response (SSE)
 --- config
     location /t {
         content_by_lua_block {
             local http = require("resty.http")
             local httpc = http.new()
-            local core = require("apisix.core")
-
-            local ok, err = httpc:connect({
-                scheme = "http",
-                host = "localhost",
-                port = ngx.var.server_port,
-            })
-
-            if not ok then
-                ngx.status = 500
-                ngx.say(err)
-                return
-            end
 
-            local params = {
+            local res, err = 
httpc:request_uri("http://127.0.0.1:9080/v1/messages";, {
                 method = "POST",
                 headers = {
                     ["Content-Type"] = "application/json",
+                    ["x-api-key"] = "test-key",
+                    ["anthropic-version"] = "2023-06-01",
                 },
-                path = "/anything",
                 body = [[{
+                    "model": "claude-3",
+                    "stream": true,
+                    "max_tokens": 16,
                     "messages": [
-                        { "role": "system", "content": "some content" }
-                    ],
-                    "stream": true
-                }]],
-            }
+                        {
+                            "role": "user",
+                            "content": [
+                                { "type": "text", "text": "Hi" }
+                            ]
+                        }
+                    ]
+                }]]
+            })
 
-            local res, err = httpc:request(params)
-            if not res then
+            if err then
                 ngx.status = 500
-                ngx.say(err)
+                ngx.say("request failed: ", err)
                 return
             end
 
-            local final_res = {}
-            while true do
-                local chunk, err = res.body_reader() -- will read chunk by 
chunk
-                if err then
-                    core.log.error("failed to read response chunk: ", err)
-                    break
-                end
-                if not chunk then
-                    break
-                end
-                core.table.insert_tail(final_res, chunk)
-            end
-
-            ngx.print(#final_res .. final_res[6])
+            ngx.status = res.status
+            ngx.say(res.body or "")
         }
     }
 --- response_body_like eval
-qr/6data: \[DONE\]\n\n/
+qr/message/

Review Comment:
   TEST 3 for streaming doesn't validate that streaming actually works. It just 
checks that the response contains "message", but doesn't verify SSE format, 
chunked reading, or the proper event stream structure. The test should use a 
mock that returns actual SSE events and verify the client receives them in the 
correct format.



##########
t/plugin/ai-proxy-anthropic.t:
##########
@@ -1,298 +1,268 @@
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
+--
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements.  See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
 
 use t::APISIX 'no_plan';
+use Test::Nginx::Socket::Lua;
 
-log_level("info");
 repeat_each(1);
 no_long_string();
 no_root_location();
 
-
-my $resp_file = 't/assets/openai-compatible-api-response.json';
-open(my $fh, '<', $resp_file) or die "Could not open file '$resp_file' $!";
-my $resp = do { local $/; <$fh> };
-close($fh);
-
-print "Hello, World!\n";
-print $resp;
-
-
 add_block_preprocessor(sub {
     my ($block) = @_;
 
-    if (!defined $block->request) {
-        $block->set_value("request", "GET /t");
-    }
+    my $http_config = <<'EOF';
+    server {
+        listen 1999;
+
+        location /v1/messages {
+            content_by_lua_block {
+                local core = require("apisix.core")
+                ngx.req.read_body()
+                local body = core.json.decode(ngx.req.get_body_data())
+
+                -- 1. Required Header: x-api-key
+                if ngx.var.http_x_api_key ~= "test-key" then
+                    ngx.status = 401
+                    
ngx.say([[{"type":"error","error":{"type":"authentication_error","message":"invalid
 api key"}}]])
+                    return
+                end
 
-    my $user_yaml_config = <<_EOC_;
-plugins:
-  - ai-proxy-multi
-  - prometheus
-_EOC_
-    $block->set_value("extra_yaml_config", $user_yaml_config);
-
-    my $http_config = $block->http_config // <<_EOC_;
-        server {
-            server_name anthropic;
-            listen 6725;
-
-            default_type 'application/json';
-
-            location /v1/chat/completions {
-                content_by_lua_block {
-                    local json = require("cjson.safe")
-
-                    if ngx.req.get_method() ~= "POST" then
-                        ngx.status = 400
-                        ngx.say("Unsupported request method: ", 
ngx.req.get_method())
-                    end
-                    ngx.req.read_body()
-                    local body, err = ngx.req.get_body_data()
-                    body, err = json.decode(body)
-
-                    local test_type = ngx.req.get_headers()["test-type"]
-                    if test_type == "options" then
-                        if body.foo == "bar" then
-                            ngx.status = 200
-                            ngx.say("options works")
-                        else
-                            ngx.status = 500
-                            ngx.say("model options feature doesn't work")
-                        end
-                        return
-                    end
-
-                    local header_auth = ngx.req.get_headers()["authorization"]
-                    local query_auth = ngx.req.get_uri_args()["apikey"]
-
-                    if header_auth ~= "Bearer token" and query_auth ~= 
"apikey" then
-                        ngx.status = 401
-                        ngx.say("Unauthorized")
-                        return
-                    end
-
-                    if header_auth == "Bearer token" or query_auth == "apikey" 
then
-                        ngx.req.read_body()
-                        local body, err = ngx.req.get_body_data()
-                        body, err = json.decode(body)
-
-                        if not body.messages or #body.messages < 1 then
-                            ngx.status = 400
-                            ngx.say([[{ "error": "bad request"}]])
-                            return
-                        end
-                        if body.messages[1].content == "write an SQL query to 
get all rows from student table" then
-                            ngx.print("SELECT * FROM STUDENTS")
-                            return
-                        end
-
-                        ngx.status = 200
-                        ngx.say([[$resp]])
-                        return
-                    end
-
-
-                    ngx.status = 503
-                    ngx.say("reached the end of the test suite")
-                }
-            }
+                -- 2. Required Header: anthropic-version
+                if ngx.var.http_anthropic_version ~= "2023-06-01" then
+                    ngx.status = 400
+                    ngx.say("missing anthropic-version")
+                    return
+                end
 
-            location /random {
-                content_by_lua_block {
-                    ngx.say("path override works")
+                -- 3. Required Parameter: max_tokens
+                if not body.max_tokens then
+                    ngx.status = 400
+                    ngx.say("missing max_tokens")
+                    return
+                end
+
+                -- 4. Validate Anthropic's native message structure
+                --    Messages must have content as array with type field
+                local msg = body.messages[1]
+                if type(msg.content) ~= "table"
+                   or msg.content[1].type ~= "text" then
+                    ngx.status = 400
+                    ngx.say("invalid anthropic message format")
+                    return
+                end
+
+                -- 5. Return mock Anthropic response
+                ngx.status = 200
+                ngx.say([[
+                {
+                  "id": "msg_123",
+                  "type": "message",
+                  "role": "assistant",
+                  "content": [
+                    { "type": "text", "text": "Hello from Claude" }
+                  ],
+                  "stop_reason": "end_turn"
                 }
+                ]])
             }
         }
-_EOC_
+    }
+EOF
 
     $block->set_value("http_config", $http_config);
 });
 
-run_tests();
-
 __DATA__
 
-=== TEST 1: set route with right auth header
+=== TEST 1: Create route with Anthropic provider
 --- config
     location /t {
         content_by_lua_block {
             local t = require("lib.test_admin").test
+
+            -- Create a route that directly exposes Anthropic's native endpoint
             local code, body = t('/apisix/admin/routes/1',
-                 ngx.HTTP_PUT,
-                 [[{
-                    "uri": "/anything",
+                ngx.HTTP_PUT,
+                [[{
+                    "uri": "/v1/messages",
                     "plugins": {
-                        "ai-proxy-multi": {
-                            "instances": [
-                                {
-                                    "name": "anthropic",
-                                    "provider": "anthropic",
-                                    "weight": 1,
-                                    "auth": {
-                                        "header": {
-                                            "Authorization": "Bearer token"
-                                        }
-                                    },
-                                    "options": {
-                                        "model": "claude-sonnet-4-5",
-                                        "max_tokens": 512,
-                                        "temperature": 1.0
-                                    },
-                                    "override": {
-                                        "endpoint": 
"http://localhost:6725/v1/chat/completions";
-                                    }
-                                }
-                            ],
-                            "ssl_verify": false
+                        "ai-proxy": {
+                            "provider": "anthropic",
+                            "api_key": "test-key",
+                            "override": {
+                                "endpoint": "http://127.0.0.1:1999/v1/messages";
+                            }
                         }
                     }
                 }]]
             )
 
             if code >= 300 then
                 ngx.status = code
+                ngx.say(body)
+                return
             end
-            ngx.say(body)
+
+            ngx.say("route created successfully")
         }
     }
 --- response_body
-passed
+route created successfully
 
 
 
-=== TEST 2: send request
+=== TEST 2: Send Anthropic native format request
 --- request
-POST /anything
-{ "messages": [ { "role": "system", "content": "You are a mathematician" }, { 
"role": "user", "content": "What is 1+1?"} ] }
+POST /v1/messages
+{
+  "model": "claude-3",
+  "max_tokens": 128,
+  "messages": [
+    {
+      "role": "user",
+      "content": [
+        { "type": "text", "text": "Hello" }
+      ]
+    }
+  ]
+}
 --- more_headers
-Authorization: Bearer token
+x-api-key: test-key
+anthropic-version: 2023-06-01
+Content-Type: application/json

Review Comment:
   The test doesn't validate the headers that should be set by the driver. 
According to the PR description, the plugin should automatically inject the 
"anthropic-version" header and convert "Authorization: Bearer" to "x-api-key", 
but the test manually provides these headers (lines 145-146, 165-166). A proper 
test should send a request without these Anthropic-specific headers and verify 
the plugin adds them automatically.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to