This is an automated email from the ASF dual-hosted git repository.
harishgokul01 pushed a commit to branch development
in repository https://gitbox.apache.org/repos/asf/incubator-resilientdb.git
The following commit(s) were added to refs/heads/development by this push:
new c4fb06df Rescontract - Development Branch PR (#226)
c4fb06df is described below
commit c4fb06df99c098fbaf9bcc5916f93b0e4f277d6a
Author: Harish <[email protected]>
AuthorDate: Sun Jan 25 21:52:29 2026 -0800
Rescontract - Development Branch PR (#226)
* Create directory for MCP and get started
* added MCP support for smart contract functionality
* fix(rescontract): switch to JSON config for contract_tools
* Add required mcp-graphql files
* Automate key generation and enable it to be used at MCP tools
* Fix few bugs
* Analysis of the transactions
* add monitoring
* Integration of graphql and smart contract
* Benchmarking tool integration
* Integrate mcp-graphql and mcp-smartcontract in the same directory
---------
Co-authored-by: Rahul Kanagaraj <[email protected]>
Co-authored-by: Vikhas <[email protected]>
Co-authored-by: Vikhas <[email protected]>
Co-authored-by: Pavan Kumar Nuthi <[email protected]>
---
ecosystem/mcp/.gitignore | 38 +
ecosystem/mcp/ARCHITECTURE.md | 365 ++++++++
ecosystem/mcp/Counter.sol | 40 +
ecosystem/mcp/Dockerfile | 26 +
ecosystem/mcp/QUICKSTART.md | 165 ++++
ecosystem/mcp/README.md | 383 ++++++++
ecosystem/mcp/TESTING.md | 455 ++++++++++
ecosystem/mcp/config.py | 31 +
ecosystem/mcp/generate_keys_utility.py | 89 ++
ecosystem/mcp/graphql_client.py | 206 +++++
ecosystem/mcp/requirements.txt | 4 +
ecosystem/mcp/rescontract_client.py | 705 +++++++++++++++
ecosystem/mcp/server.py | 982 +++++++++++++++++++++
ecosystem/mcp/test_mcp_tools.py | 24 +
ecosystem/smart-contract/rescontract/index.js | 35 +-
.../smart-contract/rescontract/package-lock.json | 33 +-
16 files changed, 3561 insertions(+), 20 deletions(-)
diff --git a/ecosystem/mcp/.gitignore b/ecosystem/mcp/.gitignore
new file mode 100644
index 00000000..bd4af1fa
--- /dev/null
+++ b/ecosystem/mcp/.gitignore
@@ -0,0 +1,38 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+env/
+venv/
+ENV/
+.venv
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Environment variables
+.env
+.env.local
+
+# IDE
+.vscode/
+.idea/
+*.swp
+*.swo
+*~
+
+# Distribution / packaging
+dist/
+build/
+*.egg-info/
+
+# Testing
+.pytest_cache/
+.coverage
+htmlcov/
+
+# OS
+.DS_Store
+Thumbs.db
+
diff --git a/ecosystem/mcp/ARCHITECTURE.md b/ecosystem/mcp/ARCHITECTURE.md
new file mode 100644
index 00000000..591d8f53
--- /dev/null
+++ b/ecosystem/mcp/ARCHITECTURE.md
@@ -0,0 +1,365 @@
+# ResilientDB MCP Server Architecture
+
+## Overview
+
+The ResilientDB MCP Server is a Model Context Protocol (MCP) implementation
that provides a standardized interface for AI agents to interact with
ResilientDB blockchain. The server integrates GraphQL for asset transactions
and HTTP REST API for key-value operations.
+
+## Architecture Diagram
+
+```
+┌─────────────────────────────────────────────────────────────────┐
+│ MCP Host (Claude Desktop) │
+└────────────────────────────┬────────────────────────────────────┘
+ │
+ │ MCP Protocol (stdio)
+ │
+┌────────────────────────────▼────────────────────────────────────┐
+│ ResilientDB MCP Server │
+│ ┌──────────────────────────────────────────────────────────┐ │
+│ │ MCP Server Core │ │
+│ │ - Tool Registration │ │
+│ │ - Request Routing │ │
+│ │ - Error Handling │ │
+│ └──────────────────────────────────────────────────────────┘ │
+│ │ │
+│ ┌────────────────────┼────────────────────┐ │
+│ │ │ │ │
+│ ┌─────▼──────┐ ┌────────▼────────┐ ┌───────▼───────┐ │
+│ │ GraphQL │ │ HTTP REST │ │ Configuration │ │
+│ │ Client │ │ Client │ │ Manager │ │
+│ │ (Port 8000)│ │ (Port 18000) │ │ │ │
+│ └─────┬──────┘ └────────┬────────┘ └───────────────┘ │
+│ │ │ │
+└────────┼────────────────────┼────────────────────────────────────┘
+ │ │
+ │ HTTP/GraphQL │ HTTP REST
+ │ │
+┌────────▼────────────────────▼────────────────────────────────────┐
+│ ResilientDB Backend │
+│ ┌──────────────────┐ ┌──────────────────────┐ │
+│ │ GraphQL Server │ │ HTTP/Crow Server │ │
+│ │ (Port 8000) │ │ (Port 18000) │ │
+│ │ - Asset Txns │ │ - Key-Value Ops │ │git
+│ └──────────────────┘ └──────────────────────┘ │
+│ │
+│ ┌──────────────────────────────────────────────────────────┐ │
+│ │ ResilientDB Blockchain │ │
+│ └──────────────────────────────────────────────────────────┘ │
+└───────────────────────────────────────────────────────────────────┘
+```
+
+## Components
+
+### 1. MCP Server Core (`server.py`)
+
+The main server component that:
+- Registers all available tools with the MCP protocol
+- Handles incoming tool calls from MCP hosts
+- Routes requests to appropriate clients (GraphQL or HTTP REST)
+- Manages error handling and response formatting
+
+**Key Responsibilities:**
+- Tool registration and discovery
+- Request validation
+- Response formatting
+- Error handling and reporting
+
+**Available Tools:**
+- `getTransaction` - Get asset transaction by ID (GraphQL)
+- `postTransaction` - Post asset transaction (GraphQL)
+- `get` - Retrieve key-value pair (HTTP REST)
+- `set` - Store key-value pair (HTTP REST)
+
+### 2. GraphQL Client (`graphql_client.py`)
+
+Handles GraphQL-based operations for asset transactions and HTTP REST
operations for key-value storage.
+
+**GraphQL Operations (Port 8000):**
+- `get_transaction(transaction_id)` - Retrieve asset transaction by ID
+- `post_transaction(data)` - Submit new asset transaction with PrepareAsset
format
+
+**HTTP REST Operations (Port 18000):**
+- `get_key_value(key)` - Retrieve key-value pair via HTTP REST API
+- `set_key_value(key, value)` - Store key-value pair via HTTP REST API
+
+**Key Features:**
+- Async HTTP client for both GraphQL and REST operations
+- Error handling and validation
+- Request timeout management
+- Response parsing and formatting
+
+### 3. Configuration Manager (`config.py`)
+
+Manages server configuration through environment variables.
+
+**Configuration Options:**
+- `RESILIENTDB_GRAPHQL_URL` - GraphQL endpoint (default:
`http://localhost:8000/graphql`)
+- `RESILIENTDB_HTTP_URL` - HTTP/Crow server endpoint (default:
`http://localhost:18000`)
+- `RESILIENTDB_API_KEY` - Optional API key for authentication
+- `RESILIENTDB_AUTH_TOKEN` - Optional auth token
+- `REQUEST_TIMEOUT` - Request timeout in seconds (default: 30)
+
+## Request Flow
+
+### Asset Transaction Operations (GraphQL)
+
+1. **MCP Host** sends tool call (e.g., `getTransaction`)
+2. **MCP Server** receives and validates request
+3. **GraphQL Client** constructs GraphQL query
+4. **GraphQL Server** (port 8000) processes request
+5. **Response** flows back through the chain
+
+**Example Flow:**
+```
+Claude Desktop → MCP Server → GraphQL Client → GraphQL Server (8000) →
ResilientDB
+```
+
+### Key-Value Operations (HTTP REST)
+
+1. **MCP Host** sends tool call (e.g., `set`)
+2. **MCP Server** receives and validates request
+3. **GraphQL Client** (HTTP REST methods) constructs HTTP request
+4. **HTTP/Crow Server** (port 18000) processes request
+5. **Response** flows back through the chain
+
+**Example Flow:**
+```
+Claude Desktop → MCP Server → HTTP REST Client → Crow Server (18000) →
ResilientDB
+```
+
+## Routing Logic
+
+The server uses operation-based routing:
+
+| Operation | Service | Port | Purpose |
+|-----------|---------|------|---------|
+| `getTransaction` | GraphQL | 8000 | Get asset transaction by ID |
+| `postTransaction` | GraphQL | 8000 | Post asset transaction |
+| `get` | HTTP REST | 18000 | Retrieve key-value pair |
+| `set` | HTTP REST | 18000 | Store key-value pair |
+
+**Note:** All routing is direct with no fallback mechanisms.
+
+## Data Flow
+
+### Request Processing
+
+```
+MCP Request → Validation → Route Selection → Client Execution → Response
Formatting → MCP Response
+```
+
+### GraphQL Request Flow
+
+```
+1. MCP Tool Call (getTransaction/postTransaction)
+2. Server validates arguments
+3. GraphQL Client constructs query/mutation
+4. HTTP POST to GraphQL endpoint (port 8000)
+5. Parse GraphQL response
+6. Format and return to MCP host
+```
+
+### HTTP REST Request Flow
+
+```
+1. MCP Tool Call (get/set)
+2. Server validates arguments
+3. HTTP Client constructs REST request
+4. HTTP GET/POST to Crow server (port 18000)
+5. Parse HTTP response
+6. Format and return to MCP host
+```
+
+### Response Format
+
+All responses are formatted as JSON:
+```json
+{
+ "data": {...},
+ "error": null
+}
+```
+
+Error responses:
+```json
+{
+ "error": {
+ "type": "ErrorType",
+ "message": "Error message",
+ "details": {...}
+ }
+}
+```
+
+## Service Separation
+
+### GraphQL Server (Port 8000)
+
+**Purpose:** Blockchain asset transactions
+
+**Operations:**
+- `getTransaction(id: ID!)` - Retrieve asset transaction
+- `postTransaction(data: PrepareAsset!)` - Create asset transaction
+
+**Schema:**
+- Query: `getTransaction` returns `RetrieveTransaction`
+- Mutation: `postTransaction` accepts `PrepareAsset` and returns
`CommitTransaction`
+
+**Required Fields for postTransaction:**
+- `operation` (String) - Transaction operation type
+- `amount` (Int) - Transaction amount
+- `signerPublicKey` (String) - Signer's public key
+- `signerPrivateKey` (String) - Signer's private key
+- `recipientPublicKey` (String) - Recipient's public key
+- `asset` (JSONScalar) - Asset data as JSON
+
+### HTTP/Crow Server (Port 18000)
+
+**Purpose:** Simple key-value storage
+
+**Operations:**
+- `POST /v1/transactions/commit` - Store key-value pair
+- `GET /v1/transactions/{key}` - Retrieve key-value pair
+
+**Request Format (set):**
+```json
+{
+ "id": "key",
+ "value": "value"
+}
+```
+
+**Response Format (set):**
+```
+id: key
+```
+
+**Response Format (get):**
+```json
+{
+ "id": "key",
+ "value": "value"
+}
+```
+
+## Error Handling
+
+### Error Types
+
+1. **Configuration Errors**: Missing or invalid configuration
+2. **GraphQL Errors**: API request failures, query errors, missing fields
+3. **HTTP Errors**: Connection failures, invalid responses
+4. **Network Errors**: Connection timeouts, unreachable services
+5. **Validation Errors**: Invalid parameters, missing required fields
+
+### Error Flow
+
+1. Error occurs in client layer
+2. Exception is caught and formatted
+3. Error details are included in response
+4. MCP host receives structured error response
+
+**Example Error Response:**
+```json
+{
+ "error": "GraphQLError",
+ "message": "Missing required fields in PrepareAsset: signerPublicKey,
signerPrivateKey",
+ "tool": "postTransaction",
+ "arguments": {...}
+}
+```
+
+## Security Considerations
+
+1. **Authentication**: Optional API keys and tokens via environment variables
+2. **Input Validation**: All inputs are validated before processing
+3. **Error Messages**: Sensitive information is not exposed in error messages
+4. **Network Security**: HTTPS should be used for production endpoints
+5. **Private Keys**: Never expose private keys in logs or error messages
+
+## Performance Considerations
+
+1. **Async Operations**: All I/O operations are asynchronous
+2. **Connection Pooling**: HTTP clients use connection pooling
+3. **Timeout Management**: Configurable timeouts prevent hanging requests
+4. **Resource Management**: Proper cleanup of resources
+
+## Implementation Details
+
+### GraphQL Integration
+
+- Asset transaction queries (`getTransaction`)
+- Asset transaction mutations (`postTransaction`)
+- Full PrepareAsset support with validation
+
+### HTTP REST Integration
+
+- Key-value storage (`set`)
+- Key-value retrieval (`get`)
+- Direct HTTP API calls to Crow server
+
+### MCP Protocol
+
+- Full MCP server implementation
+- Tool registration and discovery
+- Error handling and response formatting
+
+## Extension Points
+
+The architecture supports extension through:
+
+1. **New Tools**: Add new tools by registering them in `server.py`
+2. **New Clients**: Add new client implementations for additional services
+3. **Custom Routing**: Modify routing logic in `server.py`
+4. **Middleware**: Add middleware for logging, metrics, etc.
+
+## Testing Strategy
+
+1. **Unit Tests**: Test individual components in isolation
+2. **Integration Tests**: Test component interactions
+3. **End-to-End Tests**: Test complete request flows
+4. **Mock Services**: Use mocks for external dependencies
+5. **Error Scenarios**: Test error handling and recovery
+
+## Deployment Considerations
+
+1. **Docker**: Containerized deployment for consistency
+2. **Environment Variables**: Configuration via environment variables
+3. **Health Checks**: Monitor server health and status
+4. **Logging**: Comprehensive logging for debugging
+
+## Future Enhancements
+
+1. **Caching**: Add caching layer for frequently accessed data
+2. **Rate Limiting**: Implement rate limiting for API calls
+3. **Batch Operations**: Support for batch operations
+4. **Metrics**: Detailed metrics and analytics
+5. **Enhanced Authentication**: More robust authentication mechanisms
+
+## Project Location
+
+This project would be located in the ResilientDB ecosystem directory structure
as:
+
+```
+ecosystem/
+└── tools/
+ └── resilientdb-mcp/ # This MCP server project
+ ├── server.py
+ ├── graphql_client.py
+ ├── config.py
+ ├── requirements.txt
+ └── ...
+```
+
+**Rationale:**
+- It's a development tool that enables AI agents to interact with ResilientDB
+- It fits alongside other tools like `resvault` and `create-resilient-app`
+- It's not a service (like GraphQL server) or SDK (like resdb-orm)
+- It's a tool that bridges MCP protocol with ResilientDB services
+
+## References
+
+- [ResilientDB GraphQL
Documentation](http://beacon.resilientdb.com/docs/resilientdb_graphql)
+- [ResilientDB GraphQL
GitHub](https://github.com/apache/incubator-resilientdb-graphql)
+- [MCP Protocol Documentation](https://modelcontextprotocol.io/)
+- [ResilientDB Main
Repository](https://github.com/apache/incubator-resilientdb)
diff --git a/ecosystem/mcp/Counter.sol b/ecosystem/mcp/Counter.sol
new file mode 100644
index 00000000..658d0b00
--- /dev/null
+++ b/ecosystem/mcp/Counter.sol
@@ -0,0 +1,40 @@
+// SPDX-License-Identifier: MIT
+pragma solidity ^0.8.0;
+
+contract Counter {
+ int256 private count;
+ address public owner;
+
+ event CountChanged(int256 newCount, address changedBy);
+
+ constructor(int256 initialCount) {
+ count = initialCount;
+ owner = msg.sender;
+ }
+
+ function increment() public {
+ count += 1;
+ emit CountChanged(count, msg.sender);
+ }
+
+ function decrement() public {
+ count -= 1;
+ emit CountChanged(count, msg.sender);
+ }
+
+ function getCount() public view returns (int256) {
+ return count;
+ }
+
+ function reset() public {
+ require(msg.sender == owner, "Only owner can reset");
+ count = 0;
+ emit CountChanged(count, msg.sender);
+ }
+
+ function setCount(int256 newCount) public {
+ require(msg.sender == owner, "Only owner can set count");
+ count = newCount;
+ emit CountChanged(count, msg.sender);
+ }
+}
\ No newline at end of file
diff --git a/ecosystem/mcp/Dockerfile b/ecosystem/mcp/Dockerfile
new file mode 100644
index 00000000..c1a3304f
--- /dev/null
+++ b/ecosystem/mcp/Dockerfile
@@ -0,0 +1,26 @@
+FROM python:3.11-slim
+
+WORKDIR /app
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ curl \
+ && rm -rf /var/lib/apt/lists/*
+
+# Copy requirements and install Python dependencies
+COPY requirements.txt .
+RUN pip install --no-cache-dir -r requirements.txt
+
+# Copy application code
+COPY . .
+
+# Make server executable
+RUN chmod +x server.py
+
+# Set Python path
+ENV PYTHONPATH=/app
+ENV PYTHONUNBUFFERED=1
+
+# Run the MCP server
+CMD ["python", "server.py"]
+
diff --git a/ecosystem/mcp/QUICKSTART.md b/ecosystem/mcp/QUICKSTART.md
new file mode 100644
index 00000000..5318c733
--- /dev/null
+++ b/ecosystem/mcp/QUICKSTART.md
@@ -0,0 +1,165 @@
+# Quick Start Guide
+
+## Prerequisites
+
+1. **Python 3.11+** installed
+2. **ResilientDB** instance running (see [ResilientDB
Installation](https://github.com/apache/incubator-resilientdb))
+3. **ResContract CLI** installed (for smart contract operations)
+4. **Claude Desktop** (for testing with MCP)
+
+## Installation Steps
+
+### 1. Clone and Setup
+
+```bash
+git clone https://github.com/rahulkanagaraj786/ResilientDB-MCP.git
+cd ResilientDB-MCP
+```
+
+### 2. Install Dependencies
+
+```bash
+pip install -r requirements.txt
+```
+
+### 3. Configure Environment
+
+```bash
+cp .env.example .env
+# Edit .env with your ResilientDB settings
+```
+
+Update `.env`:
+```env
+RESILIENTDB_GRAPHQL_URL=http://localhost:9000/graphql
+RESCONTRACT_CLI_PATH=rescontract
+```
+
+### 4. Test the Server
+
+Run the server directly:
+```bash
+python server.py
+```
+
+The server should start and listen on stdio for MCP protocol messages.
+
+## Docker Setup
+
+### Build Docker Image
+
+```bash
+docker build -t mcp/resilientdb -f Dockerfile .
+```
+
+### Run Docker Container
+
+```bash
+docker run -i --rm mcp/resilientdb
+```
+
+## Claude Desktop Configuration
+
+### 1. Locate Claude Desktop Config
+
+- **macOS**: `~/Library/Application Support/Claude/claude_desktop_config.json`
+- **Windows**: `%APPDATA%\Claude\claude_desktop_config.json`
+- **Linux**: `~/.config/Claude/claude_desktop_config.json`
+
+### 2. Add MCP Server Configuration
+
+Edit the config file and add:
+
+```json
+{
+ "mcpServers": {
+ "resilientdb": {
+ "command": "python",
+ "args": ["/absolute/path/to/ResilientDB-MCP/server.py"],
+ "env": {
+ "RESILIENTDB_GRAPHQL_URL": "http://localhost:9000/graphql",
+ "RESCONTRACT_CLI_PATH": "rescontract"
+ }
+ }
+ }
+}
+```
+
+Or with Docker:
+
+```json
+{
+ "mcpServers": {
+ "resilientdb": {
+ "command": "docker",
+ "args": ["run", "-i", "--rm", "mcp/resilientdb"]
+ }
+ }
+}
+```
+
+### 3. Restart Claude Desktop
+
+Restart Claude Desktop to load the new MCP server configuration.
+
+## Testing
+
+### Test Key-Value Operations
+
+In Claude Desktop, you can now ask:
+
+- "Store a key-value pair with key 'test' and value 'hello'"
+- "Get the value for key 'test'"
+
+### Test Smart Contract Operations
+
+- "Compile a contract at /path/to/contract.sol"
+- "Deploy the compiled contract"
+- "Execute the getValue method on contract 0x123..."
+
+### Test Account Operations
+
+- "Create a new account in ResilientDB"
+- "Get transaction details for tx-123456"
+
+## Troubleshooting
+
+### Server Not Starting
+
+1. Check Python version: `python --version` (should be 3.11+)
+2. Verify dependencies: `pip list | grep mcp`
+3. Check environment variables in `.env`
+
+### Connection Errors
+
+1. Verify ResilientDB is running
+2. Check GraphQL URL is correct
+3. Test GraphQL endpoint: `curl http://localhost:9000/graphql`
+
+### ResContract CLI Not Found
+
+1. Verify ResContract is installed
+2. Check PATH: `which rescontract`
+3. Set `RESCONTRACT_CLI_PATH` in `.env`
+
+### Claude Desktop Not Connecting
+
+1. Check Claude Desktop logs
+2. Verify config file syntax (valid JSON)
+3. Ensure absolute path to server.py is correct
+4. Check file permissions
+
+## Next Steps
+
+1. Read the [README.md](README.md) for detailed documentation
+2. Review [ARCHITECTURE.md](ARCHITECTURE.md) for architecture details
+3. Explore the available tools in Claude Desktop
+4. Start building with ResilientDB!
+
+## Support
+
+For issues or questions:
+- Check the [ResilientDB
Documentation](https://resilientdb.incubator.apache.org/)
+- Review [ResContract CLI
Docs](https://beacon.resilientdb.com/docs/rescontract)
+- Check [ResilientDB GraphQL
API](https://beacon.resilientdb.com/docs/resilientdb_graphql)
+
diff --git a/ecosystem/mcp/README.md b/ecosystem/mcp/README.md
new file mode 100644
index 00000000..8fe5e44c
--- /dev/null
+++ b/ecosystem/mcp/README.md
@@ -0,0 +1,383 @@
+# ResilientDB MCP Server
+
+A Model Context Protocol (MCP) server for interacting with ResilientDB, a
high-performance blockchain platform. This server allows Large Language Models
(LLMs) like Claude to interact with ResilientDB through GraphQL queries and
HTTP REST API.
+
+## Overview
+
+This MCP server bridges the gap between AI agents (like Claude Desktop) and
ResilientDB by providing a standardized interface for:
+- **GraphQL Operations**: Asset transactions on the blockchain (port 8000)
+- **HTTP REST API Operations**: Key-value storage operations (port 18000 -
Crow server)
+
+**Note:** For midterm, this implementation focuses on GraphQL and HTTP REST
API integration. Smart contract operations (ResContract CLI) are temporarily
disabled.
+
+## Features
+
+### GraphQL Operations (Port 8000)
+- `createAccount`: Create new accounts in ResilientDB (if supported)
+- `getTransaction`: Retrieve asset transaction details by ID (blockchain
transactions)
+- `postTransaction`: Post new asset transactions to the blockchain (requires
PrepareAsset with crypto keys)
+- `updateTransaction`: Update existing transactions (note: blockchain
transactions are typically immutable)
+
+### Key-Value Operations (Port 18000 - HTTP REST API)
+- `get`: Retrieve values by key using HTTP REST API (Crow server)
+- `set`: Store key-value pairs using HTTP REST API (Crow server)
+
+**Note:** For midterm, smart contract operations (compile, deploy, execute)
are temporarily removed. Focus is on GraphQL and HTTP REST API integration.
+
+**Important Architecture Notes:**
+- **GraphQL (port 8000)**: Used for blockchain asset transactions
+- **HTTP/Crow (port 18000)**: Used for key-value operations
+
+## Installation
+
+### Prerequisites
+
+- Python 3.11 or higher
+- ResilientDB instance running (see [ResilientDB
Installation](https://github.com/apache/incubator-resilientdb))
+- ResContract CLI installed (for smart contract operations)
+- Access to ResilientDB GraphQL endpoint
+
+### Local Installation
+
+1. Clone the repository:
+```bash
+git clone https://github.com/rahulkanagaraj786/ResilientDB-MCP.git
+cd ResilientDB-MCP
+```
+
+2. Install dependencies:
+```bash
+pip install -r requirements.txt
+```
+
+3. Configure environment variables:
+```bash
+cp .env.example .env
+# Edit .env with your ResilientDB configuration
+```
+
+4. Update `.env` file with your settings:
+```env
+RESILIENTDB_GRAPHQL_URL=http://localhost:8000/graphql
+RESILIENTDB_HTTP_URL=http://localhost:18000
+```
+
+### Docker Installation
+
+1. Build the Docker image:
+```bash
+docker build -t mcp/resilientdb -f Dockerfile .
+```
+
+2. Run the container:
+```bash
+docker run -i --rm mcp/resilientdb
+```
+
+## Configuration
+
+### Environment Variables
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `RESILIENTDB_GRAPHQL_URL` | GraphQL endpoint URL (port 8000 for asset
transactions) | `http://localhost:8000/graphql` |
+| `RESILIENTDB_HTTP_URL` | HTTP/Crow server URL (port 18000 for KV operations)
| `http://localhost:18000` |
+| `RESILIENTDB_API_KEY` | Optional API key for authentication | None |
+| `RESILIENTDB_AUTH_TOKEN` | Optional auth token | None |
+| `REQUEST_TIMEOUT` | Request timeout in seconds | `30` |
+| `TRANSACTION_POLL_INTERVAL` | Polling interval for transactions | `1.0` |
+| `MAX_POLL_ATTEMPTS` | Maximum polling attempts | `30` |
+
+**Important Notes:**
+- GraphQL (port 8000) is used for **asset transactions** (blockchain)
+- HTTP/Crow (port 18000) is used for **key-value operations** (simple storage)
+
+## Usage with Claude Desktop
+
+Add the MCP server to your Claude Desktop configuration:
+
+1. Open Claude Desktop settings
+2. Edit the MCP servers configuration file (usually `claude_desktop.json`)
+3. Add the following configuration:
+
+### For Local Installation:
+```json
+{
+ "mcpServers": {
+ "resilientdb": {
+ "command": "python",
+ "args": ["/path/to/ResilientDB-MCP/server.py"],
+ "env": {
+ "RESILIENTDB_GRAPHQL_URL": "http://localhost:8000/graphql",
+ "RESILIENTDB_HTTP_URL": "http://localhost:18000"
+ }
+ }
+ }
+}
+```
+
+### For Docker Installation:
+```json
+{
+ "mcpServers": {
+ "resilientdb": {
+ "command": "docker",
+ "args": ["run", "-i", "--rm", "mcp/resilientdb"]
+ }
+ }
+}
+```
+
+4. Restart Claude Desktop
+
+## Available Tools
+
+### createAccount
+Create a new account in ResilientDB.
+
+**Parameters:**
+- `accountId` (optional): Account ID. If not provided, server will generate
one.
+
+**Example:**
+```json
+{
+ "accountId": "my-account-123"
+}
+```
+
+### getTransaction
+Get asset transaction details by transaction ID (GraphQL - port 8000).
+
+**Parameters:**
+- `transactionId` (required): Transaction ID to retrieve
+
+**Example:**
+```json
+{
+ "transactionId": "tx-123456"
+}
+```
+
+**Note:** This is for blockchain asset transactions, not KV transactions.
+
+### postTransaction
+Post a new asset transaction to ResilientDB (GraphQL - port 8000).
+
+**Parameters:**
+- `data` (required): Transaction data in PrepareAsset format with crypto keys
and signatures
+
+**Example:**
+```json
+{
+ "data": {
+ "operation": "CREATE",
+ "asset": {
+ "data": {...}
+ },
+ "outputs": [...],
+ "inputs": [...]
+ }
+}
+```
+
+**Note:** This requires PrepareAsset format with cryptographic keys. For
simple KV operations, use the `set` tool instead.
+
+### updateTransaction
+Update an existing transaction.
+
+**Parameters:**
+- `transactionId` (required): Transaction ID to update
+- `data` (required): Updated transaction data
+
+**Example:**
+```json
+{
+ "transactionId": "tx-123456",
+ "data": {
+ "status": "completed"
+ }
+}
+```
+
+### get
+Retrieve a value from ResilientDB by key (HTTP REST API - port 18000).
+
+**Parameters:**
+- `key` (required): Key to retrieve
+
+**Example:**
+```json
+{
+ "key": "my-key"
+}
+```
+
+**Note:** This uses HTTP REST API (Crow server on port 18000).
+
+### set
+Store a key-value pair in ResilientDB (HTTP REST API - port 18000).
+
+**Parameters:**
+- `key` (required): Key to store
+- `value` (required): Value to store (can be any JSON-serializable value)
+
+**Example:**
+```json
+{
+ "key": "my-key",
+ "value": "my-value"
+}
+```
+
+**Note:** This uses HTTP REST API (Crow server on port 18000).
+
+## Architecture
+
+The MCP server acts as a mediator between the MCP host (Claude Desktop) and
ResilientDB backend services:
+
+```
+┌─────────────┐ ┌──────────────┐ ┌─────────────┐
+│ Claude │────────▶│ MCP Server │────────▶│ ResilientDB │
+│ Desktop │ │ (Python) │ │ Backend │
+└─────────────┘ └──────────────┘ └─────────────┘
+ │
+ ├──▶ GraphQL Client (port 8000)
+ │ (Asset Transactions only)
+ │
+ └──▶ HTTP REST Client (port 18000)
+ (Key-Value Operations)
+```
+
+### Routing Logic
+
+The server automatically routes requests to the appropriate service:
+- **Asset Transactions** → GraphQL API (port 8000)
+ - `getTransaction`: Retrieve asset transactions
+ - `postTransaction`: Post asset transactions (requires PrepareAsset)
+ - `createAccount`: Create accounts (if supported)
+ - `updateTransaction`: Update transactions (if supported)
+
+- **Key-Value Operations** → HTTP REST API (port 18000 - Crow server)
+ - `get`: Retrieve key-value pairs
+ - `set`: Store key-value pairs
+
+**Important:** KV operations use HTTP REST API (port 18000).
+
+## Development
+
+### Project Structure
+
+```
+ResilientDB-MCP/
+├── server.py # Main MCP server implementation
+├── graphql_client.py # GraphQL client for ResilientDB
+├── rescontract_client.py # ResContract CLI client
+├── config.py # Configuration management
+├── requirements.txt # Python dependencies
+├── Dockerfile # Docker configuration
+└── README.md # This file
+```
+
+### Running Tests
+
+```bash
+# Install test dependencies
+pip install pytest pytest-asyncio
+
+# Run tests
+pytest
+```
+
+### Contributing
+
+1. Fork the repository
+2. Create a feature branch
+3. Make your changes
+4. Submit a pull request
+
+## Troubleshooting
+
+### ResContract CLI Not Found
+
+If you get an error about ResContract CLI not being found:
+1. Ensure ResContract CLI is installed
+2. Add it to your PATH, or
+3. Set `RESCONTRACT_CLI_PATH` environment variable to the full path
+
+### GraphQL Connection Errors
+
+If you encounter GraphQL connection errors:
+1. Verify ResilientDB is running
+2. Check the `RESILIENTDB_GRAPHQL_URL` is correct (should be port 8000, not
9000)
+3. Ensure network connectivity to the GraphQL endpoint
+4. Check firewall settings
+5. Verify GraphQL server is accessible: `curl http://localhost:8000/graphql`
+
+### HTTP Connection Errors
+
+If you encounter HTTP connection errors for KV operations:
+1. Verify Crow HTTP server is running on port 18000
+2. Check the `RESILIENTDB_HTTP_URL` is correct
+3. Test HTTP endpoint: `curl http://localhost:18000/v1/transactions/test`
+4. Ensure the HTTP server is accessible
+
+### Key-Value Operations Not Working
+
+If KV operations (get/set) fail:
+1. Verify you're using HTTP REST API (port 18000), not GraphQL
+2. Check that Crow HTTP server is running
+3. Test with curl:
+ ```bash
+ # Set a value
+ curl -X POST -d '{"id":"test","value":"hello"}'
http://localhost:18000/v1/transactions/commit
+
+ # Get a value
+ curl http://localhost:18000/v1/transactions/test
+ ```
+4. Verify HTTP REST API (port 18000) is accessible for KV operations
+
+### Transaction Timeouts
+
+If transactions timeout:
+1. Increase `REQUEST_TIMEOUT` in `.env`
+2. Check ResilientDB blockchain status
+3. Verify network latency
+
+## Key Architecture Insights
+
+### Service Separation
+
+ResilientDB uses different services for different operations:
+
+1. **GraphQL Server (Port 8000)**
+ - Purpose: Blockchain asset transactions
+ - Operations: `getTransaction`, `postTransaction` (with PrepareAsset)
+
+2. **HTTP/Crow Server (Port 18000)**
+ - Purpose: Simple key-value storage
+ - Operations: `get`, `set` (via REST API)
+ - Endpoints:
+ - `POST /v1/transactions/commit` (for set)
+ - `GET /v1/transactions/{key}` (for get)
+
+### Why This Matters
+
+- **KV operations use HTTP REST API** (port 18000) for `set`/`get` operations
+- **Asset transactions use GraphQL** (port 8000) and require PrepareAsset
format
+- **Wrong port numbers** (e.g., 9000 instead of 8000) will cause connection
errors
+
+## References
+
+- [ResilientDB GitHub](https://github.com/apache/incubator-resilientdb)
+- [ResilientDB Documentation](https://resilientdb.incubator.apache.org/)
+- [ResilientDB GraphQL
API](https://beacon.resilientdb.com/docs/resilientdb_graphql)
+- [ResilientDB Quick Start](https://quickstart.resilientdb.com/)
+- [MCP Protocol Documentation](https://modelcontextprotocol.io/)
+
+## License
+
+Apache 2.0 License
+
+## Authors
+
+Team 10 - ECS 265 Project
diff --git a/ecosystem/mcp/TESTING.md b/ecosystem/mcp/TESTING.md
new file mode 100644
index 00000000..f259c386
--- /dev/null
+++ b/ecosystem/mcp/TESTING.md
@@ -0,0 +1,455 @@
+# Testing Guide for ResilientDB MCP Server
+
+This guide provides individual commands to test the MCP server and GraphQL
functionalities.
+
+## Prerequisites
+
+1. ResilientDB running with:
+ - GraphQL server on port 8000
+ - HTTP/Crow server on port 18000
+2. Python 3.11+ installed
+3. Dependencies installed: `pip install -r requirements.txt`
+
+## 1. Test GraphQL Server (Port 8000)
+
+### Check if GraphQL server is running
+
+```bash
+curl -X POST http://localhost:8000/graphql \
+ -H "Content-Type: application/json" \
+ -d '{"query": "{ __schema { queryType { name } } }"}'
+```
+
+Expected: Should return schema information without errors.
+
+### Test getTransaction Query
+
+First, create a transaction using HTTP API to get a transaction ID:
+
+```bash
+# Create a transaction via HTTP API
+curl -X POST -d '{"id":"test-tx-1","value":"test data"}' \
+ http://localhost:18000/v1/transactions/commit
+```
+
+Then query it via GraphQL (Note: This may not work if the transaction was
created via HTTP, as GraphQL is for asset transactions):
+
+```bash
+# Get transaction by ID
+curl -X POST http://localhost:8000/graphql \
+ -H "Content-Type: application/json" \
+ -d '{
+ "query": "{ getTransaction(id: \"YOUR_TRANSACTION_ID\") { id version
amount uri type publicKey operation metadata asset signerPublicKey } }"
+ }'
+```
+
+Replace `YOUR_TRANSACTION_ID` with an actual asset transaction ID from
ResilientDB.
+
+### Test postTransaction Mutation
+
+You'll need to generate cryptographic keys first. For testing, you can use the
ResilientDB key generation tools or create keys manually.
+
+```bash
+# Post a new asset transaction
+curl -X POST http://localhost:8000/graphql \
+ -H "Content-Type: application/json" \
+ -d '{
+ "query": "mutation PostTransaction($data: PrepareAsset!) {
postTransaction(data: $data) { id } }",
+ "variables": {
+ "data": {
+ "operation": "CREATE",
+ "amount": 100,
+ "signerPublicKey": "YOUR_SIGNER_PUBLIC_KEY",
+ "signerPrivateKey": "YOUR_SIGNER_PRIVATE_KEY",
+ "recipientPublicKey": "YOUR_RECIPIENT_PUBLIC_KEY",
+ "asset": {
+ "data": "test asset data"
+ }
+ }
+ }
+ }'
+```
+
+Replace the placeholder keys with actual cryptographic keys (for example,
generated via the GraphQL repo’s key tools or `generate_keys_utility.py` in
this project).
+
+When using the **MCP server via Claude**, you normally **do not need to
generate keys manually**:
+- The `generateKeys` MCP tool can be called explicitly to get signer/recipient
keypairs.
+- The `postTransaction` MCP tool will **auto-generate keys** if they are not
provided in the arguments.
+
+### Inspect GraphQL Schema
+
+```bash
+# Get all available queries
+curl -X POST http://localhost:8000/graphql \
+ -H "Content-Type: application/json" \
+ -d '{
+ "query": "{ __type(name: \"Query\") { fields { name description args {
name type { name kind ofType { name } } } type { name kind ofType { name } } }
} }"
+ }' | python3 -m json.tool
+
+# Get all available mutations
+curl -X POST http://localhost:8000/graphql \
+ -H "Content-Type: application/json" \
+ -d '{
+ "query": "{ __type(name: \"Mutation\") { fields { name description args {
name type { name kind ofType { name } } } type { name kind ofType { name } } }
} }"
+ }' | python3 -m json.tool
+
+# Get PrepareAsset input type
+curl -X POST http://localhost:8000/graphql \
+ -H "Content-Type: application/json" \
+ -d '{
+ "query": "{ __type(name: \"PrepareAsset\") { inputFields { name type {
name kind ofType { name } } } } }"
+ }' | python3 -m json.tool
+```
+
+## 2. Test HTTP REST API (Port 18000)
+
+### Test set (Store key-value)
+
+```bash
+# Store a key-value pair
+curl -X POST -d '{"id":"test-key-1","value":"test value 1"}' \
+ http://localhost:18000/v1/transactions/commit
+```
+
+Expected output: `id: test-key-1`
+
+### Test get (Retrieve key-value)
+
+```bash
+# Retrieve a value by key
+curl http://localhost:18000/v1/transactions/test-key-1
+```
+
+Expected: Returns the stored value as JSON.
+
+### Test multiple operations
+
+```bash
+# Store multiple values
+curl -X POST -d '{"id":"user-1","value":"Alice"}' \
+ http://localhost:18000/v1/transactions/commit
+
+curl -X POST -d '{"id":"user-2","value":"Bob"}' \
+ http://localhost:18000/v1/transactions/commit
+
+# Retrieve them
+curl http://localhost:18000/v1/transactions/user-1
+curl http://localhost:18000/v1/transactions/user-2
+```
+
+## 3. Test MCP Server (Debugging Only)
+
+**Important Note:** The MCP server is **automatically started by Claude
Desktop** when you configure it. You do NOT need to start it manually for
normal use.
+
+The commands below are **only for debugging purposes** to verify the server
can start without errors:
+
+### Verify Server Can Start (Debugging)
+
+```bash
+# From the project directory
+# This will start the server and wait for input on stdio
+# Press Ctrl+C to stop it
+python server.py
+```
+
+**Expected:** Server should start without errors and wait for MCP protocol
messages on stdio. If you see errors, check your configuration and dependencies.
+
+**Note:** In normal operation, Claude Desktop starts the server automatically.
You only need to run this manually if you're debugging startup issues.
+
+### Test with Python script
+
+Create a test script to call the MCP server tools:
+
+```bash
+# Create a simple test script
+cat > test_mcp_tools.py << 'EOF'
+import asyncio
+import json
+from graphql_client import GraphQLClient
+
+async def test_tools():
+ client = GraphQLClient()
+
+ # Test set operation
+ print("Testing set operation...")
+ result = await client.set_key_value("test-key", "test-value")
+ print(f"Set result: {json.dumps(result, indent=2)}")
+
+ # Test get operation
+ print("\nTesting get operation...")
+ result = await client.get_key_value("test-key")
+ print(f"Get result: {json.dumps(result, indent=2)}")
+
+ # Test getTransaction (requires valid transaction ID)
+ # print("\nTesting getTransaction...")
+ # result = await client.get_transaction("YOUR_TRANSACTION_ID")
+ # print(f"GetTransaction result: {json.dumps(result, indent=2)}")
+
+if __name__ == "__main__":
+ asyncio.run(test_tools())
+EOF
+
+# Run the test script
+python test_mcp_tools.py
+```
+
+## 4. Connect to Claude Desktop
+
+**Important:** The MCP server is **automatically started by Claude Desktop**
when you configure it. You do NOT need to manually start `server.py`. Claude
Desktop will launch it automatically when needed.
+
+### Step 1: Locate Claude Desktop Configuration
+
+**macOS:**
+```bash
+# Configuration file location
+~/Library/Application Support/Claude/claude_desktop_config.json
+```
+
+**Windows:**
+```
+%APPDATA%\Claude\claude_desktop_config.json
+```
+
+**Linux:**
+```bash
+~/.config/Claude/claude_desktop_config.json
+```
+
+### Step 2: Edit Configuration File
+
+Open the configuration file and add the MCP server configuration:
+
+```bash
+# On macOS
+open ~/Library/Application\ Support/Claude/claude_desktop_config.json
+
+# Or edit with your preferred editor
+nano ~/Library/Application\ Support/Claude/claude_desktop_config.json
+```
+
+### Step 3: Add MCP Server Configuration
+
+Add this configuration to the `mcpServers` section:
+
+```json
+{
+ "mcpServers": {
+ "resilientdb": {
+ "command": "python",
+ "args": ["/absolute/path/to/ResilientDB-MCP/server.py"],
+ "env": {
+ "RESILIENTDB_GRAPHQL_URL": "http://localhost:8000/graphql",
+ "RESILIENTDB_HTTP_URL": "http://localhost:18000"
+ }
+ }
+ }
+}
+```
+
+**Important:** Replace `/absolute/path/to/ResilientDB-MCP/server.py` with the
actual absolute path to your server.py file.
+
+**Example for macOS:**
+```json
+{
+ "mcpServers": {
+ "resilientdb": {
+ "command": "python",
+ "args":
["/Users/rahul/data/workspace/kanagrah/ResilientDB-MCP/server.py"],
+ "env": {
+ "RESILIENTDB_GRAPHQL_URL": "http://localhost:8000/graphql",
+ "RESILIENTDB_HTTP_URL": "http://localhost:18000"
+ }
+ }
+ }
+}
+```
+
+### Step 4: Find Your Absolute Path
+
+```bash
+# Get the absolute path to server.py
+pwd
+# Output: /Users/rahul/data/workspace/kanagrah/ResilientDB-MCP
+
+# Full path to server.py
+realpath server.py
+# Or
+readlink -f server.py
+```
+
+### Step 5: Restart Claude Desktop
+
+1. **Quit Claude Desktop completely** (not just close the window)
+ - macOS: Cmd+Q or right-click dock icon → Quit
+ - Windows: Close all windows and exit from system tray
+ - Linux: Close all windows
+
+2. **Restart Claude Desktop**
+ - Claude Desktop will **automatically start the MCP server** when it
launches
+ - The server runs as a subprocess managed by Claude Desktop
+
+3. **Verify Connection**
+ - Open Claude Desktop
+ - Check if the MCP server is connected (usually shown in the status or
settings)
+ - Look for any error messages in the Claude Desktop logs
+ - If there are errors, check that the path to `server.py` is correct and
Python is accessible
+
+### Step 6: Test in Claude Desktop
+
+Once connected, you can test the tools by asking Claude:
+
+1. **Test set operation:**
+ ```
+ Store a key-value pair with key "test" and value "hello world"
+ ```
+
+2. **Test get operation:**
+ ```
+ Get the value for key "test"
+ ```
+
+3. **Test getTransaction:**
+ ```
+ Get transaction details for transaction ID "YOUR_TRANSACTION_ID"
+ ```
+
+4. **Test postTransaction:**
+ ```
+ Post a new asset transaction with operation "CREATE", amount 100, and asset
data {"data": "test"}
+ ```
+ (Note: This will require you to provide the cryptographic keys)
+
+## 5. Verify Server Configuration
+
+### Check if server can start (Debugging Only)
+
+**Note:** This is only for debugging. In normal use, Claude Desktop starts the
server automatically.
+
+```bash
+# Run the server briefly to check for startup errors
+# Press Ctrl+C immediately after it starts
+timeout 2 python server.py 2>&1 || true
+```
+
+**Expected:** Server should start without import errors or configuration
errors. If you see errors, fix them before configuring Claude Desktop.
+
+### Check if dependencies are installed
+
+```bash
+# Verify all dependencies are installed
+python -c "import mcp; import httpx; import dotenv; print('All dependencies
OK')"
+```
+
+### Check environment variables
+
+```bash
+# Verify environment variables (if .env file exists)
+python -c "from config import Config; print(f'GraphQL URL:
{Config.GRAPHQL_URL}'); print(f'HTTP URL: {Config.HTTP_URL}')"
+```
+
+## 6. Troubleshooting
+
+### Server won't start
+
+```bash
+# Check Python version (should be 3.11+)
+python --version
+
+# Check if MCP SDK is installed
+pip list | grep mcp
+
+# Install missing dependencies
+pip install -r requirements.txt
+```
+
+### GraphQL connection errors
+
+```bash
+# Test GraphQL endpoint directly
+curl -v http://localhost:8000/graphql
+
+# Check if ResilientDB GraphQL server is running
+netstat -an | grep 8000
+# Or on macOS/Linux
+lsof -i :8000
+```
+
+### HTTP connection errors
+
+```bash
+# Test HTTP endpoint directly
+curl -v http://localhost:18000/v1/transactions/test
+
+# Check if ResilientDB HTTP server is running
+netstat -an | grep 18000
+# Or on macOS/Linux
+lsof -i :18000
+```
+
+### Claude Desktop connection issues
+
+1. **Check Claude Desktop logs:**
+ - macOS: `~/Library/Logs/Claude/`
+ - Windows: `%APPDATA%\Claude\logs\`
+ - Linux: `~/.config/Claude/logs/`
+
+2. **Verify configuration file syntax:**
+ ```bash
+ # Validate JSON syntax
+ python -m json.tool ~/Library/Application\
Support/Claude/claude_desktop_config.json
+ ```
+
+3. **Check file permissions:**
+ ```bash
+ # Make sure server.py is executable
+ chmod +x server.py
+
+ # Check if Python is in PATH
+ which python
+ ```
+
+## 7. Quick Test Checklist
+
+- [ ] GraphQL server responds on port 8000
+- [ ] HTTP server responds on port 18000
+- [ ] Can store key-value via HTTP API
+- [ ] Can retrieve key-value via HTTP API
+- [ ] MCP server can start without errors (for debugging)
+- [ ] Claude Desktop configuration file is valid JSON
+- [ ] Claude Desktop can connect to MCP server
+- [ ] Can use `set` tool in Claude Desktop
+- [ ] Can use `get` tool in Claude Desktop
+- [ ] Can use `getTransaction` tool in Claude Desktop (with valid transaction
ID)
+- [ ] Can use `postTransaction` tool in Claude Desktop (with valid keys)
+
+## Example Test Session
+
+```bash
+# 1. Start ResilientDB (if not already running)
+# (Follow ResilientDB setup instructions)
+
+# 2. Test HTTP API
+curl -X POST -d '{"id":"demo-key","value":"demo value"}' \
+ http://localhost:18000/v1/transactions/commit
+
+curl http://localhost:18000/v1/transactions/demo-key
+
+# 3. Test GraphQL
+curl -X POST http://localhost:8000/graphql \
+ -H "Content-Type: application/json" \
+ -d '{"query": "{ __schema { queryType { name } } }"}'
+
+# 4. Configure Claude Desktop
+# (Edit claude_desktop_config.json as shown in section 4)
+# Claude Desktop will automatically start the server when you restart it
+
+# 5. Test in Claude Desktop
+# (Ask Claude to use the tools)
+```
+
+## Additional Resources
+
+- [ResilientDB Documentation](https://resilientdb.incubator.apache.org/)
+- [MCP Protocol Documentation](https://modelcontextprotocol.io/)
+- [Claude Desktop Setup
Guide](https://docs.anthropic.com/claude/docs/claude-desktop)
+
diff --git a/ecosystem/mcp/config.py b/ecosystem/mcp/config.py
new file mode 100644
index 00000000..a3424b19
--- /dev/null
+++ b/ecosystem/mcp/config.py
@@ -0,0 +1,31 @@
+"""Configuration management for ResilientDB MCP Server."""
+import os
+from typing import Optional
+from dotenv import load_dotenv
+
+load_dotenv()
+
+
+class Config:
+ """Configuration class for ResilientDB MCP Server."""
+
+ # GraphQL endpoint (port 8000 - for asset transactions only)
+ GRAPHQL_URL: str = os.getenv(
+ "RESILIENTDB_GRAPHQL_URL",
+ "http://localhost:8000/graphql"
+ )
+
+ # HTTP/Crow endpoint (port 18000 - for KV operations)
+ HTTP_URL: str = os.getenv(
+ "RESILIENTDB_HTTP_URL",
+ "http://localhost:18000"
+ )
+
+ # Optional authentication
+ API_KEY: Optional[str] = os.getenv("RESILIENTDB_API_KEY")
+ AUTH_TOKEN: Optional[str] = os.getenv("RESILIENTDB_AUTH_TOKEN")
+
+ # Timeout settings
+ REQUEST_TIMEOUT: int = int(os.getenv("REQUEST_TIMEOUT", "30"))
+ TRANSACTION_POLL_INTERVAL: float =
float(os.getenv("TRANSACTION_POLL_INTERVAL", "1.0"))
+ MAX_POLL_ATTEMPTS: int = int(os.getenv("MAX_POLL_ATTEMPTS", "30"))
\ No newline at end of file
diff --git a/ecosystem/mcp/generate_keys_utility.py
b/ecosystem/mcp/generate_keys_utility.py
new file mode 100755
index 00000000..9e8cccd0
--- /dev/null
+++ b/ecosystem/mcp/generate_keys_utility.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python3
+"""Utility script to generate Ed25519 keypairs for ResilientDB.
+
+This is a standalone utility script for manual key generation.
+For automated key generation, use the MCP server's generateKeys tool instead.
+"""
+import sys
+import os
+
+# Get the directory where this script is located
+script_dir = os.path.dirname(os.path.abspath(__file__))
+
+# Try to find the ResilientDB resdb_driver path
+# From ecosystem/mcp/ to ecosystem/graphql/resdb_driver
+possible_paths = [
+ # Relative path from mcp to graphql/resdb_driver (go up 1 level to
ecosystem/)
+ os.path.join(script_dir, '../graphql/resdb_driver'),
+ # Absolute path (fallback)
+
'/Users/rahul/data/workspace/kanagrah/incubator-resilientdb/ecosystem/graphql/resdb_driver',
+]
+
+resilientdb_path = None
+for path in possible_paths:
+ abs_path = os.path.abspath(path)
+ if os.path.exists(abs_path):
+ sys.path.insert(0, abs_path)
+ resilientdb_path = abs_path
+ break
+
+if resilientdb_path is None:
+ print("Error: Could not find ResilientDB resdb_driver directory.")
+ print("Tried the following paths:")
+ for path in possible_paths:
+ print(f" - {os.path.abspath(path)}")
+ print(f"\nCurrent script location: {script_dir}")
+ sys.exit(1)
+
+try:
+ from crypto import generate_keypair
+except ImportError:
+ print("Error: Could not import generate_keypair from ResilientDB crypto
module.")
+ print(f"Found path: {resilientdb_path}")
+ print(f"Please ensure the crypto.py file exists in: {resilientdb_path}")
+ sys.exit(1)
+
+# Generate keypairs
+signer = generate_keypair()
+recipient = generate_keypair()
+
+print("=" * 70)
+print("ResilientDB Key Generator")
+print("=" * 70)
+print()
+print("Signer Keypair:")
+print(f" Public Key: {signer.public_key}")
+print(f" Private Key: {signer.private_key}")
+print()
+print("Recipient Keypair:")
+print(f" Public Key: {recipient.public_key}")
+print(f" Private Key: {recipient.private_key}")
+print()
+print("=" * 70)
+print("Ready-to-use curl command:")
+print("=" * 70)
+print()
+print(f"""curl -X POST http://localhost:8000/graphql \\
+ -H "Content-Type: application/json" \\
+ -d '{{
+ "query": "mutation Test($data: PrepareAsset!) {{ postTransaction(data:
$data) {{ id }} }}",
+ "variables": {{
+ "data": {{
+ "operation": "CREATE",
+ "amount": 100,
+ "signerPublicKey": "{signer.public_key}",
+ "signerPrivateKey": "{signer.private_key}",
+ "recipientPublicKey": "{recipient.public_key}",
+ "asset": {{
+ "data": {{
+ "name": "Test Asset",
+ "description": "My first test asset"
+ }}
+ }}
+ }}
+ }}
+ }}' | python3 -m json.tool""")
+print()
+print("=" * 70)
+print("Copy and paste the curl command above to create a transaction!")
+print("=" * 70)
diff --git a/ecosystem/mcp/graphql_client.py b/ecosystem/mcp/graphql_client.py
new file mode 100644
index 00000000..be5baaf7
--- /dev/null
+++ b/ecosystem/mcp/graphql_client.py
@@ -0,0 +1,206 @@
+"""GraphQL client for ResilientDB operations."""
+import httpx
+from typing import Dict, Any, Optional
+from config import Config
+import json
+
+
+class GraphQLClient:
+ """Client for executing GraphQL queries and mutations on ResilientDB."""
+
+ def __init__(self, url: str = None, api_key: Optional[str] = None):
+ self.url = url or Config.GRAPHQL_URL
+ self.http_url = Config.HTTP_URL # HTTP/Crow server for KV operations
+ self.api_key = api_key or Config.API_KEY
+ self.timeout = Config.REQUEST_TIMEOUT
+
+ def _get_headers(self) -> Dict[str, str]:
+ """Get HTTP headers for requests."""
+ headers = {
+ "Content-Type": "application/json",
+ }
+ if self.api_key:
+ headers["Authorization"] = f"Bearer {self.api_key}"
+ return headers
+
+ async def execute_query(self, query: str, variables: Optional[Dict[str,
Any]] = None) -> Dict[str, Any]:
+ """
+ Execute a GraphQL query.
+
+ Args:
+ query: GraphQL query string
+ variables: Optional variables for the query
+
+ Returns:
+ Response data from GraphQL server
+ """
+ payload = {
+ "query": query,
+ "variables": variables or {}
+ }
+
+ async with httpx.AsyncClient(timeout=self.timeout) as client:
+ response = await client.post(
+ self.url,
+ json=payload,
+ headers=self._get_headers()
+ )
+ response.raise_for_status()
+ result = response.json()
+
+ if "errors" in result:
+ error_msg = json.dumps(result["errors"], indent=2)
+ raise Exception(f"GraphQL errors: {error_msg}")
+
+ return result.get("data", {})
+
+ async def create_account(self, account_id: Optional[str] = None) ->
Dict[str, Any]:
+ """
+ Create a new account in ResilientDB.
+ Account creation requires generating keys outside of GraphQL.
+ """
+ raise Exception(
+ "createAccount is not available. "
+ "Account creation requires generating cryptographic keys outside
of GraphQL. "
+ "Use external key generation tools to create accounts."
+ )
+
+ async def get_transaction(self, transaction_id: str) -> Dict[str, Any]:
+ """
+ Get transaction by ID via GraphQL.
+ Returns RetrieveTransaction with all available fields.
+
+ Based on official documentation:
https://beacon.resilientdb.com/docs/resilientdb_graphql#get-transaction-by-id
+
+ Note: All fields except 'metadata' are NON_NULL and must be included
in the query.
+ """
+ query = """
+ query GetTx($id: ID!) {
+ getTransaction(id: $id) {
+ id
+ version
+ amount
+ uri
+ type
+ publicKey
+ operation
+ metadata
+ asset
+ signerPublicKey
+ }
+ }
+ """
+ return await self.execute_query(query, {"id": transaction_id})
+
+ async def post_transaction(self, data: Dict[str, Any]) -> Dict[str, Any]:
+ """
+ Post a new asset transaction via GraphQL.
+ Requires PrepareAsset with all required fields:
+ - operation (String): Transaction operation type
+ - amount (Int): Transaction amount
+ - signerPublicKey (String): Public key of the signer
+ - signerPrivateKey (String): Private key of the signer
+ - recipientPublicKey (String): Public key of the recipient
+ - asset (JSONScalar): Asset data as JSON object with 'data' field (not
string!)
+
+ Based on official documentation:
https://beacon.resilientdb.com/docs/resilientdb_graphql#get-transaction-by-id
+
+ The asset must be structured as: {"data": {...}} where {...} contains
your actual asset data.
+
+ Returns CommitTransaction with transaction ID.
+ """
+ # Validate required fields
+ required_fields = ["operation", "amount", "signerPublicKey",
"signerPrivateKey", "recipientPublicKey", "asset"]
+ missing_fields = [field for field in required_fields if field not in
data]
+ if missing_fields:
+ raise Exception(
+ f"Missing required fields in PrepareAsset: {',
'.join(missing_fields)}. "
+ f"Required fields: {', '.join(required_fields)}"
+ )
+
+ # IMPORTANT: Keep asset as dict/object - JSONScalar expects JSON
object, not string
+ # MCP framework may convert the asset object to a string, so we need
to parse it back
+ asset = data["asset"]
+ if isinstance(asset, str):
+ try:
+ asset = json.loads(asset)
+ except json.JSONDecodeError as e:
+ raise Exception(
+ f"Failed to parse asset JSON string: {e}. "
+ f"Asset value: {asset[:100]}..."
+ )
+
+ # Ensure asset is a dict/object (not a list, string, or primitive)
+ if not isinstance(asset, dict):
+ raise Exception(
+ f"Asset must be a JSON object (dict), but got
{type(asset).__name__}. "
+ f"Value: {str(asset)[:100]}..."
+ )
+
+ # Update data with parsed asset
+ data["asset"] = asset
+
+ mutation = """
+ mutation Test($data: PrepareAsset!) {
+ postTransaction(data: $data) {
+ id
+ }
+ }
+ """
+ return await self.execute_query(mutation, {"data": data})
+
+ async def update_transaction(self, transaction_id: str, data: Dict[str,
Any]) -> Dict[str, Any]:
+ """
+ Update an existing transaction.
+ Blockchain transactions are immutable once committed.
+ """
+ raise Exception(
+ "updateTransaction is not available. "
+ "Blockchain transactions are immutable once committed. "
+ "To modify data, create a new transaction instead."
+ )
+
+ async def get_key_value(self, key: str) -> Dict[str, Any]:
+ """
+ Query key-value store via HTTP REST API (Crow server on port 18000).
+ """
+ async with httpx.AsyncClient(timeout=self.timeout) as client:
+ response = await client.get(
+ f"{self.http_url}/v1/transactions/{key}",
+ headers=self._get_headers()
+ )
+ response.raise_for_status()
+ result = response.json()
+
+ # Return in a consistent format
+ return {
+ "key": key,
+ "value": result.get("value", result),
+ "response": result
+ }
+
+ async def set_key_value(self, key: str, value: Any) -> Dict[str, Any]:
+ """
+ Set key-value pair via HTTP REST API (Crow server on port 18000).
+ """
+ # Convert value to string if it's not already
+ if not isinstance(value, str):
+ value = json.dumps(value)
+
+ async with httpx.AsyncClient(timeout=self.timeout) as client:
+ response = await client.post(
+ f"{self.http_url}/v1/transactions/commit",
+ json={"id": key, "value": value},
+ headers=self._get_headers()
+ )
+ response.raise_for_status()
+
+ # Parse response text (Crow returns plain text like "id: key")
+ response_text = response.text.strip()
+
+ return {
+ "key": key,
+ "value": value,
+ "status": "committed",
+ "response": response_text
+ }
\ No newline at end of file
diff --git a/ecosystem/mcp/requirements.txt b/ecosystem/mcp/requirements.txt
new file mode 100644
index 00000000..d041f084
--- /dev/null
+++ b/ecosystem/mcp/requirements.txt
@@ -0,0 +1,4 @@
+mcp>=1.0.0
+pydantic>=2.0.0
+httpx>=0.25.0
+python-dotenv>=1.0.0
diff --git a/ecosystem/mcp/rescontract_client.py
b/ecosystem/mcp/rescontract_client.py
new file mode 100644
index 00000000..002a505a
--- /dev/null
+++ b/ecosystem/mcp/rescontract_client.py
@@ -0,0 +1,705 @@
+"""ResContract client for ResilientDB smart contract operations."""
+import os
+import subprocess
+import json
+import tempfile
+import logging
+import asyncio
+from typing import Dict, Any, Optional
+
+logger = logging.getLogger(__name__)
+
+
+class ResContractClient:
+ """Client for interacting with ResContract CLI and contract_tools."""
+
+ def __init__(self, repo_root: Optional[str] = None):
+ """
+ Initialize ResContract client.
+
+ Args:
+ repo_root: Root directory of ResilientDB repository. If None,
auto-detects.
+ """
+ if repo_root is None:
+ # Auto-detect: go up from mcp directory to incubator-resilientdb
root
+ script_dir = os.path.dirname(os.path.abspath(__file__))
+ # From ecosystem/mcp/ to root (go up 2 levels: .. = ecosystem/,
../.. = root)
+ repo_root = os.path.abspath(os.path.join(script_dir, '../..'))
+
+ self.repo_root = repo_root
+ self.rescontract_cmd = "rescontract"
+
+ self.contract_tools_path = os.path.join(
+ self.repo_root,
"bazel-bin/service/tools/contract/api_tools/contract_tools"
+ )
+
+ def _run_command(self, cmd: list[str]) -> str:
+ """Run a command and return its output (stdout + stderr)."""
+ logger.info(f"Running command: {' '.join(cmd)}")
+ env = os.environ.copy()
+ env["ResDB_Home"] = self.repo_root
+
+ try:
+ result = subprocess.run(
+ cmd,
+ capture_output=True,
+ text=True,
+ check=True,
+ env=env,
+ cwd=self.repo_root
+ )
+ return f"{result.stdout}\n{result.stderr}"
+ except subprocess.CalledProcessError as e:
+ logger.error(f"Command failed: {e.stderr}\nOutput: {e.stdout}")
+ return f"Command failed: {e.stderr}\nOutput: {e.stdout}"
+
+ def compile_solidity(self, sol_path: str, output_name: str) -> str:
+ """Compile a solidity file."""
+ cmd = [self.rescontract_cmd, "compile", "--sol", sol_path, "--output",
output_name]
+ return self._run_command(cmd)
+
+ def _run_contract_tools(self, config_path: str, config_data: dict[str,
Any]) -> str:
+ """Helper to run contract_tools with a temp config file."""
+ with tempfile.NamedTemporaryFile(mode='w', suffix='.json',
delete=False) as tmp_config:
+ json.dump(config_data, tmp_config)
+ tmp_config_path = tmp_config.name
+
+ try:
+ cmd = [
+ self.contract_tools_path,
+ "-c", config_path,
+ "--config_file", tmp_config_path
+ ]
+ return self._run_command(cmd)
+ finally:
+ if os.path.exists(tmp_config_path):
+ os.remove(tmp_config_path)
+
+ def deploy_contract(self, config_path: str, contract_path: str, name: str,
+ arguments: str, owner_address: str) -> str:
+ """Deploy a smart contract using contract_tools directly."""
+ if not contract_path.endswith('.json'):
+ contract_path += '.json'
+
+ real_contract_path = contract_path
+ full_contract_path = contract_path
+ if not os.path.isabs(contract_path):
+ full_contract_path = os.path.join(self.repo_root, contract_path)
+
+ try:
+ with open(full_contract_path, 'r') as f:
+ contract_data = json.load(f)
+
+ if "contracts" in contract_data:
+ contracts = contract_data["contracts"]
+ if name not in contracts:
+ found_key = None
+ for key in contracts:
+ if key.endswith(f":{name}"):
+ found_key = key
+ break
+
+ if found_key:
+ logger.info(f"Mapping contract key '{found_key}' to
'{name}' for deployment")
+ contract_data["contracts"][name] = contracts[found_key]
+
+ with tempfile.NamedTemporaryFile(mode='w',
suffix='.json', delete=False) as tmp_contract:
+ json.dump(contract_data, tmp_contract)
+ real_contract_path = contract_path
+ contract_path = tmp_contract.name
+
+ except Exception as e:
+ logger.warning(f"Failed to preprocess contract JSON: {e}.
Proceeding with original file.")
+
+ config_data = {
+ "command": "deploy",
+ "contract_path": contract_path,
+ "contract_name": name,
+ "contract_address": owner_address,
+ "init_params": arguments
+ }
+
+ try:
+ return self._run_contract_tools(config_path, config_data)
+ finally:
+ if contract_path != real_contract_path and
os.path.exists(contract_path):
+ os.remove(contract_path)
+
+ def execute_contract(self, config_path: str, sender_address: str,
+ contract_address: str, function_name: str,
+ arguments: str) -> str:
+ """Execute a contract function using contract_tools directly."""
+ config_data = {
+ "command": "execute",
+ "caller_address": sender_address,
+ "contract_address": contract_address,
+ "func_name": function_name,
+ "params": arguments
+ }
+ return self._run_contract_tools(config_path, config_data)
+
+ def create_account(self, config_path: str) -> str:
+ """Create a new account using contract_tools directly."""
+ config_data = {
+ "command": "create_account"
+ }
+ return self._run_contract_tools(config_path, config_data)
+
+ def check_replica_status(self) -> Dict[str, Any]:
+ """Check the status of contract_service replicas."""
+ try:
+ result = subprocess.run(
+ ["ps", "aux"],
+ capture_output=True,
+ text=True,
+ check=True
+ )
+ lines = [line for line in result.stdout.split('\n')
+ if 'contract_service' in line and 'grep' not in line]
+
+ count = len(lines)
+ return {
+ "count": count,
+ "running": count == 5,
+ "details": lines,
+ "message": f"{count}/5 replicas running. System is {'ready' if
count == 5 else 'NOT ready'}."
+ }
+ except subprocess.CalledProcessError as e:
+ logger.error(f"Failed to check replica status: {e}")
+ return {
+ "count": 0,
+ "running": False,
+ "details": [],
+ "message": f"Error checking replicas: {e}"
+ }
+
+ def start_replica_cluster(self) -> str:
+ """Start/restart the ResilientDB replica cluster."""
+ script_path = os.path.join(
+ self.repo_root,
+ "service/tools/contract/service_tools/start_contract_service.sh"
+ )
+
+ if not os.path.exists(script_path):
+ return f"Error: Start script not found at {script_path}"
+
+ try:
+ logger.info(f"Starting replica cluster using {script_path}")
+ result = subprocess.run(
+ ["/bin/bash", script_path],
+ capture_output=True,
+ text=True,
+ cwd=self.repo_root,
+ timeout=30
+ )
+
+ import time
+ time.sleep(2)
+ status = self.check_replica_status()
+
+ return f"Replica cluster
started.\n{status['message']}\n\nOutput:\n{result.stdout}\n{result.stderr}"
+ except subprocess.TimeoutExpired:
+ return "Replica start script timed out after 30 seconds"
+ except Exception as e:
+ logger.error(f"Failed to start replicas: {e}")
+ return f"Error starting replicas: {str(e)}"
+
+ def get_logs(self, log_file: str, lines: int = 50) -> str:
+ """Get recent lines from a log file."""
+ log_path = os.path.join(self.repo_root, log_file)
+
+ if not os.path.exists(log_path):
+ return f"Error: Log file not found at {log_path}"
+
+ try:
+ result = subprocess.run(
+ ["tail", "-n", str(lines), log_path],
+ capture_output=True,
+ text=True,
+ check=True
+ )
+ return result.stdout
+ except subprocess.CalledProcessError as e:
+ logger.error(f"Failed to read log file {log_file}: {e}")
+ return f"Error reading log file: {e.stderr}"
+
+ @staticmethod
+ def validate_address(address: str) -> tuple[bool, Optional[str]]:
+ """Validate Ethereum-style address format."""
+ if not isinstance(address, str):
+ return False, "Address must be a string"
+
+ if not address.startswith("0x"):
+ return False, "Address must start with '0x'"
+
+ if len(address) != 42:
+ return False, f"Address must be 42 characters (got {len(address)})"
+
+ try:
+ int(address[2:], 16)
+ return True, None
+ except ValueError:
+ return False, "Address contains invalid hexadecimal characters"
+
+ def validate_config(self, config_path: str) -> Dict[str, Any]:
+ """Validate a ResilientDB configuration file."""
+ errors = []
+ warnings = []
+
+ if not os.path.exists(config_path):
+ return {
+ "valid": False,
+ "errors": [f"Config file not found: {config_path}"],
+ "warnings": []
+ }
+
+ try:
+ with open(config_path, 'r') as f:
+ content = f.read().strip()
+ except Exception as e:
+ return {
+ "valid": False,
+ "errors": [f"Cannot read config file: {e}"],
+ "warnings": []
+ }
+
+ if config_path.endswith('.json'):
+ try:
+ config_data = json.loads(content)
+
+ if "contract_address" in config_data:
+ is_valid, error =
self.validate_address(config_data["contract_address"])
+ if not is_valid:
+ errors.append(f"Invalid contract_address: {error}")
+
+ if "caller_address" in config_data:
+ is_valid, error =
self.validate_address(config_data["caller_address"])
+ if not is_valid:
+ errors.append(f"Invalid caller_address: {error}")
+
+ if "contract_path" in config_data:
+ if not os.path.exists(config_data["contract_path"]):
+ warnings.append(f"Contract file not found:
{config_data['contract_path']}")
+
+ except json.JSONDecodeError as e:
+ errors.append(f"Invalid JSON format: {e}")
+ else:
+ lines = content.split('\n')
+ for i, line in enumerate(lines, 1):
+ if not line or line.startswith('#'):
+ continue
+
+ parts = line.split()
+ if len(parts) < 3:
+ errors.append(f"Line {i}: Expected format 'num_replicas
host port', got: {line}")
+ continue
+
+ try:
+ num_replicas = int(parts[0])
+ if num_replicas < 1 or num_replicas > 100:
+ warnings.append(f"Line {i}: Unusual number of
replicas: {num_replicas}")
+ except ValueError:
+ errors.append(f"Line {i}: Invalid replica count
'{parts[0]}' (must be integer)")
+
+ host = parts[1]
+ if not (host == "localhost" or host == "127.0.0.1" or "." in
host):
+ warnings.append(f"Line {i}: Unusual host format: {host}")
+
+ try:
+ port = int(parts[2])
+ if port < 1 or port > 65535:
+ errors.append(f"Line {i}: Invalid port {port} (must be
1-65535)")
+ except ValueError:
+ errors.append(f"Line {i}: Invalid port '{parts[2]}' (must
be integer)")
+
+ return {
+ "valid": len(errors) == 0,
+ "errors": errors,
+ "warnings": warnings
+ }
+
+ def health_check(self) -> Dict[str, Any]:
+ """Comprehensive system health check."""
+ import socket
+ health = {
+ "replicas": self._health_check_replicas(),
+ "rest_api": self._health_check_rest(),
+ "graphql_api": self._health_check_graphql(),
+ "overall_status": "healthy"
+ }
+
+ if health["replicas"]["status"] != "healthy" or
health["rest_api"]["status"] != "healthy":
+ health["overall_status"] = "down"
+ elif health["graphql_api"]["status"] != "healthy":
+ health["overall_status"] = "degraded"
+
+ return health
+
+ def _health_check_replicas(self) -> Dict[str, Any]:
+ """Check replica health."""
+ status = self.check_replica_status()
+ return {
+ "status": "healthy" if status["running"] else "down",
+ "count": status["count"],
+ "message": status["message"]
+ }
+
+ def _health_check_rest(self) -> Dict[str, Any]:
+ """Check REST API health."""
+ import time
+ import socket
+ try:
+ start = time.time()
+ sock = socket.create_connection(("127.0.0.1", 18000), timeout=2)
+ latency_ms = int((time.time() - start) * 1000)
+ sock.close()
+ return {
+ "status": "healthy",
+ "latency_ms": latency_ms,
+ "url": "http://127.0.0.1:18000"
+ }
+ except (socket.timeout, ConnectionRefusedError, OSError) as e:
+ return {
+ "status": "down",
+ "error": str(e),
+ "url": "http://127.0.0.1:18000"
+ }
+
+ def _health_check_graphql(self) -> Dict[str, Any]:
+ """Check GraphQL API health."""
+ import time
+ import socket
+ try:
+ start = time.time()
+ sock = socket.create_connection(("127.0.0.1", 8000), timeout=2)
+ latency_ms = int((time.time() - start) * 1000)
+ sock.close()
+ return {
+ "status": "healthy",
+ "latency_ms": latency_ms,
+ "url": "http://127.0.0.1:8000"
+ }
+ except (socket.timeout, ConnectionRefusedError, OSError) as e:
+ return {
+ "status": "down",
+ "error": str(e),
+ "url": "http://127.0.0.1:8000"
+ }
+
+ def list_all_accounts(self) -> list[Dict[str, Any]]:
+ """List all accounts found in the system logs."""
+ import re
+
+ accounts = {}
+ log_file = os.path.join(self.repo_root, "server0.log")
+
+ if not os.path.exists(log_file):
+ return []
+
+ try:
+ with open(log_file, 'r') as f:
+ for line in f:
+ if "create count:address" in line or 'create account:' in
line:
+ match = re.search(r'address: "([^"]+)"', line)
+ if match:
+ addr = match.group(1)
+ if addr not in accounts:
+ timestamp_match = re.match(r'E(\d{8}
\d{2}:\d{2}:\d{2})', line)
+ timestamp = timestamp_match.group(1) if
timestamp_match else "Unknown"
+
+ accounts[addr] = {
+ "address": addr,
+ "created": timestamp,
+ "activity_count": 0
+ }
+
+ with open(log_file, 'r') as f:
+ for line in f:
+ for addr in accounts:
+ if addr in line:
+ accounts[addr]["activity_count"] += 1
+
+ except Exception as e:
+ logger.error(f"Error parsing accounts from logs: {e}")
+ return []
+
+ return sorted(accounts.values(), key=lambda x: x["created"],
reverse=True)
+
+ def get_transaction_history(
+ self,
+ limit: int = 50,
+ tx_type: Optional[str] = None,
+ address: Optional[str] = None
+ ) -> list[Dict[str, Any]]:
+ """Get transaction history from server logs."""
+ import re
+
+ transactions = []
+ log_file = os.path.join(self.repo_root, "server0.log")
+
+ if not os.path.exists(log_file):
+ return []
+
+ try:
+ with open(log_file, 'r') as f:
+ for line in f:
+ if "cmd: DEPLOY" in line:
+ timestamp_match = re.match(r'E(\d{8}
\d{2}:\d{2}:\d{2})', line)
+ timestamp = timestamp_match.group(1) if
timestamp_match else "Unknown"
+
+ caller_match = re.search(r'caller_address: "([^"]+)"',
line)
+ caller = caller_match.group(1) if caller_match else
"Unknown"
+
+ contract_match = re.search(r'contract_name:
"([^"]+)"', line)
+ contract_name = contract_match.group(1) if
contract_match else "Unknown"
+
+ tx = {
+ "type": "DEPLOY",
+ "timestamp": timestamp,
+ "caller": caller,
+ "contract_name": contract_name,
+ "details": ""
+ }
+
+ if self._matches_filter(tx, tx_type, address):
+ transactions.append(tx)
+
+ elif "cmd: EXECUTE" in line:
+ timestamp_match = re.match(r'E(\d{8}
\d{2}:\d{2}:\d{2})', line)
+ timestamp = timestamp_match.group(1) if
timestamp_match else "Unknown"
+
+ caller_match = re.search(r'caller_address: "([^"]+)"',
line)
+ caller = caller_match.group(1) if caller_match else
"Unknown"
+
+ contract_match = re.search(r'contract_address:
"([^"]+)"', line)
+ contract_addr = contract_match.group(1) if
contract_match else "Unknown"
+
+ func_match = re.search(r'func_name: "([^"]+)"', line)
+ func_name = func_match.group(1) if func_match else
"Unknown"
+
+ tx = {
+ "type": "EXECUTE",
+ "timestamp": timestamp,
+ "caller": caller,
+ "contract_address": contract_addr,
+ "function": func_name
+ }
+
+ if self._matches_filter(tx, tx_type, address):
+ transactions.append(tx)
+
+ except Exception as e:
+ logger.error(f"Error parsing transaction history: {e}")
+ return []
+
+ return transactions[-limit:] if transactions else []
+
+ def _matches_filter(
+ self,
+ tx: Dict[str, Any],
+ tx_type: Optional[str],
+ address: Optional[str]
+ ) -> bool:
+ """Check if transaction matches filter criteria."""
+ if tx_type and tx["type"] != tx_type.upper():
+ return False
+ if address and address not in str(tx):
+ return False
+ return True
+
+ def search_logs(self, query: str, server_id: Optional[int] = None, lines:
int = 100) -> str:
+ """Search for a pattern in log files."""
+ if server_id is not None:
+ files = [f"server{server_id}.log"]
+ else:
+ files = ["server0.log", "server1.log", "server2.log",
"server3.log", "client.log"]
+
+ results = []
+ for filename in files:
+ log_path = os.path.join(self.repo_root, filename)
+ if not os.path.exists(log_path):
+ continue
+
+ try:
+ cmd = ["grep", "-n", query, log_path]
+ result = subprocess.run(
+ cmd,
+ capture_output=True,
+ text=True
+ )
+
+ if result.stdout:
+ matches = result.stdout.strip().split('\n')
+ if len(matches) > lines:
+ matches = matches[-lines:]
+
+ results.append(f"--- {filename} ---")
+ results.extend(matches)
+ results.append("")
+
+ except Exception as e:
+ results.append(f"Error searching {filename}: {e}")
+
+ return "\n".join(results) if results else "No matches found."
+
+ def get_consensus_metrics(self) -> Dict[str, Any]:
+ """Extract consensus metrics from server logs."""
+ import re
+ log_path = os.path.join(self.repo_root, "server0.log")
+ metrics = {
+ "view": "Unknown",
+ "sequence": "Unknown",
+ "primary_id": "Unknown",
+ "active_replicas": 0
+ }
+
+ if not os.path.exists(log_path):
+ return metrics
+
+ try:
+ result = subprocess.run(
+ ["tail", "-n", "200", log_path],
+ capture_output=True,
+ text=True
+ )
+
+ lines = result.stdout.split('\n')
+ for line in reversed(lines):
+ if "primary:" in line and "version:" in line:
+ p_match = re.search(r'primary:(\d+)', line)
+ if p_match and metrics["primary_id"] == "Unknown":
+ metrics["primary_id"] = int(p_match.group(1))
+
+ v_match = re.search(r'version:(\d+)', line)
+ if v_match and metrics["view"] == "Unknown":
+ metrics["view"] = int(v_match.group(1))
+
+ if "execute done:" in line:
+ s_match = re.search(r'execute done:(\d+)', line)
+ if s_match and metrics["sequence"] == "Unknown":
+ metrics["sequence"] = int(s_match.group(1))
+
+ if metrics["view"] != "Unknown" and metrics["sequence"] !=
"Unknown":
+ break
+
+ metrics["active_replicas"] = self.check_replica_status()["count"]
+
+ except Exception as e:
+ logger.error(f"Error parsing consensus metrics: {e}")
+
+ return metrics
+
+ def archive_logs(self) -> str:
+ """Archive all log files to a zip file."""
+ import zipfile
+ from datetime import datetime
+
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
+ archive_name = f"resdb_logs_{timestamp}.zip"
+ archive_path = os.path.join(self.repo_root, archive_name)
+
+ files_to_archive = [
+ "server0.log", "server1.log", "server2.log", "server3.log",
+ "client.log", "service/tools/config/interface/service.config"
+ ]
+
+ try:
+ with zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED) as
zipf:
+ for file in files_to_archive:
+ file_path = os.path.join(self.repo_root, file)
+ if os.path.exists(file_path):
+ zipf.write(file_path, arcname=file)
+ else:
+ logger.warning(f"File not found for archive: {file}")
+
+ return archive_path
+ except Exception as e:
+ raise Exception(f"Failed to create archive: {e}")
+
+ async def benchmark_throughput(self, num_tx: int = 100) -> Dict[str, Any]:
+ """
+ Benchmark system throughput by sending a batch of transactions via
HTTP REST API.
+
+ Returns metrics including TPS and latency.
+
+ Args:
+ num_tx: Number of transactions to send (default: 100)
+
+ Returns:
+ Dictionary with benchmark metrics
+ """
+ import httpx
+ import uuid
+ import time
+ from config import Config
+
+ http_url = Config.HTTP_URL
+ start_time = time.time()
+ successful = 0
+ failed = 0
+ latencies = []
+
+ # Create tasks for concurrent execution
+ async def send_transaction(tx_id: str, value: str):
+ """Send a single transaction and return (success, latency)."""
+ tx_start = time.time()
+ try:
+ async with httpx.AsyncClient(timeout=30.0) as client:
+ response = await client.post(
+ f"{http_url}/v1/transactions/commit",
+ json={"id": tx_id, "value": value},
+ headers={"Content-Type": "application/json"}
+ )
+ response.raise_for_status()
+ latency = (time.time() - tx_start) * 1000 # Convert to ms
+ return True, latency
+ except Exception as e:
+ logger.error(f"Transaction {tx_id} failed: {e}")
+ latency = (time.time() - tx_start) * 1000
+ return False, latency
+
+ # Execute in batches to avoid overwhelming the client/network
+ batch_size = 50
+ tasks = []
+
+ for i in range(num_tx):
+ tx_id = f"bench-{uuid.uuid4()}"
+ value = f"bench-val-{i}"
+ tasks.append(send_transaction(tx_id, value))
+
+ # Process in batches
+ for i in range(0, len(tasks), batch_size):
+ batch = tasks[i:i+batch_size]
+ results = await asyncio.gather(*batch, return_exceptions=True)
+
+ for res in results:
+ if isinstance(res, Exception):
+ failed += 1
+ else:
+ success, latency = res
+ if success:
+ successful += 1
+ latencies.append(latency)
+ else:
+ failed += 1
+
+ end_time = time.time()
+ duration = end_time - start_time
+ tps = successful / duration if duration > 0 else 0
+
+ avg_latency = sum(latencies) / len(latencies) if latencies else 0
+ min_latency = min(latencies) if latencies else 0
+ max_latency = max(latencies) if latencies else 0
+
+ return {
+ "total_transactions": num_tx,
+ "successful": successful,
+ "failed": failed,
+ "duration_seconds": round(duration, 2),
+ "tps": round(tps, 2),
+ "latency_avg_ms": round(avg_latency, 2),
+ "latency_min_ms": round(min_latency, 2),
+ "latency_max_ms": round(max_latency, 2),
+ "success_rate": round((successful / num_tx) * 100, 2) if num_tx >
0 else 0
+ }
+
diff --git a/ecosystem/mcp/server.py b/ecosystem/mcp/server.py
new file mode 100644
index 00000000..c2704745
--- /dev/null
+++ b/ecosystem/mcp/server.py
@@ -0,0 +1,982 @@
+"""MCP Server for ResilientDB - Complete integration with GraphQL, REST API,
and Smart Contracts."""
+import asyncio
+import json
+import sys
+import os
+import time
+from datetime import datetime
+from typing import Any, Dict
+import httpx
+
+try:
+ from mcp.server import Server
+ from mcp.server.stdio import stdio_server
+ from mcp.types import Tool, TextContent
+except ImportError:
+ print("Error: MCP SDK not found. Please install it with: pip install mcp",
file=sys.stderr)
+ sys.exit(1)
+
+from config import Config
+from graphql_client import GraphQLClient
+from rescontract_client import ResContractClient
+
+# Initialize clients
+graphql_client = GraphQLClient()
+rescontract_client = ResContractClient()
+
+
+async def send_monitoring_data(tool_name: str, args: dict, result: Any,
duration: float):
+ """Send monitoring data to ResLens middleware."""
+ try:
+ async with httpx.AsyncClient() as client:
+ await client.post(
+ "http://localhost:3000/api/v1/mcp/prompts",
+ json={
+ "tool": tool_name,
+ "args": args,
+ "result": str(result)[:1000] if result else "None",
+ "timestamp": datetime.now().isoformat(),
+ "duration": duration,
+ "resdb_metrics": {}
+ },
+ timeout=5.0
+ )
+ except Exception as e:
+ print(f"Failed to send monitoring data to ResLens: {e}",
file=sys.stderr)
+
+
+async def analyze_transactions(transaction_ids: list[str]) -> Dict[str, Any]:
+ """
+ Analyze a set of transactions and compute summary statistics.
+
+ Args:
+ transaction_ids: List of transaction IDs to analyze
+
+ Returns:
+ Dictionary with summary statistics and raw transaction data
+ """
+ transactions = []
+ errors = []
+
+ # Fetch all transactions
+ for tx_id in transaction_ids:
+ try:
+ result = await graphql_client.get_transaction(tx_id)
+ # Extract the actual transaction data from GraphQL response
+ tx_data = result.get("getTransaction", {})
+ if tx_data:
+ transactions.append(tx_data)
+ except Exception as e:
+ errors.append({
+ "transactionId": tx_id,
+ "error": str(e)
+ })
+
+ if not transactions:
+ return {
+ "summary": {
+ "total": 0,
+ "successful": 0,
+ "failed": len(errors),
+ "message": "No transactions could be retrieved"
+ },
+ "transactions": [],
+ "errors": errors
+ }
+
+ # Compute statistics
+ total = len(transactions)
+ amounts = []
+ operations = {}
+ types = set()
+ signers = set()
+ public_keys = set()
+
+ for tx in transactions:
+ # Collect amounts
+ if "amount" in tx and tx["amount"] is not None:
+ try:
+ amounts.append(int(tx["amount"]))
+ except (ValueError, TypeError):
+ pass
+
+ # Count operations
+ op = tx.get("operation", "UNKNOWN")
+ operations[op] = operations.get(op, 0) + 1
+
+ # Collect types
+ if "type" in tx and tx["type"]:
+ types.add(str(tx["type"]))
+
+ # Collect signers
+ if "signerPublicKey" in tx and tx["signerPublicKey"]:
+ signers.add(str(tx["signerPublicKey"]))
+
+ # Collect public keys
+ if "publicKey" in tx and tx["publicKey"]:
+ public_keys.add(str(tx["publicKey"]))
+
+ # Build summary
+ summary = {
+ "total": total,
+ "successful": len(transactions),
+ "failed": len(errors),
+ "byOperation": operations,
+ "distinctTypes": list(types),
+ "distinctSigners": len(signers),
+ "distinctPublicKeys": len(public_keys)
+ }
+
+ # Add amount statistics if available
+ if amounts:
+ summary["amountStats"] = {
+ "min": min(amounts),
+ "max": max(amounts),
+ "average": sum(amounts) / len(amounts),
+ "total": sum(amounts),
+ "count": len(amounts)
+ }
+
+ return {
+ "summary": summary,
+ "transactions": transactions,
+ "errors": errors
+ }
+
+
+def _setup_resilientdb_path() -> str:
+ """Setup path to ResilientDB resdb_driver for key generation."""
+ script_dir = os.path.dirname(os.path.abspath(__file__))
+
+ possible_paths = [
+ # Relative path from mcp to graphql/resdb_driver (go up 1 level to
ecosystem/)
+ os.path.join(script_dir, '../graphql/resdb_driver'),
+ # Absolute path (fallback)
+
'/Users/rahul/data/workspace/kanagrah/incubator-resilientdb/ecosystem/graphql/resdb_driver',
+ ]
+
+ for path in possible_paths:
+ abs_path = os.path.abspath(path)
+ if os.path.exists(abs_path):
+ if abs_path not in sys.path:
+ sys.path.insert(0, abs_path)
+ return abs_path
+
+ raise ImportError(
+ f"Could not find ResilientDB resdb_driver directory. "
+ f"Tried: {', '.join([os.path.abspath(p) for p in possible_paths])}"
+ )
+
+
+def _setup_sha3_shim():
+ """Setup sha3 module shim using Python's built-in hashlib for Python
3.11+."""
+ import hashlib
+ import sys
+ from types import ModuleType
+
+ class SHA3_256:
+ """SHA3-256 hash implementation using Python's built-in hashlib."""
+
+ def __init__(self, data=None):
+ """Initialize SHA3-256 hash object."""
+ self._hash = hashlib.sha3_256()
+ if data is not None:
+ if isinstance(data, str):
+ data = data.encode('utf-8')
+ self._hash.update(data)
+
+ def update(self, data):
+ """Update the hash with additional data."""
+ if isinstance(data, str):
+ data = data.encode('utf-8')
+ self._hash.update(data)
+
+ def hexdigest(self):
+ """Return the hexadecimal digest of the hash."""
+ return self._hash.hexdigest()
+
+ def digest(self):
+ """Return the binary digest of the hash."""
+ return self._hash.digest()
+
+ # Create a factory function that returns instances
+ def sha3_256(data=None):
+ """Factory function for SHA3-256 hash objects."""
+ return SHA3_256(data)
+
+ # Create a fake sha3 module and inject it into sys.modules
+ sha3_module = ModuleType('sha3')
+ sha3_module.sha3_256 = sha3_256
+
+ # Only inject if sha3 is not already available
+ if 'sha3' not in sys.modules:
+ sys.modules['sha3'] = sha3_module
+
+
+def generate_keypairs() -> Dict[str, str]:
+ """
+ Generate Ed25519 keypairs for ResilientDB transactions.
+
+ Returns:
+ Dictionary with signer and recipient public/private keys
+ """
+ try:
+ _setup_resilientdb_path()
+ # Setup sha3 shim before importing crypto (which imports sha3)
+ _setup_sha3_shim()
+ from crypto import generate_keypair
+ except ImportError as e:
+ raise ImportError(
+ f"Could not import generate_keypair from ResilientDB crypto
module: {e}"
+ )
+
+ signer = generate_keypair()
+ recipient = generate_keypair()
+
+ return {
+ "signerPublicKey": signer.public_key,
+ "signerPrivateKey": signer.private_key,
+ "recipientPublicKey": recipient.public_key,
+ "recipientPrivateKey": recipient.private_key
+ }
+
+# Create MCP server
+app = Server("resilientdb-mcp")
+
+
[email protected]_tools()
+async def handle_list_tools() -> list[Tool]:
+ """List all available tools."""
+ return [
+ Tool(
+ name="generateKeys",
+ description="Generate Ed25519 cryptographic keypairs (signer and
recipient) for ResilientDB transactions. Returns signerPublicKey,
signerPrivateKey, recipientPublicKey, and recipientPrivateKey. Use this tool to
generate keys before creating transactions, or it will be automatically called
when needed for postTransaction.",
+ inputSchema={
+ "type": "object",
+ "properties": {},
+ "required": []
+ }
+ ),
+ Tool(
+ name="getTransaction",
+ description="Get asset transaction details by transaction ID using
GraphQL (port 8000). Returns RetrieveTransaction with id, version, amount, uri,
type, publicKey, operation, metadata, asset, and signerPublicKey.",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "transactionId": {
+ "type": "string",
+ "description": "Transaction ID to retrieve."
+ }
+ },
+ "required": ["transactionId"]
+ }
+ ),
+ Tool(
+ name="postTransaction",
+ description="Post a new asset transaction to ResilientDB using
GraphQL (port 8000). Requires PrepareAsset with: operation (String), amount
(Int), signerPublicKey (String), signerPrivateKey (String), recipientPublicKey
(String), and asset (JSON). Returns CommitTransaction with transaction ID. If
keys are not provided, automatically generate them using generateKeys tool
first.",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "operation": {
+ "type": "string",
+ "description": "Transaction operation type (e.g.,
'CREATE', 'TRANSFER')."
+ },
+ "amount": {
+ "type": "integer",
+ "description": "Transaction amount (integer)."
+ },
+ "signerPublicKey": {
+ "type": "string",
+ "description": "Public key of the signer. If not
provided, keys will be auto-generated."
+ },
+ "signerPrivateKey": {
+ "type": "string",
+ "description": "Private key of the signer. If not
provided, keys will be auto-generated."
+ },
+ "recipientPublicKey": {
+ "type": "string",
+ "description": "Public key of the recipient. If not
provided, keys will be auto-generated."
+ },
+ "asset": {
+ "description": "Asset data as JSON object."
+ }
+ },
+ "required": ["operation", "amount", "asset"]
+ }
+ ),
+ Tool(
+ name="analyzeTransactions",
+ description="Analyze a set of transactions by their IDs and
compute summary statistics. Returns summary with counts by operation type,
amount statistics (min/max/average), distinct types, signers, and public keys.
Also returns raw transaction data and any errors encountered. Useful for
understanding transaction patterns and identifying outliers.",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "transactionIds": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "List of transaction IDs to analyze
(maximum 20 transactions recommended).",
+ "minItems": 1,
+ "maxItems": 20
+ }
+ },
+ "required": ["transactionIds"]
+ }
+ ),
+ Tool(
+ name="get",
+ description="Retrieves a value from ResilientDB by key using HTTP
REST API (Crow server on port 18000).",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "key": {
+ "type": "string",
+ "description": "Key to retrieve."
+ }
+ },
+ "required": ["key"]
+ }
+ ),
+ Tool(
+ name="set",
+ description="Stores a key-value pair in ResilientDB using HTTP
REST API (Crow server on port 18000).",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "key": {
+ "type": "string",
+ "description": "Key to store the value under."
+ },
+ "value": {
+ "description": "Value to store (can be any
JSON-serializable value)."
+ }
+ },
+ "required": ["key", "value"]
+ }
+ ),
+ # Smart Contract Tools
+ Tool(
+ name="introspectGraphQL",
+ description="Introspect the ResilientDB GraphQL schema to see
available types and operations.",
+ inputSchema={
+ "type": "object",
+ "properties": {},
+ "required": []
+ }
+ ),
+ Tool(
+ name="compileContract",
+ description="Compile a Solidity smart contract to JSON format.",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "sol_path": {
+ "type": "string",
+ "description": "Path to the .sol file."
+ },
+ "output_name": {
+ "type": "string",
+ "description": "Name of the output .json file."
+ }
+ },
+ "required": ["sol_path", "output_name"]
+ }
+ ),
+ Tool(
+ name="deployContract",
+ description="Deploy a smart contract to ResilientDB.",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "config_path": {
+ "type": "string",
+ "description": "Path to the client configuration file."
+ },
+ "contract_path": {
+ "type": "string",
+ "description": "Path to the compiled contract JSON
file."
+ },
+ "name": {
+ "type": "string",
+ "description": "Name of the contract."
+ },
+ "arguments": {
+ "type": "string",
+ "description": "Constructor parameters
(comma-separated)."
+ },
+ "owner_address": {
+ "type": "string",
+ "description": "The address of the contract owner."
+ }
+ },
+ "required": ["config_path", "contract_path", "name",
"arguments", "owner_address"]
+ }
+ ),
+ Tool(
+ name="executeContract",
+ description="Execute a function on a deployed smart contract.",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "config_path": {
+ "type": "string",
+ "description": "Path to the client configuration file."
+ },
+ "sender_address": {
+ "type": "string",
+ "description": "The address of the sender executing
the function."
+ },
+ "contract_address": {
+ "type": "string",
+ "description": "The address of the deployed contract."
+ },
+ "function_name": {
+ "type": "string",
+ "description": "Name of the function to execute
(including parameter types, e.g., 'transfer(address,uint256)')."
+ },
+ "arguments": {
+ "type": "string",
+ "description": "Arguments to pass to the function
(comma-separated)."
+ }
+ },
+ "required": ["config_path", "sender_address",
"contract_address", "function_name", "arguments"]
+ }
+ ),
+ Tool(
+ name="createAccount",
+ description="Create a new ResilientDB account for smart contract
operations.",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "config_path": {
+ "type": "string",
+ "description": "Path to the client configuration file."
+ }
+ },
+ "required": ["config_path"]
+ }
+ ),
+ Tool(
+ name="checkReplicasStatus",
+ description="Check the status of ResilientDB contract service
replicas. Returns information about how many of the 5 required replicas are
currently running.",
+ inputSchema={
+ "type": "object",
+ "properties": {},
+ "required": []
+ }
+ ),
+ Tool(
+ name="startReplicas",
+ description="Start or restart the ResilientDB contract service
replica cluster. WARNING: This will wipe the existing blockchain state.",
+ inputSchema={
+ "type": "object",
+ "properties": {},
+ "required": []
+ }
+ ),
+ Tool(
+ name="getServerLogs",
+ description="Get recent log entries from a specific replica
server.",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "server_id": {
+ "type": "integer",
+ "description": "The server ID (0-3 for server0.log
through server3.log). Default is 0."
+ },
+ "lines": {
+ "type": "integer",
+ "description": "Number of recent log lines to
retrieve. Default is 50."
+ }
+ },
+ "required": []
+ }
+ ),
+ Tool(
+ name="getClientLogs",
+ description="Get recent log entries from the client proxy.",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "lines": {
+ "type": "integer",
+ "description": "Number of recent log lines to
retrieve. Default is 50."
+ }
+ },
+ "required": []
+ }
+ ),
+ Tool(
+ name="validateConfig",
+ description="Validate a ResilientDB configuration file. Checks for
file existence, correct format, valid addresses, valid ports, and other
configuration errors.",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "config_path": {
+ "type": "string",
+ "description": "Absolute path to the configuration
file to validate."
+ }
+ },
+ "required": ["config_path"]
+ }
+ ),
+ Tool(
+ name="healthCheck",
+ description="Perform a comprehensive health check of all
ResilientDB system components. Checks replicas, REST API, GraphQL API, and
network latency.",
+ inputSchema={
+ "type": "object",
+ "properties": {},
+ "required": []
+ }
+ ),
+ Tool(
+ name="listAllAccounts",
+ description="List all accounts found on the ResilientDB
blockchain. Parses server logs to find all created accounts and their activity
levels.",
+ inputSchema={
+ "type": "object",
+ "properties": {},
+ "required": []
+ }
+ ),
+ Tool(
+ name="getTransactionHistory",
+ description="Query transaction history from the ResilientDB
blockchain. Parses server logs to extract DEPLOY and EXECUTE transactions with
filtering options.",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "limit": {
+ "type": "integer",
+ "description": "Maximum number of transactions to
return. Default is 50."
+ },
+ "tx_type": {
+ "type": "string",
+ "description": "Filter by transaction type: 'DEPLOY'
or 'EXECUTE'. Optional."
+ },
+ "address": {
+ "type": "string",
+ "description": "Filter by account address (shows
transactions involving this address). Optional."
+ }
+ },
+ "required": []
+ }
+ ),
+ Tool(
+ name="searchLogs",
+ description="Search for a text pattern in the server logs.",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "query": {
+ "type": "string",
+ "description": "The text string to search for (e.g.,
'Error', 'TransactionID')."
+ },
+ "server_id": {
+ "type": "integer",
+ "description": "Optional server ID (0-3) to search
only one log. If omitted, searches all logs."
+ },
+ "lines": {
+ "type": "integer",
+ "description": "Maximum number of matching lines to
return. Default is 100."
+ }
+ },
+ "required": ["query"]
+ }
+ ),
+ Tool(
+ name="getConsensusMetrics",
+ description="Get internal consensus metrics from the system logs.
Extracts the current View Number, Sequence Number, and Primary Replica ID.",
+ inputSchema={
+ "type": "object",
+ "properties": {},
+ "required": []
+ }
+ ),
+ Tool(
+ name="archiveLogs",
+ description="Archive all current log files to a ZIP file. Creates
a timestamped ZIP file containing server0-3.log, client.log, and configuration
files.",
+ inputSchema={
+ "type": "object",
+ "properties": {},
+ "required": []
+ }
+ ),
+ Tool(
+ name="benchmarkThroughput",
+ description="Benchmark system throughput by sending a batch of
transactions via HTTP REST API (key-value operations). Returns metrics
including TPS (transactions per second), latency statistics (min/avg/max),
success rate, and duration. Useful for performance testing and capacity
planning.",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "num_tx": {
+ "type": "integer",
+ "description": "Number of transactions to send for
benchmarking. Default is 100.",
+ "default": 100
+ }
+ },
+ "required": []
+ }
+ )
+ ]
+
+
[email protected]_tool()
+async def handle_call_tool(name: str, arguments: dict[str, Any] | None) ->
list[TextContent]:
+ """Handle tool calls and route to appropriate services."""
+ if arguments is None:
+ arguments = {}
+
+ start_time = time.time()
+ result = None
+ try:
+ if name == "generateKeys":
+ keys = generate_keypairs()
+ result = {
+ "signerPublicKey": keys["signerPublicKey"],
+ "signerPrivateKey": keys["signerPrivateKey"],
+ "recipientPublicKey": keys["recipientPublicKey"],
+ "recipientPrivateKey": keys["recipientPrivateKey"],
+ "message": "Keys generated successfully. Use these keys with
postTransaction tool."
+ }
+ return [TextContent(
+ type="text",
+ text=json.dumps(result, indent=2)
+ )]
+
+ elif name == "getTransaction":
+ transaction_id = arguments["transactionId"]
+ result = await graphql_client.get_transaction(transaction_id)
+ return [TextContent(
+ type="text",
+ text=json.dumps(result, indent=2)
+ )]
+
+ elif name == "postTransaction":
+ # Auto-generate keys if not provided or if any key is missing/empty
+ required_keys = ["signerPublicKey", "signerPrivateKey",
"recipientPublicKey"]
+ if not all(k in arguments and arguments.get(k) for k in
required_keys):
+ keys = generate_keypairs()
+ arguments["signerPublicKey"] = keys["signerPublicKey"]
+ arguments["signerPrivateKey"] = keys["signerPrivateKey"]
+ arguments["recipientPublicKey"] = keys["recipientPublicKey"]
+
+ # Process asset - ensure it has 'data' field
+ asset = arguments["asset"]
+ if isinstance(asset, str):
+ try:
+ asset = json.loads(asset)
+ except json.JSONDecodeError:
+ pass # Keep as string if not valid JSON
+
+ # If asset is a dict but doesn't have 'data' field, wrap it
+ if isinstance(asset, dict) and "data" not in asset:
+ asset = {"data": asset}
+ elif not isinstance(asset, dict):
+ # If it's still a string or other type, wrap it in data
+ asset = {"data": asset}
+
+ # Build PrepareAsset from individual arguments
+ data = {
+ "operation": arguments["operation"],
+ "amount": arguments["amount"],
+ "signerPublicKey": arguments["signerPublicKey"],
+ "signerPrivateKey": arguments["signerPrivateKey"],
+ "recipientPublicKey": arguments["recipientPublicKey"],
+ "asset": asset
+ }
+ result = await graphql_client.post_transaction(data)
+ return [TextContent(
+ type="text",
+ text=json.dumps(result, indent=2)
+ )]
+
+ elif name == "analyzeTransactions":
+ transaction_ids = arguments.get("transactionIds", [])
+ if not transaction_ids:
+ raise ValueError("transactionIds list cannot be empty")
+
+ # Limit to 20 transactions to avoid performance issues
+ if len(transaction_ids) > 20:
+ transaction_ids = transaction_ids[:20]
+
+ result = await analyze_transactions(transaction_ids)
+ return [TextContent(
+ type="text",
+ text=json.dumps(result, indent=2)
+ )]
+
+ elif name == "get":
+ key = arguments["key"]
+ result = await graphql_client.get_key_value(key)
+ return [TextContent(
+ type="text",
+ text=json.dumps(result, indent=2)
+ )]
+
+ elif name == "set":
+ key = arguments["key"]
+ value = arguments["value"]
+ result = await graphql_client.set_key_value(key, value)
+ return [TextContent(
+ type="text",
+ text=json.dumps(result, indent=2)
+ )]
+
+ # Smart Contract Tools
+ elif name == "introspectGraphQL":
+ query = "{ __schema { types { name } } }"
+ result = await graphql_client.execute_query(query)
+ return [TextContent(
+ type="text",
+ text=json.dumps(result, indent=2)
+ )]
+
+ elif name == "compileContract":
+ sol_path = arguments["sol_path"]
+ output_name = arguments["output_name"]
+ result = rescontract_client.compile_solidity(sol_path, output_name)
+ return [TextContent(
+ type="text",
+ text=json.dumps({"status": "success", "output": result},
indent=2)
+ )]
+
+ elif name == "deployContract":
+ config_path = arguments["config_path"]
+ contract_path = arguments["contract_path"]
+ name = arguments["name"]
+ arguments_str = arguments.get("arguments", "")
+ owner_address = arguments["owner_address"]
+ result = rescontract_client.deploy_contract(config_path,
contract_path, name, arguments_str, owner_address)
+ return [TextContent(
+ type="text",
+ text=json.dumps({"status": "success", "output": result},
indent=2)
+ )]
+
+ elif name == "executeContract":
+ config_path = arguments["config_path"]
+ sender_address = arguments["sender_address"]
+ contract_address = arguments["contract_address"]
+ function_name = arguments["function_name"]
+ arguments_str = arguments.get("arguments", "")
+ result = rescontract_client.execute_contract(config_path,
sender_address, contract_address, function_name, arguments_str)
+ return [TextContent(
+ type="text",
+ text=json.dumps({"status": "success", "output": result},
indent=2)
+ )]
+
+ elif name == "createAccount":
+ config_path = arguments["config_path"]
+ result = rescontract_client.create_account(config_path)
+ return [TextContent(
+ type="text",
+ text=json.dumps({"status": "success", "output": result},
indent=2)
+ )]
+
+ elif name == "checkReplicasStatus":
+ status = rescontract_client.check_replica_status()
+ response = f"{status['message']}\n\n"
+ if status['count'] > 0:
+ response += "Running processes:\n"
+ for i, detail in enumerate(status['details'], 1):
+ detail_short = detail[:150] + "..." if len(detail) > 150
else detail
+ response += f"{i}. {detail_short}\n"
+ if not status['running']:
+ response += "\n⚠️ System is NOT ready for operations. Use
startReplicas tool to start the cluster."
+ else:
+ response += "\n✅ System is ready for contract operations."
+ return [TextContent(
+ type="text",
+ text=json.dumps({"status": status, "message": response},
indent=2)
+ )]
+
+ elif name == "startReplicas":
+ result = rescontract_client.start_replica_cluster()
+ return [TextContent(
+ type="text",
+ text=json.dumps({"status": "success", "output": result,
"warning": "The blockchain state has been reset. You will need to create new
accounts and redeploy contracts."}, indent=2)
+ )]
+
+ elif name == "getServerLogs":
+ server_id = arguments.get("server_id", 0)
+ lines = arguments.get("lines", 50)
+ if server_id < 0 or server_id > 3:
+ raise ValueError(f"server_id must be between 0 and 3. Got:
{server_id}")
+ log_file = f"server{server_id}.log"
+ result = rescontract_client.get_logs(log_file, lines)
+ return [TextContent(
+ type="text",
+ text=json.dumps({"log_file": log_file, "lines": lines,
"content": result}, indent=2)
+ )]
+
+ elif name == "getClientLogs":
+ lines = arguments.get("lines", 50)
+ result = rescontract_client.get_logs("client.log", lines)
+ return [TextContent(
+ type="text",
+ text=json.dumps({"log_file": "client.log", "lines": lines,
"content": result}, indent=2)
+ )]
+
+ elif name == "validateConfig":
+ config_path = arguments["config_path"]
+ result = rescontract_client.validate_config(config_path)
+ return [TextContent(
+ type="text",
+ text=json.dumps(result, indent=2)
+ )]
+
+ elif name == "healthCheck":
+ health = rescontract_client.health_check()
+ status_emoji = {
+ "healthy": "✅",
+ "degraded": "⚠️",
+ "down": "❌"
+ }
+ overall_emoji = status_emoji.get(health["overall_status"], "❓")
+ report = f"🏥 ResilientDB Health Check Report\n\n"
+ report += f"Overall Status: {overall_emoji}
{health['overall_status'].upper()}\n\n"
+ report += "📊 Components:\n"
+ rep = health["replicas"]
+ rep_emoji = status_emoji.get(rep["status"], "❓")
+ report += f" {rep_emoji} Replicas: {rep['message']}\n"
+ rest = health["rest_api"]
+ rest_emoji = status_emoji.get(rest["status"], "❓")
+ if rest["status"] == "healthy":
+ report += f" {rest_emoji} REST API: Responding
({rest['url']}) - {rest['latency_ms']}ms\n"
+ else:
+ report += f" {rest_emoji} REST API: Down ({rest['url']}) -
{rest.get('error', 'Unknown error')}\n"
+ gql = health["graphql_api"]
+ gql_emoji = status_emoji.get(gql["status"], "❓")
+ if gql["status"] == "healthy":
+ report += f" {gql_emoji} GraphQL API: Responding
({gql['url']}) - {gql['latency_ms']}ms\n"
+ else:
+ report += f" {gql_emoji} GraphQL API: Down ({gql['url']}) -
{gql.get('error', 'Unknown error')}\n"
+ if health["overall_status"] != "healthy":
+ report += "\n💡 Recommendations:\n"
+ if health["replicas"]["status"] != "healthy":
+ report += " • Start replicas using the startReplicas
tool\n"
+ if health["rest_api"]["status"] != "healthy":
+ report += " • Check if ResilientDB REST service is
running on port 18000\n"
+ if health["graphql_api"]["status"] != "healthy":
+ report += " • Check if ResilientDB GraphQL service is
running on port 8000\n"
+ return [TextContent(
+ type="text",
+ text=json.dumps({"health": health, "report": report}, indent=2)
+ )]
+
+ elif name == "listAllAccounts":
+ accounts = rescontract_client.list_all_accounts()
+ if not accounts:
+ response = "No accounts found in the system logs.\n\nCreate an
account using the createAccount tool."
+ else:
+ response = f"👥 ResilientDB Accounts ({len(accounts)}
total)\n\n"
+ for i, acc in enumerate(accounts, 1):
+ response += f"{i}. {acc['address']}\n"
+ response += f" Created: {acc['created']}\n"
+ response += f" Activity: {acc['activity_count']} log
entries\n\n"
+ return [TextContent(
+ type="text",
+ text=json.dumps({"accounts": accounts, "message": response},
indent=2)
+ )]
+
+ elif name == "getTransactionHistory":
+ limit = arguments.get("limit", 50)
+ tx_type = arguments.get("tx_type")
+ address = arguments.get("address")
+ transactions = rescontract_client.get_transaction_history(limit,
tx_type, address)
+ if not transactions:
+ response = "📜 No transactions found matching the
criteria.\n\nTransactions will appear here after deploying contracts or
executing functions."
+ else:
+ response = f"📜 Transaction History ({len(transactions)}
transactions"
+ if tx_type:
+ response += f", type={tx_type}"
+ if address:
+ response += f", address={address[:10]}..."
+ response += ")\n\n"
+ for i, tx in enumerate(transactions, 1):
+ if tx["type"] == "DEPLOY":
+ response += f"{i}. [DEPLOY] {tx['timestamp']}\n"
+ response += f" Caller: {tx['caller']}\n"
+ response += f" Contract: {tx['contract_name']}\n\n"
+ elif tx["type"] == "EXECUTE":
+ response += f"{i}. [EXECUTE] {tx['timestamp']}\n"
+ response += f" Caller: {tx['caller']}\n"
+ response += f" Contract: {tx['contract_address']}\n"
+ response += f" Function: {tx['function']}\n\n"
+ return [TextContent(
+ type="text",
+ text=json.dumps({"transactions": transactions, "message":
response}, indent=2)
+ )]
+
+ elif name == "searchLogs":
+ query = arguments["query"]
+ server_id = arguments.get("server_id")
+ lines = arguments.get("lines", 100)
+ result = rescontract_client.search_logs(query, server_id, lines)
+ return [TextContent(
+ type="text",
+ text=json.dumps({"query": query, "results": result}, indent=2)
+ )]
+
+ elif name == "getConsensusMetrics":
+ metrics = rescontract_client.get_consensus_metrics()
+ report = f"📊 Consensus Metrics\n\n"
+ report += f"👑 Primary Replica: {metrics['primary_id']}\n"
+ report += f"👀 Current View: {metrics['view']}\n"
+ report += f"🔢 Sequence Number: {metrics['sequence']}\n"
+ report += f"🟢 Active Replicas: {metrics['active_replicas']}/5\n"
+ return [TextContent(
+ type="text",
+ text=json.dumps({"metrics": metrics, "report": report},
indent=2)
+ )]
+
+ elif name == "archiveLogs":
+ archive_path = rescontract_client.archive_logs()
+ return [TextContent(
+ type="text",
+ text=json.dumps({"status": "success", "archive_path":
archive_path, "message": f"📦 Logs archived successfully!\n\nLocation:
{archive_path}"}, indent=2)
+ )]
+
+ elif name == "benchmarkThroughput":
+ num_tx = arguments.get("num_tx", 100)
+ if num_tx < 1 or num_tx > 10000:
+ raise ValueError("num_tx must be between 1 and 10000")
+ result = await rescontract_client.benchmark_throughput(num_tx)
+ return [TextContent(
+ type="text",
+ text=json.dumps(result, indent=2)
+ )]
+
+ else:
+ raise ValueError(f"Unknown tool: {name}")
+
+ except Exception as e:
+ result = f"Error: {str(e)}"
+ error_message = f"Error executing tool '{name}': {str(e)}"
+ error_details = {
+ "error": type(e).__name__,
+ "message": error_message,
+ "tool": name,
+ "arguments": arguments
+ }
+ return [TextContent(
+ type="text",
+ text=json.dumps(error_details, indent=2)
+ )]
+ finally:
+ duration = time.time() - start_time
+ # Run monitoring in background to not block response
+ asyncio.create_task(send_monitoring_data(name, arguments, result,
duration))
+
+
+async def main():
+ """Main entry point for the MCP server."""
+ # Run the server using stdio transport
+ # The stdio_server context manager returns (read_stream, write_stream)
+ async with stdio_server() as (read_stream, write_stream):
+ await app.run(
+ read_stream,
+ write_stream,
+ app.create_initialization_options()
+ )
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
\ No newline at end of file
diff --git a/ecosystem/mcp/test_mcp_tools.py b/ecosystem/mcp/test_mcp_tools.py
new file mode 100644
index 00000000..3faefc44
--- /dev/null
+++ b/ecosystem/mcp/test_mcp_tools.py
@@ -0,0 +1,24 @@
+import asyncio
+import json
+from graphql_client import GraphQLClient
+
+async def test_tools():
+ client = GraphQLClient()
+
+ # Test set operation
+ print("Testing set operation...")
+ result = await client.set_key_value("test-key", "test-value")
+ print(f"Set result: {json.dumps(result, indent=2)}")
+
+ # Test get operation
+ print("\nTesting get operation...")
+ result = await client.get_key_value("test-key")
+ print(f"Get result: {json.dumps(result, indent=2)}")
+
+ # Test getTransaction (requires valid transaction ID)
+ # print("\nTesting getTransaction...")
+ # result = await client.get_transaction("YOUR_TRANSACTION_ID")
+ # print(f"GetTransaction result: {json.dumps(result, indent=2)}")
+
+if __name__ == "__main__":
+ asyncio.run(test_tools())
diff --git a/ecosystem/smart-contract/rescontract/index.js
b/ecosystem/smart-contract/rescontract/index.js
index 57aa9f65..1c436b34 100755
--- a/ecosystem/smart-contract/rescontract/index.js
+++ b/ecosystem/smart-contract/rescontract/index.js
@@ -163,7 +163,19 @@ program
process.exit(1);
}
- await handleExecFile(commandPath, ['create', '-c', configPath]);
+ const tempConfigPath = path.join(os.tmpdir(),
`rescontract_config_${Date.now()}.json`);
+ const tempConfig = {
+ command: 'create_account'
+ };
+ fs.writeFileSync(tempConfigPath, JSON.stringify(tempConfig));
+
+ try {
+ await handleExecFile(commandPath, ['-c', configPath, '--config_file',
tempConfigPath]);
+ } finally {
+ if (fs.existsSync(tempConfigPath)) {
+ fs.unlinkSync(tempConfigPath);
+ }
+ }
} catch (error) {
logger.error(`Error executing create command: ${error.message}`);
console.error(`Error: ${error.message}`);
@@ -265,18 +277,21 @@ program
'contract_tools'
);
+ const tempConfigPath = path.join(os.tmpdir(),
`rescontract_config_${Date.now()}.json`);
+ const tempConfig = {
+ command: 'deploy',
+ contract_path: contract,
+ contract_name: name,
+ contract_address: owner,
+ init_params: args
+ };
+ fs.writeFileSync(tempConfigPath, JSON.stringify(tempConfig));
+
const argList = [
- 'deploy',
'-c',
configPath,
- '-p',
- contract,
- '-n',
- name,
- '-a',
- args,
- '-m',
- owner,
+ '--config_file',
+ tempConfigPath
];
const output = await handleSpawnProcess(commandPath, argList);
diff --git a/ecosystem/smart-contract/rescontract/package-lock.json
b/ecosystem/smart-contract/rescontract/package-lock.json
index abad5609..0505d364 100644
--- a/ecosystem/smart-contract/rescontract/package-lock.json
+++ b/ecosystem/smart-contract/rescontract/package-lock.json
@@ -16,19 +16,19 @@
// under the License.
{
"name": "rescontract-cli",
- "version": "1.1.0",
+ "version": "1.2.5",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "rescontract-cli",
- "version": "1.1.0",
- "license": "MIT",
+ "version": "1.2.5",
+ "license": "Apache",
"dependencies": {
- "commander": "^9.4.0",
- "fs": "^0.0.1-security",
+ "commander": "^9.5.0",
"fs-extra": "^10.0.0",
"inquirer": "^8.2.4",
+ "js-yaml": "^4.1.0",
"winston": "^3.13.0"
},
"bin": {
@@ -97,6 +97,12 @@
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
+ "node_modules/argparse": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
+ "integrity":
"sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
+ "license": "Python-2.0"
+ },
"node_modules/async": {
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz",
@@ -342,11 +348,6 @@
"resolved": "https://registry.npmjs.org/fn.name/-/fn.name-1.1.0.tgz",
"integrity":
"sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw=="
},
- "node_modules/fs": {
- "version": "0.0.1-security",
- "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.1-security.tgz",
- "integrity":
"sha512-3XY9e1pP0CVEUCdj5BmfIZxRBTSDycnbqhIOGec9QYtmVH2fbLpj86CFWkrNOkt/Fvty4KZG5lTglL9j/gJ87w=="
- },
"node_modules/fs-extra": {
"version": "10.1.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz",
@@ -476,6 +477,18 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/js-yaml": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz",
+ "integrity":
"sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
+ "license": "MIT",
+ "dependencies": {
+ "argparse": "^2.0.1"
+ },
+ "bin": {
+ "js-yaml": "bin/js-yaml.js"
+ }
+ },
"node_modules/jsonfile": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz",