This is an automated email from the ASF dual-hosted git repository.

spmallette pushed a commit to branch gremlin-mcp
in repository https://gitbox.apache.org/repos/asf/tinkerpop.git

commit eda3b45aef47079b78b9e4cbdef70c3cffa1ae9c
Author: Stephen Mallette <[email protected]>
AuthorDate: Wed Oct 8 08:40:28 2025 -0400

    Remove import tool from gremlin-mcp.
    
    We could bring this back later. Currently feels a little tricky to use in 
testing and release of 3.8.0 is looming.
---
 docs/src/reference/gremlin-applications.asciidoc   |  13 +-
 gremlin-mcp/src/main/javascript/README.md          |  19 +-
 gremlin-mcp/src/main/javascript/src/constants.ts   |   1 -
 gremlin-mcp/src/main/javascript/src/errors.ts      |   1 -
 .../javascript/src/gremlin/models/import-export.ts |  47 +---
 .../src/main/javascript/src/handlers/tools.ts      |  53 +----
 .../main/javascript/src/utils/data-operations.ts   | 246 +--------------------
 .../tests/integration/mcp-integration.test.ts      |   1 -
 8 files changed, 17 insertions(+), 364 deletions(-)

diff --git a/docs/src/reference/gremlin-applications.asciidoc 
b/docs/src/reference/gremlin-applications.asciidoc
index 301fdb23bb..ca3a21a71d 100644
--- a/docs/src/reference/gremlin-applications.asciidoc
+++ b/docs/src/reference/gremlin-applications.asciidoc
@@ -3026,7 +3026,7 @@ input schema, and a result schema. When connected to a 
Gremlin MCP server, the a
 * Inspect the server’s health and connection to a Gremlin data source
 * Discover the graph’s schema (labels, properties, relationships, counts)
 * Execute Gremlin traversals
-* Import/export graph data in common formats
+* Export graph data in common formats
 
 The Gremlin MCP server sits alongside Gremlin Server (or any 
TinkerPop‑compatible endpoint) and forwards tool calls to
 the graph via standard Gremlin traversals.
@@ -3057,10 +3057,9 @@ The Gremlin MCP server exposes these tools:
   properties may be surfaced as enums to encourage valid values in queries.
 * `run_gremlin_query` — Executes an arbitrary Gremlin traversal and returns 
JSON results.
 * `refresh_schema_cache` — Forces schema discovery to run again when the graph 
has changed.
-* `import_graph_data` — Loads graph data (for example, GraphSON/JSON/CSV) in 
batches and reports progress.
 * `export_subgraph` — Exports a selected subgraph to JSON, GraphSON, or CSV.
 
-NOTE: Import and export operate on potentially large portions of the graph. 
Ensure proper authorization and confirm the
+WARNING: Export operations can involve large portions of the graph. Ensure 
proper authorization and confirm the
 assistant’s intent in the client before approving such operations.
 
 ==== Schema discovery
@@ -3100,12 +3099,10 @@ For example, the assistant may execute a traversal like 
the following:
 g.V().hasLabel('person').has('age', gt(30)).out('knows').values('name')
 ----
 
-==== Import and export
+==== Export
 
-* Import — Provide the data format and mapping to labels/properties. The 
server processes records in batches and
-  reports progress and validation errors.
-* Export — Describe the subgraph or selection criteria (for example, all 
person vertices and their knows edges) and
-  choose a target format. The server returns the exported data for download by 
the client.
+Describe the subgraph or selection criteria (for example, "all person vertices 
and their `knows` edges") and choose a
+target format. The server returns the exported data for download by the client.
 
 ==== Configuring an MCP Client
 
diff --git a/gremlin-mcp/src/main/javascript/README.md 
b/gremlin-mcp/src/main/javascript/README.md
index 317433f6c1..7751e49a90 100644
--- a/gremlin-mcp/src/main/javascript/README.md
+++ b/gremlin-mcp/src/main/javascript/README.md
@@ -36,7 +36,6 @@ Talk to your graph database naturally:
 - 📊 **"Show me all users over 30 and their connections"** - Complex graph 
queries
 - 🔗 **"Find the shortest path between Alice and Bob"** - Relationship analysis
 - 📈 **"Give me graph statistics and metrics"** - Data insights
-- 📥 **"Import this GraphSON data"** - Data loading
 - 📤 **"Export user data as CSV"** - Data extraction
 - 🧠 **Smart enum discovery** - AI learns your data's valid values automatically
 
@@ -45,12 +44,11 @@ Talk to your graph database naturally:
 Your AI assistant gets access to these powerful tools:
 
 | Tool                        | Purpose          | What It Does                
                                 |
-|-----------------------------| ---------------- | 
------------------------------------------------------------ |
+| --------------------------- | ---------------- | 
------------------------------------------------------------ |
 | 🔍 **get_graph_status**     | Health Check     | Verify database connectivity 
and server status               |
 | 📋 **get_graph_schema**     | Schema Discovery | Get complete graph structure 
with vertices and edges         |
-| ⚡ **run_gremlin_query**     | Query Execution  | Execute any Gremlin 
traversal query with full syntax support |
+| ⚡ **run_gremlin_query**    | Query Execution  | Execute any Gremlin 
traversal query with full syntax support |
 | 🔄 **refresh_schema_cache** | Cache Management | Force immediate refresh of 
cached schema information         |
-| 📥 **import_graph_data**    | Data Import      | Load data from GraphSON, 
CSV, or JSON with batch processing  |
 | 📤 **export_subgraph**      | Data Export      | Extract subgraphs to JSON, 
GraphSON, or CSV formats          |
 
 ## 🚀 Quick Setup
@@ -169,12 +167,6 @@ Restart your AI client and try asking:
 
 **AI response:** The AI runs multiple queries to count vertices, edges, and 
analyze the distribution, then presents a summary.
 
-### Data Import
-
-**You ask:** _"Load this GraphSON data into my database"_
-
-**AI response:** The AI uses `import_graph_data` to process your data in 
batches and reports the import status.
-
 ## 🧠 Automatic Enum Discovery
 
 > **Why this matters:** AI agents work best when they know the exact valid 
 > values for properties. Instead of guessing or making invalid queries, they 
 > can use precise, real values from your data.
@@ -461,7 +453,7 @@ src/
 │   ├── resources.ts       # Effect-based resource handlers
 │   └── effect-runtime-bridge.ts # ManagedRuntime container for Effect 
execution
 └── utils/                 # Effect-based utility modules
-    ├── data-operations.ts # Effect-based graph data import/export operations
+    ├── data-operations.ts # Effect-based graph data export operations
     ├── result-parser.ts   # Gremlin result parsing with metadata extraction
     └── type-guards.ts     # Runtime type checking functions
 ```
@@ -481,8 +473,7 @@ src/
 
 The server implements intelligent schema discovery with enumeration detection:
 
-```typescript
-// Property with detected enum values
+```json
 {
   "name": "status",
   "type": ["string"],
@@ -516,7 +507,7 @@ The server implements intelligent schema discovery with 
enumeration detection:
 
 ## License
 
-This project is licensed under the Apache License 2.0 - see the 
[LICENSE](https://www.apache.org/licenses/LICENSE-2.0) 
+This project is licensed under the Apache License 2.0 - see the 
[LICENSE](https://www.apache.org/licenses/LICENSE-2.0)
 file for details.
 
 This project is part of the [Apache TinkerPop](https://tinkerpop.apache.org/) 
project.
diff --git a/gremlin-mcp/src/main/javascript/src/constants.ts 
b/gremlin-mcp/src/main/javascript/src/constants.ts
index c20a739e55..941839c184 100644
--- a/gremlin-mcp/src/main/javascript/src/constants.ts
+++ b/gremlin-mcp/src/main/javascript/src/constants.ts
@@ -44,7 +44,6 @@ export const TOOL_NAMES = {
   GET_GRAPH_SCHEMA: 'get_graph_schema',
   RUN_GREMLIN_QUERY: 'run_gremlin_query',
   REFRESH_SCHEMA_CACHE: 'refresh_schema_cache',
-  IMPORT_GRAPH_DATA: 'import_graph_data',
   EXPORT_SUBGRAPH: 'export_subgraph',
 } as const;
 
diff --git a/gremlin-mcp/src/main/javascript/src/errors.ts 
b/gremlin-mcp/src/main/javascript/src/errors.ts
index 1f31b3a106..c8192d99ab 100644
--- a/gremlin-mcp/src/main/javascript/src/errors.ts
+++ b/gremlin-mcp/src/main/javascript/src/errors.ts
@@ -35,7 +35,6 @@ export const ERROR_PREFIXES = {
   QUERY: 'Query failed',
   SCHEMA: 'Schema error',
   RESOURCE: 'Resource error',
-  IMPORT: 'Import failed',
   EXPORT: 'Export failed',
   CONFIG: 'Configuration error',
   TIMEOUT: 'Operation timed out',
diff --git 
a/gremlin-mcp/src/main/javascript/src/gremlin/models/import-export.ts 
b/gremlin-mcp/src/main/javascript/src/gremlin/models/import-export.ts
index 81398af4e4..4acdcf447c 100644
--- a/gremlin-mcp/src/main/javascript/src/gremlin/models/import-export.ts
+++ b/gremlin-mcp/src/main/javascript/src/gremlin/models/import-export.ts
@@ -18,56 +18,11 @@
  */
 
 /**
- * @fileoverview Data import/export operation models.
+ * @fileoverview Data import/export operation models. Note that import 
operations have been temporarily removed.
  */
 
 import { z } from 'zod';
 
-/**
- * Import operation input schema with validation.
- */
-export const ImportDataInputSchema = z
-  .object({
-    format: z.enum(['graphson', 'csv'], {
-      errorMap: () => ({ message: 'Format must be either "graphson" or "csv"' 
}),
-    }),
-    data: z
-      .string()
-      .min(1, 'Data cannot be empty')
-      .max(50 * 1024 * 1024, 'Data size cannot exceed 50MB'), // 50MB limit
-    options: z
-      .object({
-        clear_graph: z.boolean().optional(),
-        batch_size: z
-          .number()
-          .positive('Batch size must be positive')
-          .max(10000, 'Batch size cannot exceed 10,000')
-          .optional(),
-        validate_schema: z.boolean().optional(),
-      })
-      .optional(),
-  })
-  .refine(
-    data => {
-      // Additional validation for GraphSON format
-      if (data.format === 'graphson') {
-        try {
-          JSON.parse(data.data);
-          return true;
-        } catch {
-          return false;
-        }
-      }
-      return true;
-    },
-    {
-      message: 'GraphSON data must be valid JSON',
-      path: ['data'],
-    }
-  );
-
-export type ImportDataInput = z.infer<typeof ImportDataInputSchema>;
-
 /**
  * Export operation input schema with validation.
  */
diff --git a/gremlin-mcp/src/main/javascript/src/handlers/tools.ts 
b/gremlin-mcp/src/main/javascript/src/handlers/tools.ts
index 3a64427bda..a331bd8c70 100644
--- a/gremlin-mcp/src/main/javascript/src/handlers/tools.ts
+++ b/gremlin-mcp/src/main/javascript/src/handlers/tools.ts
@@ -30,7 +30,7 @@ import type { McpServer } from 
'@modelcontextprotocol/sdk/server/mcp.js';
 import { z } from 'zod';
 import { TOOL_NAMES } from '../constants.js';
 import { GremlinService } from '../gremlin/service.js';
-import { importGraphData, exportSubgraph } from '../utils/data-operations.js';
+import { exportSubgraph } from '../utils/data-operations.js';
 import {
   createToolEffect,
   createStringToolEffect,
@@ -41,17 +41,6 @@ import {
 /**
  * Input validation schemas for tool parameters.
  */
-const importInputSchema = z.object({
-  format: z.enum(['graphson', 'csv']),
-  data: z.string(),
-  options: z
-    .object({
-      batch_size: z.number().optional(),
-      clear_graph: z.boolean().optional(),
-      validate_schema: z.boolean().optional(),
-    })
-    .optional(),
-});
 
 const exportInputSchema = z.object({
   traversal_query: z.string(),
@@ -71,7 +60,7 @@ const exportInputSchema = z.object({
  * - Graph status monitoring
  * - Schema introspection and caching
  * - Query execution
- * - Data import/export operations
+ * - Data export operations
  */
 export function registerEffectToolHandlers(
   server: McpServer,
@@ -158,44 +147,6 @@ export function registerEffectToolHandlers(
     }
   );
 
-  // Import Graph Data
-  server.registerTool(
-    TOOL_NAMES.IMPORT_GRAPH_DATA,
-    {
-      title: 'Import Graph Data',
-      description: 'Import graph data from various formats including GraphSON 
and CSV',
-      inputSchema: {
-        format: z.enum(['graphson', 'csv']).describe('The format of the data 
to import'),
-        data: z.string().describe('The data content to import'),
-        options: z
-          .object({
-            batch_size: z.number().optional().describe('Number of operations 
per batch'),
-            clear_graph: z
-              .boolean()
-              .optional()
-              .describe('Whether to clear the graph before importing'),
-            validate_schema: z
-              .boolean()
-              .optional()
-              .describe('Whether to validate against existing schema'),
-          })
-          .optional()
-          .describe('Import options'),
-      },
-    },
-    (args: unknown) =>
-      Effect.runPromise(
-        pipe(
-          createValidatedToolEffect(
-            importInputSchema,
-            input => Effect.andThen(GremlinService, service => 
importGraphData(service, input)),
-            'Import Graph Data'
-          )(args),
-          Effect.provide(runtime)
-        )
-      )
-  );
-
   // Export Subgraph
   server.registerTool(
     TOOL_NAMES.EXPORT_SUBGRAPH,
diff --git a/gremlin-mcp/src/main/javascript/src/utils/data-operations.ts 
b/gremlin-mcp/src/main/javascript/src/utils/data-operations.ts
index ee11b7259b..c94bceaf67 100644
--- a/gremlin-mcp/src/main/javascript/src/utils/data-operations.ts
+++ b/gremlin-mcp/src/main/javascript/src/utils/data-operations.ts
@@ -18,15 +18,14 @@
  */
 
 /**
- * @fileoverview Graph data import/export operations with Effect-based 
composition.
+ * @fileoverview Graph data export operations with Effect-based composition.
  *
- * Provides high-level operations for importing data from various formats 
(GraphSON, CSV)
- * and exporting subgraphs based on traversal queries. Handles format 
validation,
- * batch processing, and error recovery.
+ * Provides high-level operations for exporting subgraphs based on traversal 
queries. Handles
+ * format validation and error recovery.
  */
 
 import { Effect } from 'effect';
-import { type ImportDataInput, type ExportSubgraphInput } from 
'../gremlin/models/index.js';
+import { type ExportSubgraphInput } from '../gremlin/models/index.js';
 import { GremlinService } from '../gremlin/service.js';
 import {
   Errors,
@@ -43,42 +42,6 @@ function isRecord(value: unknown): value is Record<string, 
unknown> {
   return typeof value === 'object' && value !== null && !Array.isArray(value);
 }
 
-/**
- * Imports graph data from various formats with comprehensive validation.
- *
- * @param service - Gremlin service instance
- * @param input - Import configuration and data
- * @returns Effect with success message or import errors
- *
- * Supports:
- * - GraphSON format (native Gremlin JSON)
- * - CSV format (vertices and edges)
- *
- * Features batch processing and optional graph clearing for fresh imports.
- */
-export const importGraphData = (
-  service: typeof GremlinService.Service,
-  input: ImportDataInput
-): Effect.Effect<string, ResourceError | GremlinConnectionError | 
GremlinQueryError | ParseError> =>
-  Effect.gen(function* () {
-    yield* Effect.logInfo(
-      `Starting import operation: format=${input.format}, 
size=${input.data.length} chars`
-    );
-
-    switch (input.format) {
-      case 'graphson':
-        return yield* importGraphSON(service, input);
-
-      case 'csv':
-        return yield* importCSV(service, input);
-
-      default:
-        return yield* Effect.fail(
-          Errors.resource(`Unsupported import format: ${input.format}`, 
'import_operation')
-        );
-    }
-  });
-
 /**
  * Exports subgraph data based on traversal queries.
  *
@@ -122,156 +85,6 @@ export const exportSubgraph = (
     }
   });
 
-/**
- * Clear graph data if requested
- */
-const clearGraphIfRequested = (
-  service: typeof GremlinService.Service,
-  shouldClear: boolean | undefined
-): Effect.Effect<void, GremlinConnectionError | GremlinQueryError | 
ParseError> =>
-  shouldClear
-    ? Effect.gen(function* () {
-        yield* service.executeQuery('g.V().drop()');
-        yield* service.executeQuery('g.E().drop()');
-        yield* Effect.logInfo('Graph cleared before import');
-      })
-    : Effect.void;
-
-/**
- * Import vertices from GraphSON data
- */
-const importVertices = (
-  service: typeof GremlinService.Service,
-  vertices: unknown[]
-): Effect.Effect<void, GremlinConnectionError | GremlinQueryError | 
ParseError> =>
-  Effect.gen(function* () {
-    for (const vertex of vertices) {
-      const query = buildVertexInsertQuery(vertex as Record<string, unknown>);
-      yield* service.executeQuery(query);
-    }
-    yield* Effect.logInfo(`Imported ${vertices.length} vertices`);
-  });
-
-/**
- * Import edges from GraphSON data
- */
-const importEdges = (
-  service: typeof GremlinService.Service,
-  edges: unknown[]
-): Effect.Effect<void, GremlinConnectionError | GremlinQueryError | 
ParseError> =>
-  Effect.gen(function* () {
-    for (const edge of edges) {
-      const query = buildEdgeInsertQuery(edge as Record<string, unknown>);
-      yield* service.executeQuery(query);
-    }
-    yield* Effect.logInfo(`Imported ${edges.length} edges`);
-  });
-
-/**
- * Build GraphSON import summary
- */
-const buildImportSummary = (vertexCount: number, edgeCount: number): string =>
-  `GraphSON import completed successfully. Vertices: ${vertexCount}, Edges: 
${edgeCount}`;
-
-/**
- * Parse GraphSON data with proper error handling
- */
-const parseGraphSONData = (
-  input: string
-): Effect.Effect<{ vertices?: unknown[]; edges?: unknown[] }, ResourceError> =>
-  Effect.try({
-    try: () => JSON.parse(input) as { vertices?: unknown[]; edges?: unknown[] 
},
-    catch: error => Errors.resource('Failed to parse GraphSON data', 
'graphson_import', error),
-  });
-
-/**
- * Import GraphSON format data
- */
-const importGraphSON = (
-  service: typeof GremlinService.Service,
-  input: ImportDataInput
-): Effect.Effect<string, ResourceError | GremlinConnectionError | 
GremlinQueryError | ParseError> =>
-  Effect.gen(function* () {
-    const data = yield* parseGraphSONData(input.data);
-
-    yield* clearGraphIfRequested(service, input.options?.clear_graph);
-
-    const vertexCount = data.vertices?.length || 0;
-    const edgeCount = data.edges?.length || 0;
-
-    if (data.vertices && vertexCount > 0) {
-      yield* importVertices(service, data.vertices);
-    }
-
-    if (data.edges && edgeCount > 0) {
-      yield* importEdges(service, data.edges);
-    }
-
-    return buildImportSummary(vertexCount, edgeCount);
-  });
-
-/**
- * Parse CSV data safely
- */
-const parseCSVData = (
-  csvData: string
-): Effect.Effect<{ headers: string[]; dataRows: string[] }, ResourceError> =>
-  Effect.try({
-    try: () => {
-      const lines = csvData.split('\n').filter(line => line.trim());
-      const headers = lines[0]?.split(',').map(h => h.trim()) || [];
-      const dataRows = lines.slice(1);
-      return { headers, dataRows };
-    },
-    catch: error => Errors.resource('Failed to parse CSV data', 'csv_import', 
error),
-  });
-
-/**
- * Import CSV format data
- */
-const importCSV = (
-  service: typeof GremlinService.Service,
-  input: ImportDataInput
-): Effect.Effect<string, ResourceError | GremlinConnectionError | 
GremlinQueryError | ParseError> =>
-  Effect.gen(function* () {
-    const { headers, dataRows } = yield* parseCSVData(input.data);
-
-    yield* clearGraphIfRequested(service, input.options?.clear_graph);
-    yield* importCSVVertices(service, dataRows, headers);
-
-    return `CSV import completed successfully. Processed ${dataRows.length} 
rows.`;
-  });
-
-/**
- * Process a single CSV row into vertex properties
- */
-const processCSVRow = (row: string, headers: string[]): Record<string, string> 
=> {
-  const values = row.split(',').map(v => v.trim());
-  return headers.reduce((props: Record<string, string>, header, index) => {
-    if (values[index]) {
-      props[header] = values[index];
-    }
-    return props;
-  }, {});
-};
-
-/**
- * Import vertices from CSV rows
- */
-const importCSVVertices = (
-  service: typeof GremlinService.Service,
-  dataRows: string[],
-  headers: string[]
-): Effect.Effect<void, GremlinConnectionError | GremlinQueryError | 
ParseError> =>
-  Effect.gen(function* () {
-    for (const row of dataRows) {
-      const properties = processCSVRow(row, headers);
-      const query = buildCSVVertexInsertQuery(properties);
-      yield* service.executeQuery(query);
-    }
-    yield* Effect.logInfo(`Imported ${dataRows.length} vertices from CSV`);
-  });
-
 /**
  * Export to GraphSON format
  */
@@ -382,54 +195,3 @@ const exportToCSV = (results: unknown[]): 
Effect.Effect<string, ResourceError> =
 /**
  * Helper functions for query building
  */
-function buildVertexInsertQuery(vertex: Record<string, unknown>): string {
-  const label = vertex['label'] || 'vertex';
-  const id = vertex['id'];
-  const properties = vertex['properties'] || {};
-
-  let query = `g.addV('${label}')`;
-
-  if (id !== undefined) {
-    query += `.property(id, '${id}')`;
-  }
-
-  for (const [key, value] of Object.entries(properties)) {
-    if (Array.isArray(value)) {
-      // Handle multi-value properties
-      value.forEach(v => {
-        query += `.property('${key}', '${v}')`;
-      });
-    } else {
-      query += `.property('${key}', '${value}')`;
-    }
-  }
-
-  return query;
-}
-
-function buildEdgeInsertQuery(edge: Record<string, unknown>): string {
-  const label = edge['label'] || 'edge';
-  const outV = edge['outV'];
-  const inV = edge['inV'];
-  const properties = edge['properties'] || {};
-
-  let query = `g.V('${outV}').addE('${label}').to(g.V('${inV}'))`;
-
-  for (const [key, value] of Object.entries(properties)) {
-    query += `.property('${key}', '${value}')`;
-  }
-
-  return query;
-}
-
-function buildCSVVertexInsertQuery(properties: Record<string, string>): string 
{
-  let query = "g.addV('data')";
-
-  for (const [key, value] of Object.entries(properties)) {
-    if (key && value) {
-      query += `.property('${key}', '${value}')`;
-    }
-  }
-
-  return query;
-}
diff --git 
a/gremlin-mcp/src/main/javascript/tests/integration/mcp-integration.test.ts 
b/gremlin-mcp/src/main/javascript/tests/integration/mcp-integration.test.ts
index 6878eafd04..9858db3024 100644
--- a/gremlin-mcp/src/main/javascript/tests/integration/mcp-integration.test.ts
+++ b/gremlin-mcp/src/main/javascript/tests/integration/mcp-integration.test.ts
@@ -126,7 +126,6 @@ describe('MCP Server Integration Tests', () => {
       expect(toolNames).toContain(TOOL_NAMES.GET_GRAPH_SCHEMA);
       expect(toolNames).toContain(TOOL_NAMES.RUN_GREMLIN_QUERY);
       expect(toolNames).toContain(TOOL_NAMES.REFRESH_SCHEMA_CACHE);
-      expect(toolNames).toContain(TOOL_NAMES.IMPORT_GRAPH_DATA);
       expect(toolNames).toContain(TOOL_NAMES.EXPORT_SUBGRAPH);
     },
     30000

Reply via email to