From f8e5a8d7420a6ef6d30e3c19ac08845516ad05fd Mon Sep 17 00:00:00 2001 From: Schrody Date: Thu, 12 Feb 2026 17:42:51 +0100 Subject: [PATCH] Added filesystem module --- mcpServer/modules/filesystem/Dockerfile | 25 + mcpServer/modules/filesystem/README.md | 321 ++++++ .../__tests__/directory-tree.test.ts | 147 +++ .../modules/filesystem/__tests__/lib.test.ts | 725 +++++++++++++ .../filesystem/__tests__/path-utils.test.ts | 371 +++++++ .../__tests__/path-validation.test.ts | 953 ++++++++++++++++++ .../filesystem/__tests__/roots-utils.test.ts | 84 ++ .../__tests__/startup-validation.test.ts | 100 ++ .../__tests__/structured-content.test.ts | 158 +++ mcpServer/modules/filesystem/index.ts | 673 +++++++++++++ mcpServer/modules/filesystem/lib.ts | 415 ++++++++ mcpServer/modules/filesystem/package.json | 43 + mcpServer/modules/filesystem/path-utils.ts | 118 +++ .../modules/filesystem/path-validation.ts | 86 ++ mcpServer/modules/filesystem/roots-utils.ts | 76 ++ mcpServer/modules/filesystem/tsconfig.json | 17 + mcpServer/modules/filesystem/vitest.config.ts | 14 + 17 files changed, 4326 insertions(+) create mode 100644 mcpServer/modules/filesystem/Dockerfile create mode 100644 mcpServer/modules/filesystem/README.md create mode 100644 mcpServer/modules/filesystem/__tests__/directory-tree.test.ts create mode 100644 mcpServer/modules/filesystem/__tests__/lib.test.ts create mode 100644 mcpServer/modules/filesystem/__tests__/path-utils.test.ts create mode 100644 mcpServer/modules/filesystem/__tests__/path-validation.test.ts create mode 100644 mcpServer/modules/filesystem/__tests__/roots-utils.test.ts create mode 100644 mcpServer/modules/filesystem/__tests__/startup-validation.test.ts create mode 100644 mcpServer/modules/filesystem/__tests__/structured-content.test.ts create mode 100644 mcpServer/modules/filesystem/index.ts create mode 100644 mcpServer/modules/filesystem/lib.ts create mode 100644 mcpServer/modules/filesystem/package.json create mode 100644 mcpServer/modules/filesystem/path-utils.ts create mode 100644 mcpServer/modules/filesystem/path-validation.ts create mode 100644 mcpServer/modules/filesystem/roots-utils.ts create mode 100644 mcpServer/modules/filesystem/tsconfig.json create mode 100644 mcpServer/modules/filesystem/vitest.config.ts diff --git a/mcpServer/modules/filesystem/Dockerfile b/mcpServer/modules/filesystem/Dockerfile new file mode 100644 index 0000000..0830a03 --- /dev/null +++ b/mcpServer/modules/filesystem/Dockerfile @@ -0,0 +1,25 @@ +FROM node:22.12-alpine AS builder + +WORKDIR /app + +COPY . /app +COPY tsconfig.json /tsconfig.json + +RUN npm install + +RUN npm ci --ignore-scripts --omit-dev + + +FROM node:22-alpine AS release + +WORKDIR /app + +COPY --from=builder /app/dist /app/dist +COPY --from=builder /app/package.json /app/package.json +COPY --from=builder /app/package-lock.json /app/package-lock.json + +ENV NODE_ENV=production + +RUN npm ci --ignore-scripts --omit-dev + +ENTRYPOINT ["node", "/app/dist/index.js"] \ No newline at end of file diff --git a/mcpServer/modules/filesystem/README.md b/mcpServer/modules/filesystem/README.md new file mode 100644 index 0000000..e9ddc2b --- /dev/null +++ b/mcpServer/modules/filesystem/README.md @@ -0,0 +1,321 @@ +# Filesystem MCP Server + +Node.js server implementing Model Context Protocol (MCP) for filesystem operations. + +## Features + +- Read/write files +- Create/list/delete directories +- Move files/directories +- Search files +- Get file metadata +- Dynamic directory access control via [Roots](https://modelcontextprotocol.io/docs/learn/client-concepts#roots) + +## Directory Access Control + +The server uses a flexible directory access control system. Directories can be specified via command-line arguments or dynamically via [Roots](https://modelcontextprotocol.io/docs/learn/client-concepts#roots). + +### Method 1: Command-line Arguments +Specify Allowed directories when starting the server: +```bash +mcp-server-filesystem /path/to/dir1 /path/to/dir2 +``` + +### Method 2: MCP Roots (Recommended) +MCP clients that support [Roots](https://modelcontextprotocol.io/docs/learn/client-concepts#roots) can dynamically update the Allowed directories. + +Roots notified by Client to Server, completely replace any server-side Allowed directories when provided. + +**Important**: If server starts without command-line arguments AND client doesn't support roots protocol (or provides empty roots), the server will throw an error during initialization. + +This is the recommended method, as this enables runtime directory updates via `roots/list_changed` notifications without server restart, providing a more flexible and modern integration experience. + +### How It Works + +The server's directory access control follows this flow: + +1. **Server Startup** + - Server starts with directories from command-line arguments (if provided) + - If no arguments provided, server starts with empty allowed directories + +2. **Client Connection & Initialization** + - Client connects and sends `initialize` request with capabilities + - Server checks if client supports roots protocol (`capabilities.roots`) + +3. **Roots Protocol Handling** (if client supports roots) + - **On initialization**: Server requests roots from client via `roots/list` + - Client responds with its configured roots + - Server replaces ALL allowed directories with client's roots + - **On runtime updates**: Client can send `notifications/roots/list_changed` + - Server requests updated roots and replaces allowed directories again + +4. **Fallback Behavior** (if client doesn't support roots) + - Server continues using command-line directories only + - No dynamic updates possible + +5. **Access Control** + - All filesystem operations are restricted to allowed directories + - Use `list_allowed_directories` tool to see current directories + - Server requires at least ONE allowed directory to operate + +**Note**: The server will only allow operations within directories specified either via `args` or via Roots. + + + +## API + +### Tools + +- **read_text_file** + - Read complete contents of a file as text + - Inputs: + - `path` (string) + - `head` (number, optional): First N lines + - `tail` (number, optional): Last N lines + - Always treats the file as UTF-8 text regardless of extension + - Cannot specify both `head` and `tail` simultaneously + +- **read_media_file** + - Read an image or audio file + - Inputs: + - `path` (string) + - Streams the file and returns base64 data with the corresponding MIME type + +- **read_multiple_files** + - Read multiple files simultaneously + - Input: `paths` (string[]) + - Failed reads won't stop the entire operation + +- **write_file** + - Create new file or overwrite existing (exercise caution with this) + - Inputs: + - `path` (string): File location + - `content` (string): File content + +- **edit_file** + - Make selective edits using advanced pattern matching and formatting + - Features: + - Line-based and multi-line content matching + - Whitespace normalization with indentation preservation + - Multiple simultaneous edits with correct positioning + - Indentation style detection and preservation + - Git-style diff output with context + - Preview changes with dry run mode + - Inputs: + - `path` (string): File to edit + - `edits` (array): List of edit operations + - `oldText` (string): Text to search for (can be substring) + - `newText` (string): Text to replace with + - `dryRun` (boolean): Preview changes without applying (default: false) + - Returns detailed diff and match information for dry runs, otherwise applies changes + - Best Practice: Always use dryRun first to preview changes before applying them + +- **create_directory** + - Create new directory or ensure it exists + - Input: `path` (string) + - Creates parent directories if needed + - Succeeds silently if directory exists + +- **list_directory** + - List directory contents with [FILE] or [DIR] prefixes + - Input: `path` (string) + +- **list_directory_with_sizes** + - List directory contents with [FILE] or [DIR] prefixes, including file sizes + - Inputs: + - `path` (string): Directory path to list + - `sortBy` (string, optional): Sort entries by "name" or "size" (default: "name") + - Returns detailed listing with file sizes and summary statistics + - Shows total files, directories, and combined size + +- **move_file** + - Move or rename files and directories + - Inputs: + - `source` (string) + - `destination` (string) + - Fails if destination exists + +- **search_files** + - Recursively search for files/directories that match or do not match patterns + - Inputs: + - `path` (string): Starting directory + - `pattern` (string): Search pattern + - `excludePatterns` (string[]): Exclude any patterns. + - Glob-style pattern matching + - Returns full paths to matches + +- **directory_tree** + - Get recursive JSON tree structure of directory contents + - Inputs: + - `path` (string): Starting directory + - `excludePatterns` (string[]): Exclude any patterns. Glob formats are supported. + - Returns: + - JSON array where each entry contains: + - `name` (string): File/directory name + - `type` ('file'|'directory'): Entry type + - `children` (array): Present only for directories + - Empty array for empty directories + - Omitted for files + - Output is formatted with 2-space indentation for readability + +- **get_file_info** + - Get detailed file/directory metadata + - Input: `path` (string) + - Returns: + - Size + - Creation time + - Modified time + - Access time + - Type (file/directory) + - Permissions + +- **list_allowed_directories** + - List all directories the server is allowed to access + - No input required + - Returns: + - Directories that this server can read/write from + +### Tool annotations (MCP hints) + +This server sets [MCP ToolAnnotations](https://modelcontextprotocol.io/specification/2025-03-26/server/tools#toolannotations) +on each tool so clients can: + +- Distinguish **read‑only** tools from write‑capable tools. +- Understand which write operations are **idempotent** (safe to retry with the same arguments). +- Highlight operations that may be **destructive** (overwriting or heavily mutating data). + +The mapping for filesystem tools is: + +| Tool | readOnlyHint | idempotentHint | destructiveHint | Notes | +|-----------------------------|--------------|----------------|-----------------|--------------------------------------------------| +| `read_text_file` | `true` | – | – | Pure read | +| `read_media_file` | `true` | – | – | Pure read | +| `read_multiple_files` | `true` | – | – | Pure read | +| `list_directory` | `true` | – | – | Pure read | +| `list_directory_with_sizes` | `true` | – | – | Pure read | +| `directory_tree` | `true` | – | – | Pure read | +| `search_files` | `true` | – | – | Pure read | +| `get_file_info` | `true` | – | – | Pure read | +| `list_allowed_directories` | `true` | – | – | Pure read | +| `create_directory` | `false` | `true` | `false` | Re‑creating the same dir is a no‑op | +| `write_file` | `false` | `true` | `true` | Overwrites existing files | +| `edit_file` | `false` | `false` | `true` | Re‑applying edits can fail or double‑apply | +| `move_file` | `false` | `false` | `false` | Move/rename only; repeat usually errors | + +> Note: `idempotentHint` and `destructiveHint` are meaningful only when `readOnlyHint` is `false`, as defined by the MCP spec. + +## Usage with Claude Desktop +Add this to your `claude_desktop_config.json`: + +Note: you can provide sandboxed directories to the server by mounting them to `/projects`. Adding the `ro` flag will make the directory readonly by the server. + +### Docker +Note: all directories must be mounted to `/projects` by default. + +```json +{ + "mcpServers": { + "filesystem": { + "command": "docker", + "args": [ + "run", + "-i", + "--rm", + "--mount", "type=bind,src=/Users/username/Desktop,dst=/projects/Desktop", + "--mount", "type=bind,src=/path/to/other/allowed/dir,dst=/projects/other/allowed/dir,ro", + "--mount", "type=bind,src=/path/to/file.txt,dst=/projects/path/to/file.txt", + "mcp/filesystem", + "/projects" + ] + } + } +} +``` + +### NPX + +```json +{ + "mcpServers": { + "filesystem": { + "command": "npx", + "args": [ + "-y", + "@modelcontextprotocol/server-filesystem", + "/Users/username/Desktop", + "/path/to/other/allowed/dir" + ] + } + } +} +``` + +## Usage with VS Code + +For quick installation, click the installation buttons below... + +[![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-filesystem%22%2C%22%24%7BworkspaceFolder%7D%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-filesystem%22%2C%22%24%7BworkspaceFolder%7D%22%5D%7D&quality=insiders) + +[![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fprojects%2Fworkspace%22%2C%22mcp%2Ffilesystem%22%2C%22%2Fprojects%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fprojects%2Fworkspace%22%2C%22mcp%2Ffilesystem%22%2C%22%2Fprojects%22%5D%7D&quality=insiders) + +For manual installation, you can configure the MCP server using one of these methods: + +**Method 1: User Configuration (Recommended)** +Add the configuration to your user-level MCP configuration file. Open the Command Palette (`Ctrl + Shift + P`) and run `MCP: Open User Configuration`. This will open your user `mcp.json` file where you can add the server configuration. + +**Method 2: Workspace Configuration** +Alternatively, you can add the configuration to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. + +> For more details about MCP configuration in VS Code, see the [official VS Code MCP documentation](https://code.visualstudio.com/docs/copilot/customization/mcp-servers). + +You can provide sandboxed directories to the server by mounting them to `/projects`. Adding the `ro` flag will make the directory readonly by the server. + +### Docker +Note: all directories must be mounted to `/projects` by default. + +```json +{ + "servers": { + "filesystem": { + "command": "docker", + "args": [ + "run", + "-i", + "--rm", + "--mount", "type=bind,src=${workspaceFolder},dst=/projects/workspace", + "mcp/filesystem", + "/projects" + ] + } + } +} +``` + +### NPX + +```json +{ + "servers": { + "filesystem": { + "command": "npx", + "args": [ + "-y", + "@modelcontextprotocol/server-filesystem", + "${workspaceFolder}" + ] + } + } +} +``` + +## Build + +Docker build: + +```bash +docker build -t mcp/filesystem -f src/filesystem/Dockerfile . +``` + +## License + +This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. diff --git a/mcpServer/modules/filesystem/__tests__/directory-tree.test.ts b/mcpServer/modules/filesystem/__tests__/directory-tree.test.ts new file mode 100644 index 0000000..04c8278 --- /dev/null +++ b/mcpServer/modules/filesystem/__tests__/directory-tree.test.ts @@ -0,0 +1,147 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import * as fs from 'fs/promises'; +import * as path from 'path'; +import * as os from 'os'; + +// We need to test the buildTree function, but it's defined inside the request handler +// So we'll extract the core logic into a testable function +import { minimatch } from 'minimatch'; + +interface TreeEntry { + name: string; + type: 'file' | 'directory'; + children?: TreeEntry[]; +} + +async function buildTreeForTesting(currentPath: string, rootPath: string, excludePatterns: string[] = []): Promise { + const entries = await fs.readdir(currentPath, {withFileTypes: true}); + const result: TreeEntry[] = []; + + for (const entry of entries) { + const relativePath = path.relative(rootPath, path.join(currentPath, entry.name)); + const shouldExclude = excludePatterns.some(pattern => { + if (pattern.includes('*')) { + return minimatch(relativePath, pattern, {dot: true}); + } + // For files: match exact name or as part of path + // For directories: match as directory path + return minimatch(relativePath, pattern, {dot: true}) || + minimatch(relativePath, `**/${pattern}`, {dot: true}) || + minimatch(relativePath, `**/${pattern}/**`, {dot: true}); + }); + if (shouldExclude) + continue; + + const entryData: TreeEntry = { + name: entry.name, + type: entry.isDirectory() ? 'directory' : 'file' + }; + + if (entry.isDirectory()) { + const subPath = path.join(currentPath, entry.name); + entryData.children = await buildTreeForTesting(subPath, rootPath, excludePatterns); + } + + result.push(entryData); + } + + return result; +} + +describe('buildTree exclude patterns', () => { + let testDir: string; + + beforeEach(async () => { + testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'filesystem-test-')); + + // Create test directory structure + await fs.mkdir(path.join(testDir, 'src')); + await fs.mkdir(path.join(testDir, 'node_modules')); + await fs.mkdir(path.join(testDir, '.git')); + await fs.mkdir(path.join(testDir, 'nested', 'node_modules'), { recursive: true }); + + // Create test files + await fs.writeFile(path.join(testDir, '.env'), 'SECRET=value'); + await fs.writeFile(path.join(testDir, '.env.local'), 'LOCAL_SECRET=value'); + await fs.writeFile(path.join(testDir, 'src', 'index.js'), 'console.log("hello");'); + await fs.writeFile(path.join(testDir, 'package.json'), '{}'); + await fs.writeFile(path.join(testDir, 'node_modules', 'module.js'), 'module.exports = {};'); + await fs.writeFile(path.join(testDir, 'nested', 'node_modules', 'deep.js'), 'module.exports = {};'); + }); + + afterEach(async () => { + await fs.rm(testDir, { recursive: true, force: true }); + }); + + it('should exclude files matching simple patterns', async () => { + // Test the current implementation - this will fail until the bug is fixed + const tree = await buildTreeForTesting(testDir, testDir, ['.env']); + const fileNames = tree.map(entry => entry.name); + + expect(fileNames).not.toContain('.env'); + expect(fileNames).toContain('.env.local'); // Should not exclude this + expect(fileNames).toContain('src'); + expect(fileNames).toContain('package.json'); + }); + + it('should exclude directories matching simple patterns', async () => { + const tree = await buildTreeForTesting(testDir, testDir, ['node_modules']); + const dirNames = tree.map(entry => entry.name); + + expect(dirNames).not.toContain('node_modules'); + expect(dirNames).toContain('src'); + expect(dirNames).toContain('.git'); + }); + + it('should exclude nested directories with same pattern', async () => { + const tree = await buildTreeForTesting(testDir, testDir, ['node_modules']); + + // Find the nested directory + const nestedDir = tree.find(entry => entry.name === 'nested'); + expect(nestedDir).toBeDefined(); + expect(nestedDir!.children).toBeDefined(); + + // The nested/node_modules should also be excluded + const nestedChildren = nestedDir!.children!.map(child => child.name); + expect(nestedChildren).not.toContain('node_modules'); + }); + + it('should handle glob patterns correctly', async () => { + const tree = await buildTreeForTesting(testDir, testDir, ['*.env']); + const fileNames = tree.map(entry => entry.name); + + expect(fileNames).not.toContain('.env'); + expect(fileNames).toContain('.env.local'); // *.env should not match .env.local + expect(fileNames).toContain('src'); + }); + + it('should handle dot files correctly', async () => { + const tree = await buildTreeForTesting(testDir, testDir, ['.git']); + const dirNames = tree.map(entry => entry.name); + + expect(dirNames).not.toContain('.git'); + expect(dirNames).toContain('.env'); // Should not exclude this + }); + + it('should work with multiple exclude patterns', async () => { + const tree = await buildTreeForTesting(testDir, testDir, ['node_modules', '.env', '.git']); + const entryNames = tree.map(entry => entry.name); + + expect(entryNames).not.toContain('node_modules'); + expect(entryNames).not.toContain('.env'); + expect(entryNames).not.toContain('.git'); + expect(entryNames).toContain('src'); + expect(entryNames).toContain('package.json'); + }); + + it('should handle empty exclude patterns', async () => { + const tree = await buildTreeForTesting(testDir, testDir, []); + const entryNames = tree.map(entry => entry.name); + + // All entries should be included + expect(entryNames).toContain('node_modules'); + expect(entryNames).toContain('.env'); + expect(entryNames).toContain('.git'); + expect(entryNames).toContain('src'); + }); +}); \ No newline at end of file diff --git a/mcpServer/modules/filesystem/__tests__/lib.test.ts b/mcpServer/modules/filesystem/__tests__/lib.test.ts new file mode 100644 index 0000000..f7e585a --- /dev/null +++ b/mcpServer/modules/filesystem/__tests__/lib.test.ts @@ -0,0 +1,725 @@ +import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; +import fs from 'fs/promises'; +import path from 'path'; +import os from 'os'; +import { + // Pure utility functions + formatSize, + normalizeLineEndings, + createUnifiedDiff, + // Security & validation functions + validatePath, + setAllowedDirectories, + // File operations + getFileStats, + readFileContent, + writeFileContent, + // Search & filtering functions + searchFilesWithValidation, + // File editing functions + applyFileEdits, + tailFile, + headFile +} from '../lib.js'; + +// Mock fs module +vi.mock('fs/promises'); +const mockFs = fs as any; + +describe('Lib Functions', () => { + beforeEach(() => { + vi.clearAllMocks(); + // Set up allowed directories for tests + const allowedDirs = process.platform === 'win32' ? ['C:\\Users\\test', 'C:\\temp', 'C:\\allowed'] : ['/home/user', '/tmp', '/allowed']; + setAllowedDirectories(allowedDirs); + }); + + afterEach(() => { + vi.restoreAllMocks(); + // Clear allowed directories after tests + setAllowedDirectories([]); + }); + + describe('Pure Utility Functions', () => { + describe('formatSize', () => { + it('formats bytes correctly', () => { + expect(formatSize(0)).toBe('0 B'); + expect(formatSize(512)).toBe('512 B'); + expect(formatSize(1024)).toBe('1.00 KB'); + expect(formatSize(1536)).toBe('1.50 KB'); + expect(formatSize(1048576)).toBe('1.00 MB'); + expect(formatSize(1073741824)).toBe('1.00 GB'); + expect(formatSize(1099511627776)).toBe('1.00 TB'); + }); + + it('handles edge cases', () => { + expect(formatSize(1023)).toBe('1023 B'); + expect(formatSize(1025)).toBe('1.00 KB'); + expect(formatSize(1048575)).toBe('1024.00 KB'); + }); + + it('handles very large numbers beyond TB', () => { + // The function only supports up to TB, so very large numbers will show as TB + expect(formatSize(1024 * 1024 * 1024 * 1024 * 1024)).toBe('1024.00 TB'); + expect(formatSize(Number.MAX_SAFE_INTEGER)).toContain('TB'); + }); + + it('handles negative numbers', () => { + // Negative numbers will result in NaN for the log calculation + expect(formatSize(-1024)).toContain('NaN'); + expect(formatSize(-0)).toBe('0 B'); + }); + + it('handles decimal numbers', () => { + expect(formatSize(1536.5)).toBe('1.50 KB'); + expect(formatSize(1023.9)).toBe('1023.9 B'); + }); + + it('handles very small positive numbers', () => { + expect(formatSize(1)).toBe('1 B'); + expect(formatSize(0.5)).toBe('0.5 B'); + expect(formatSize(0.1)).toBe('0.1 B'); + }); + }); + + describe('normalizeLineEndings', () => { + it('converts CRLF to LF', () => { + expect(normalizeLineEndings('line1\r\nline2\r\nline3')).toBe('line1\nline2\nline3'); + }); + + it('leaves LF unchanged', () => { + expect(normalizeLineEndings('line1\nline2\nline3')).toBe('line1\nline2\nline3'); + }); + + it('handles mixed line endings', () => { + expect(normalizeLineEndings('line1\r\nline2\nline3\r\n')).toBe('line1\nline2\nline3\n'); + }); + + it('handles empty string', () => { + expect(normalizeLineEndings('')).toBe(''); + }); + }); + + describe('createUnifiedDiff', () => { + it('creates diff for simple changes', () => { + const original = 'line1\nline2\nline3'; + const modified = 'line1\nmodified line2\nline3'; + const diff = createUnifiedDiff(original, modified, 'test.txt'); + + expect(diff).toContain('--- test.txt'); + expect(diff).toContain('+++ test.txt'); + expect(diff).toContain('-line2'); + expect(diff).toContain('+modified line2'); + }); + + it('handles CRLF normalization', () => { + const original = 'line1\r\nline2\r\n'; + const modified = 'line1\nmodified line2\n'; + const diff = createUnifiedDiff(original, modified); + + expect(diff).toContain('-line2'); + expect(diff).toContain('+modified line2'); + }); + + it('handles identical content', () => { + const content = 'line1\nline2\nline3'; + const diff = createUnifiedDiff(content, content); + + // Should not contain any +/- lines for identical content (excluding header lines) + expect(diff.split('\n').filter((line: string) => line.startsWith('+++') || line.startsWith('---'))).toHaveLength(2); + expect(diff.split('\n').filter((line: string) => line.startsWith('+') && !line.startsWith('+++'))).toHaveLength(0); + expect(diff.split('\n').filter((line: string) => line.startsWith('-') && !line.startsWith('---'))).toHaveLength(0); + }); + + it('handles empty content', () => { + const diff = createUnifiedDiff('', ''); + expect(diff).toContain('--- file'); + expect(diff).toContain('+++ file'); + }); + + it('handles default filename parameter', () => { + const diff = createUnifiedDiff('old', 'new'); + expect(diff).toContain('--- file'); + expect(diff).toContain('+++ file'); + }); + + it('handles custom filename', () => { + const diff = createUnifiedDiff('old', 'new', 'custom.txt'); + expect(diff).toContain('--- custom.txt'); + expect(diff).toContain('+++ custom.txt'); + }); + }); + }); + + describe('Security & Validation Functions', () => { + describe('validatePath', () => { + // Use Windows-compatible paths for testing + const allowedDirs = process.platform === 'win32' ? ['C:\\Users\\test', 'C:\\temp'] : ['/home/user', '/tmp']; + + beforeEach(() => { + mockFs.realpath.mockImplementation(async (path: any) => path.toString()); + }); + + it('validates allowed paths', async () => { + const testPath = process.platform === 'win32' ? 'C:\\Users\\test\\file.txt' : '/home/user/file.txt'; + const result = await validatePath(testPath); + expect(result).toBe(testPath); + }); + + it('rejects disallowed paths', async () => { + const testPath = process.platform === 'win32' ? 'C:\\Windows\\System32\\file.txt' : '/etc/passwd'; + await expect(validatePath(testPath)) + .rejects.toThrow('Access denied - path outside allowed directories'); + }); + + it('handles non-existent files by checking parent directory', async () => { + const newFilePath = process.platform === 'win32' ? 'C:\\Users\\test\\newfile.txt' : '/home/user/newfile.txt'; + const parentPath = process.platform === 'win32' ? 'C:\\Users\\test' : '/home/user'; + + // Create an error with the ENOENT code that the implementation checks for + const enoentError = new Error('ENOENT') as NodeJS.ErrnoException; + enoentError.code = 'ENOENT'; + + mockFs.realpath + .mockRejectedValueOnce(enoentError) + .mockResolvedValueOnce(parentPath); + + const result = await validatePath(newFilePath); + expect(result).toBe(path.resolve(newFilePath)); + }); + + it('rejects when parent directory does not exist', async () => { + const newFilePath = process.platform === 'win32' ? 'C:\\Users\\test\\nonexistent\\newfile.txt' : '/home/user/nonexistent/newfile.txt'; + + // Create errors with the ENOENT code + const enoentError1 = new Error('ENOENT') as NodeJS.ErrnoException; + enoentError1.code = 'ENOENT'; + const enoentError2 = new Error('ENOENT') as NodeJS.ErrnoException; + enoentError2.code = 'ENOENT'; + + mockFs.realpath + .mockRejectedValueOnce(enoentError1) + .mockRejectedValueOnce(enoentError2); + + await expect(validatePath(newFilePath)) + .rejects.toThrow('Parent directory does not exist'); + }); + + it('resolves relative paths against allowed directories instead of process.cwd()', async () => { + const relativePath = 'test-file.txt'; + const originalCwd = process.cwd; + + // Mock process.cwd to return a directory outside allowed directories + const disallowedCwd = process.platform === 'win32' ? 'C:\\Windows\\System32' : '/root'; + (process as any).cwd = vi.fn(() => disallowedCwd); + + try { + const result = await validatePath(relativePath); + + // Result should be resolved against first allowed directory, not process.cwd() + const expectedPath = process.platform === 'win32' + ? path.resolve('C:\\Users\\test', relativePath) + : path.resolve('/home/user', relativePath); + + expect(result).toBe(expectedPath); + expect(result).not.toContain(disallowedCwd); + } finally { + // Restore original process.cwd + process.cwd = originalCwd; + } + }); + }); + }); + + describe('File Operations', () => { + describe('getFileStats', () => { + it('returns file statistics', async () => { + const mockStats = { + size: 1024, + birthtime: new Date('2023-01-01'), + mtime: new Date('2023-01-02'), + atime: new Date('2023-01-03'), + isDirectory: () => false, + isFile: () => true, + mode: 0o644 + }; + + mockFs.stat.mockResolvedValueOnce(mockStats as any); + + const result = await getFileStats('/test/file.txt'); + + expect(result).toEqual({ + size: 1024, + created: new Date('2023-01-01'), + modified: new Date('2023-01-02'), + accessed: new Date('2023-01-03'), + isDirectory: false, + isFile: true, + permissions: '644' + }); + }); + + it('handles directory statistics', async () => { + const mockStats = { + size: 4096, + birthtime: new Date('2023-01-01'), + mtime: new Date('2023-01-02'), + atime: new Date('2023-01-03'), + isDirectory: () => true, + isFile: () => false, + mode: 0o755 + }; + + mockFs.stat.mockResolvedValueOnce(mockStats as any); + + const result = await getFileStats('/test/dir'); + + expect(result.isDirectory).toBe(true); + expect(result.isFile).toBe(false); + expect(result.permissions).toBe('755'); + }); + }); + + describe('readFileContent', () => { + it('reads file with default encoding', async () => { + mockFs.readFile.mockResolvedValueOnce('file content'); + + const result = await readFileContent('/test/file.txt'); + + expect(result).toBe('file content'); + expect(mockFs.readFile).toHaveBeenCalledWith('/test/file.txt', 'utf-8'); + }); + + it('reads file with custom encoding', async () => { + mockFs.readFile.mockResolvedValueOnce('file content'); + + const result = await readFileContent('/test/file.txt', 'ascii'); + + expect(result).toBe('file content'); + expect(mockFs.readFile).toHaveBeenCalledWith('/test/file.txt', 'ascii'); + }); + }); + + describe('writeFileContent', () => { + it('writes file content', async () => { + mockFs.writeFile.mockResolvedValueOnce(undefined); + + await writeFileContent('/test/file.txt', 'new content'); + + expect(mockFs.writeFile).toHaveBeenCalledWith('/test/file.txt', 'new content', { encoding: "utf-8", flag: 'wx' }); + }); + }); + + }); + + describe('Search & Filtering Functions', () => { + describe('searchFilesWithValidation', () => { + beforeEach(() => { + mockFs.realpath.mockImplementation(async (path: any) => path.toString()); + }); + + + it('excludes files matching exclude patterns', async () => { + const mockEntries = [ + { name: 'test.txt', isDirectory: () => false }, + { name: 'test.log', isDirectory: () => false }, + { name: 'node_modules', isDirectory: () => true } + ]; + + mockFs.readdir.mockResolvedValueOnce(mockEntries as any); + + const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir'; + const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed']; + + // Mock realpath to return the same path for validation to pass + mockFs.realpath.mockImplementation(async (inputPath: any) => { + const pathStr = inputPath.toString(); + // Return the path as-is for validation + return pathStr; + }); + + const result = await searchFilesWithValidation( + testDir, + '*test*', + allowedDirs, + { excludePatterns: ['*.log', 'node_modules'] } + ); + + const expectedResult = process.platform === 'win32' ? 'C:\\allowed\\dir\\test.txt' : '/allowed/dir/test.txt'; + expect(result).toEqual([expectedResult]); + }); + + it('handles validation errors during search', async () => { + const mockEntries = [ + { name: 'test.txt', isDirectory: () => false }, + { name: 'invalid_file.txt', isDirectory: () => false } + ]; + + mockFs.readdir.mockResolvedValueOnce(mockEntries as any); + + // Mock validatePath to throw error for invalid_file.txt + mockFs.realpath.mockImplementation(async (path: any) => { + if (path.toString().includes('invalid_file.txt')) { + throw new Error('Access denied'); + } + return path.toString(); + }); + + const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir'; + const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed']; + + const result = await searchFilesWithValidation( + testDir, + '*test*', + allowedDirs, + {} + ); + + // Should only return the valid file, skipping the invalid one + const expectedResult = process.platform === 'win32' ? 'C:\\allowed\\dir\\test.txt' : '/allowed/dir/test.txt'; + expect(result).toEqual([expectedResult]); + }); + + it('handles complex exclude patterns with wildcards', async () => { + const mockEntries = [ + { name: 'test.txt', isDirectory: () => false }, + { name: 'test.backup', isDirectory: () => false }, + { name: 'important_test.js', isDirectory: () => false } + ]; + + mockFs.readdir.mockResolvedValueOnce(mockEntries as any); + + const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir'; + const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed']; + + const result = await searchFilesWithValidation( + testDir, + '*test*', + allowedDirs, + { excludePatterns: ['*.backup'] } + ); + + const expectedResults = process.platform === 'win32' ? [ + 'C:\\allowed\\dir\\test.txt', + 'C:\\allowed\\dir\\important_test.js' + ] : [ + '/allowed/dir/test.txt', + '/allowed/dir/important_test.js' + ]; + expect(result).toEqual(expectedResults); + }); + }); + }); + + describe('File Editing Functions', () => { + describe('applyFileEdits', () => { + beforeEach(() => { + mockFs.readFile.mockResolvedValue('line1\nline2\nline3\n'); + mockFs.writeFile.mockResolvedValue(undefined); + }); + + it('applies simple text replacement', async () => { + const edits = [ + { oldText: 'line2', newText: 'modified line2' } + ]; + + mockFs.rename.mockResolvedValueOnce(undefined); + + const result = await applyFileEdits('/test/file.txt', edits, false); + + expect(result).toContain('modified line2'); + // Should write to temporary file then rename + expect(mockFs.writeFile).toHaveBeenCalledWith( + expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/), + 'line1\nmodified line2\nline3\n', + 'utf-8' + ); + expect(mockFs.rename).toHaveBeenCalledWith( + expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/), + '/test/file.txt' + ); + }); + + it('handles dry run mode', async () => { + const edits = [ + { oldText: 'line2', newText: 'modified line2' } + ]; + + const result = await applyFileEdits('/test/file.txt', edits, true); + + expect(result).toContain('modified line2'); + expect(mockFs.writeFile).not.toHaveBeenCalled(); + }); + + it('applies multiple edits sequentially', async () => { + const edits = [ + { oldText: 'line1', newText: 'first line' }, + { oldText: 'line3', newText: 'third line' } + ]; + + mockFs.rename.mockResolvedValueOnce(undefined); + + await applyFileEdits('/test/file.txt', edits, false); + + expect(mockFs.writeFile).toHaveBeenCalledWith( + expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/), + 'first line\nline2\nthird line\n', + 'utf-8' + ); + expect(mockFs.rename).toHaveBeenCalledWith( + expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/), + '/test/file.txt' + ); + }); + + it('handles whitespace-flexible matching', async () => { + mockFs.readFile.mockResolvedValue(' line1\n line2\n line3\n'); + + const edits = [ + { oldText: 'line2', newText: 'modified line2' } + ]; + + mockFs.rename.mockResolvedValueOnce(undefined); + + await applyFileEdits('/test/file.txt', edits, false); + + expect(mockFs.writeFile).toHaveBeenCalledWith( + expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/), + ' line1\n modified line2\n line3\n', + 'utf-8' + ); + expect(mockFs.rename).toHaveBeenCalledWith( + expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/), + '/test/file.txt' + ); + }); + + it('throws error for non-matching edits', async () => { + const edits = [ + { oldText: 'nonexistent line', newText: 'replacement' } + ]; + + await expect(applyFileEdits('/test/file.txt', edits, false)) + .rejects.toThrow('Could not find exact match for edit'); + }); + + it('handles complex multi-line edits with indentation', async () => { + mockFs.readFile.mockResolvedValue('function test() {\n console.log("hello");\n return true;\n}'); + + const edits = [ + { + oldText: ' console.log("hello");\n return true;', + newText: ' console.log("world");\n console.log("test");\n return false;' + } + ]; + + mockFs.rename.mockResolvedValueOnce(undefined); + + await applyFileEdits('/test/file.js', edits, false); + + expect(mockFs.writeFile).toHaveBeenCalledWith( + expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/), + 'function test() {\n console.log("world");\n console.log("test");\n return false;\n}', + 'utf-8' + ); + expect(mockFs.rename).toHaveBeenCalledWith( + expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/), + '/test/file.js' + ); + }); + + it('handles edits with different indentation patterns', async () => { + mockFs.readFile.mockResolvedValue(' if (condition) {\n doSomething();\n }'); + + const edits = [ + { + oldText: 'doSomething();', + newText: 'doSomethingElse();\n doAnotherThing();' + } + ]; + + mockFs.rename.mockResolvedValueOnce(undefined); + + await applyFileEdits('/test/file.js', edits, false); + + expect(mockFs.writeFile).toHaveBeenCalledWith( + expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/), + ' if (condition) {\n doSomethingElse();\n doAnotherThing();\n }', + 'utf-8' + ); + expect(mockFs.rename).toHaveBeenCalledWith( + expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/), + '/test/file.js' + ); + }); + + it('handles CRLF line endings in file content', async () => { + mockFs.readFile.mockResolvedValue('line1\r\nline2\r\nline3\r\n'); + + const edits = [ + { oldText: 'line2', newText: 'modified line2' } + ]; + + mockFs.rename.mockResolvedValueOnce(undefined); + + await applyFileEdits('/test/file.txt', edits, false); + + expect(mockFs.writeFile).toHaveBeenCalledWith( + expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/), + 'line1\nmodified line2\nline3\n', + 'utf-8' + ); + expect(mockFs.rename).toHaveBeenCalledWith( + expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/), + '/test/file.txt' + ); + }); + }); + + describe('tailFile', () => { + it('handles empty files', async () => { + mockFs.stat.mockResolvedValue({ size: 0 } as any); + + const result = await tailFile('/test/empty.txt', 5); + + expect(result).toBe(''); + expect(mockFs.open).not.toHaveBeenCalled(); + }); + + it('calls stat to check file size', async () => { + mockFs.stat.mockResolvedValue({ size: 100 } as any); + + // Mock file handle with proper typing + const mockFileHandle = { + read: vi.fn(), + close: vi.fn() + } as any; + + mockFileHandle.read.mockResolvedValue({ bytesRead: 0 }); + mockFileHandle.close.mockResolvedValue(undefined); + + mockFs.open.mockResolvedValue(mockFileHandle); + + await tailFile('/test/file.txt', 2); + + expect(mockFs.stat).toHaveBeenCalledWith('/test/file.txt'); + expect(mockFs.open).toHaveBeenCalledWith('/test/file.txt', 'r'); + }); + + it('handles files with content and returns last lines', async () => { + mockFs.stat.mockResolvedValue({ size: 50 } as any); + + const mockFileHandle = { + read: vi.fn(), + close: vi.fn() + } as any; + + // Simulate reading file content in chunks + mockFileHandle.read + .mockResolvedValueOnce({ bytesRead: 20, buffer: Buffer.from('line3\nline4\nline5\n') }) + .mockResolvedValueOnce({ bytesRead: 0 }); + mockFileHandle.close.mockResolvedValue(undefined); + + mockFs.open.mockResolvedValue(mockFileHandle); + + const result = await tailFile('/test/file.txt', 2); + + expect(mockFileHandle.close).toHaveBeenCalled(); + }); + + it('handles read errors gracefully', async () => { + mockFs.stat.mockResolvedValue({ size: 100 } as any); + + const mockFileHandle = { + read: vi.fn(), + close: vi.fn() + } as any; + + mockFileHandle.read.mockResolvedValue({ bytesRead: 0 }); + mockFileHandle.close.mockResolvedValue(undefined); + + mockFs.open.mockResolvedValue(mockFileHandle); + + await tailFile('/test/file.txt', 5); + + expect(mockFileHandle.close).toHaveBeenCalled(); + }); + }); + + describe('headFile', () => { + it('opens file for reading', async () => { + // Mock file handle with proper typing + const mockFileHandle = { + read: vi.fn(), + close: vi.fn() + } as any; + + mockFileHandle.read.mockResolvedValue({ bytesRead: 0 }); + mockFileHandle.close.mockResolvedValue(undefined); + + mockFs.open.mockResolvedValue(mockFileHandle); + + await headFile('/test/file.txt', 2); + + expect(mockFs.open).toHaveBeenCalledWith('/test/file.txt', 'r'); + }); + + it('handles files with content and returns first lines', async () => { + const mockFileHandle = { + read: vi.fn(), + close: vi.fn() + } as any; + + // Simulate reading file content with newlines + mockFileHandle.read + .mockResolvedValueOnce({ bytesRead: 20, buffer: Buffer.from('line1\nline2\nline3\n') }) + .mockResolvedValueOnce({ bytesRead: 0 }); + mockFileHandle.close.mockResolvedValue(undefined); + + mockFs.open.mockResolvedValue(mockFileHandle); + + const result = await headFile('/test/file.txt', 2); + + expect(mockFileHandle.close).toHaveBeenCalled(); + }); + + it('handles files with leftover content', async () => { + const mockFileHandle = { + read: vi.fn(), + close: vi.fn() + } as any; + + // Simulate reading file content without final newline + mockFileHandle.read + .mockResolvedValueOnce({ bytesRead: 15, buffer: Buffer.from('line1\nline2\nend') }) + .mockResolvedValueOnce({ bytesRead: 0 }); + mockFileHandle.close.mockResolvedValue(undefined); + + mockFs.open.mockResolvedValue(mockFileHandle); + + const result = await headFile('/test/file.txt', 5); + + expect(mockFileHandle.close).toHaveBeenCalled(); + }); + + it('handles reaching requested line count', async () => { + const mockFileHandle = { + read: vi.fn(), + close: vi.fn() + } as any; + + // Simulate reading exactly the requested number of lines + mockFileHandle.read + .mockResolvedValueOnce({ bytesRead: 12, buffer: Buffer.from('line1\nline2\n') }) + .mockResolvedValueOnce({ bytesRead: 0 }); + mockFileHandle.close.mockResolvedValue(undefined); + + mockFs.open.mockResolvedValue(mockFileHandle); + + const result = await headFile('/test/file.txt', 2); + + expect(mockFileHandle.close).toHaveBeenCalled(); + }); + }); + }); +}); diff --git a/mcpServer/modules/filesystem/__tests__/path-utils.test.ts b/mcpServer/modules/filesystem/__tests__/path-utils.test.ts new file mode 100644 index 0000000..5530cba --- /dev/null +++ b/mcpServer/modules/filesystem/__tests__/path-utils.test.ts @@ -0,0 +1,371 @@ +import { describe, it, expect, afterEach } from 'vitest'; +import { normalizePath, expandHome, convertToWindowsPath } from '../path-utils.js'; + +describe('Path Utilities', () => { + describe('convertToWindowsPath', () => { + it('leaves Unix paths unchanged', () => { + expect(convertToWindowsPath('/usr/local/bin')) + .toBe('/usr/local/bin'); + expect(convertToWindowsPath('/home/user/some path')) + .toBe('/home/user/some path'); + }); + + it('never converts WSL paths (they work correctly in WSL with Node.js fs)', () => { + // WSL paths should NEVER be converted, regardless of platform + // They are valid Linux paths that work with Node.js fs operations inside WSL + expect(convertToWindowsPath('/mnt/c/NS/MyKindleContent')) + .toBe('/mnt/c/NS/MyKindleContent'); + expect(convertToWindowsPath('/mnt/d/Documents')) + .toBe('/mnt/d/Documents'); + }); + + it('converts Unix-style Windows paths only on Windows platform', () => { + // On Windows, /c/ style paths should be converted + if (process.platform === 'win32') { + expect(convertToWindowsPath('/c/NS/MyKindleContent')) + .toBe('C:\\NS\\MyKindleContent'); + } else { + // On Linux, leave them unchanged + expect(convertToWindowsPath('/c/NS/MyKindleContent')) + .toBe('/c/NS/MyKindleContent'); + } + }); + + it('leaves Windows paths unchanged but ensures backslashes', () => { + expect(convertToWindowsPath('C:\\NS\\MyKindleContent')) + .toBe('C:\\NS\\MyKindleContent'); + expect(convertToWindowsPath('C:/NS/MyKindleContent')) + .toBe('C:\\NS\\MyKindleContent'); + }); + + it('handles Windows paths with spaces', () => { + expect(convertToWindowsPath('C:\\Program Files\\Some App')) + .toBe('C:\\Program Files\\Some App'); + expect(convertToWindowsPath('C:/Program Files/Some App')) + .toBe('C:\\Program Files\\Some App'); + }); + + it('handles drive letter paths based on platform', () => { + // WSL paths should never be converted + expect(convertToWindowsPath('/mnt/d/some/path')) + .toBe('/mnt/d/some/path'); + + if (process.platform === 'win32') { + // On Windows, Unix-style paths like /d/ should be converted + expect(convertToWindowsPath('/d/some/path')) + .toBe('D:\\some\\path'); + } else { + // On Linux, /d/ is just a regular Unix path + expect(convertToWindowsPath('/d/some/path')) + .toBe('/d/some/path'); + } + }); + }); + + describe('normalizePath', () => { + it('preserves Unix paths', () => { + expect(normalizePath('/usr/local/bin')) + .toBe('/usr/local/bin'); + expect(normalizePath('/home/user/some path')) + .toBe('/home/user/some path'); + expect(normalizePath('"/usr/local/some app/"')) + .toBe('/usr/local/some app'); + expect(normalizePath('/usr/local//bin/app///')) + .toBe('/usr/local/bin/app'); + expect(normalizePath('/')) + .toBe('/'); + expect(normalizePath('///')) + .toBe('/'); + }); + + it('removes surrounding quotes', () => { + expect(normalizePath('"C:\\NS\\My Kindle Content"')) + .toBe('C:\\NS\\My Kindle Content'); + }); + + it('normalizes backslashes', () => { + expect(normalizePath('C:\\\\NS\\\\MyKindleContent')) + .toBe('C:\\NS\\MyKindleContent'); + }); + + it('converts forward slashes to backslashes on Windows', () => { + expect(normalizePath('C:/NS/MyKindleContent')) + .toBe('C:\\NS\\MyKindleContent'); + }); + + it('always preserves WSL paths (they work correctly in WSL)', () => { + // WSL paths should ALWAYS be preserved, regardless of platform + // This is the fix for issue #2795 + expect(normalizePath('/mnt/c/NS/MyKindleContent')) + .toBe('/mnt/c/NS/MyKindleContent'); + expect(normalizePath('/mnt/d/Documents')) + .toBe('/mnt/d/Documents'); + }); + + it('handles Unix-style Windows paths', () => { + // On Windows, /c/ paths should be converted + if (process.platform === 'win32') { + expect(normalizePath('/c/NS/MyKindleContent')) + .toBe('C:\\NS\\MyKindleContent'); + } else if (process.platform === 'linux') { + // On Linux, /c/ is just a regular Unix path + expect(normalizePath('/c/NS/MyKindleContent')) + .toBe('/c/NS/MyKindleContent'); + } + }); + + it('handles paths with spaces and mixed slashes', () => { + expect(normalizePath('C:/NS/My Kindle Content')) + .toBe('C:\\NS\\My Kindle Content'); + // WSL paths should always be preserved + expect(normalizePath('/mnt/c/NS/My Kindle Content')) + .toBe('/mnt/c/NS/My Kindle Content'); + expect(normalizePath('C:\\Program Files (x86)\\App Name')) + .toBe('C:\\Program Files (x86)\\App Name'); + expect(normalizePath('"C:\\Program Files\\App Name"')) + .toBe('C:\\Program Files\\App Name'); + expect(normalizePath(' C:\\Program Files\\App Name ')) + .toBe('C:\\Program Files\\App Name'); + }); + + it('preserves spaces in all path formats', () => { + // WSL paths should always be preserved + expect(normalizePath('/mnt/c/Program Files/App Name')) + .toBe('/mnt/c/Program Files/App Name'); + + if (process.platform === 'win32') { + // On Windows, Unix-style paths like /c/ should be converted + expect(normalizePath('/c/Program Files/App Name')) + .toBe('C:\\Program Files\\App Name'); + } else { + // On Linux, /c/ is just a regular Unix path + expect(normalizePath('/c/Program Files/App Name')) + .toBe('/c/Program Files/App Name'); + } + expect(normalizePath('C:/Program Files/App Name')) + .toBe('C:\\Program Files\\App Name'); + }); + + it('handles special characters in paths', () => { + // Test ampersand in path + expect(normalizePath('C:\\NS\\Sub&Folder')) + .toBe('C:\\NS\\Sub&Folder'); + expect(normalizePath('C:/NS/Sub&Folder')) + .toBe('C:\\NS\\Sub&Folder'); + // WSL paths should always be preserved + expect(normalizePath('/mnt/c/NS/Sub&Folder')) + .toBe('/mnt/c/NS/Sub&Folder'); + + // Test tilde in path (short names in Windows) + expect(normalizePath('C:\\NS\\MYKIND~1')) + .toBe('C:\\NS\\MYKIND~1'); + expect(normalizePath('/Users/NEMANS~1/FOLDER~2/SUBFO~1/Public/P12PST~1')) + .toBe('/Users/NEMANS~1/FOLDER~2/SUBFO~1/Public/P12PST~1'); + + // Test other special characters + expect(normalizePath('C:\\Path with #hash')) + .toBe('C:\\Path with #hash'); + expect(normalizePath('C:\\Path with (parentheses)')) + .toBe('C:\\Path with (parentheses)'); + expect(normalizePath('C:\\Path with [brackets]')) + .toBe('C:\\Path with [brackets]'); + expect(normalizePath('C:\\Path with @at+plus$dollar%percent')) + .toBe('C:\\Path with @at+plus$dollar%percent'); + }); + + it('capitalizes lowercase drive letters for Windows paths', () => { + expect(normalizePath('c:/windows/system32')) + .toBe('C:\\windows\\system32'); + // WSL paths should always be preserved + expect(normalizePath('/mnt/d/my/folder')) + .toBe('/mnt/d/my/folder'); + + if (process.platform === 'win32') { + // On Windows, Unix-style paths should be converted and capitalized + expect(normalizePath('/e/another/folder')) + .toBe('E:\\another\\folder'); + } else { + // On Linux, /e/ is just a regular Unix path + expect(normalizePath('/e/another/folder')) + .toBe('/e/another/folder'); + } + }); + + it('handles UNC paths correctly', () => { + // UNC paths should preserve the leading double backslash + const uncPath = '\\\\SERVER\\share\\folder'; + expect(normalizePath(uncPath)).toBe('\\\\SERVER\\share\\folder'); + + // Test UNC path with double backslashes that need normalization + const uncPathWithDoubles = '\\\\\\\\SERVER\\\\share\\\\folder'; + expect(normalizePath(uncPathWithDoubles)).toBe('\\\\SERVER\\share\\folder'); + }); + + it('returns normalized non-Windows/WSL/Unix-style Windows paths as is after basic normalization', () => { + // A path that looks somewhat absolute but isn't a drive or recognized Unix root for Windows conversion + // These paths should be preserved as-is (not converted to Windows C:\ format or WSL format) + const otherAbsolutePath = '\\someserver\\share\\file'; + expect(normalizePath(otherAbsolutePath)).toBe(otherAbsolutePath); + }); + }); + + describe('expandHome', () => { + it('expands ~ to home directory', () => { + const result = expandHome('~/test'); + expect(result).toContain('test'); + expect(result).not.toContain('~'); + }); + + it('expands bare ~ to home directory', () => { + const result = expandHome('~'); + expect(result).not.toContain('~'); + expect(result.length).toBeGreaterThan(0); + }); + + it('leaves other paths unchanged', () => { + expect(expandHome('C:/test')).toBe('C:/test'); + }); + }); + + describe('WSL path handling (issue #2795 fix)', () => { + // Save original platform + const originalPlatform = process.platform; + + afterEach(() => { + // Restore platform after each test + Object.defineProperty(process, 'platform', { + value: originalPlatform, + writable: true, + configurable: true + }); + }); + + it('should NEVER convert WSL paths - they work correctly in WSL with Node.js fs', () => { + // The key insight: When running `wsl npx ...`, Node.js runs INSIDE WSL (process.platform === 'linux') + // and /mnt/c/ paths work correctly with Node.js fs operations in that environment. + // Converting them to C:\ format breaks fs operations because Windows paths don't work inside WSL. + + // Mock Linux platform (inside WSL) + Object.defineProperty(process, 'platform', { + value: 'linux', + writable: true, + configurable: true + }); + + // WSL paths should NOT be converted, even inside WSL + expect(normalizePath('/mnt/c/Users/username/folder')) + .toBe('/mnt/c/Users/username/folder'); + + expect(normalizePath('/mnt/d/Documents/project')) + .toBe('/mnt/d/Documents/project'); + }); + + it('should also preserve WSL paths when running on Windows', () => { + // Mock Windows platform + Object.defineProperty(process, 'platform', { + value: 'win32', + writable: true, + configurable: true + }); + + // WSL paths should still be preserved (though they wouldn't be accessible from Windows Node.js) + expect(normalizePath('/mnt/c/Users/username/folder')) + .toBe('/mnt/c/Users/username/folder'); + + expect(normalizePath('/mnt/d/Documents/project')) + .toBe('/mnt/d/Documents/project'); + }); + + it('should convert Unix-style Windows paths (/c/) only when running on Windows (win32)', () => { + // Mock process.platform to be 'win32' (Windows) + Object.defineProperty(process, 'platform', { + value: 'win32', + writable: true, + configurable: true + }); + + // Unix-style Windows paths like /c/ should be converted on Windows + expect(normalizePath('/c/Users/username/folder')) + .toBe('C:\\Users\\username\\folder'); + + expect(normalizePath('/d/Documents/project')) + .toBe('D:\\Documents\\project'); + }); + + it('should NOT convert Unix-style paths (/c/) when running inside WSL (linux)', () => { + // Mock process.platform to be 'linux' (WSL/Linux) + Object.defineProperty(process, 'platform', { + value: 'linux', + writable: true, + configurable: true + }); + + // When on Linux, /c/ is just a regular Unix directory, not a drive letter + expect(normalizePath('/c/some/path')) + .toBe('/c/some/path'); + + expect(normalizePath('/d/another/path')) + .toBe('/d/another/path'); + }); + + it('should preserve regular Unix paths on all platforms', () => { + // Test on Linux + Object.defineProperty(process, 'platform', { + value: 'linux', + writable: true, + configurable: true + }); + + expect(normalizePath('/home/user/documents')) + .toBe('/home/user/documents'); + + expect(normalizePath('/var/log/app')) + .toBe('/var/log/app'); + + // Test on Windows (though these paths wouldn't work on Windows) + Object.defineProperty(process, 'platform', { + value: 'win32', + writable: true, + configurable: true + }); + + expect(normalizePath('/home/user/documents')) + .toBe('/home/user/documents'); + + expect(normalizePath('/var/log/app')) + .toBe('/var/log/app'); + }); + + it('reproduces exact scenario from issue #2795', () => { + // Simulate running inside WSL: wsl npx @modelcontextprotocol/server-filesystem /mnt/c/Users/username/folder + Object.defineProperty(process, 'platform', { + value: 'linux', + writable: true, + configurable: true + }); + + // This is the exact path from the issue + const inputPath = '/mnt/c/Users/username/folder'; + const result = normalizePath(inputPath); + + // Should NOT convert to C:\Users\username\folder + expect(result).toBe('/mnt/c/Users/username/folder'); + expect(result).not.toContain('C:'); + expect(result).not.toContain('\\'); + }); + + it('should handle relative path slash conversion based on platform', () => { + // This test verifies platform-specific behavior naturally without mocking + // On Windows: forward slashes converted to backslashes + // On Linux/Unix: forward slashes preserved + const relativePath = 'some/relative/path'; + const result = normalizePath(relativePath); + + if (originalPlatform === 'win32') { + expect(result).toBe('some\\relative\\path'); + } else { + expect(result).toBe('some/relative/path'); + } + }); + }); +}); diff --git a/mcpServer/modules/filesystem/__tests__/path-validation.test.ts b/mcpServer/modules/filesystem/__tests__/path-validation.test.ts new file mode 100644 index 0000000..098119e --- /dev/null +++ b/mcpServer/modules/filesystem/__tests__/path-validation.test.ts @@ -0,0 +1,953 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import * as path from 'path'; +import * as fs from 'fs/promises'; +import * as os from 'os'; +import { isPathWithinAllowedDirectories } from '../path-validation.js'; + +/** + * Check if the current environment supports symlink creation + */ +async function checkSymlinkSupport(): Promise { + const testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'symlink-test-')); + try { + const targetFile = path.join(testDir, 'target.txt'); + const linkFile = path.join(testDir, 'link.txt'); + + await fs.writeFile(targetFile, 'test'); + await fs.symlink(targetFile, linkFile); + + // If we get here, symlinks are supported + return true; + } catch (error) { + // EPERM indicates no symlink permissions + if ((error as NodeJS.ErrnoException).code === 'EPERM') { + return false; + } + // Other errors might indicate a real problem + throw error; + } finally { + await fs.rm(testDir, { recursive: true, force: true }); + } +} + +// Global variable to store symlink support status +let symlinkSupported: boolean | null = null; + +/** + * Get cached symlink support status, checking once per test run + */ +async function getSymlinkSupport(): Promise { + if (symlinkSupported === null) { + symlinkSupported = await checkSymlinkSupport(); + if (!symlinkSupported) { + console.log('\n⚠️ Symlink tests will be skipped - symlink creation not supported in this environment'); + console.log(' On Windows, enable Developer Mode or run as Administrator to enable symlink tests'); + } + } + return symlinkSupported; +} + +describe('Path Validation', () => { + it('allows exact directory match', () => { + const allowed = ['/home/user/project']; + expect(isPathWithinAllowedDirectories('/home/user/project', allowed)).toBe(true); + }); + + it('allows subdirectories', () => { + const allowed = ['/home/user/project']; + expect(isPathWithinAllowedDirectories('/home/user/project/src', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project/src/index.js', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project/deeply/nested/file.txt', allowed)).toBe(true); + }); + + it('blocks similar directory names (prefix vulnerability)', () => { + const allowed = ['/home/user/project']; + expect(isPathWithinAllowedDirectories('/home/user/project2', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project_backup', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project-old', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/projectile', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project.bak', allowed)).toBe(false); + }); + + it('blocks paths outside allowed directories', () => { + const allowed = ['/home/user/project']; + expect(isPathWithinAllowedDirectories('/home/user/other', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/etc/passwd', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/', allowed)).toBe(false); + }); + + it('handles multiple allowed directories', () => { + const allowed = ['/home/user/project1', '/home/user/project2']; + expect(isPathWithinAllowedDirectories('/home/user/project1/src', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project2/src', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project3', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project1_backup', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project2-old', allowed)).toBe(false); + }); + + it('blocks parent and sibling directories', () => { + const allowed = ['/test/allowed']; + + // Parent directory + expect(isPathWithinAllowedDirectories('/test', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/', allowed)).toBe(false); + + // Sibling with common prefix + expect(isPathWithinAllowedDirectories('/test/allowed_sibling', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/test/allowed2', allowed)).toBe(false); + }); + + it('handles paths with special characters', () => { + const allowed = ['/home/user/my-project (v2)']; + + expect(isPathWithinAllowedDirectories('/home/user/my-project (v2)', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/my-project (v2)/src', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/my-project (v2)_backup', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/my-project', allowed)).toBe(false); + }); + + describe('Input validation', () => { + it('rejects empty inputs', () => { + const allowed = ['/home/user/project']; + + expect(isPathWithinAllowedDirectories('', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project', [])).toBe(false); + }); + + it('handles trailing separators correctly', () => { + const allowed = ['/home/user/project']; + + // Path with trailing separator should still match + expect(isPathWithinAllowedDirectories('/home/user/project/', allowed)).toBe(true); + + // Allowed directory with trailing separator + const allowedWithSep = ['/home/user/project/']; + expect(isPathWithinAllowedDirectories('/home/user/project', allowedWithSep)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project/', allowedWithSep)).toBe(true); + + // Should still block similar names with or without trailing separators + expect(isPathWithinAllowedDirectories('/home/user/project2', allowedWithSep)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project2', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project2/', allowed)).toBe(false); + }); + + it('skips empty directory entries in allowed list', () => { + const allowed = ['', '/home/user/project', '']; + expect(isPathWithinAllowedDirectories('/home/user/project', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project/src', allowed)).toBe(true); + + // Should still validate properly with empty entries + expect(isPathWithinAllowedDirectories('/home/user/other', allowed)).toBe(false); + }); + + it('handles Windows paths with trailing separators', () => { + if (path.sep === '\\') { + const allowed = ['C:\\Users\\project']; + + // Path with trailing separator + expect(isPathWithinAllowedDirectories('C:\\Users\\project\\', allowed)).toBe(true); + + // Allowed with trailing separator + const allowedWithSep = ['C:\\Users\\project\\']; + expect(isPathWithinAllowedDirectories('C:\\Users\\project', allowedWithSep)).toBe(true); + expect(isPathWithinAllowedDirectories('C:\\Users\\project\\', allowedWithSep)).toBe(true); + + // Should still block similar names + expect(isPathWithinAllowedDirectories('C:\\Users\\project2\\', allowed)).toBe(false); + } + }); + }); + + describe('Error handling', () => { + it('normalizes relative paths to absolute', () => { + const allowed = [process.cwd()]; + + // Relative paths get normalized to absolute paths based on cwd + expect(isPathWithinAllowedDirectories('relative/path', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('./file', allowed)).toBe(true); + + // Parent directory references that escape allowed directory + const parentAllowed = ['/home/user/project']; + expect(isPathWithinAllowedDirectories('../parent', parentAllowed)).toBe(false); + }); + + it('returns false for relative paths in allowed directories', () => { + const badAllowed = ['relative/path', '/some/other/absolute/path']; + + // Relative paths in allowed dirs are normalized to absolute based on cwd + // The normalized 'relative/path' won't match our test path + expect(isPathWithinAllowedDirectories('/some/other/absolute/path/file', badAllowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/absolute/path/file', badAllowed)).toBe(false); + }); + + it('handles null and undefined inputs gracefully', () => { + const allowed = ['/home/user/project']; + + // Should return false, not crash + expect(isPathWithinAllowedDirectories(null as any, allowed)).toBe(false); + expect(isPathWithinAllowedDirectories(undefined as any, allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/path', null as any)).toBe(false); + expect(isPathWithinAllowedDirectories('/path', undefined as any)).toBe(false); + }); + }); + + describe('Unicode and special characters', () => { + it('handles unicode characters in paths', () => { + const allowed = ['/home/user/café']; + + expect(isPathWithinAllowedDirectories('/home/user/café', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/café/file', allowed)).toBe(true); + + // Different unicode representation won't match (not normalized) + const decomposed = '/home/user/cafe\u0301'; // e + combining accent + expect(isPathWithinAllowedDirectories(decomposed, allowed)).toBe(false); + }); + + it('handles paths with spaces correctly', () => { + const allowed = ['/home/user/my project']; + + expect(isPathWithinAllowedDirectories('/home/user/my project', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/my project/file', allowed)).toBe(true); + + // Partial matches should fail + expect(isPathWithinAllowedDirectories('/home/user/my', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/my proj', allowed)).toBe(false); + }); + }); + + describe('Overlapping allowed directories', () => { + it('handles nested allowed directories correctly', () => { + const allowed = ['/home', '/home/user', '/home/user/project']; + + // All paths under /home are allowed + expect(isPathWithinAllowedDirectories('/home/anything', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/anything', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project/anything', allowed)).toBe(true); + + // First match wins (most permissive) + expect(isPathWithinAllowedDirectories('/home/other/deep/path', allowed)).toBe(true); + }); + + it('handles root directory as allowed', () => { + const allowed = ['/']; + + // Everything is allowed under root (dangerous configuration) + expect(isPathWithinAllowedDirectories('/', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/any/path', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/etc/passwd', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/secret', allowed)).toBe(true); + + // But only on the same filesystem root + if (path.sep === '\\') { + expect(isPathWithinAllowedDirectories('D:\\other', ['/'])).toBe(false); + } + }); + }); + + describe('Cross-platform behavior', () => { + it('handles Windows-style paths on Windows', () => { + if (path.sep === '\\') { + const allowed = ['C:\\Users\\project']; + expect(isPathWithinAllowedDirectories('C:\\Users\\project', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('C:\\Users\\project\\src', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('C:\\Users\\project2', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('C:\\Users\\project_backup', allowed)).toBe(false); + } + }); + + it('handles Unix-style paths on Unix', () => { + if (path.sep === '/') { + const allowed = ['/home/user/project']; + expect(isPathWithinAllowedDirectories('/home/user/project', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project/src', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project2', allowed)).toBe(false); + } + }); + }); + + describe('Validation Tests - Path Traversal', () => { + it('blocks path traversal attempts', () => { + const allowed = ['/home/user/project']; + + // Basic traversal attempts + expect(isPathWithinAllowedDirectories('/home/user/project/../../../etc/passwd', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project/../../other', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project/../project2', allowed)).toBe(false); + + // Mixed traversal with valid segments + expect(isPathWithinAllowedDirectories('/home/user/project/src/../../project2', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project/./../../other', allowed)).toBe(false); + + // Multiple traversal sequences + expect(isPathWithinAllowedDirectories('/home/user/project/../project/../../../etc', allowed)).toBe(false); + }); + + it('blocks traversal in allowed directories', () => { + const allowed = ['/home/user/project/../safe']; + + // The allowed directory itself should be normalized and safe + expect(isPathWithinAllowedDirectories('/home/user/safe/file', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project/file', allowed)).toBe(false); + }); + + it('handles complex traversal patterns', () => { + const allowed = ['/home/user/project']; + + // Double dots in filenames (not traversal) - these normalize to paths within allowed dir + expect(isPathWithinAllowedDirectories('/home/user/project/..test', allowed)).toBe(true); // Not traversal + expect(isPathWithinAllowedDirectories('/home/user/project/test..', allowed)).toBe(true); // Not traversal + expect(isPathWithinAllowedDirectories('/home/user/project/te..st', allowed)).toBe(true); // Not traversal + + // Actual traversal + expect(isPathWithinAllowedDirectories('/home/user/project/../test', allowed)).toBe(false); // Is traversal - goes to /home/user/test + + // Edge case: /home/user/project/.. normalizes to /home/user (parent dir) + expect(isPathWithinAllowedDirectories('/home/user/project/..', allowed)).toBe(false); // Goes to parent + }); + }); + + describe('Validation Tests - Null Bytes', () => { + it('rejects paths with null bytes', () => { + const allowed = ['/home/user/project']; + + expect(isPathWithinAllowedDirectories('/home/user/project\x00/etc/passwd', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project/test\x00.txt', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('\x00/home/user/project', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project/\x00', allowed)).toBe(false); + }); + + it('rejects allowed directories with null bytes', () => { + const allowed = ['/home/user/project\x00']; + + expect(isPathWithinAllowedDirectories('/home/user/project', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project/file', allowed)).toBe(false); + }); + }); + + describe('Validation Tests - Special Characters', () => { + it('allows percent signs in filenames', () => { + const allowed = ['/home/user/project']; + + // Percent is a valid filename character + expect(isPathWithinAllowedDirectories('/home/user/project/report_50%.pdf', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project/Q1_25%_growth', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project/%41', allowed)).toBe(true); // File named %41 + + // URL encoding is NOT decoded by path.normalize, so these are just odd filenames + expect(isPathWithinAllowedDirectories('/home/user/project/%2e%2e', allowed)).toBe(true); // File named "%2e%2e" + expect(isPathWithinAllowedDirectories('/home/user/project/file%20name', allowed)).toBe(true); // File with %20 in name + }); + + it('handles percent signs in allowed directories', () => { + const allowed = ['/home/user/project%20files']; + + // This is a directory literally named "project%20files" + expect(isPathWithinAllowedDirectories('/home/user/project%20files/test', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project files/test', allowed)).toBe(false); // Different dir + }); + }); + + describe('Path Normalization', () => { + it('normalizes paths before comparison', () => { + const allowed = ['/home/user/project']; + + // Trailing slashes + expect(isPathWithinAllowedDirectories('/home/user/project/', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project//', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project///', allowed)).toBe(true); + + // Current directory references + expect(isPathWithinAllowedDirectories('/home/user/project/./src', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/./project/src', allowed)).toBe(true); + + // Multiple slashes + expect(isPathWithinAllowedDirectories('/home/user/project//src//file', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home//user//project//src', allowed)).toBe(true); + + // Should still block outside paths + expect(isPathWithinAllowedDirectories('/home/user//project2', allowed)).toBe(false); + }); + + it('handles mixed separators correctly', () => { + if (path.sep === '\\') { + const allowed = ['C:\\Users\\project']; + + // Mixed separators should be normalized + expect(isPathWithinAllowedDirectories('C:/Users/project', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('C:\\Users/project\\src', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('C:/Users\\project/src', allowed)).toBe(true); + } + }); + }); + + describe('Edge Cases', () => { + it('rejects non-string inputs safely', () => { + const allowed = ['/home/user/project']; + + expect(isPathWithinAllowedDirectories(123 as any, allowed)).toBe(false); + expect(isPathWithinAllowedDirectories({} as any, allowed)).toBe(false); + expect(isPathWithinAllowedDirectories([] as any, allowed)).toBe(false); + expect(isPathWithinAllowedDirectories(null as any, allowed)).toBe(false); + expect(isPathWithinAllowedDirectories(undefined as any, allowed)).toBe(false); + + // Non-string in allowed directories + expect(isPathWithinAllowedDirectories('/home/user/project', [123 as any])).toBe(false); + expect(isPathWithinAllowedDirectories('/home/user/project', [{} as any])).toBe(false); + }); + + it('handles very long paths', () => { + const allowed = ['/home/user/project']; + + // Create a very long path that's still valid + const longSubPath = 'a/'.repeat(1000) + 'file.txt'; + expect(isPathWithinAllowedDirectories(`/home/user/project/${longSubPath}`, allowed)).toBe(true); + + // Very long path that escapes + const escapePath = 'a/'.repeat(1000) + '../'.repeat(1001) + 'etc/passwd'; + expect(isPathWithinAllowedDirectories(`/home/user/project/${escapePath}`, allowed)).toBe(false); + }); + }); + + describe('Additional Coverage', () => { + it('handles allowed directories with traversal that normalizes safely', () => { + // These allowed dirs contain traversal but normalize to valid paths + const allowed = ['/home/user/../user/project']; + + // Should normalize to /home/user/project and work correctly + expect(isPathWithinAllowedDirectories('/home/user/project/file', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/other', allowed)).toBe(false); + }); + + it('handles symbolic dots in filenames', () => { + const allowed = ['/home/user/project']; + + // Single and double dots as actual filenames (not traversal) + expect(isPathWithinAllowedDirectories('/home/user/project/.', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('/home/user/project/..', allowed)).toBe(false); // This normalizes to parent + expect(isPathWithinAllowedDirectories('/home/user/project/...', allowed)).toBe(true); // Three dots is a valid filename + expect(isPathWithinAllowedDirectories('/home/user/project/....', allowed)).toBe(true); // Four dots is a valid filename + }); + + it('handles UNC paths on Windows', () => { + if (path.sep === '\\') { + const allowed = ['\\\\server\\share\\project']; + + expect(isPathWithinAllowedDirectories('\\\\server\\share\\project', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('\\\\server\\share\\project\\file', allowed)).toBe(true); + expect(isPathWithinAllowedDirectories('\\\\server\\share\\other', allowed)).toBe(false); + expect(isPathWithinAllowedDirectories('\\\\other\\share\\project', allowed)).toBe(false); + } + }); + }); + + describe('Symlink Tests', () => { + let testDir: string; + let allowedDir: string; + let forbiddenDir: string; + + beforeEach(async () => { + testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'fs-error-test-')); + allowedDir = path.join(testDir, 'allowed'); + forbiddenDir = path.join(testDir, 'forbidden'); + + await fs.mkdir(allowedDir, { recursive: true }); + await fs.mkdir(forbiddenDir, { recursive: true }); + }); + + afterEach(async () => { + await fs.rm(testDir, { recursive: true, force: true }); + }); + + it('validates symlink handling', async () => { + // Test with symlinks + try { + const linkPath = path.join(allowedDir, 'bad-link'); + const targetPath = path.join(forbiddenDir, 'target.txt'); + + await fs.writeFile(targetPath, 'content'); + await fs.symlink(targetPath, linkPath); + + // In real implementation, this would throw with the resolved path + const realPath = await fs.realpath(linkPath); + const allowed = [allowedDir]; + + // Symlink target should be outside allowed directory + expect(isPathWithinAllowedDirectories(realPath, allowed)).toBe(false); + } catch (error) { + // Skip if no symlink permissions + } + }); + + it('handles non-existent paths correctly', async () => { + const newFilePath = path.join(allowedDir, 'subdir', 'newfile.txt'); + + // Parent directory doesn't exist + try { + await fs.access(newFilePath); + } catch (error) { + expect((error as NodeJS.ErrnoException).code).toBe('ENOENT'); + } + + // After creating parent, validation should work + await fs.mkdir(path.dirname(newFilePath), { recursive: true }); + const allowed = [allowedDir]; + expect(isPathWithinAllowedDirectories(newFilePath, allowed)).toBe(true); + }); + + // Test path resolution consistency for symlinked files + it('validates symlinked files consistently between path and resolved forms', async () => { + try { + // Setup: Create target file in forbidden area + const targetFile = path.join(forbiddenDir, 'target.txt'); + await fs.writeFile(targetFile, 'TARGET_CONTENT'); + + // Create symlink inside allowed directory pointing to forbidden file + const symlinkPath = path.join(allowedDir, 'link-to-target.txt'); + await fs.symlink(targetFile, symlinkPath); + + // The symlink path itself passes validation (looks like it's in allowed dir) + expect(isPathWithinAllowedDirectories(symlinkPath, [allowedDir])).toBe(true); + + // But the resolved path should fail validation + const resolvedPath = await fs.realpath(symlinkPath); + expect(isPathWithinAllowedDirectories(resolvedPath, [allowedDir])).toBe(false); + + // Verify the resolved path goes to the forbidden location (normalize both paths for macOS temp dirs) + expect(await fs.realpath(resolvedPath)).toBe(await fs.realpath(targetFile)); + } catch (error) { + // Skip if no symlink permissions on the system + if ((error as NodeJS.ErrnoException).code !== 'EPERM') { + throw error; + } + } + }); + + // Test allowed directory resolution behavior + it('validates paths correctly when allowed directory is resolved from symlink', async () => { + try { + // Setup: Create the actual target directory with content + const actualTargetDir = path.join(testDir, 'actual-target'); + await fs.mkdir(actualTargetDir, { recursive: true }); + const targetFile = path.join(actualTargetDir, 'file.txt'); + await fs.writeFile(targetFile, 'FILE_CONTENT'); + + // Setup: Create symlink directory that points to target + const symlinkDir = path.join(testDir, 'symlink-dir'); + await fs.symlink(actualTargetDir, symlinkDir); + + // Simulate resolved allowed directory (what the server startup should do) + const resolvedAllowedDir = await fs.realpath(symlinkDir); + const resolvedTargetDir = await fs.realpath(actualTargetDir); + expect(resolvedAllowedDir).toBe(resolvedTargetDir); + + // Test 1: File access through original symlink path should pass validation with resolved allowed dir + const fileViaSymlink = path.join(symlinkDir, 'file.txt'); + const resolvedFile = await fs.realpath(fileViaSymlink); + expect(isPathWithinAllowedDirectories(resolvedFile, [resolvedAllowedDir])).toBe(true); + + // Test 2: File access through resolved path should also pass validation + const fileViaResolved = path.join(resolvedTargetDir, 'file.txt'); + expect(isPathWithinAllowedDirectories(fileViaResolved, [resolvedAllowedDir])).toBe(true); + + // Test 3: Demonstrate inconsistent behavior with unresolved allowed directories + // If allowed dirs were not resolved (storing symlink paths instead): + const unresolvedAllowedDirs = [symlinkDir]; + // This validation would incorrectly fail for the same content: + expect(isPathWithinAllowedDirectories(resolvedFile, unresolvedAllowedDirs)).toBe(false); + + } catch (error) { + // Skip if no symlink permissions on the system + if ((error as NodeJS.ErrnoException).code !== 'EPERM') { + throw error; + } + } + }); + + it('resolves nested symlink chains completely', async () => { + try { + // Setup: Create target file in forbidden area + const actualTarget = path.join(forbiddenDir, 'target-file.txt'); + await fs.writeFile(actualTarget, 'FINAL_CONTENT'); + + // Create chain of symlinks: allowedFile -> link2 -> link1 -> actualTarget + const link1 = path.join(testDir, 'intermediate-link1'); + const link2 = path.join(testDir, 'intermediate-link2'); + const allowedFile = path.join(allowedDir, 'seemingly-safe-file'); + + await fs.symlink(actualTarget, link1); + await fs.symlink(link1, link2); + await fs.symlink(link2, allowedFile); + + // The allowed file path passes basic validation + expect(isPathWithinAllowedDirectories(allowedFile, [allowedDir])).toBe(true); + + // But complete resolution reveals the forbidden target + const fullyResolvedPath = await fs.realpath(allowedFile); + expect(isPathWithinAllowedDirectories(fullyResolvedPath, [allowedDir])).toBe(false); + expect(await fs.realpath(fullyResolvedPath)).toBe(await fs.realpath(actualTarget)); + + } catch (error) { + // Skip if no symlink permissions on the system + if ((error as NodeJS.ErrnoException).code !== 'EPERM') { + throw error; + } + } + }); + }); + + describe('Path Validation Race Condition Tests', () => { + let testDir: string; + let allowedDir: string; + let forbiddenDir: string; + let targetFile: string; + let testPath: string; + + beforeEach(async () => { + testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'race-test-')); + allowedDir = path.join(testDir, 'allowed'); + forbiddenDir = path.join(testDir, 'outside'); + targetFile = path.join(forbiddenDir, 'target.txt'); + testPath = path.join(allowedDir, 'test.txt'); + + await fs.mkdir(allowedDir, { recursive: true }); + await fs.mkdir(forbiddenDir, { recursive: true }); + await fs.writeFile(targetFile, 'ORIGINAL CONTENT', 'utf-8'); + }); + + afterEach(async () => { + await fs.rm(testDir, { recursive: true, force: true }); + }); + + it('validates non-existent file paths based on parent directory', async () => { + const allowed = [allowedDir]; + + expect(isPathWithinAllowedDirectories(testPath, allowed)).toBe(true); + await expect(fs.access(testPath)).rejects.toThrow(); + + const parentDir = path.dirname(testPath); + expect(isPathWithinAllowedDirectories(parentDir, allowed)).toBe(true); + }); + + it('demonstrates symlink race condition allows writing outside allowed directories', async () => { + const symlinkSupported = await getSymlinkSupport(); + if (!symlinkSupported) { + console.log(' ⏭️ Skipping symlink race condition test - symlinks not supported'); + return; + } + + const allowed = [allowedDir]; + + await expect(fs.access(testPath)).rejects.toThrow(); + expect(isPathWithinAllowedDirectories(testPath, allowed)).toBe(true); + + await fs.symlink(targetFile, testPath); + await fs.writeFile(testPath, 'MODIFIED CONTENT', 'utf-8'); + + const targetContent = await fs.readFile(targetFile, 'utf-8'); + expect(targetContent).toBe('MODIFIED CONTENT'); + + const resolvedPath = await fs.realpath(testPath); + expect(isPathWithinAllowedDirectories(resolvedPath, allowed)).toBe(false); + }); + + it('shows timing differences between validation approaches', async () => { + const symlinkSupported = await getSymlinkSupport(); + if (!symlinkSupported) { + console.log(' ⏭️ Skipping timing validation test - symlinks not supported'); + return; + } + + const allowed = [allowedDir]; + + const validation1 = isPathWithinAllowedDirectories(testPath, allowed); + expect(validation1).toBe(true); + + await fs.symlink(targetFile, testPath); + + const resolvedPath = await fs.realpath(testPath); + const validation2 = isPathWithinAllowedDirectories(resolvedPath, allowed); + expect(validation2).toBe(false); + + expect(validation1).not.toBe(validation2); + }); + + it('validates directory creation timing', async () => { + const symlinkSupported = await getSymlinkSupport(); + if (!symlinkSupported) { + console.log(' ⏭️ Skipping directory creation timing test - symlinks not supported'); + return; + } + + const allowed = [allowedDir]; + const testDir = path.join(allowedDir, 'newdir'); + + expect(isPathWithinAllowedDirectories(testDir, allowed)).toBe(true); + + await fs.symlink(forbiddenDir, testDir); + + expect(isPathWithinAllowedDirectories(testDir, allowed)).toBe(true); + + const resolved = await fs.realpath(testDir); + expect(isPathWithinAllowedDirectories(resolved, allowed)).toBe(false); + }); + + it('demonstrates exclusive file creation behavior', async () => { + const symlinkSupported = await getSymlinkSupport(); + if (!symlinkSupported) { + console.log(' ⏭️ Skipping exclusive file creation test - symlinks not supported'); + return; + } + + const allowed = [allowedDir]; + + await fs.symlink(targetFile, testPath); + + await expect(fs.open(testPath, 'wx')).rejects.toThrow(/EEXIST/); + + await fs.writeFile(testPath, 'NEW CONTENT', 'utf-8'); + const targetContent = await fs.readFile(targetFile, 'utf-8'); + expect(targetContent).toBe('NEW CONTENT'); + }); + + it('should use resolved parent paths for non-existent files', async () => { + const symlinkSupported = await getSymlinkSupport(); + if (!symlinkSupported) { + console.log(' ⏭️ Skipping resolved parent paths test - symlinks not supported'); + return; + } + + const allowed = [allowedDir]; + + const symlinkDir = path.join(allowedDir, 'link'); + await fs.symlink(forbiddenDir, symlinkDir); + + const fileThroughSymlink = path.join(symlinkDir, 'newfile.txt'); + + expect(fileThroughSymlink.startsWith(allowedDir)).toBe(true); + + const parentDir = path.dirname(fileThroughSymlink); + const resolvedParent = await fs.realpath(parentDir); + expect(isPathWithinAllowedDirectories(resolvedParent, allowed)).toBe(false); + + const expectedSafePath = path.join(resolvedParent, path.basename(fileThroughSymlink)); + expect(isPathWithinAllowedDirectories(expectedSafePath, allowed)).toBe(false); + }); + + it('demonstrates parent directory symlink traversal', async () => { + const symlinkSupported = await getSymlinkSupport(); + if (!symlinkSupported) { + console.log(' ⏭️ Skipping parent directory symlink traversal test - symlinks not supported'); + return; + } + + const allowed = [allowedDir]; + const deepPath = path.join(allowedDir, 'sub1', 'sub2', 'file.txt'); + + expect(isPathWithinAllowedDirectories(deepPath, allowed)).toBe(true); + + const sub1Path = path.join(allowedDir, 'sub1'); + await fs.symlink(forbiddenDir, sub1Path); + + await fs.mkdir(path.join(sub1Path, 'sub2'), { recursive: true }); + await fs.writeFile(deepPath, 'CONTENT', 'utf-8'); + + const realPath = await fs.realpath(deepPath); + const realAllowedDir = await fs.realpath(allowedDir); + const realForbiddenDir = await fs.realpath(forbiddenDir); + + expect(realPath.startsWith(realAllowedDir)).toBe(false); + expect(realPath.startsWith(realForbiddenDir)).toBe(true); + }); + + it('should prevent race condition between validatePath and file operation', async () => { + const symlinkSupported = await getSymlinkSupport(); + if (!symlinkSupported) { + console.log(' ⏭️ Skipping race condition prevention test - symlinks not supported'); + return; + } + + const allowed = [allowedDir]; + const racePath = path.join(allowedDir, 'race-file.txt'); + const targetFile = path.join(forbiddenDir, 'target.txt'); + + await fs.writeFile(targetFile, 'ORIGINAL CONTENT', 'utf-8'); + + // Path validation would pass (file doesn't exist, parent is in allowed dir) + expect(await fs.access(racePath).then(() => false).catch(() => true)).toBe(true); + expect(isPathWithinAllowedDirectories(racePath, allowed)).toBe(true); + + // Race condition: symlink created after validation but before write + await fs.symlink(targetFile, racePath); + + // With exclusive write flag, write should fail on symlink + await expect( + fs.writeFile(racePath, 'NEW CONTENT', { encoding: 'utf-8', flag: 'wx' }) + ).rejects.toThrow(/EEXIST/); + + // Verify content unchanged + const targetContent = await fs.readFile(targetFile, 'utf-8'); + expect(targetContent).toBe('ORIGINAL CONTENT'); + + // The symlink exists but write was blocked + const actualWritePath = await fs.realpath(racePath); + expect(actualWritePath).toBe(await fs.realpath(targetFile)); + expect(isPathWithinAllowedDirectories(actualWritePath, allowed)).toBe(false); + }); + + it('should allow overwrites to legitimate files within allowed directories', async () => { + const allowed = [allowedDir]; + const legitFile = path.join(allowedDir, 'legit-file.txt'); + + // Create a legitimate file + await fs.writeFile(legitFile, 'ORIGINAL', 'utf-8'); + + // Opening with w should work for legitimate files + const fd = await fs.open(legitFile, 'w'); + try { + await fd.write('UPDATED', 0, 'utf-8'); + } finally { + await fd.close(); + } + + const content = await fs.readFile(legitFile, 'utf-8'); + expect(content).toBe('UPDATED'); + }); + + it('should handle symlinks that point within allowed directories', async () => { + const symlinkSupported = await getSymlinkSupport(); + if (!symlinkSupported) { + console.log(' ⏭️ Skipping symlinks within allowed directories test - symlinks not supported'); + return; + } + + const allowed = [allowedDir]; + const targetFile = path.join(allowedDir, 'target.txt'); + const symlinkPath = path.join(allowedDir, 'symlink.txt'); + + // Create target file within allowed directory + await fs.writeFile(targetFile, 'TARGET CONTENT', 'utf-8'); + + // Create symlink pointing to allowed file + await fs.symlink(targetFile, symlinkPath); + + // Opening symlink with w follows it to the target + const fd = await fs.open(symlinkPath, 'w'); + try { + await fd.write('UPDATED VIA SYMLINK', 0, 'utf-8'); + } finally { + await fd.close(); + } + + // Both symlink and target should show updated content + const symlinkContent = await fs.readFile(symlinkPath, 'utf-8'); + const targetContent = await fs.readFile(targetFile, 'utf-8'); + expect(symlinkContent).toBe('UPDATED VIA SYMLINK'); + expect(targetContent).toBe('UPDATED VIA SYMLINK'); + }); + + it('should prevent overwriting files through symlinks pointing outside allowed directories', async () => { + const symlinkSupported = await getSymlinkSupport(); + if (!symlinkSupported) { + console.log(' ⏭️ Skipping symlink overwrite prevention test - symlinks not supported'); + return; + } + + const allowed = [allowedDir]; + const legitFile = path.join(allowedDir, 'existing.txt'); + const targetFile = path.join(forbiddenDir, 'target.txt'); + + // Create a legitimate file first + await fs.writeFile(legitFile, 'LEGIT CONTENT', 'utf-8'); + + // Create target file in forbidden directory + await fs.writeFile(targetFile, 'FORBIDDEN CONTENT', 'utf-8'); + + // Now replace the legitimate file with a symlink to forbidden location + await fs.unlink(legitFile); + await fs.symlink(targetFile, legitFile); + + // Simulate the server's validation logic + const stats = await fs.lstat(legitFile); + expect(stats.isSymbolicLink()).toBe(true); + + const realPath = await fs.realpath(legitFile); + expect(isPathWithinAllowedDirectories(realPath, allowed)).toBe(false); + + // With atomic rename, symlinks are replaced not followed + // So this test now demonstrates the protection + + // Verify content remains unchanged + const targetContent = await fs.readFile(targetFile, 'utf-8'); + expect(targetContent).toBe('FORBIDDEN CONTENT'); + }); + + it('demonstrates race condition in read operations', async () => { + const symlinkSupported = await getSymlinkSupport(); + if (!symlinkSupported) { + console.log(' ⏭️ Skipping race condition in read operations test - symlinks not supported'); + return; + } + + const allowed = [allowedDir]; + const legitFile = path.join(allowedDir, 'readable.txt'); + const secretFile = path.join(forbiddenDir, 'secret.txt'); + + // Create legitimate file + await fs.writeFile(legitFile, 'PUBLIC CONTENT', 'utf-8'); + + // Create secret file in forbidden directory + await fs.writeFile(secretFile, 'SECRET CONTENT', 'utf-8'); + + // Step 1: validatePath would pass for legitimate file + expect(isPathWithinAllowedDirectories(legitFile, allowed)).toBe(true); + + // Step 2: Race condition - replace file with symlink after validation + await fs.unlink(legitFile); + await fs.symlink(secretFile, legitFile); + + // Step 3: Read operation follows symlink to forbidden location + const content = await fs.readFile(legitFile, 'utf-8'); + + // This shows the vulnerability - we read forbidden content + expect(content).toBe('SECRET CONTENT'); + expect(isPathWithinAllowedDirectories(await fs.realpath(legitFile), allowed)).toBe(false); + }); + + it('verifies rename does not follow symlinks', async () => { + const symlinkSupported = await getSymlinkSupport(); + if (!symlinkSupported) { + console.log(' ⏭️ Skipping rename symlink test - symlinks not supported'); + return; + } + + const allowed = [allowedDir]; + const tempFile = path.join(allowedDir, 'temp.txt'); + const targetSymlink = path.join(allowedDir, 'target-symlink.txt'); + const forbiddenTarget = path.join(forbiddenDir, 'forbidden-target.txt'); + + // Create forbidden target + await fs.writeFile(forbiddenTarget, 'ORIGINAL CONTENT', 'utf-8'); + + // Create symlink pointing to forbidden location + await fs.symlink(forbiddenTarget, targetSymlink); + + // Write temp file + await fs.writeFile(tempFile, 'NEW CONTENT', 'utf-8'); + + // Rename temp file to symlink path + await fs.rename(tempFile, targetSymlink); + + // Check what happened + const symlinkExists = await fs.lstat(targetSymlink).then(() => true).catch(() => false); + const isSymlink = symlinkExists && (await fs.lstat(targetSymlink)).isSymbolicLink(); + const targetContent = await fs.readFile(targetSymlink, 'utf-8'); + const forbiddenContent = await fs.readFile(forbiddenTarget, 'utf-8'); + + // Rename should replace the symlink with a regular file + expect(isSymlink).toBe(false); + expect(targetContent).toBe('NEW CONTENT'); + expect(forbiddenContent).toBe('ORIGINAL CONTENT'); // Unchanged + }); + }); +}); diff --git a/mcpServer/modules/filesystem/__tests__/roots-utils.test.ts b/mcpServer/modules/filesystem/__tests__/roots-utils.test.ts new file mode 100644 index 0000000..1a39483 --- /dev/null +++ b/mcpServer/modules/filesystem/__tests__/roots-utils.test.ts @@ -0,0 +1,84 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { getValidRootDirectories } from '../roots-utils.js'; +import { mkdtempSync, rmSync, mkdirSync, writeFileSync, realpathSync } from 'fs'; +import { tmpdir } from 'os'; +import { join } from 'path'; +import type { Root } from '@modelcontextprotocol/sdk/types.js'; + +describe('getValidRootDirectories', () => { + let testDir1: string; + let testDir2: string; + let testDir3: string; + let testFile: string; + + beforeEach(() => { + // Create test directories + testDir1 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test1-'))); + testDir2 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test2-'))); + testDir3 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test3-'))); + + // Create a test file (not a directory) + testFile = join(testDir1, 'test-file.txt'); + writeFileSync(testFile, 'test content'); + }); + + afterEach(() => { + // Cleanup + rmSync(testDir1, { recursive: true, force: true }); + rmSync(testDir2, { recursive: true, force: true }); + rmSync(testDir3, { recursive: true, force: true }); + }); + + describe('valid directory processing', () => { + it('should process all URI formats and edge cases', async () => { + const roots = [ + { uri: `file://${testDir1}`, name: 'File URI' }, + { uri: testDir2, name: 'Plain path' }, + { uri: testDir3 } // Plain path without name property + ]; + + const result = await getValidRootDirectories(roots); + + expect(result).toContain(testDir1); + expect(result).toContain(testDir2); + expect(result).toContain(testDir3); + expect(result).toHaveLength(3); + }); + + it('should normalize complex paths', async () => { + const subDir = join(testDir1, 'subdir'); + mkdirSync(subDir); + + const roots = [ + { uri: `file://${testDir1}/./subdir/../subdir`, name: 'Complex Path' } + ]; + + const result = await getValidRootDirectories(roots); + + expect(result).toHaveLength(1); + expect(result[0]).toBe(subDir); + }); + }); + + describe('error handling', () => { + + it('should handle various error types', async () => { + const nonExistentDir = join(tmpdir(), 'non-existent-directory-12345'); + const invalidPath = '\0invalid\0path'; // Null bytes cause different error types + const roots = [ + { uri: `file://${testDir1}`, name: 'Valid Dir' }, + { uri: `file://${nonExistentDir}`, name: 'Non-existent Dir' }, + { uri: `file://${testFile}`, name: 'File Not Dir' }, + { uri: `file://${invalidPath}`, name: 'Invalid Path' } + ]; + + const result = await getValidRootDirectories(roots); + + expect(result).toContain(testDir1); + expect(result).not.toContain(nonExistentDir); + expect(result).not.toContain(testFile); + expect(result).not.toContain(invalidPath); + expect(result).toHaveLength(1); + }); + }); +}); \ No newline at end of file diff --git a/mcpServer/modules/filesystem/__tests__/startup-validation.test.ts b/mcpServer/modules/filesystem/__tests__/startup-validation.test.ts new file mode 100644 index 0000000..40938ed --- /dev/null +++ b/mcpServer/modules/filesystem/__tests__/startup-validation.test.ts @@ -0,0 +1,100 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { spawn } from 'child_process'; +import * as path from 'path'; +import * as fs from 'fs/promises'; +import * as os from 'os'; + +const SERVER_PATH = path.join(__dirname, '..', 'dist', 'index.js'); + +/** + * Spawns the filesystem server with given arguments and returns exit info + */ +async function spawnServer(args: string[], timeoutMs = 2000): Promise<{ exitCode: number | null; stderr: string }> { + return new Promise((resolve) => { + const proc = spawn('node', [SERVER_PATH, ...args], { + stdio: ['pipe', 'pipe', 'pipe'], + }); + + let stderr = ''; + proc.stderr?.on('data', (data) => { + stderr += data.toString(); + }); + + const timeout = setTimeout(() => { + proc.kill('SIGTERM'); + }, timeoutMs); + + proc.on('close', (code) => { + clearTimeout(timeout); + resolve({ exitCode: code, stderr }); + }); + + proc.on('error', (err) => { + clearTimeout(timeout); + resolve({ exitCode: 1, stderr: err.message }); + }); + }); +} + +describe('Startup Directory Validation', () => { + let testDir: string; + let accessibleDir: string; + let accessibleDir2: string; + + beforeEach(async () => { + testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'fs-startup-test-')); + accessibleDir = path.join(testDir, 'accessible'); + accessibleDir2 = path.join(testDir, 'accessible2'); + await fs.mkdir(accessibleDir, { recursive: true }); + await fs.mkdir(accessibleDir2, { recursive: true }); + }); + + afterEach(async () => { + await fs.rm(testDir, { recursive: true, force: true }); + }); + + it('should start successfully with all accessible directories', async () => { + const result = await spawnServer([accessibleDir, accessibleDir2]); + // Server starts and runs (we kill it after timeout, so exit code is null or from SIGTERM) + expect(result.stderr).toContain('Secure MCP Filesystem Server running on SSE'); + expect(result.stderr).not.toContain('Error:'); + }); + + it('should skip inaccessible directory and continue with accessible one', async () => { + const nonExistentDir = path.join(testDir, 'non-existent-dir-12345'); + + const result = await spawnServer([nonExistentDir, accessibleDir]); + + // Should warn about inaccessible directory + expect(result.stderr).toContain('Warning: Cannot access directory'); + expect(result.stderr).toContain(nonExistentDir); + + // Should still start successfully + expect(result.stderr).toContain('Secure MCP Filesystem Server running on SSE'); + }); + + it('should exit with error when ALL directories are inaccessible', async () => { + const nonExistent1 = path.join(testDir, 'non-existent-1'); + const nonExistent2 = path.join(testDir, 'non-existent-2'); + + const result = await spawnServer([nonExistent1, nonExistent2]); + + // Should exit with error + expect(result.exitCode).toBe(1); + expect(result.stderr).toContain('Error: None of the specified directories are accessible'); + }); + + it('should warn when path is not a directory', async () => { + const filePath = path.join(testDir, 'not-a-directory.txt'); + await fs.writeFile(filePath, 'content'); + + const result = await spawnServer([filePath, accessibleDir]); + + // Should warn about non-directory + expect(result.stderr).toContain('Warning:'); + expect(result.stderr).toContain('not a directory'); + + // Should still start with the valid directory + expect(result.stderr).toContain('Secure MCP Filesystem Server running on SSE'); + }); +}); diff --git a/mcpServer/modules/filesystem/__tests__/structured-content.test.ts b/mcpServer/modules/filesystem/__tests__/structured-content.test.ts new file mode 100644 index 0000000..4b8f92b --- /dev/null +++ b/mcpServer/modules/filesystem/__tests__/structured-content.test.ts @@ -0,0 +1,158 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import * as fs from 'fs/promises'; +import * as path from 'path'; +import * as os from 'os'; +import { Client } from '@modelcontextprotocol/sdk/client/index.js'; +import { StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js'; +import { spawn } from 'child_process'; + +/** + * Integration tests to verify that tool handlers return structuredContent + * that matches the declared outputSchema. + * + * These tests address issues #3110, #3106, #3093 where tools were returning + * structuredContent: { content: [contentBlock] } (array) instead of + * structuredContent: { content: string } as declared in outputSchema. + */ +describe('structuredContent schema compliance', () => { + let client: Client; + let transport: StdioClientTransport; + let testDir: string; + + beforeEach(async () => { + // Create a temp directory for testing + testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'mcp-fs-test-')); + + // Create test files + await fs.writeFile(path.join(testDir, 'test.txt'), 'test content'); + await fs.mkdir(path.join(testDir, 'subdir')); + await fs.writeFile(path.join(testDir, 'subdir', 'nested.txt'), 'nested content'); + + // Start the MCP server + const serverPath = path.resolve(__dirname, '../dist/index.js'); + transport = new StdioClientTransport({ + command: 'node', + args: [serverPath, testDir], + }); + + client = new Client({ + name: 'test-client', + version: '1.0.0', + }, { + capabilities: {} + }); + + await client.connect(transport); + }); + + afterEach(async () => { + await client?.close(); + await fs.rm(testDir, { recursive: true, force: true }); + }); + + describe('directory_tree', () => { + it('should return structuredContent.content as a string, not an array', async () => { + const result = await client.callTool({ + name: 'directory_tree', + arguments: { path: testDir } + }); + + // The result should have structuredContent + expect(result.structuredContent).toBeDefined(); + + // structuredContent.content should be a string (matching outputSchema: { content: z.string() }) + const structuredContent = result.structuredContent as { content: unknown }; + expect(typeof structuredContent.content).toBe('string'); + + // It should NOT be an array + expect(Array.isArray(structuredContent.content)).toBe(false); + + // The content should be valid JSON representing the tree + const treeData = JSON.parse(structuredContent.content as string); + expect(Array.isArray(treeData)).toBe(true); + }); + }); + + describe('list_directory_with_sizes', () => { + it('should return structuredContent.content as a string, not an array', async () => { + const result = await client.callTool({ + name: 'list_directory_with_sizes', + arguments: { path: testDir } + }); + + // The result should have structuredContent + expect(result.structuredContent).toBeDefined(); + + // structuredContent.content should be a string (matching outputSchema: { content: z.string() }) + const structuredContent = result.structuredContent as { content: unknown }; + expect(typeof structuredContent.content).toBe('string'); + + // It should NOT be an array + expect(Array.isArray(structuredContent.content)).toBe(false); + + // The content should contain directory listing info + expect(structuredContent.content).toContain('[FILE]'); + }); + }); + + describe('move_file', () => { + it('should return structuredContent.content as a string, not an array', async () => { + const sourcePath = path.join(testDir, 'test.txt'); + const destPath = path.join(testDir, 'moved.txt'); + + const result = await client.callTool({ + name: 'move_file', + arguments: { + source: sourcePath, + destination: destPath + } + }); + + // The result should have structuredContent + expect(result.structuredContent).toBeDefined(); + + // structuredContent.content should be a string (matching outputSchema: { content: z.string() }) + const structuredContent = result.structuredContent as { content: unknown }; + expect(typeof structuredContent.content).toBe('string'); + + // It should NOT be an array + expect(Array.isArray(structuredContent.content)).toBe(false); + + // The content should contain success message + expect(structuredContent.content).toContain('Successfully moved'); + }); + }); + + describe('list_directory (control - already working)', () => { + it('should return structuredContent.content as a string', async () => { + const result = await client.callTool({ + name: 'list_directory', + arguments: { path: testDir } + }); + + expect(result.structuredContent).toBeDefined(); + + const structuredContent = result.structuredContent as { content: unknown }; + expect(typeof structuredContent.content).toBe('string'); + expect(Array.isArray(structuredContent.content)).toBe(false); + }); + }); + + describe('search_files (control - already working)', () => { + it('should return structuredContent.content as a string', async () => { + const result = await client.callTool({ + name: 'search_files', + arguments: { + path: testDir, + pattern: '*.txt' + } + }); + + expect(result.structuredContent).toBeDefined(); + + const structuredContent = result.structuredContent as { content: unknown }; + expect(typeof structuredContent.content).toBe('string'); + expect(Array.isArray(structuredContent.content)).toBe(false); + }); + }); +}); diff --git a/mcpServer/modules/filesystem/index.ts b/mcpServer/modules/filesystem/index.ts new file mode 100644 index 0000000..12485ba --- /dev/null +++ b/mcpServer/modules/filesystem/index.ts @@ -0,0 +1,673 @@ +#!/usr/bin/env node + +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js"; +import { CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import fs from "fs/promises"; +import { createReadStream } from "fs"; +import http from "http"; +import path from "path"; +import { URL } from "url"; +import { z } from "zod"; +import { minimatch } from "minimatch"; +import { + // Function imports + formatSize, + validatePath, + getFileStats, + readFileContent, + writeFileContent, + searchFilesWithValidation, + applyFileEdits, + tailFile, + headFile, + setAllowedDirectories, +} from './lib.js'; + +// Always use full filesystem (typical for container deployment) +const allowedDirectories: string[] = ["/"]; +setAllowedDirectories(allowedDirectories); + +// Schema definitions +const ReadTextFileArgsSchema = z.object({ + path: z.string(), + tail: z.number().optional().describe('If provided, returns only the last N lines of the file'), + head: z.number().optional().describe('If provided, returns only the first N lines of the file') +}); + +const ReadMediaFileArgsSchema = z.object({ + path: z.string() +}); + +const ReadMultipleFilesArgsSchema = z.object({ + paths: z + .array(z.string()) + .min(1, "At least one file path must be provided") + .describe("Array of file paths to read. Each path must be a string pointing to a valid file within allowed directories."), +}); + +const WriteFileArgsSchema = z.object({ + path: z.string(), + content: z.string(), +}); + +const EditOperation = z.object({ + oldText: z.string().describe('Text to search for - must match exactly'), + newText: z.string().describe('Text to replace with') +}); + +const EditFileArgsSchema = z.object({ + path: z.string(), + edits: z.array(EditOperation), + dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format') +}); + +const CreateDirectoryArgsSchema = z.object({ + path: z.string(), +}); + +const ListDirectoryArgsSchema = z.object({ + path: z.string(), +}); + +const ListDirectoryWithSizesArgsSchema = z.object({ + path: z.string(), + sortBy: z.enum(['name', 'size']).optional().default('name').describe('Sort entries by name or size'), +}); + +const DirectoryTreeArgsSchema = z.object({ + path: z.string(), + excludePatterns: z.array(z.string()).optional().default([]) +}); + +const MoveFileArgsSchema = z.object({ + source: z.string(), + destination: z.string(), +}); + +const SearchFilesArgsSchema = z.object({ + path: z.string(), + pattern: z.string(), + excludePatterns: z.array(z.string()).optional().default([]) +}); + +const GetFileInfoArgsSchema = z.object({ + path: z.string(), +}); + +// Server setup +const server = new McpServer( + { + name: "secure-filesystem-server", + version: "0.2.0", + } +); + +// Reads a file as a stream of buffers, concatenates them, and then encodes +// the result to a Base64 string. This is a memory-efficient way to handle +// binary data from a stream before the final encoding. +async function readFileAsBase64Stream(filePath: string): Promise { + return new Promise((resolve, reject) => { + const stream = createReadStream(filePath); + const chunks: Buffer[] = []; + stream.on('data', (chunk) => { + chunks.push(chunk as Buffer); + }); + stream.on('end', () => { + const finalBuffer = Buffer.concat(chunks); + resolve(finalBuffer.toString('base64')); + }); + stream.on('error', (err) => reject(err)); + }); +} + +// Tool registrations + +// read_file (deprecated) and read_text_file +const readTextFileHandler = async (args: z.infer) => { + const validPath = await validatePath(args.path); + + if (args.head && args.tail) { + throw new Error("Cannot specify both head and tail parameters simultaneously"); + } + + let content: string; + if (args.tail) { + content = await tailFile(validPath, args.tail); + } else if (args.head) { + content = await headFile(validPath, args.head); + } else { + content = await readFileContent(validPath); + } + + return { + content: [{ type: "text" as const, text: content }], + structuredContent: { content } + }; +}; + +server.registerTool( + "read_file", + { + title: "Read File (Deprecated)", + description: "Read the complete contents of a file as text. DEPRECATED: Use read_text_file instead.", + inputSchema: ReadTextFileArgsSchema.shape, + outputSchema: { content: z.string() }, + annotations: { readOnlyHint: true } + }, + readTextFileHandler +); + +server.registerTool( + "read_text_file", + { + title: "Read Text File", + description: + "Read the complete contents of a file from the file system as text. " + + "Handles various text encodings and provides detailed error messages " + + "if the file cannot be read. Use this tool when you need to examine " + + "the contents of a single file. Use the 'head' parameter to read only " + + "the first N lines of a file, or the 'tail' parameter to read only " + + "the last N lines of a file. Operates on the file as text regardless of extension. " + + "Only works within allowed directories.", + inputSchema: { + path: z.string(), + tail: z.number().optional().describe("If provided, returns only the last N lines of the file"), + head: z.number().optional().describe("If provided, returns only the first N lines of the file") + }, + outputSchema: { content: z.string() }, + annotations: { readOnlyHint: true } + }, + readTextFileHandler +); + +server.registerTool( + "read_media_file", + { + title: "Read Media File", + description: + "Read an image or audio file. Returns the base64 encoded data and MIME type. " + + "Only works within allowed directories.", + inputSchema: { + path: z.string() + }, + outputSchema: { + content: z.array(z.object({ + type: z.enum(["image", "audio", "blob"]), + data: z.string(), + mimeType: z.string() + })) + }, + annotations: { readOnlyHint: true } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + const extension = path.extname(validPath).toLowerCase(); + const mimeTypes: Record = { + ".png": "image/png", + ".jpg": "image/jpeg", + ".jpeg": "image/jpeg", + ".gif": "image/gif", + ".webp": "image/webp", + ".bmp": "image/bmp", + ".svg": "image/svg+xml", + ".mp3": "audio/mpeg", + ".wav": "audio/wav", + ".ogg": "audio/ogg", + ".flac": "audio/flac", + }; + const mimeType = mimeTypes[extension] || "application/octet-stream"; + const data = await readFileAsBase64Stream(validPath); + + const type = mimeType.startsWith("image/") + ? "image" + : mimeType.startsWith("audio/") + ? "audio" + // Fallback for other binary types, not officially supported by the spec but has been used for some time + : "blob"; + const contentItem = { type: type as 'image' | 'audio' | 'blob', data, mimeType }; + return { + content: [contentItem], + structuredContent: { content: [contentItem] } + } as unknown as CallToolResult; + } +); + +server.registerTool( + "read_multiple_files", + { + title: "Read Multiple Files", + description: + "Read the contents of multiple files simultaneously. This is more " + + "efficient than reading files one by one when you need to analyze " + + "or compare multiple files. Each file's content is returned with its " + + "path as a reference. Failed reads for individual files won't stop " + + "the entire operation. Only works within allowed directories.", + inputSchema: { + paths: z.array(z.string()) + .min(1) + .describe("Array of file paths to read. Each path must be a string pointing to a valid file within allowed directories.") + }, + outputSchema: { content: z.string() }, + annotations: { readOnlyHint: true } + }, + async (args: z.infer) => { + const results = await Promise.all( + args.paths.map(async (filePath: string) => { + try { + const validPath = await validatePath(filePath); + const content = await readFileContent(validPath); + return `${filePath}:\n${content}\n`; + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error); + return `${filePath}: Error - ${errorMessage}`; + } + }), + ); + const text = results.join("\n---\n"); + return { + content: [{ type: "text" as const, text }], + structuredContent: { content: text } + }; + } +); + +server.registerTool( + "write_file", + { + title: "Write File", + description: + "Create a new file or completely overwrite an existing file with new content. " + + "Use with caution as it will overwrite existing files without warning. " + + "Handles text content with proper encoding. Only works within allowed directories.", + inputSchema: { + path: z.string(), + content: z.string() + }, + outputSchema: { content: z.string() }, + annotations: { readOnlyHint: false, idempotentHint: true, destructiveHint: true } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + await writeFileContent(validPath, args.content); + const text = `Successfully wrote to ${args.path}`; + return { + content: [{ type: "text" as const, text }], + structuredContent: { content: text } + }; + } +); + +server.registerTool( + "edit_file", + { + title: "Edit File", + description: + "Make line-based edits to a text file. Each edit replaces exact line sequences " + + "with new content. Returns a git-style diff showing the changes made. " + + "Only works within allowed directories.", + inputSchema: { + path: z.string(), + edits: z.array(z.object({ + oldText: z.string().describe("Text to search for - must match exactly"), + newText: z.string().describe("Text to replace with") + })), + dryRun: z.boolean().default(false).describe("Preview changes using git-style diff format") + }, + outputSchema: { content: z.string() }, + annotations: { readOnlyHint: false, idempotentHint: false, destructiveHint: true } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + const result = await applyFileEdits(validPath, args.edits, args.dryRun); + return { + content: [{ type: "text" as const, text: result }], + structuredContent: { content: result } + }; + } +); + +server.registerTool( + "create_directory", + { + title: "Create Directory", + description: + "Create a new directory or ensure a directory exists. Can create multiple " + + "nested directories in one operation. If the directory already exists, " + + "this operation will succeed silently. Perfect for setting up directory " + + "structures for projects or ensuring required paths exist. Only works within allowed directories.", + inputSchema: { + path: z.string() + }, + outputSchema: { content: z.string() }, + annotations: { readOnlyHint: false, idempotentHint: true, destructiveHint: false } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + await fs.mkdir(validPath, { recursive: true }); + const text = `Successfully created directory ${args.path}`; + return { + content: [{ type: "text" as const, text }], + structuredContent: { content: text } + }; + } +); + +server.registerTool( + "list_directory", + { + title: "List Directory", + description: + "Get a detailed listing of all files and directories in a specified path. " + + "Results clearly distinguish between files and directories with [FILE] and [DIR] " + + "prefixes. This tool is essential for understanding directory structure and " + + "finding specific files within a directory. Only works within allowed directories.", + inputSchema: { + path: z.string() + }, + outputSchema: { content: z.string() }, + annotations: { readOnlyHint: true } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + const entries = await fs.readdir(validPath, { withFileTypes: true }); + const formatted = entries + .map((entry) => `${entry.isDirectory() ? "[DIR]" : "[FILE]"} ${entry.name}`) + .join("\n"); + return { + content: [{ type: "text" as const, text: formatted }], + structuredContent: { content: formatted } + }; + } +); + +server.registerTool( + "list_directory_with_sizes", + { + title: "List Directory with Sizes", + description: + "Get a detailed listing of all files and directories in a specified path, including sizes. " + + "Results clearly distinguish between files and directories with [FILE] and [DIR] " + + "prefixes. This tool is useful for understanding directory structure and " + + "finding specific files within a directory. Only works within allowed directories.", + inputSchema: { + path: z.string(), + sortBy: z.enum(["name", "size"]).optional().default("name").describe("Sort entries by name or size") + }, + outputSchema: { content: z.string() }, + annotations: { readOnlyHint: true } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + const entries = await fs.readdir(validPath, { withFileTypes: true }); + + // Get detailed information for each entry + const detailedEntries = await Promise.all( + entries.map(async (entry) => { + const entryPath = path.join(validPath, entry.name); + try { + const stats = await fs.stat(entryPath); + return { + name: entry.name, + isDirectory: entry.isDirectory(), + size: stats.size, + mtime: stats.mtime + }; + } catch (error) { + return { + name: entry.name, + isDirectory: entry.isDirectory(), + size: 0, + mtime: new Date(0) + }; + } + }) + ); + + // Sort entries based on sortBy parameter + const sortedEntries = [...detailedEntries].sort((a, b) => { + if (args.sortBy === 'size') { + return b.size - a.size; // Descending by size + } + // Default sort by name + return a.name.localeCompare(b.name); + }); + + // Format the output + const formattedEntries = sortedEntries.map(entry => + `${entry.isDirectory ? "[DIR]" : "[FILE]"} ${entry.name.padEnd(30)} ${ + entry.isDirectory ? "" : formatSize(entry.size).padStart(10) + }` + ); + + // Add summary + const totalFiles = detailedEntries.filter(e => !e.isDirectory).length; + const totalDirs = detailedEntries.filter(e => e.isDirectory).length; + const totalSize = detailedEntries.reduce((sum, entry) => sum + (entry.isDirectory ? 0 : entry.size), 0); + + const summary = [ + "", + `Total: ${totalFiles} files, ${totalDirs} directories`, + `Combined size: ${formatSize(totalSize)}` + ]; + + const text = [...formattedEntries, ...summary].join("\n"); + const contentBlock = { type: "text" as const, text }; + return { + content: [contentBlock], + structuredContent: { content: text } + }; + } +); + +server.registerTool( + "directory_tree", + { + title: "Directory Tree", + description: + "Get a recursive tree view of files and directories as a JSON structure. " + + "Each entry includes 'name', 'type' (file/directory), and 'children' for directories. " + + "Files have no children array, while directories always have a children array (which may be empty). " + + "The output is formatted with 2-space indentation for readability. Only works within allowed directories.", + inputSchema: { + path: z.string(), + excludePatterns: z.array(z.string()).optional().default([]) + }, + outputSchema: { content: z.string() }, + annotations: { readOnlyHint: true } + }, + async (args: z.infer) => { + interface TreeEntry { + name: string; + type: 'file' | 'directory'; + children?: TreeEntry[]; + } + const rootPath = args.path; + + async function buildTree(currentPath: string, excludePatterns: string[] = []): Promise { + const validPath = await validatePath(currentPath); + const entries = await fs.readdir(validPath, { withFileTypes: true }); + const result: TreeEntry[] = []; + + for (const entry of entries) { + const relativePath = path.relative(rootPath, path.join(currentPath, entry.name)); + const shouldExclude = excludePatterns.some(pattern => { + if (pattern.includes('*')) { + return minimatch(relativePath, pattern, { dot: true }); + } + // For files: match exact name or as part of path + // For directories: match as directory path + return minimatch(relativePath, pattern, { dot: true }) || + minimatch(relativePath, `**/${pattern}`, { dot: true }) || + minimatch(relativePath, `**/${pattern}/**`, { dot: true }); + }); + if (shouldExclude) + continue; + + const entryData: TreeEntry = { + name: entry.name, + type: entry.isDirectory() ? 'directory' : 'file' + }; + + if (entry.isDirectory()) { + const subPath = path.join(currentPath, entry.name); + entryData.children = await buildTree(subPath, excludePatterns); + } + + result.push(entryData); + } + + return result; + } + + const treeData = await buildTree(rootPath, args.excludePatterns); + const text = JSON.stringify(treeData, null, 2); + const contentBlock = { type: "text" as const, text }; + return { + content: [contentBlock], + structuredContent: { content: text } + }; + } +); + +server.registerTool( + "move_file", + { + title: "Move File", + description: + "Move or rename files and directories. Can move files between directories " + + "and rename them in a single operation. If the destination exists, the " + + "operation will fail. Works across different directories and can be used " + + "for simple renaming within the same directory. Both source and destination must be within allowed directories.", + inputSchema: { + source: z.string(), + destination: z.string() + }, + outputSchema: { content: z.string() }, + annotations: { readOnlyHint: false, idempotentHint: false, destructiveHint: false } + }, + async (args: z.infer) => { + const validSourcePath = await validatePath(args.source); + const validDestPath = await validatePath(args.destination); + await fs.rename(validSourcePath, validDestPath); + const text = `Successfully moved ${args.source} to ${args.destination}`; + const contentBlock = { type: "text" as const, text }; + return { + content: [contentBlock], + structuredContent: { content: text } + }; + } +); + +server.registerTool( + "search_files", + { + title: "Search Files", + description: + "Recursively search for files and directories matching a pattern. " + + "The patterns should be glob-style patterns that match paths relative to the working directory. " + + "Use pattern like '*.ext' to match files in current directory, and '**/*.ext' to match files in all subdirectories. " + + "Returns full paths to all matching items. Great for finding files when you don't know their exact location. " + + "Only searches within allowed directories.", + inputSchema: { + path: z.string(), + pattern: z.string(), + excludePatterns: z.array(z.string()).optional().default([]) + }, + outputSchema: { content: z.string() }, + annotations: { readOnlyHint: true } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + const results = await searchFilesWithValidation(validPath, args.pattern, allowedDirectories, { excludePatterns: args.excludePatterns }); + const text = results.length > 0 ? results.join("\n") : "No matches found"; + return { + content: [{ type: "text" as const, text }], + structuredContent: { content: text } + }; + } +); + +server.registerTool( + "get_file_info", + { + title: "Get File Info", + description: + "Retrieve detailed metadata about a file or directory. Returns comprehensive " + + "information including size, creation time, last modified time, permissions, " + + "and type. This tool is perfect for understanding file characteristics " + + "without reading the actual content. Only works within allowed directories.", + inputSchema: { + path: z.string() + }, + outputSchema: { content: z.string() }, + annotations: { readOnlyHint: true } + }, + async (args: z.infer) => { + const validPath = await validatePath(args.path); + const info = await getFileStats(validPath); + const text = Object.entries(info) + .map(([key, value]) => `${key}: ${value}`) + .join("\n"); + return { + content: [{ type: "text" as const, text }], + structuredContent: { content: text } + }; + } +); + +// SSE transport session routing (sessionId -> transport) +const sseTransportsBySessionId = new Map(); + +function runServer() { + const port = Number(process.env.MCP_PORT ?? process.env.SSE_PORT ?? 3000); + + const httpServer = http.createServer(async (req, res) => { + const url = new URL(req.url ?? "/", `http://${req.headers.host ?? "localhost"}`); + const pathname = url.pathname; + + if (req.method === "GET" && (pathname === "/sse" || pathname === "/")) { + try { + const transport = new SSEServerTransport("/messages", res); + sseTransportsBySessionId.set(transport.sessionId, transport); + transport.onclose = () => { + sseTransportsBySessionId.delete(transport.sessionId); + }; + await server.connect(transport); + console.error("Secure MCP Filesystem Server: new SSE client connected"); + } catch (error) { + console.error("SSE connection error:", error); + if (!res.headersSent) { + res.writeHead(500).end("Internal server error"); + } + } + return; + } + + if (req.method === "POST" && pathname === "/messages") { + const sessionId = url.searchParams.get("sessionId"); + if (!sessionId) { + res.writeHead(400).end("Missing sessionId query parameter"); + return; + } + const transport = sseTransportsBySessionId.get(sessionId); + if (!transport) { + res.writeHead(404).end("Unknown session"); + return; + } + await transport.handlePostMessage(req, res); + return; + } + + res.writeHead(404).end("Not found"); + }); + + httpServer.listen(port, () => { + console.error(`Secure MCP Filesystem Server running on SSE at http://localhost:${port}`); + console.error(" GET /sse – open SSE stream (then POST to /messages?sessionId=...)"); + console.error(" POST /messages?sessionId= – send MCP messages"); + console.error(" Allowed directory: / (full filesystem)"); + }); +} + +runServer(); diff --git a/mcpServer/modules/filesystem/lib.ts b/mcpServer/modules/filesystem/lib.ts new file mode 100644 index 0000000..17e4654 --- /dev/null +++ b/mcpServer/modules/filesystem/lib.ts @@ -0,0 +1,415 @@ +import fs from "fs/promises"; +import path from "path"; +import os from 'os'; +import { randomBytes } from 'crypto'; +import { diffLines, createTwoFilesPatch } from 'diff'; +import { minimatch } from 'minimatch'; +import { normalizePath, expandHome } from './path-utils.js'; +import { isPathWithinAllowedDirectories } from './path-validation.js'; + +// Global allowed directories - set by the main module +let allowedDirectories: string[] = []; + +// Function to set allowed directories from the main module +export function setAllowedDirectories(directories: string[]): void { + allowedDirectories = [...directories]; +} + +// Function to get current allowed directories +export function getAllowedDirectories(): string[] { + return [...allowedDirectories]; +} + +// Type definitions +interface FileInfo { + size: number; + created: Date; + modified: Date; + accessed: Date; + isDirectory: boolean; + isFile: boolean; + permissions: string; +} + +export interface SearchOptions { + excludePatterns?: string[]; +} + +export interface SearchResult { + path: string; + isDirectory: boolean; +} + +// Pure Utility Functions +export function formatSize(bytes: number): string { + const units = ['B', 'KB', 'MB', 'GB', 'TB']; + if (bytes === 0) return '0 B'; + + const i = Math.floor(Math.log(bytes) / Math.log(1024)); + + if (i < 0 || i === 0) return `${bytes} ${units[0]}`; + + const unitIndex = Math.min(i, units.length - 1); + return `${(bytes / Math.pow(1024, unitIndex)).toFixed(2)} ${units[unitIndex]}`; +} + +export function normalizeLineEndings(text: string): string { + return text.replace(/\r\n/g, '\n'); +} + +export function createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string { + // Ensure consistent line endings for diff + const normalizedOriginal = normalizeLineEndings(originalContent); + const normalizedNew = normalizeLineEndings(newContent); + + return createTwoFilesPatch( + filepath, + filepath, + normalizedOriginal, + normalizedNew, + 'original', + 'modified' + ); +} + +// Helper function to resolve relative paths against allowed directories +function resolveRelativePathAgainstAllowedDirectories(relativePath: string): string { + if (allowedDirectories.length === 0) { + // Fallback to process.cwd() if no allowed directories are set + return path.resolve(process.cwd(), relativePath); + } + + // Try to resolve relative path against each allowed directory + for (const allowedDir of allowedDirectories) { + const candidate = path.resolve(allowedDir, relativePath); + const normalizedCandidate = normalizePath(candidate); + + // Check if the resulting path lies within any allowed directory + if (isPathWithinAllowedDirectories(normalizedCandidate, allowedDirectories)) { + return candidate; + } + } + + // If no valid resolution found, use the first allowed directory as base + // This provides a consistent fallback behavior + return path.resolve(allowedDirectories[0], relativePath); +} + +// Security & Validation Functions +export async function validatePath(requestedPath: string): Promise { + const expandedPath = expandHome(requestedPath); + const absolute = path.isAbsolute(expandedPath) + ? path.resolve(expandedPath) + : resolveRelativePathAgainstAllowedDirectories(expandedPath); + + const normalizedRequested = normalizePath(absolute); + + // Security: Check if path is within allowed directories before any file operations + const isAllowed = isPathWithinAllowedDirectories(normalizedRequested, allowedDirectories); + if (!isAllowed) { + throw new Error(`Access denied - path outside allowed directories: ${absolute} not in ${allowedDirectories.join(', ')}`); + } + + // Security: Handle symlinks by checking their real path to prevent symlink attacks + // This prevents attackers from creating symlinks that point outside allowed directories + try { + const realPath = await fs.realpath(absolute); + const normalizedReal = normalizePath(realPath); + if (!isPathWithinAllowedDirectories(normalizedReal, allowedDirectories)) { + throw new Error(`Access denied - symlink target outside allowed directories: ${realPath} not in ${allowedDirectories.join(', ')}`); + } + return realPath; + } catch (error) { + // Security: For new files that don't exist yet, verify parent directory + // This ensures we can't create files in unauthorized locations + if ((error as NodeJS.ErrnoException).code === 'ENOENT') { + const parentDir = path.dirname(absolute); + try { + const realParentPath = await fs.realpath(parentDir); + const normalizedParent = normalizePath(realParentPath); + if (!isPathWithinAllowedDirectories(normalizedParent, allowedDirectories)) { + throw new Error(`Access denied - parent directory outside allowed directories: ${realParentPath} not in ${allowedDirectories.join(', ')}`); + } + return absolute; + } catch { + throw new Error(`Parent directory does not exist: ${parentDir}`); + } + } + throw error; + } +} + + +// File Operations +export async function getFileStats(filePath: string): Promise { + const stats = await fs.stat(filePath); + return { + size: stats.size, + created: stats.birthtime, + modified: stats.mtime, + accessed: stats.atime, + isDirectory: stats.isDirectory(), + isFile: stats.isFile(), + permissions: stats.mode.toString(8).slice(-3), + }; +} + +export async function readFileContent(filePath: string, encoding: string = 'utf-8'): Promise { + return await fs.readFile(filePath, encoding as BufferEncoding); +} + +export async function writeFileContent(filePath: string, content: string): Promise { + try { + // Security: 'wx' flag ensures exclusive creation - fails if file/symlink exists, + // preventing writes through pre-existing symlinks + await fs.writeFile(filePath, content, { encoding: "utf-8", flag: 'wx' }); + } catch (error) { + if ((error as NodeJS.ErrnoException).code === 'EEXIST') { + // Security: Use atomic rename to prevent race conditions where symlinks + // could be created between validation and write. Rename operations + // replace the target file atomically and don't follow symlinks. + const tempPath = `${filePath}.${randomBytes(16).toString('hex')}.tmp`; + try { + await fs.writeFile(tempPath, content, 'utf-8'); + await fs.rename(tempPath, filePath); + } catch (renameError) { + try { + await fs.unlink(tempPath); + } catch {} + throw renameError; + } + } else { + throw error; + } + } +} + + +// File Editing Functions +interface FileEdit { + oldText: string; + newText: string; +} + +export async function applyFileEdits( + filePath: string, + edits: FileEdit[], + dryRun: boolean = false +): Promise { + // Read file content and normalize line endings + const content = normalizeLineEndings(await fs.readFile(filePath, 'utf-8')); + + // Apply edits sequentially + let modifiedContent = content; + for (const edit of edits) { + const normalizedOld = normalizeLineEndings(edit.oldText); + const normalizedNew = normalizeLineEndings(edit.newText); + + // If exact match exists, use it + if (modifiedContent.includes(normalizedOld)) { + modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew); + continue; + } + + // Otherwise, try line-by-line matching with flexibility for whitespace + const oldLines = normalizedOld.split('\n'); + const contentLines = modifiedContent.split('\n'); + let matchFound = false; + + for (let i = 0; i <= contentLines.length - oldLines.length; i++) { + const potentialMatch = contentLines.slice(i, i + oldLines.length); + + // Compare lines with normalized whitespace + const isMatch = oldLines.every((oldLine, j) => { + const contentLine = potentialMatch[j]; + return oldLine.trim() === contentLine.trim(); + }); + + if (isMatch) { + // Preserve original indentation of first line + const originalIndent = contentLines[i].match(/^\s*/)?.[0] || ''; + const newLines = normalizedNew.split('\n').map((line, j) => { + if (j === 0) return originalIndent + line.trimStart(); + // For subsequent lines, try to preserve relative indentation + const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || ''; + const newIndent = line.match(/^\s*/)?.[0] || ''; + if (oldIndent && newIndent) { + const relativeIndent = newIndent.length - oldIndent.length; + return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart(); + } + return line; + }); + + contentLines.splice(i, oldLines.length, ...newLines); + modifiedContent = contentLines.join('\n'); + matchFound = true; + break; + } + } + + if (!matchFound) { + throw new Error(`Could not find exact match for edit:\n${edit.oldText}`); + } + } + + // Create unified diff + const diff = createUnifiedDiff(content, modifiedContent, filePath); + + // Format diff with appropriate number of backticks + let numBackticks = 3; + while (diff.includes('`'.repeat(numBackticks))) { + numBackticks++; + } + const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`; + + if (!dryRun) { + // Security: Use atomic rename to prevent race conditions where symlinks + // could be created between validation and write. Rename operations + // replace the target file atomically and don't follow symlinks. + const tempPath = `${filePath}.${randomBytes(16).toString('hex')}.tmp`; + try { + await fs.writeFile(tempPath, modifiedContent, 'utf-8'); + await fs.rename(tempPath, filePath); + } catch (error) { + try { + await fs.unlink(tempPath); + } catch {} + throw error; + } + } + + return formattedDiff; +} + +// Memory-efficient implementation to get the last N lines of a file +export async function tailFile(filePath: string, numLines: number): Promise { + const CHUNK_SIZE = 1024; // Read 1KB at a time + const stats = await fs.stat(filePath); + const fileSize = stats.size; + + if (fileSize === 0) return ''; + + // Open file for reading + const fileHandle = await fs.open(filePath, 'r'); + try { + const lines: string[] = []; + let position = fileSize; + let chunk = Buffer.alloc(CHUNK_SIZE); + let linesFound = 0; + let remainingText = ''; + + // Read chunks from the end of the file until we have enough lines + while (position > 0 && linesFound < numLines) { + const size = Math.min(CHUNK_SIZE, position); + position -= size; + + const { bytesRead } = await fileHandle.read(chunk, 0, size, position); + if (!bytesRead) break; + + // Get the chunk as a string and prepend any remaining text from previous iteration + const readData = chunk.slice(0, bytesRead).toString('utf-8'); + const chunkText = readData + remainingText; + + // Split by newlines and count + const chunkLines = normalizeLineEndings(chunkText).split('\n'); + + // If this isn't the end of the file, the first line is likely incomplete + // Save it to prepend to the next chunk + if (position > 0) { + remainingText = chunkLines[0]; + chunkLines.shift(); // Remove the first (incomplete) line + } + + // Add lines to our result (up to the number we need) + for (let i = chunkLines.length - 1; i >= 0 && linesFound < numLines; i--) { + lines.unshift(chunkLines[i]); + linesFound++; + } + } + + return lines.join('\n'); + } finally { + await fileHandle.close(); + } +} + +// New function to get the first N lines of a file +export async function headFile(filePath: string, numLines: number): Promise { + const fileHandle = await fs.open(filePath, 'r'); + try { + const lines: string[] = []; + let buffer = ''; + let bytesRead = 0; + const chunk = Buffer.alloc(1024); // 1KB buffer + + // Read chunks and count lines until we have enough or reach EOF + while (lines.length < numLines) { + const result = await fileHandle.read(chunk, 0, chunk.length, bytesRead); + if (result.bytesRead === 0) break; // End of file + bytesRead += result.bytesRead; + buffer += chunk.slice(0, result.bytesRead).toString('utf-8'); + + const newLineIndex = buffer.lastIndexOf('\n'); + if (newLineIndex !== -1) { + const completeLines = buffer.slice(0, newLineIndex).split('\n'); + buffer = buffer.slice(newLineIndex + 1); + for (const line of completeLines) { + lines.push(line); + if (lines.length >= numLines) break; + } + } + } + + // If there is leftover content and we still need lines, add it + if (buffer.length > 0 && lines.length < numLines) { + lines.push(buffer); + } + + return lines.join('\n'); + } finally { + await fileHandle.close(); + } +} + +export async function searchFilesWithValidation( + rootPath: string, + pattern: string, + allowedDirectories: string[], + options: SearchOptions = {} +): Promise { + const { excludePatterns = [] } = options; + const results: string[] = []; + + async function search(currentPath: string) { + const entries = await fs.readdir(currentPath, { withFileTypes: true }); + + for (const entry of entries) { + const fullPath = path.join(currentPath, entry.name); + + try { + await validatePath(fullPath); + + const relativePath = path.relative(rootPath, fullPath); + const shouldExclude = excludePatterns.some(excludePattern => + minimatch(relativePath, excludePattern, { dot: true }) + ); + + if (shouldExclude) continue; + + // Use glob matching for the search pattern + if (minimatch(relativePath, pattern, { dot: true })) { + results.push(fullPath); + } + + if (entry.isDirectory()) { + await search(fullPath); + } + } catch { + continue; + } + } + } + + await search(rootPath); + return results; +} diff --git a/mcpServer/modules/filesystem/package.json b/mcpServer/modules/filesystem/package.json new file mode 100644 index 0000000..97357d9 --- /dev/null +++ b/mcpServer/modules/filesystem/package.json @@ -0,0 +1,43 @@ +{ + "name": "@modelcontextprotocol/server-filesystem", + "version": "0.6.3", + "description": "MCP server for filesystem access", + "license": "SEE LICENSE IN LICENSE", + "mcpName": "io.github.modelcontextprotocol/server-filesystem", + "author": "Model Context Protocol a Series of LF Projects, LLC.", + "homepage": "https://modelcontextprotocol.io", + "bugs": "https://github.com/modelcontextprotocol/servers/issues", + "repository": { + "type": "git", + "url": "https://github.com/modelcontextprotocol/servers.git" + }, + "type": "module", + "bin": { + "mcp-server-filesystem": "dist/index.js" + }, + "files": [ + "dist" + ], + "scripts": { + "build": "tsc && shx chmod +x dist/*.js", + "prepare": "npm run build", + "watch": "tsc --watch", + "test": "vitest run --coverage" + }, + "dependencies": { + "@modelcontextprotocol/sdk": "^1.26.0", + "diff": "^8.0.3", + "glob": "^10.5.0", + "minimatch": "^10.0.1", + "zod-to-json-schema": "^3.23.5" + }, + "devDependencies": { + "@types/diff": "^5.0.9", + "@types/minimatch": "^5.1.2", + "@types/node": "^22", + "@vitest/coverage-v8": "^2.1.8", + "shx": "^0.3.4", + "typescript": "^5.8.2", + "vitest": "^2.1.8" + } +} diff --git a/mcpServer/modules/filesystem/path-utils.ts b/mcpServer/modules/filesystem/path-utils.ts new file mode 100644 index 0000000..50910b9 --- /dev/null +++ b/mcpServer/modules/filesystem/path-utils.ts @@ -0,0 +1,118 @@ +import path from "path"; +import os from 'os'; + +/** + * Converts WSL or Unix-style Windows paths to Windows format + * @param p The path to convert + * @returns Converted Windows path + */ +export function convertToWindowsPath(p: string): string { + // Handle WSL paths (/mnt/c/...) + // NEVER convert WSL paths - they are valid Linux paths that work with Node.js fs operations in WSL + // Converting them to Windows format (C:\...) breaks fs operations inside WSL + if (p.startsWith('/mnt/')) { + return p; // Leave WSL paths unchanged + } + + // Handle Unix-style Windows paths (/c/...) + // Only convert when running on Windows + if (p.match(/^\/[a-zA-Z]\//) && process.platform === 'win32') { + const driveLetter = p.charAt(1).toUpperCase(); + const pathPart = p.slice(2).replace(/\//g, '\\'); + return `${driveLetter}:${pathPart}`; + } + + // Handle standard Windows paths, ensuring backslashes + if (p.match(/^[a-zA-Z]:/)) { + return p.replace(/\//g, '\\'); + } + + // Leave non-Windows paths unchanged + return p; +} + +/** + * Normalizes path by standardizing format while preserving OS-specific behavior + * @param p The path to normalize + * @returns Normalized path + */ +export function normalizePath(p: string): string { + // Remove any surrounding quotes and whitespace + p = p.trim().replace(/^["']|["']$/g, ''); + + // Check if this is a Unix path that should not be converted + // WSL paths (/mnt/) should ALWAYS be preserved as they work correctly in WSL with Node.js fs + // Regular Unix paths should also be preserved + const isUnixPath = p.startsWith('/') && ( + // Always preserve WSL paths (/mnt/c/, /mnt/d/, etc.) + p.match(/^\/mnt\/[a-z]\//i) || + // On non-Windows platforms, treat all absolute paths as Unix paths + (process.platform !== 'win32') || + // On Windows, preserve Unix paths that aren't Unix-style Windows paths (/c/, /d/, etc.) + (process.platform === 'win32' && !p.match(/^\/[a-zA-Z]\//)) + ); + + if (isUnixPath) { + // For Unix paths, just normalize without converting to Windows format + // Replace double slashes with single slashes and remove trailing slashes + return p.replace(/\/+/g, '/').replace(/(? { + if (typeof dir !== 'string' || !dir) { + return false; + } + + // Reject null bytes in allowed dirs + if (dir.includes('\x00')) { + return false; + } + + // Normalize the allowed directory + let normalizedDir: string; + try { + normalizedDir = path.resolve(path.normalize(dir)); + } catch { + return false; + } + + // Verify allowed directory is absolute after normalization + if (!path.isAbsolute(normalizedDir)) { + throw new Error('Allowed directories must be absolute paths after normalization'); + } + + // Check if normalizedPath is within normalizedDir + // Path is inside if it's the same or a subdirectory + if (normalizedPath === normalizedDir) { + return true; + } + + // Special case for root directory to avoid double slash + // On Windows, we need to check if both paths are on the same drive + if (normalizedDir === path.sep) { + return normalizedPath.startsWith(path.sep); + } + + // On Windows, also check for drive root (e.g., "C:\") + if (path.sep === '\\' && normalizedDir.match(/^[A-Za-z]:\\?$/)) { + // Ensure both paths are on the same drive + const dirDrive = normalizedDir.charAt(0).toLowerCase(); + const pathDrive = normalizedPath.charAt(0).toLowerCase(); + return pathDrive === dirDrive && normalizedPath.startsWith(normalizedDir.replace(/\\?$/, '\\')); + } + + return normalizedPath.startsWith(normalizedDir + path.sep); + }); +} diff --git a/mcpServer/modules/filesystem/roots-utils.ts b/mcpServer/modules/filesystem/roots-utils.ts new file mode 100644 index 0000000..8732997 --- /dev/null +++ b/mcpServer/modules/filesystem/roots-utils.ts @@ -0,0 +1,76 @@ +import { promises as fs, type Stats } from 'fs'; +import path from 'path'; +import os from 'os'; +import { normalizePath } from './path-utils.js'; +import type { Root } from '@modelcontextprotocol/sdk/types.js'; + +/** + * Converts a root URI to a normalized directory path with basic security validation. + * @param rootUri - File URI (file://...) or plain directory path + * @returns Promise resolving to validated path or null if invalid + */ +async function parseRootUri(rootUri: string): Promise { + try { + const rawPath = rootUri.startsWith('file://') ? rootUri.slice(7) : rootUri; + const expandedPath = rawPath.startsWith('~/') || rawPath === '~' + ? path.join(os.homedir(), rawPath.slice(1)) + : rawPath; + const absolutePath = path.resolve(expandedPath); + const resolvedPath = await fs.realpath(absolutePath); + return normalizePath(resolvedPath); + } catch { + return null; // Path doesn't exist or other error + } +} + +/** + * Formats error message for directory validation failures. + * @param dir - Directory path that failed validation + * @param error - Error that occurred during validation + * @param reason - Specific reason for failure + * @returns Formatted error message + */ +function formatDirectoryError(dir: string, error?: unknown, reason?: string): string { + if (reason) { + return `Skipping ${reason}: ${dir}`; + } + const message = error instanceof Error ? error.message : String(error); + return `Skipping invalid directory: ${dir} due to error: ${message}`; +} + +/** + * Resolves requested root directories from MCP root specifications. + * + * Converts root URI specifications (file:// URIs or plain paths) into normalized + * directory paths, validating that each path exists and is a directory. + * Includes symlink resolution for security. + * + * @param requestedRoots - Array of root specifications with URI and optional name + * @returns Promise resolving to array of validated directory paths + */ +export async function getValidRootDirectories( + requestedRoots: readonly Root[] +): Promise { + const validatedDirectories: string[] = []; + + for (const requestedRoot of requestedRoots) { + const resolvedPath = await parseRootUri(requestedRoot.uri); + if (!resolvedPath) { + console.error(formatDirectoryError(requestedRoot.uri, undefined, 'invalid path or inaccessible')); + continue; + } + + try { + const stats: Stats = await fs.stat(resolvedPath); + if (stats.isDirectory()) { + validatedDirectories.push(resolvedPath); + } else { + console.error(formatDirectoryError(resolvedPath, undefined, 'non-directory root')); + } + } catch (error) { + console.error(formatDirectoryError(resolvedPath, error)); + } + } + + return validatedDirectories; +} \ No newline at end of file diff --git a/mcpServer/modules/filesystem/tsconfig.json b/mcpServer/modules/filesystem/tsconfig.json new file mode 100644 index 0000000..6f0385b --- /dev/null +++ b/mcpServer/modules/filesystem/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "outDir": "./dist", + "rootDir": ".", + "moduleResolution": "NodeNext", + "module": "NodeNext" + }, + "include": [ + "./**/*.ts" + ], + "exclude": [ + "**/__tests__/**", + "**/*.test.ts", + "**/*.spec.ts", + "vitest.config.ts" + ] +} diff --git a/mcpServer/modules/filesystem/vitest.config.ts b/mcpServer/modules/filesystem/vitest.config.ts new file mode 100644 index 0000000..d414ec8 --- /dev/null +++ b/mcpServer/modules/filesystem/vitest.config.ts @@ -0,0 +1,14 @@ +import { defineConfig } from 'vitest/config'; + +export default defineConfig({ + test: { + globals: true, + environment: 'node', + include: ['**/__tests__/**/*.test.ts'], + coverage: { + provider: 'v8', + include: ['**/*.ts'], + exclude: ['**/__tests__/**', '**/dist/**'], + }, + }, +});