mirror of
https://github.com/modelcontextprotocol/servers.git
synced 2026-04-20 12:55:21 +02:00
Merge branch 'main' into claude/issue-2361-20250817-1626
This commit is contained in:
@@ -169,16 +169,6 @@ export const createServer = () => {
|
||||
let subsUpdateInterval: NodeJS.Timeout | undefined;
|
||||
let stdErrUpdateInterval: NodeJS.Timeout | undefined;
|
||||
|
||||
// Set up update interval for subscribed resources
|
||||
subsUpdateInterval = setInterval(() => {
|
||||
for (const uri of subscriptions) {
|
||||
server.notification({
|
||||
method: "notifications/resources/updated",
|
||||
params: { uri },
|
||||
});
|
||||
}
|
||||
}, 10000);
|
||||
|
||||
let logLevel: LoggingLevel = "debug";
|
||||
let logsUpdateInterval: NodeJS.Timeout | undefined;
|
||||
const messages = [
|
||||
@@ -198,15 +188,30 @@ export const createServer = () => {
|
||||
return messageLevel < currentLevel;
|
||||
};
|
||||
|
||||
// Set up update interval for random log messages
|
||||
logsUpdateInterval = setInterval(() => {
|
||||
let message = {
|
||||
method: "notifications/message",
|
||||
params: messages[Math.floor(Math.random() * messages.length)],
|
||||
};
|
||||
if (!isMessageIgnored(message.params.level as LoggingLevel))
|
||||
server.notification(message);
|
||||
}, 20000);
|
||||
// Function to start notification intervals when a client connects
|
||||
const startNotificationIntervals = () => {
|
||||
if (!subsUpdateInterval) {
|
||||
subsUpdateInterval = setInterval(() => {
|
||||
for (const uri of subscriptions) {
|
||||
server.notification({
|
||||
method: "notifications/resources/updated",
|
||||
params: { uri },
|
||||
});
|
||||
}
|
||||
}, 10000);
|
||||
}
|
||||
|
||||
if (!logsUpdateInterval) {
|
||||
logsUpdateInterval = setInterval(() => {
|
||||
let message = {
|
||||
method: "notifications/message",
|
||||
params: messages[Math.floor(Math.random() * messages.length)],
|
||||
};
|
||||
if (!isMessageIgnored(message.params.level as LoggingLevel))
|
||||
server.notification(message);
|
||||
}, 20000);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
@@ -874,7 +879,7 @@ export const createServer = () => {
|
||||
if (stdErrUpdateInterval) clearInterval(stdErrUpdateInterval);
|
||||
};
|
||||
|
||||
return { server, cleanup };
|
||||
return { server, cleanup, startNotificationIntervals };
|
||||
};
|
||||
|
||||
const MCP_TINY_IMAGE =
|
||||
|
||||
@@ -10,7 +10,7 @@ const transports: Map<string, SSEServerTransport> = new Map<string, SSEServerTra
|
||||
|
||||
app.get("/sse", async (req, res) => {
|
||||
let transport: SSEServerTransport;
|
||||
const { server, cleanup } = createServer();
|
||||
const { server, cleanup, startNotificationIntervals } = createServer();
|
||||
|
||||
if (req?.query?.sessionId) {
|
||||
const sessionId = (req?.query?.sessionId as string);
|
||||
@@ -25,6 +25,9 @@ app.get("/sse", async (req, res) => {
|
||||
await server.connect(transport);
|
||||
console.error("Client Connected: ", transport.sessionId);
|
||||
|
||||
// Start notification intervals after client connects
|
||||
startNotificationIntervals();
|
||||
|
||||
// Handle close of connection
|
||||
server.onclose = async () => {
|
||||
console.error("Client Disconnected: ", transport.sessionId);
|
||||
|
||||
@@ -64,10 +64,6 @@ The server's directory access control follows this flow:
|
||||
|
||||
## API
|
||||
|
||||
### Resources
|
||||
|
||||
- `file://system`: File system operations interface
|
||||
|
||||
### Tools
|
||||
|
||||
- **read_text_file**
|
||||
@@ -77,6 +73,7 @@ The server's directory access control follows this flow:
|
||||
- `head` (number, optional): First N lines
|
||||
- `tail` (number, optional): Last N lines
|
||||
- Always treats the file as UTF-8 text regardless of extension
|
||||
- Cannot specify both `head` and `tail` simultaneously
|
||||
|
||||
- **read_media_file**
|
||||
- Read an image or audio file
|
||||
@@ -123,6 +120,23 @@ The server's directory access control follows this flow:
|
||||
- List directory contents with [FILE] or [DIR] prefixes
|
||||
- Input: `path` (string)
|
||||
|
||||
- **list_directory_with_sizes**
|
||||
- List directory contents with [FILE] or [DIR] prefixes, including file sizes
|
||||
- Inputs:
|
||||
- `path` (string): Directory path to list
|
||||
- `sortBy` (string, optional): Sort entries by "name" or "size" (default: "name")
|
||||
- Returns detailed listing with file sizes and summary statistics
|
||||
- Shows total files, directories, and combined size
|
||||
|
||||
- **directory_tree**
|
||||
- Get a recursive tree view of files and directories as a JSON structure
|
||||
- Input: `path` (string): Starting directory path
|
||||
- Returns JSON structure with:
|
||||
- `name`: File/directory name
|
||||
- `type`: "file" or "directory"
|
||||
- `children`: Array of child entries (for directories only)
|
||||
- Output is formatted with 2-space indentation for readability
|
||||
|
||||
- **move_file**
|
||||
- Move or rename files and directories
|
||||
- Inputs:
|
||||
@@ -131,14 +145,27 @@ The server's directory access control follows this flow:
|
||||
- Fails if destination exists
|
||||
|
||||
- **search_files**
|
||||
- Recursively search for files/directories
|
||||
- Recursively search for files/directories that match or do not match patterns
|
||||
- Inputs:
|
||||
- `path` (string): Starting directory
|
||||
- `pattern` (string): Search pattern
|
||||
- `excludePatterns` (string[]): Exclude any patterns. Glob formats are supported.
|
||||
- Case-insensitive matching
|
||||
- `excludePatterns` (string[]): Exclude any patterns.
|
||||
- Glob-style pattern matching
|
||||
- Returns full paths to matches
|
||||
|
||||
- **directory_tree**
|
||||
- Get recursive JSON tree structure of directory contents
|
||||
- Inputs:
|
||||
- `path` (string): Starting directory
|
||||
- `excludePatterns` (string[]): Exclude any patterns. Glob formats are supported.
|
||||
- Returns:
|
||||
- JSON array where each entry contains:
|
||||
- `name` (string): File/directory name
|
||||
- `type` ('file'|'directory'): Entry type
|
||||
- `children` (array): Present only for directories
|
||||
- Empty array for empty directories
|
||||
- Omitted for files
|
||||
|
||||
- **get_file_info**
|
||||
- Get detailed file/directory metadata
|
||||
- Input: `path` (string)
|
||||
|
||||
147
src/filesystem/__tests__/directory-tree.test.ts
Normal file
147
src/filesystem/__tests__/directory-tree.test.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from '@jest/globals';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
|
||||
// We need to test the buildTree function, but it's defined inside the request handler
|
||||
// So we'll extract the core logic into a testable function
|
||||
import { minimatch } from 'minimatch';
|
||||
|
||||
interface TreeEntry {
|
||||
name: string;
|
||||
type: 'file' | 'directory';
|
||||
children?: TreeEntry[];
|
||||
}
|
||||
|
||||
async function buildTreeForTesting(currentPath: string, rootPath: string, excludePatterns: string[] = []): Promise<TreeEntry[]> {
|
||||
const entries = await fs.readdir(currentPath, {withFileTypes: true});
|
||||
const result: TreeEntry[] = [];
|
||||
|
||||
for (const entry of entries) {
|
||||
const relativePath = path.relative(rootPath, path.join(currentPath, entry.name));
|
||||
const shouldExclude = excludePatterns.some(pattern => {
|
||||
if (pattern.includes('*')) {
|
||||
return minimatch(relativePath, pattern, {dot: true});
|
||||
}
|
||||
// For files: match exact name or as part of path
|
||||
// For directories: match as directory path
|
||||
return minimatch(relativePath, pattern, {dot: true}) ||
|
||||
minimatch(relativePath, `**/${pattern}`, {dot: true}) ||
|
||||
minimatch(relativePath, `**/${pattern}/**`, {dot: true});
|
||||
});
|
||||
if (shouldExclude)
|
||||
continue;
|
||||
|
||||
const entryData: TreeEntry = {
|
||||
name: entry.name,
|
||||
type: entry.isDirectory() ? 'directory' : 'file'
|
||||
};
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
const subPath = path.join(currentPath, entry.name);
|
||||
entryData.children = await buildTreeForTesting(subPath, rootPath, excludePatterns);
|
||||
}
|
||||
|
||||
result.push(entryData);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
describe('buildTree exclude patterns', () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'filesystem-test-'));
|
||||
|
||||
// Create test directory structure
|
||||
await fs.mkdir(path.join(testDir, 'src'));
|
||||
await fs.mkdir(path.join(testDir, 'node_modules'));
|
||||
await fs.mkdir(path.join(testDir, '.git'));
|
||||
await fs.mkdir(path.join(testDir, 'nested', 'node_modules'), { recursive: true });
|
||||
|
||||
// Create test files
|
||||
await fs.writeFile(path.join(testDir, '.env'), 'SECRET=value');
|
||||
await fs.writeFile(path.join(testDir, '.env.local'), 'LOCAL_SECRET=value');
|
||||
await fs.writeFile(path.join(testDir, 'src', 'index.js'), 'console.log("hello");');
|
||||
await fs.writeFile(path.join(testDir, 'package.json'), '{}');
|
||||
await fs.writeFile(path.join(testDir, 'node_modules', 'module.js'), 'module.exports = {};');
|
||||
await fs.writeFile(path.join(testDir, 'nested', 'node_modules', 'deep.js'), 'module.exports = {};');
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.rm(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should exclude files matching simple patterns', async () => {
|
||||
// Test the current implementation - this will fail until the bug is fixed
|
||||
const tree = await buildTreeForTesting(testDir, testDir, ['.env']);
|
||||
const fileNames = tree.map(entry => entry.name);
|
||||
|
||||
expect(fileNames).not.toContain('.env');
|
||||
expect(fileNames).toContain('.env.local'); // Should not exclude this
|
||||
expect(fileNames).toContain('src');
|
||||
expect(fileNames).toContain('package.json');
|
||||
});
|
||||
|
||||
it('should exclude directories matching simple patterns', async () => {
|
||||
const tree = await buildTreeForTesting(testDir, testDir, ['node_modules']);
|
||||
const dirNames = tree.map(entry => entry.name);
|
||||
|
||||
expect(dirNames).not.toContain('node_modules');
|
||||
expect(dirNames).toContain('src');
|
||||
expect(dirNames).toContain('.git');
|
||||
});
|
||||
|
||||
it('should exclude nested directories with same pattern', async () => {
|
||||
const tree = await buildTreeForTesting(testDir, testDir, ['node_modules']);
|
||||
|
||||
// Find the nested directory
|
||||
const nestedDir = tree.find(entry => entry.name === 'nested');
|
||||
expect(nestedDir).toBeDefined();
|
||||
expect(nestedDir!.children).toBeDefined();
|
||||
|
||||
// The nested/node_modules should also be excluded
|
||||
const nestedChildren = nestedDir!.children!.map(child => child.name);
|
||||
expect(nestedChildren).not.toContain('node_modules');
|
||||
});
|
||||
|
||||
it('should handle glob patterns correctly', async () => {
|
||||
const tree = await buildTreeForTesting(testDir, testDir, ['*.env']);
|
||||
const fileNames = tree.map(entry => entry.name);
|
||||
|
||||
expect(fileNames).not.toContain('.env');
|
||||
expect(fileNames).toContain('.env.local'); // *.env should not match .env.local
|
||||
expect(fileNames).toContain('src');
|
||||
});
|
||||
|
||||
it('should handle dot files correctly', async () => {
|
||||
const tree = await buildTreeForTesting(testDir, testDir, ['.git']);
|
||||
const dirNames = tree.map(entry => entry.name);
|
||||
|
||||
expect(dirNames).not.toContain('.git');
|
||||
expect(dirNames).toContain('.env'); // Should not exclude this
|
||||
});
|
||||
|
||||
it('should work with multiple exclude patterns', async () => {
|
||||
const tree = await buildTreeForTesting(testDir, testDir, ['node_modules', '.env', '.git']);
|
||||
const entryNames = tree.map(entry => entry.name);
|
||||
|
||||
expect(entryNames).not.toContain('node_modules');
|
||||
expect(entryNames).not.toContain('.env');
|
||||
expect(entryNames).not.toContain('.git');
|
||||
expect(entryNames).toContain('src');
|
||||
expect(entryNames).toContain('package.json');
|
||||
});
|
||||
|
||||
it('should handle empty exclude patterns', async () => {
|
||||
const tree = await buildTreeForTesting(testDir, testDir, []);
|
||||
const entryNames = tree.map(entry => entry.name);
|
||||
|
||||
// All entries should be included
|
||||
expect(entryNames).toContain('node_modules');
|
||||
expect(entryNames).toContain('.env');
|
||||
expect(entryNames).toContain('.git');
|
||||
expect(entryNames).toContain('src');
|
||||
});
|
||||
});
|
||||
701
src/filesystem/__tests__/lib.test.ts
Normal file
701
src/filesystem/__tests__/lib.test.ts
Normal file
@@ -0,0 +1,701 @@
|
||||
import { describe, it, expect, beforeEach, afterEach, jest } from '@jest/globals';
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import {
|
||||
// Pure utility functions
|
||||
formatSize,
|
||||
normalizeLineEndings,
|
||||
createUnifiedDiff,
|
||||
// Security & validation functions
|
||||
validatePath,
|
||||
setAllowedDirectories,
|
||||
// File operations
|
||||
getFileStats,
|
||||
readFileContent,
|
||||
writeFileContent,
|
||||
// Search & filtering functions
|
||||
searchFilesWithValidation,
|
||||
// File editing functions
|
||||
applyFileEdits,
|
||||
tailFile,
|
||||
headFile
|
||||
} from '../lib.js';
|
||||
|
||||
// Mock fs module
|
||||
jest.mock('fs/promises');
|
||||
const mockFs = fs as jest.Mocked<typeof fs>;
|
||||
|
||||
describe('Lib Functions', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
// Set up allowed directories for tests
|
||||
const allowedDirs = process.platform === 'win32' ? ['C:\\Users\\test', 'C:\\temp', 'C:\\allowed'] : ['/home/user', '/tmp', '/allowed'];
|
||||
setAllowedDirectories(allowedDirs);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
// Clear allowed directories after tests
|
||||
setAllowedDirectories([]);
|
||||
});
|
||||
|
||||
describe('Pure Utility Functions', () => {
|
||||
describe('formatSize', () => {
|
||||
it('formats bytes correctly', () => {
|
||||
expect(formatSize(0)).toBe('0 B');
|
||||
expect(formatSize(512)).toBe('512 B');
|
||||
expect(formatSize(1024)).toBe('1.00 KB');
|
||||
expect(formatSize(1536)).toBe('1.50 KB');
|
||||
expect(formatSize(1048576)).toBe('1.00 MB');
|
||||
expect(formatSize(1073741824)).toBe('1.00 GB');
|
||||
expect(formatSize(1099511627776)).toBe('1.00 TB');
|
||||
});
|
||||
|
||||
it('handles edge cases', () => {
|
||||
expect(formatSize(1023)).toBe('1023 B');
|
||||
expect(formatSize(1025)).toBe('1.00 KB');
|
||||
expect(formatSize(1048575)).toBe('1024.00 KB');
|
||||
});
|
||||
|
||||
it('handles very large numbers beyond TB', () => {
|
||||
// The function only supports up to TB, so very large numbers will show as TB
|
||||
expect(formatSize(1024 * 1024 * 1024 * 1024 * 1024)).toBe('1024.00 TB');
|
||||
expect(formatSize(Number.MAX_SAFE_INTEGER)).toContain('TB');
|
||||
});
|
||||
|
||||
it('handles negative numbers', () => {
|
||||
// Negative numbers will result in NaN for the log calculation
|
||||
expect(formatSize(-1024)).toContain('NaN');
|
||||
expect(formatSize(-0)).toBe('0 B');
|
||||
});
|
||||
|
||||
it('handles decimal numbers', () => {
|
||||
expect(formatSize(1536.5)).toBe('1.50 KB');
|
||||
expect(formatSize(1023.9)).toBe('1023.9 B');
|
||||
});
|
||||
|
||||
it('handles very small positive numbers', () => {
|
||||
expect(formatSize(1)).toBe('1 B');
|
||||
expect(formatSize(0.5)).toBe('0.5 B');
|
||||
expect(formatSize(0.1)).toBe('0.1 B');
|
||||
});
|
||||
});
|
||||
|
||||
describe('normalizeLineEndings', () => {
|
||||
it('converts CRLF to LF', () => {
|
||||
expect(normalizeLineEndings('line1\r\nline2\r\nline3')).toBe('line1\nline2\nline3');
|
||||
});
|
||||
|
||||
it('leaves LF unchanged', () => {
|
||||
expect(normalizeLineEndings('line1\nline2\nline3')).toBe('line1\nline2\nline3');
|
||||
});
|
||||
|
||||
it('handles mixed line endings', () => {
|
||||
expect(normalizeLineEndings('line1\r\nline2\nline3\r\n')).toBe('line1\nline2\nline3\n');
|
||||
});
|
||||
|
||||
it('handles empty string', () => {
|
||||
expect(normalizeLineEndings('')).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('createUnifiedDiff', () => {
|
||||
it('creates diff for simple changes', () => {
|
||||
const original = 'line1\nline2\nline3';
|
||||
const modified = 'line1\nmodified line2\nline3';
|
||||
const diff = createUnifiedDiff(original, modified, 'test.txt');
|
||||
|
||||
expect(diff).toContain('--- test.txt');
|
||||
expect(diff).toContain('+++ test.txt');
|
||||
expect(diff).toContain('-line2');
|
||||
expect(diff).toContain('+modified line2');
|
||||
});
|
||||
|
||||
it('handles CRLF normalization', () => {
|
||||
const original = 'line1\r\nline2\r\n';
|
||||
const modified = 'line1\nmodified line2\n';
|
||||
const diff = createUnifiedDiff(original, modified);
|
||||
|
||||
expect(diff).toContain('-line2');
|
||||
expect(diff).toContain('+modified line2');
|
||||
});
|
||||
|
||||
it('handles identical content', () => {
|
||||
const content = 'line1\nline2\nline3';
|
||||
const diff = createUnifiedDiff(content, content);
|
||||
|
||||
// Should not contain any +/- lines for identical content (excluding header lines)
|
||||
expect(diff.split('\n').filter((line: string) => line.startsWith('+++') || line.startsWith('---'))).toHaveLength(2);
|
||||
expect(diff.split('\n').filter((line: string) => line.startsWith('+') && !line.startsWith('+++'))).toHaveLength(0);
|
||||
expect(diff.split('\n').filter((line: string) => line.startsWith('-') && !line.startsWith('---'))).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('handles empty content', () => {
|
||||
const diff = createUnifiedDiff('', '');
|
||||
expect(diff).toContain('--- file');
|
||||
expect(diff).toContain('+++ file');
|
||||
});
|
||||
|
||||
it('handles default filename parameter', () => {
|
||||
const diff = createUnifiedDiff('old', 'new');
|
||||
expect(diff).toContain('--- file');
|
||||
expect(diff).toContain('+++ file');
|
||||
});
|
||||
|
||||
it('handles custom filename', () => {
|
||||
const diff = createUnifiedDiff('old', 'new', 'custom.txt');
|
||||
expect(diff).toContain('--- custom.txt');
|
||||
expect(diff).toContain('+++ custom.txt');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Security & Validation Functions', () => {
|
||||
describe('validatePath', () => {
|
||||
// Use Windows-compatible paths for testing
|
||||
const allowedDirs = process.platform === 'win32' ? ['C:\\Users\\test', 'C:\\temp'] : ['/home/user', '/tmp'];
|
||||
|
||||
beforeEach(() => {
|
||||
mockFs.realpath.mockImplementation(async (path: any) => path.toString());
|
||||
});
|
||||
|
||||
it('validates allowed paths', async () => {
|
||||
const testPath = process.platform === 'win32' ? 'C:\\Users\\test\\file.txt' : '/home/user/file.txt';
|
||||
const result = await validatePath(testPath);
|
||||
expect(result).toBe(testPath);
|
||||
});
|
||||
|
||||
it('rejects disallowed paths', async () => {
|
||||
const testPath = process.platform === 'win32' ? 'C:\\Windows\\System32\\file.txt' : '/etc/passwd';
|
||||
await expect(validatePath(testPath))
|
||||
.rejects.toThrow('Access denied - path outside allowed directories');
|
||||
});
|
||||
|
||||
it('handles non-existent files by checking parent directory', async () => {
|
||||
const newFilePath = process.platform === 'win32' ? 'C:\\Users\\test\\newfile.txt' : '/home/user/newfile.txt';
|
||||
const parentPath = process.platform === 'win32' ? 'C:\\Users\\test' : '/home/user';
|
||||
|
||||
// Create an error with the ENOENT code that the implementation checks for
|
||||
const enoentError = new Error('ENOENT') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
|
||||
mockFs.realpath
|
||||
.mockRejectedValueOnce(enoentError)
|
||||
.mockResolvedValueOnce(parentPath);
|
||||
|
||||
const result = await validatePath(newFilePath);
|
||||
expect(result).toBe(path.resolve(newFilePath));
|
||||
});
|
||||
|
||||
it('rejects when parent directory does not exist', async () => {
|
||||
const newFilePath = process.platform === 'win32' ? 'C:\\Users\\test\\nonexistent\\newfile.txt' : '/home/user/nonexistent/newfile.txt';
|
||||
|
||||
// Create errors with the ENOENT code
|
||||
const enoentError1 = new Error('ENOENT') as NodeJS.ErrnoException;
|
||||
enoentError1.code = 'ENOENT';
|
||||
const enoentError2 = new Error('ENOENT') as NodeJS.ErrnoException;
|
||||
enoentError2.code = 'ENOENT';
|
||||
|
||||
mockFs.realpath
|
||||
.mockRejectedValueOnce(enoentError1)
|
||||
.mockRejectedValueOnce(enoentError2);
|
||||
|
||||
await expect(validatePath(newFilePath))
|
||||
.rejects.toThrow('Parent directory does not exist');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('File Operations', () => {
|
||||
describe('getFileStats', () => {
|
||||
it('returns file statistics', async () => {
|
||||
const mockStats = {
|
||||
size: 1024,
|
||||
birthtime: new Date('2023-01-01'),
|
||||
mtime: new Date('2023-01-02'),
|
||||
atime: new Date('2023-01-03'),
|
||||
isDirectory: () => false,
|
||||
isFile: () => true,
|
||||
mode: 0o644
|
||||
};
|
||||
|
||||
mockFs.stat.mockResolvedValueOnce(mockStats as any);
|
||||
|
||||
const result = await getFileStats('/test/file.txt');
|
||||
|
||||
expect(result).toEqual({
|
||||
size: 1024,
|
||||
created: new Date('2023-01-01'),
|
||||
modified: new Date('2023-01-02'),
|
||||
accessed: new Date('2023-01-03'),
|
||||
isDirectory: false,
|
||||
isFile: true,
|
||||
permissions: '644'
|
||||
});
|
||||
});
|
||||
|
||||
it('handles directory statistics', async () => {
|
||||
const mockStats = {
|
||||
size: 4096,
|
||||
birthtime: new Date('2023-01-01'),
|
||||
mtime: new Date('2023-01-02'),
|
||||
atime: new Date('2023-01-03'),
|
||||
isDirectory: () => true,
|
||||
isFile: () => false,
|
||||
mode: 0o755
|
||||
};
|
||||
|
||||
mockFs.stat.mockResolvedValueOnce(mockStats as any);
|
||||
|
||||
const result = await getFileStats('/test/dir');
|
||||
|
||||
expect(result.isDirectory).toBe(true);
|
||||
expect(result.isFile).toBe(false);
|
||||
expect(result.permissions).toBe('755');
|
||||
});
|
||||
});
|
||||
|
||||
describe('readFileContent', () => {
|
||||
it('reads file with default encoding', async () => {
|
||||
mockFs.readFile.mockResolvedValueOnce('file content');
|
||||
|
||||
const result = await readFileContent('/test/file.txt');
|
||||
|
||||
expect(result).toBe('file content');
|
||||
expect(mockFs.readFile).toHaveBeenCalledWith('/test/file.txt', 'utf-8');
|
||||
});
|
||||
|
||||
it('reads file with custom encoding', async () => {
|
||||
mockFs.readFile.mockResolvedValueOnce('file content');
|
||||
|
||||
const result = await readFileContent('/test/file.txt', 'ascii');
|
||||
|
||||
expect(result).toBe('file content');
|
||||
expect(mockFs.readFile).toHaveBeenCalledWith('/test/file.txt', 'ascii');
|
||||
});
|
||||
});
|
||||
|
||||
describe('writeFileContent', () => {
|
||||
it('writes file content', async () => {
|
||||
mockFs.writeFile.mockResolvedValueOnce(undefined);
|
||||
|
||||
await writeFileContent('/test/file.txt', 'new content');
|
||||
|
||||
expect(mockFs.writeFile).toHaveBeenCalledWith('/test/file.txt', 'new content', { encoding: "utf-8", flag: 'wx' });
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('Search & Filtering Functions', () => {
|
||||
describe('searchFilesWithValidation', () => {
|
||||
beforeEach(() => {
|
||||
mockFs.realpath.mockImplementation(async (path: any) => path.toString());
|
||||
});
|
||||
|
||||
|
||||
it('excludes files matching exclude patterns', async () => {
|
||||
const mockEntries = [
|
||||
{ name: 'test.txt', isDirectory: () => false },
|
||||
{ name: 'test.log', isDirectory: () => false },
|
||||
{ name: 'node_modules', isDirectory: () => true }
|
||||
];
|
||||
|
||||
mockFs.readdir.mockResolvedValueOnce(mockEntries as any);
|
||||
|
||||
const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir';
|
||||
const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed'];
|
||||
|
||||
// Mock realpath to return the same path for validation to pass
|
||||
mockFs.realpath.mockImplementation(async (inputPath: any) => {
|
||||
const pathStr = inputPath.toString();
|
||||
// Return the path as-is for validation
|
||||
return pathStr;
|
||||
});
|
||||
|
||||
const result = await searchFilesWithValidation(
|
||||
testDir,
|
||||
'*test*',
|
||||
allowedDirs,
|
||||
{ excludePatterns: ['*.log', 'node_modules'] }
|
||||
);
|
||||
|
||||
const expectedResult = process.platform === 'win32' ? 'C:\\allowed\\dir\\test.txt' : '/allowed/dir/test.txt';
|
||||
expect(result).toEqual([expectedResult]);
|
||||
});
|
||||
|
||||
it('handles validation errors during search', async () => {
|
||||
const mockEntries = [
|
||||
{ name: 'test.txt', isDirectory: () => false },
|
||||
{ name: 'invalid_file.txt', isDirectory: () => false }
|
||||
];
|
||||
|
||||
mockFs.readdir.mockResolvedValueOnce(mockEntries as any);
|
||||
|
||||
// Mock validatePath to throw error for invalid_file.txt
|
||||
mockFs.realpath.mockImplementation(async (path: any) => {
|
||||
if (path.toString().includes('invalid_file.txt')) {
|
||||
throw new Error('Access denied');
|
||||
}
|
||||
return path.toString();
|
||||
});
|
||||
|
||||
const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir';
|
||||
const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed'];
|
||||
|
||||
const result = await searchFilesWithValidation(
|
||||
testDir,
|
||||
'*test*',
|
||||
allowedDirs,
|
||||
{}
|
||||
);
|
||||
|
||||
// Should only return the valid file, skipping the invalid one
|
||||
const expectedResult = process.platform === 'win32' ? 'C:\\allowed\\dir\\test.txt' : '/allowed/dir/test.txt';
|
||||
expect(result).toEqual([expectedResult]);
|
||||
});
|
||||
|
||||
it('handles complex exclude patterns with wildcards', async () => {
|
||||
const mockEntries = [
|
||||
{ name: 'test.txt', isDirectory: () => false },
|
||||
{ name: 'test.backup', isDirectory: () => false },
|
||||
{ name: 'important_test.js', isDirectory: () => false }
|
||||
];
|
||||
|
||||
mockFs.readdir.mockResolvedValueOnce(mockEntries as any);
|
||||
|
||||
const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir';
|
||||
const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed'];
|
||||
|
||||
const result = await searchFilesWithValidation(
|
||||
testDir,
|
||||
'*test*',
|
||||
allowedDirs,
|
||||
{ excludePatterns: ['*.backup'] }
|
||||
);
|
||||
|
||||
const expectedResults = process.platform === 'win32' ? [
|
||||
'C:\\allowed\\dir\\test.txt',
|
||||
'C:\\allowed\\dir\\important_test.js'
|
||||
] : [
|
||||
'/allowed/dir/test.txt',
|
||||
'/allowed/dir/important_test.js'
|
||||
];
|
||||
expect(result).toEqual(expectedResults);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('File Editing Functions', () => {
|
||||
describe('applyFileEdits', () => {
|
||||
beforeEach(() => {
|
||||
mockFs.readFile.mockResolvedValue('line1\nline2\nline3\n');
|
||||
mockFs.writeFile.mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
it('applies simple text replacement', async () => {
|
||||
const edits = [
|
||||
{ oldText: 'line2', newText: 'modified line2' }
|
||||
];
|
||||
|
||||
mockFs.rename.mockResolvedValueOnce(undefined);
|
||||
|
||||
const result = await applyFileEdits('/test/file.txt', edits, false);
|
||||
|
||||
expect(result).toContain('modified line2');
|
||||
// Should write to temporary file then rename
|
||||
expect(mockFs.writeFile).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
|
||||
'line1\nmodified line2\nline3\n',
|
||||
'utf-8'
|
||||
);
|
||||
expect(mockFs.rename).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
|
||||
'/test/file.txt'
|
||||
);
|
||||
});
|
||||
|
||||
it('handles dry run mode', async () => {
|
||||
const edits = [
|
||||
{ oldText: 'line2', newText: 'modified line2' }
|
||||
];
|
||||
|
||||
const result = await applyFileEdits('/test/file.txt', edits, true);
|
||||
|
||||
expect(result).toContain('modified line2');
|
||||
expect(mockFs.writeFile).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('applies multiple edits sequentially', async () => {
|
||||
const edits = [
|
||||
{ oldText: 'line1', newText: 'first line' },
|
||||
{ oldText: 'line3', newText: 'third line' }
|
||||
];
|
||||
|
||||
mockFs.rename.mockResolvedValueOnce(undefined);
|
||||
|
||||
await applyFileEdits('/test/file.txt', edits, false);
|
||||
|
||||
expect(mockFs.writeFile).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
|
||||
'first line\nline2\nthird line\n',
|
||||
'utf-8'
|
||||
);
|
||||
expect(mockFs.rename).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
|
||||
'/test/file.txt'
|
||||
);
|
||||
});
|
||||
|
||||
it('handles whitespace-flexible matching', async () => {
|
||||
mockFs.readFile.mockResolvedValue(' line1\n line2\n line3\n');
|
||||
|
||||
const edits = [
|
||||
{ oldText: 'line2', newText: 'modified line2' }
|
||||
];
|
||||
|
||||
mockFs.rename.mockResolvedValueOnce(undefined);
|
||||
|
||||
await applyFileEdits('/test/file.txt', edits, false);
|
||||
|
||||
expect(mockFs.writeFile).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
|
||||
' line1\n modified line2\n line3\n',
|
||||
'utf-8'
|
||||
);
|
||||
expect(mockFs.rename).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
|
||||
'/test/file.txt'
|
||||
);
|
||||
});
|
||||
|
||||
it('throws error for non-matching edits', async () => {
|
||||
const edits = [
|
||||
{ oldText: 'nonexistent line', newText: 'replacement' }
|
||||
];
|
||||
|
||||
await expect(applyFileEdits('/test/file.txt', edits, false))
|
||||
.rejects.toThrow('Could not find exact match for edit');
|
||||
});
|
||||
|
||||
it('handles complex multi-line edits with indentation', async () => {
|
||||
mockFs.readFile.mockResolvedValue('function test() {\n console.log("hello");\n return true;\n}');
|
||||
|
||||
const edits = [
|
||||
{
|
||||
oldText: ' console.log("hello");\n return true;',
|
||||
newText: ' console.log("world");\n console.log("test");\n return false;'
|
||||
}
|
||||
];
|
||||
|
||||
mockFs.rename.mockResolvedValueOnce(undefined);
|
||||
|
||||
await applyFileEdits('/test/file.js', edits, false);
|
||||
|
||||
expect(mockFs.writeFile).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
|
||||
'function test() {\n console.log("world");\n console.log("test");\n return false;\n}',
|
||||
'utf-8'
|
||||
);
|
||||
expect(mockFs.rename).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
|
||||
'/test/file.js'
|
||||
);
|
||||
});
|
||||
|
||||
it('handles edits with different indentation patterns', async () => {
|
||||
mockFs.readFile.mockResolvedValue(' if (condition) {\n doSomething();\n }');
|
||||
|
||||
const edits = [
|
||||
{
|
||||
oldText: 'doSomething();',
|
||||
newText: 'doSomethingElse();\n doAnotherThing();'
|
||||
}
|
||||
];
|
||||
|
||||
mockFs.rename.mockResolvedValueOnce(undefined);
|
||||
|
||||
await applyFileEdits('/test/file.js', edits, false);
|
||||
|
||||
expect(mockFs.writeFile).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
|
||||
' if (condition) {\n doSomethingElse();\n doAnotherThing();\n }',
|
||||
'utf-8'
|
||||
);
|
||||
expect(mockFs.rename).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
|
||||
'/test/file.js'
|
||||
);
|
||||
});
|
||||
|
||||
it('handles CRLF line endings in file content', async () => {
|
||||
mockFs.readFile.mockResolvedValue('line1\r\nline2\r\nline3\r\n');
|
||||
|
||||
const edits = [
|
||||
{ oldText: 'line2', newText: 'modified line2' }
|
||||
];
|
||||
|
||||
mockFs.rename.mockResolvedValueOnce(undefined);
|
||||
|
||||
await applyFileEdits('/test/file.txt', edits, false);
|
||||
|
||||
expect(mockFs.writeFile).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
|
||||
'line1\nmodified line2\nline3\n',
|
||||
'utf-8'
|
||||
);
|
||||
expect(mockFs.rename).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
|
||||
'/test/file.txt'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('tailFile', () => {
|
||||
it('handles empty files', async () => {
|
||||
mockFs.stat.mockResolvedValue({ size: 0 } as any);
|
||||
|
||||
const result = await tailFile('/test/empty.txt', 5);
|
||||
|
||||
expect(result).toBe('');
|
||||
expect(mockFs.open).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('calls stat to check file size', async () => {
|
||||
mockFs.stat.mockResolvedValue({ size: 100 } as any);
|
||||
|
||||
// Mock file handle with proper typing
|
||||
const mockFileHandle = {
|
||||
read: jest.fn(),
|
||||
close: jest.fn()
|
||||
} as any;
|
||||
|
||||
mockFileHandle.read.mockResolvedValue({ bytesRead: 0 });
|
||||
mockFileHandle.close.mockResolvedValue(undefined);
|
||||
|
||||
mockFs.open.mockResolvedValue(mockFileHandle);
|
||||
|
||||
await tailFile('/test/file.txt', 2);
|
||||
|
||||
expect(mockFs.stat).toHaveBeenCalledWith('/test/file.txt');
|
||||
expect(mockFs.open).toHaveBeenCalledWith('/test/file.txt', 'r');
|
||||
});
|
||||
|
||||
it('handles files with content and returns last lines', async () => {
|
||||
mockFs.stat.mockResolvedValue({ size: 50 } as any);
|
||||
|
||||
const mockFileHandle = {
|
||||
read: jest.fn(),
|
||||
close: jest.fn()
|
||||
} as any;
|
||||
|
||||
// Simulate reading file content in chunks
|
||||
mockFileHandle.read
|
||||
.mockResolvedValueOnce({ bytesRead: 20, buffer: Buffer.from('line3\nline4\nline5\n') })
|
||||
.mockResolvedValueOnce({ bytesRead: 0 });
|
||||
mockFileHandle.close.mockResolvedValue(undefined);
|
||||
|
||||
mockFs.open.mockResolvedValue(mockFileHandle);
|
||||
|
||||
const result = await tailFile('/test/file.txt', 2);
|
||||
|
||||
expect(mockFileHandle.close).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('handles read errors gracefully', async () => {
|
||||
mockFs.stat.mockResolvedValue({ size: 100 } as any);
|
||||
|
||||
const mockFileHandle = {
|
||||
read: jest.fn(),
|
||||
close: jest.fn()
|
||||
} as any;
|
||||
|
||||
mockFileHandle.read.mockResolvedValue({ bytesRead: 0 });
|
||||
mockFileHandle.close.mockResolvedValue(undefined);
|
||||
|
||||
mockFs.open.mockResolvedValue(mockFileHandle);
|
||||
|
||||
await tailFile('/test/file.txt', 5);
|
||||
|
||||
expect(mockFileHandle.close).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('headFile', () => {
|
||||
it('opens file for reading', async () => {
|
||||
// Mock file handle with proper typing
|
||||
const mockFileHandle = {
|
||||
read: jest.fn(),
|
||||
close: jest.fn()
|
||||
} as any;
|
||||
|
||||
mockFileHandle.read.mockResolvedValue({ bytesRead: 0 });
|
||||
mockFileHandle.close.mockResolvedValue(undefined);
|
||||
|
||||
mockFs.open.mockResolvedValue(mockFileHandle);
|
||||
|
||||
await headFile('/test/file.txt', 2);
|
||||
|
||||
expect(mockFs.open).toHaveBeenCalledWith('/test/file.txt', 'r');
|
||||
});
|
||||
|
||||
it('handles files with content and returns first lines', async () => {
|
||||
const mockFileHandle = {
|
||||
read: jest.fn(),
|
||||
close: jest.fn()
|
||||
} as any;
|
||||
|
||||
// Simulate reading file content with newlines
|
||||
mockFileHandle.read
|
||||
.mockResolvedValueOnce({ bytesRead: 20, buffer: Buffer.from('line1\nline2\nline3\n') })
|
||||
.mockResolvedValueOnce({ bytesRead: 0 });
|
||||
mockFileHandle.close.mockResolvedValue(undefined);
|
||||
|
||||
mockFs.open.mockResolvedValue(mockFileHandle);
|
||||
|
||||
const result = await headFile('/test/file.txt', 2);
|
||||
|
||||
expect(mockFileHandle.close).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('handles files with leftover content', async () => {
|
||||
const mockFileHandle = {
|
||||
read: jest.fn(),
|
||||
close: jest.fn()
|
||||
} as any;
|
||||
|
||||
// Simulate reading file content without final newline
|
||||
mockFileHandle.read
|
||||
.mockResolvedValueOnce({ bytesRead: 15, buffer: Buffer.from('line1\nline2\nend') })
|
||||
.mockResolvedValueOnce({ bytesRead: 0 });
|
||||
mockFileHandle.close.mockResolvedValue(undefined);
|
||||
|
||||
mockFs.open.mockResolvedValue(mockFileHandle);
|
||||
|
||||
const result = await headFile('/test/file.txt', 5);
|
||||
|
||||
expect(mockFileHandle.close).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('handles reaching requested line count', async () => {
|
||||
const mockFileHandle = {
|
||||
read: jest.fn(),
|
||||
close: jest.fn()
|
||||
} as any;
|
||||
|
||||
// Simulate reading exactly the requested number of lines
|
||||
mockFileHandle.read
|
||||
.mockResolvedValueOnce({ bytesRead: 12, buffer: Buffer.from('line1\nline2\n') })
|
||||
.mockResolvedValueOnce({ bytesRead: 0 });
|
||||
mockFileHandle.close.mockResolvedValue(undefined);
|
||||
|
||||
mockFs.open.mockResolvedValue(mockFileHandle);
|
||||
|
||||
const result = await headFile('/test/file.txt', 2);
|
||||
|
||||
expect(mockFileHandle.close).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -162,6 +162,12 @@ describe('Path Utilities', () => {
|
||||
expect(result).not.toContain('~');
|
||||
});
|
||||
|
||||
it('expands bare ~ to home directory', () => {
|
||||
const result = expandHome('~');
|
||||
expect(result).not.toContain('~');
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('leaves other paths unchanged', () => {
|
||||
expect(expandHome('C:/test')).toBe('C:/test');
|
||||
});
|
||||
|
||||
@@ -4,6 +4,49 @@ import * as fs from 'fs/promises';
|
||||
import * as os from 'os';
|
||||
import { isPathWithinAllowedDirectories } from '../path-validation.js';
|
||||
|
||||
/**
|
||||
* Check if the current environment supports symlink creation
|
||||
*/
|
||||
async function checkSymlinkSupport(): Promise<boolean> {
|
||||
const testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'symlink-test-'));
|
||||
try {
|
||||
const targetFile = path.join(testDir, 'target.txt');
|
||||
const linkFile = path.join(testDir, 'link.txt');
|
||||
|
||||
await fs.writeFile(targetFile, 'test');
|
||||
await fs.symlink(targetFile, linkFile);
|
||||
|
||||
// If we get here, symlinks are supported
|
||||
return true;
|
||||
} catch (error) {
|
||||
// EPERM indicates no symlink permissions
|
||||
if ((error as NodeJS.ErrnoException).code === 'EPERM') {
|
||||
return false;
|
||||
}
|
||||
// Other errors might indicate a real problem
|
||||
throw error;
|
||||
} finally {
|
||||
await fs.rm(testDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
// Global variable to store symlink support status
|
||||
let symlinkSupported: boolean | null = null;
|
||||
|
||||
/**
|
||||
* Get cached symlink support status, checking once per test run
|
||||
*/
|
||||
async function getSymlinkSupport(): Promise<boolean> {
|
||||
if (symlinkSupported === null) {
|
||||
symlinkSupported = await checkSymlinkSupport();
|
||||
if (!symlinkSupported) {
|
||||
console.log('\n⚠️ Symlink tests will be skipped - symlink creation not supported in this environment');
|
||||
console.log(' On Windows, enable Developer Mode or run as Administrator to enable symlink tests');
|
||||
}
|
||||
}
|
||||
return symlinkSupported;
|
||||
}
|
||||
|
||||
describe('Path Validation', () => {
|
||||
it('allows exact directory match', () => {
|
||||
const allowed = ['/home/user/project'];
|
||||
@@ -587,6 +630,12 @@ describe('Path Validation', () => {
|
||||
});
|
||||
|
||||
it('demonstrates symlink race condition allows writing outside allowed directories', async () => {
|
||||
const symlinkSupported = await getSymlinkSupport();
|
||||
if (!symlinkSupported) {
|
||||
console.log(' ⏭️ Skipping symlink race condition test - symlinks not supported');
|
||||
return;
|
||||
}
|
||||
|
||||
const allowed = [allowedDir];
|
||||
|
||||
await expect(fs.access(testPath)).rejects.toThrow();
|
||||
@@ -603,6 +652,12 @@ describe('Path Validation', () => {
|
||||
});
|
||||
|
||||
it('shows timing differences between validation approaches', async () => {
|
||||
const symlinkSupported = await getSymlinkSupport();
|
||||
if (!symlinkSupported) {
|
||||
console.log(' ⏭️ Skipping timing validation test - symlinks not supported');
|
||||
return;
|
||||
}
|
||||
|
||||
const allowed = [allowedDir];
|
||||
|
||||
const validation1 = isPathWithinAllowedDirectories(testPath, allowed);
|
||||
@@ -618,6 +673,12 @@ describe('Path Validation', () => {
|
||||
});
|
||||
|
||||
it('validates directory creation timing', async () => {
|
||||
const symlinkSupported = await getSymlinkSupport();
|
||||
if (!symlinkSupported) {
|
||||
console.log(' ⏭️ Skipping directory creation timing test - symlinks not supported');
|
||||
return;
|
||||
}
|
||||
|
||||
const allowed = [allowedDir];
|
||||
const testDir = path.join(allowedDir, 'newdir');
|
||||
|
||||
@@ -632,6 +693,12 @@ describe('Path Validation', () => {
|
||||
});
|
||||
|
||||
it('demonstrates exclusive file creation behavior', async () => {
|
||||
const symlinkSupported = await getSymlinkSupport();
|
||||
if (!symlinkSupported) {
|
||||
console.log(' ⏭️ Skipping exclusive file creation test - symlinks not supported');
|
||||
return;
|
||||
}
|
||||
|
||||
const allowed = [allowedDir];
|
||||
|
||||
await fs.symlink(targetFile, testPath);
|
||||
@@ -644,6 +711,12 @@ describe('Path Validation', () => {
|
||||
});
|
||||
|
||||
it('should use resolved parent paths for non-existent files', async () => {
|
||||
const symlinkSupported = await getSymlinkSupport();
|
||||
if (!symlinkSupported) {
|
||||
console.log(' ⏭️ Skipping resolved parent paths test - symlinks not supported');
|
||||
return;
|
||||
}
|
||||
|
||||
const allowed = [allowedDir];
|
||||
|
||||
const symlinkDir = path.join(allowedDir, 'link');
|
||||
@@ -662,6 +735,12 @@ describe('Path Validation', () => {
|
||||
});
|
||||
|
||||
it('demonstrates parent directory symlink traversal', async () => {
|
||||
const symlinkSupported = await getSymlinkSupport();
|
||||
if (!symlinkSupported) {
|
||||
console.log(' ⏭️ Skipping parent directory symlink traversal test - symlinks not supported');
|
||||
return;
|
||||
}
|
||||
|
||||
const allowed = [allowedDir];
|
||||
const deepPath = path.join(allowedDir, 'sub1', 'sub2', 'file.txt');
|
||||
|
||||
@@ -682,6 +761,12 @@ describe('Path Validation', () => {
|
||||
});
|
||||
|
||||
it('should prevent race condition between validatePath and file operation', async () => {
|
||||
const symlinkSupported = await getSymlinkSupport();
|
||||
if (!symlinkSupported) {
|
||||
console.log(' ⏭️ Skipping race condition prevention test - symlinks not supported');
|
||||
return;
|
||||
}
|
||||
|
||||
const allowed = [allowedDir];
|
||||
const racePath = path.join(allowedDir, 'race-file.txt');
|
||||
const targetFile = path.join(forbiddenDir, 'target.txt');
|
||||
@@ -730,6 +815,12 @@ describe('Path Validation', () => {
|
||||
});
|
||||
|
||||
it('should handle symlinks that point within allowed directories', async () => {
|
||||
const symlinkSupported = await getSymlinkSupport();
|
||||
if (!symlinkSupported) {
|
||||
console.log(' ⏭️ Skipping symlinks within allowed directories test - symlinks not supported');
|
||||
return;
|
||||
}
|
||||
|
||||
const allowed = [allowedDir];
|
||||
const targetFile = path.join(allowedDir, 'target.txt');
|
||||
const symlinkPath = path.join(allowedDir, 'symlink.txt');
|
||||
@@ -756,6 +847,12 @@ describe('Path Validation', () => {
|
||||
});
|
||||
|
||||
it('should prevent overwriting files through symlinks pointing outside allowed directories', async () => {
|
||||
const symlinkSupported = await getSymlinkSupport();
|
||||
if (!symlinkSupported) {
|
||||
console.log(' ⏭️ Skipping symlink overwrite prevention test - symlinks not supported');
|
||||
return;
|
||||
}
|
||||
|
||||
const allowed = [allowedDir];
|
||||
const legitFile = path.join(allowedDir, 'existing.txt');
|
||||
const targetFile = path.join(forbiddenDir, 'target.txt');
|
||||
@@ -786,6 +883,12 @@ describe('Path Validation', () => {
|
||||
});
|
||||
|
||||
it('demonstrates race condition in read operations', async () => {
|
||||
const symlinkSupported = await getSymlinkSupport();
|
||||
if (!symlinkSupported) {
|
||||
console.log(' ⏭️ Skipping race condition in read operations test - symlinks not supported');
|
||||
return;
|
||||
}
|
||||
|
||||
const allowed = [allowedDir];
|
||||
const legitFile = path.join(allowedDir, 'readable.txt');
|
||||
const secretFile = path.join(forbiddenDir, 'secret.txt');
|
||||
@@ -812,6 +915,12 @@ describe('Path Validation', () => {
|
||||
});
|
||||
|
||||
it('verifies rename does not follow symlinks', async () => {
|
||||
const symlinkSupported = await getSymlinkSupport();
|
||||
if (!symlinkSupported) {
|
||||
console.log(' ⏭️ Skipping rename symlink test - symlinks not supported');
|
||||
return;
|
||||
}
|
||||
|
||||
const allowed = [allowedDir];
|
||||
const tempFile = path.join(allowedDir, 'temp.txt');
|
||||
const targetSymlink = path.join(allowedDir, 'target-symlink.txt');
|
||||
|
||||
@@ -12,14 +12,24 @@ import {
|
||||
import fs from "fs/promises";
|
||||
import { createReadStream } from "fs";
|
||||
import path from "path";
|
||||
import os from 'os';
|
||||
import { randomBytes } from 'crypto';
|
||||
import { z } from "zod";
|
||||
import { zodToJsonSchema } from "zod-to-json-schema";
|
||||
import { diffLines, createTwoFilesPatch } from 'diff';
|
||||
import { minimatch } from 'minimatch';
|
||||
import { isPathWithinAllowedDirectories } from './path-validation.js';
|
||||
import { minimatch } from "minimatch";
|
||||
import { normalizePath, expandHome } from './path-utils.js';
|
||||
import { getValidRootDirectories } from './roots-utils.js';
|
||||
import {
|
||||
// Function imports
|
||||
formatSize,
|
||||
validatePath,
|
||||
getFileStats,
|
||||
readFileContent,
|
||||
writeFileContent,
|
||||
searchFilesWithValidation,
|
||||
applyFileEdits,
|
||||
tailFile,
|
||||
headFile,
|
||||
setAllowedDirectories,
|
||||
} from './lib.js';
|
||||
|
||||
// Command line argument parsing
|
||||
const args = process.argv.slice(2);
|
||||
@@ -31,25 +41,14 @@ if (args.length === 0) {
|
||||
console.error("At least one directory must be provided by EITHER method for the server to operate.");
|
||||
}
|
||||
|
||||
// Normalize all paths consistently
|
||||
function normalizePath(p: string): string {
|
||||
return path.normalize(p);
|
||||
}
|
||||
|
||||
function expandHome(filepath: string): string {
|
||||
if (filepath.startsWith('~/') || filepath === '~') {
|
||||
return path.join(os.homedir(), filepath.slice(1));
|
||||
}
|
||||
return filepath;
|
||||
}
|
||||
|
||||
// Store allowed directories in normalized and resolved form
|
||||
let allowedDirectories = await Promise.all(
|
||||
args.map(async (dir) => {
|
||||
const expanded = expandHome(dir);
|
||||
const absolute = path.resolve(expanded);
|
||||
try {
|
||||
// Resolve symlinks in allowed directories during startup
|
||||
// Security: Resolve symlinks in allowed directories during startup
|
||||
// This ensures we know the real paths and can validate against them later
|
||||
const resolved = await fs.realpath(absolute);
|
||||
return normalizePath(resolved);
|
||||
} catch (error) {
|
||||
@@ -61,9 +60,9 @@ let allowedDirectories = await Promise.all(
|
||||
);
|
||||
|
||||
// Validate that all directories exist and are accessible
|
||||
await Promise.all(args.map(async (dir) => {
|
||||
await Promise.all(allowedDirectories.map(async (dir) => {
|
||||
try {
|
||||
const stats = await fs.stat(expandHome(dir));
|
||||
const stats = await fs.stat(dir);
|
||||
if (!stats.isDirectory()) {
|
||||
console.error(`Error: ${dir} is not a directory`);
|
||||
process.exit(1);
|
||||
@@ -74,47 +73,8 @@ await Promise.all(args.map(async (dir) => {
|
||||
}
|
||||
}));
|
||||
|
||||
// Security utilities
|
||||
async function validatePath(requestedPath: string): Promise<string> {
|
||||
const expandedPath = expandHome(requestedPath);
|
||||
const absolute = path.isAbsolute(expandedPath)
|
||||
? path.resolve(expandedPath)
|
||||
: path.resolve(process.cwd(), expandedPath);
|
||||
|
||||
const normalizedRequested = normalizePath(absolute);
|
||||
|
||||
// Check if path is within allowed directories
|
||||
const isAllowed = isPathWithinAllowedDirectories(normalizedRequested, allowedDirectories);
|
||||
if (!isAllowed) {
|
||||
throw new Error(`Access denied - path outside allowed directories: ${absolute} not in ${allowedDirectories.join(', ')}`);
|
||||
}
|
||||
|
||||
// Handle symlinks by checking their real path
|
||||
try {
|
||||
const realPath = await fs.realpath(absolute);
|
||||
const normalizedReal = normalizePath(realPath);
|
||||
if (!isPathWithinAllowedDirectories(normalizedReal, allowedDirectories)) {
|
||||
throw new Error(`Access denied - symlink target outside allowed directories: ${realPath} not in ${allowedDirectories.join(', ')}`);
|
||||
}
|
||||
return realPath;
|
||||
} catch (error) {
|
||||
// For new files that don't exist yet, verify parent directory
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
const parentDir = path.dirname(absolute);
|
||||
try {
|
||||
const realParentPath = await fs.realpath(parentDir);
|
||||
const normalizedParent = normalizePath(realParentPath);
|
||||
if (!isPathWithinAllowedDirectories(normalizedParent, allowedDirectories)) {
|
||||
throw new Error(`Access denied - parent directory outside allowed directories: ${realParentPath} not in ${allowedDirectories.join(', ')}`);
|
||||
}
|
||||
return absolute;
|
||||
} catch {
|
||||
throw new Error(`Parent directory does not exist: ${parentDir}`);
|
||||
}
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Initialize the global allowedDirectories in lib.ts
|
||||
setAllowedDirectories(allowedDirectories);
|
||||
|
||||
// Schema definitions
|
||||
const ReadTextFileArgsSchema = z.object({
|
||||
@@ -128,7 +88,10 @@ const ReadMediaFileArgsSchema = z.object({
|
||||
});
|
||||
|
||||
const ReadMultipleFilesArgsSchema = z.object({
|
||||
paths: z.array(z.string()),
|
||||
paths: z
|
||||
.array(z.string())
|
||||
.min(1, "At least one file path must be provided")
|
||||
.describe("Array of file paths to read. Each path must be a string pointing to a valid file within allowed directories."),
|
||||
});
|
||||
|
||||
const WriteFileArgsSchema = z.object({
|
||||
@@ -162,6 +125,7 @@ const ListDirectoryWithSizesArgsSchema = z.object({
|
||||
|
||||
const DirectoryTreeArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
excludePatterns: z.array(z.string()).optional().default([])
|
||||
});
|
||||
|
||||
const MoveFileArgsSchema = z.object({
|
||||
@@ -182,16 +146,6 @@ const GetFileInfoArgsSchema = z.object({
|
||||
const ToolInputSchema = ToolSchema.shape.inputSchema;
|
||||
type ToolInput = z.infer<typeof ToolInputSchema>;
|
||||
|
||||
interface FileInfo {
|
||||
size: number;
|
||||
created: Date;
|
||||
modified: Date;
|
||||
accessed: Date;
|
||||
isDirectory: boolean;
|
||||
isFile: boolean;
|
||||
permissions: string;
|
||||
}
|
||||
|
||||
// Server setup
|
||||
const server = new Server(
|
||||
{
|
||||
@@ -205,277 +159,6 @@ const server = new Server(
|
||||
},
|
||||
);
|
||||
|
||||
// Tool implementations
|
||||
async function getFileStats(filePath: string): Promise<FileInfo> {
|
||||
const stats = await fs.stat(filePath);
|
||||
return {
|
||||
size: stats.size,
|
||||
created: stats.birthtime,
|
||||
modified: stats.mtime,
|
||||
accessed: stats.atime,
|
||||
isDirectory: stats.isDirectory(),
|
||||
isFile: stats.isFile(),
|
||||
permissions: stats.mode.toString(8).slice(-3),
|
||||
};
|
||||
}
|
||||
|
||||
async function searchFiles(
|
||||
rootPath: string,
|
||||
pattern: string,
|
||||
excludePatterns: string[] = []
|
||||
): Promise<string[]> {
|
||||
const results: string[] = [];
|
||||
|
||||
async function search(currentPath: string) {
|
||||
const entries = await fs.readdir(currentPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(currentPath, entry.name);
|
||||
|
||||
try {
|
||||
// Validate each path before processing
|
||||
await validatePath(fullPath);
|
||||
|
||||
// Check if path matches any exclude pattern
|
||||
const relativePath = path.relative(rootPath, fullPath);
|
||||
const shouldExclude = excludePatterns.some(pattern => {
|
||||
const globPattern = pattern.includes('*') ? pattern : `**/${pattern}/**`;
|
||||
return minimatch(relativePath, globPattern, { dot: true });
|
||||
});
|
||||
|
||||
if (shouldExclude) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.name.toLowerCase().includes(pattern.toLowerCase())) {
|
||||
results.push(fullPath);
|
||||
}
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await search(fullPath);
|
||||
}
|
||||
} catch (error) {
|
||||
// Skip invalid paths during search
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await search(rootPath);
|
||||
return results;
|
||||
}
|
||||
|
||||
// file editing and diffing utilities
|
||||
function normalizeLineEndings(text: string): string {
|
||||
return text.replace(/\r\n/g, '\n');
|
||||
}
|
||||
|
||||
function createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string {
|
||||
// Ensure consistent line endings for diff
|
||||
const normalizedOriginal = normalizeLineEndings(originalContent);
|
||||
const normalizedNew = normalizeLineEndings(newContent);
|
||||
|
||||
return createTwoFilesPatch(
|
||||
filepath,
|
||||
filepath,
|
||||
normalizedOriginal,
|
||||
normalizedNew,
|
||||
'original',
|
||||
'modified'
|
||||
);
|
||||
}
|
||||
|
||||
async function applyFileEdits(
|
||||
filePath: string,
|
||||
edits: Array<{oldText: string, newText: string}>,
|
||||
dryRun = false
|
||||
): Promise<string> {
|
||||
// Read file content and normalize line endings
|
||||
const content = normalizeLineEndings(await fs.readFile(filePath, 'utf-8'));
|
||||
|
||||
// Apply edits sequentially
|
||||
let modifiedContent = content;
|
||||
for (const edit of edits) {
|
||||
const normalizedOld = normalizeLineEndings(edit.oldText);
|
||||
const normalizedNew = normalizeLineEndings(edit.newText);
|
||||
|
||||
// If exact match exists, use it
|
||||
if (modifiedContent.includes(normalizedOld)) {
|
||||
modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Otherwise, try line-by-line matching with flexibility for whitespace
|
||||
const oldLines = normalizedOld.split('\n');
|
||||
const contentLines = modifiedContent.split('\n');
|
||||
let matchFound = false;
|
||||
|
||||
for (let i = 0; i <= contentLines.length - oldLines.length; i++) {
|
||||
const potentialMatch = contentLines.slice(i, i + oldLines.length);
|
||||
|
||||
// Compare lines with normalized whitespace
|
||||
const isMatch = oldLines.every((oldLine, j) => {
|
||||
const contentLine = potentialMatch[j];
|
||||
return oldLine.trim() === contentLine.trim();
|
||||
});
|
||||
|
||||
if (isMatch) {
|
||||
// Preserve original indentation of first line
|
||||
const originalIndent = contentLines[i].match(/^\s*/)?.[0] || '';
|
||||
const newLines = normalizedNew.split('\n').map((line, j) => {
|
||||
if (j === 0) return originalIndent + line.trimStart();
|
||||
// For subsequent lines, try to preserve relative indentation
|
||||
const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || '';
|
||||
const newIndent = line.match(/^\s*/)?.[0] || '';
|
||||
if (oldIndent && newIndent) {
|
||||
const relativeIndent = newIndent.length - oldIndent.length;
|
||||
return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart();
|
||||
}
|
||||
return line;
|
||||
});
|
||||
|
||||
contentLines.splice(i, oldLines.length, ...newLines);
|
||||
modifiedContent = contentLines.join('\n');
|
||||
matchFound = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!matchFound) {
|
||||
throw new Error(`Could not find exact match for edit:\n${edit.oldText}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Create unified diff
|
||||
const diff = createUnifiedDiff(content, modifiedContent, filePath);
|
||||
|
||||
// Format diff with appropriate number of backticks
|
||||
let numBackticks = 3;
|
||||
while (diff.includes('`'.repeat(numBackticks))) {
|
||||
numBackticks++;
|
||||
}
|
||||
const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`;
|
||||
|
||||
if (!dryRun) {
|
||||
// Security: Use atomic rename to prevent race conditions where symlinks
|
||||
// could be created between validation and write. Rename operations
|
||||
// replace the target file atomically and don't follow symlinks.
|
||||
const tempPath = `${filePath}.${randomBytes(16).toString('hex')}.tmp`;
|
||||
try {
|
||||
await fs.writeFile(tempPath, modifiedContent, 'utf-8');
|
||||
await fs.rename(tempPath, filePath);
|
||||
} catch (error) {
|
||||
try {
|
||||
await fs.unlink(tempPath);
|
||||
} catch {}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return formattedDiff;
|
||||
}
|
||||
|
||||
// Helper functions
|
||||
function formatSize(bytes: number): string {
|
||||
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
if (bytes === 0) return '0 B';
|
||||
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
if (i === 0) return `${bytes} ${units[i]}`;
|
||||
|
||||
return `${(bytes / Math.pow(1024, i)).toFixed(2)} ${units[i]}`;
|
||||
}
|
||||
|
||||
// Memory-efficient implementation to get the last N lines of a file
|
||||
async function tailFile(filePath: string, numLines: number): Promise<string> {
|
||||
const CHUNK_SIZE = 1024; // Read 1KB at a time
|
||||
const stats = await fs.stat(filePath);
|
||||
const fileSize = stats.size;
|
||||
|
||||
if (fileSize === 0) return '';
|
||||
|
||||
// Open file for reading
|
||||
const fileHandle = await fs.open(filePath, 'r');
|
||||
try {
|
||||
const lines: string[] = [];
|
||||
let position = fileSize;
|
||||
let chunk = Buffer.alloc(CHUNK_SIZE);
|
||||
let linesFound = 0;
|
||||
let remainingText = '';
|
||||
|
||||
// Read chunks from the end of the file until we have enough lines
|
||||
while (position > 0 && linesFound < numLines) {
|
||||
const size = Math.min(CHUNK_SIZE, position);
|
||||
position -= size;
|
||||
|
||||
const { bytesRead } = await fileHandle.read(chunk, 0, size, position);
|
||||
if (!bytesRead) break;
|
||||
|
||||
// Get the chunk as a string and prepend any remaining text from previous iteration
|
||||
const readData = chunk.slice(0, bytesRead).toString('utf-8');
|
||||
const chunkText = readData + remainingText;
|
||||
|
||||
// Split by newlines and count
|
||||
const chunkLines = normalizeLineEndings(chunkText).split('\n');
|
||||
|
||||
// If this isn't the end of the file, the first line is likely incomplete
|
||||
// Save it to prepend to the next chunk
|
||||
if (position > 0) {
|
||||
remainingText = chunkLines[0];
|
||||
chunkLines.shift(); // Remove the first (incomplete) line
|
||||
}
|
||||
|
||||
// Add lines to our result (up to the number we need)
|
||||
for (let i = chunkLines.length - 1; i >= 0 && linesFound < numLines; i--) {
|
||||
lines.unshift(chunkLines[i]);
|
||||
linesFound++;
|
||||
}
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
} finally {
|
||||
await fileHandle.close();
|
||||
}
|
||||
}
|
||||
|
||||
// New function to get the first N lines of a file
|
||||
async function headFile(filePath: string, numLines: number): Promise<string> {
|
||||
const fileHandle = await fs.open(filePath, 'r');
|
||||
try {
|
||||
const lines: string[] = [];
|
||||
let buffer = '';
|
||||
let bytesRead = 0;
|
||||
const chunk = Buffer.alloc(1024); // 1KB buffer
|
||||
|
||||
// Read chunks and count lines until we have enough or reach EOF
|
||||
while (lines.length < numLines) {
|
||||
const result = await fileHandle.read(chunk, 0, chunk.length, bytesRead);
|
||||
if (result.bytesRead === 0) break; // End of file
|
||||
bytesRead += result.bytesRead;
|
||||
buffer += chunk.slice(0, result.bytesRead).toString('utf-8');
|
||||
|
||||
const newLineIndex = buffer.lastIndexOf('\n');
|
||||
if (newLineIndex !== -1) {
|
||||
const completeLines = buffer.slice(0, newLineIndex).split('\n');
|
||||
buffer = buffer.slice(newLineIndex + 1);
|
||||
for (const line of completeLines) {
|
||||
lines.push(line);
|
||||
if (lines.length >= numLines) break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If there is leftover content and we still need lines, add it
|
||||
if (buffer.length > 0 && lines.length < numLines) {
|
||||
lines.push(buffer);
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
} finally {
|
||||
await fileHandle.close();
|
||||
}
|
||||
}
|
||||
|
||||
// Reads a file as a stream of buffers, concatenates them, and then encodes
|
||||
// the result to a Base64 string. This is a memory-efficient way to handle
|
||||
// binary data from a stream before the final encoding.
|
||||
@@ -597,9 +280,9 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
name: "search_files",
|
||||
description:
|
||||
"Recursively search for files and directories matching a pattern. " +
|
||||
"Searches through all subdirectories from the starting path. The search " +
|
||||
"is case-insensitive and matches partial names. Returns full paths to all " +
|
||||
"matching items. Great for finding files when you don't know their exact location. " +
|
||||
"The patterns should be glob-style patterns that match paths relative to the working directory. " +
|
||||
"Use pattern like '*.ext' to match files in current directory, and '**/*.ext' to match files in all subdirectories. " +
|
||||
"Returns full paths to all matching items. Great for finding files when you don't know their exact location. " +
|
||||
"Only searches within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(SearchFilesArgsSchema) as ToolInput,
|
||||
},
|
||||
@@ -615,8 +298,10 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
{
|
||||
name: "list_allowed_directories",
|
||||
description:
|
||||
"Returns the list of root directories that this server is allowed to access. " +
|
||||
"Use this to understand which directories are available before trying to access files. ",
|
||||
"Returns the list of directories that this server is allowed to access. " +
|
||||
"Subdirectories within these allowed directories are also accessible. " +
|
||||
"Use this to understand which directories and their nested paths are available " +
|
||||
"before trying to access files.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {},
|
||||
@@ -660,8 +345,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
content: [{ type: "text", text: headContent }],
|
||||
};
|
||||
}
|
||||
|
||||
const content = await fs.readFile(validPath, "utf-8");
|
||||
const content = await readFileContent(validPath);
|
||||
return {
|
||||
content: [{ type: "text", text: content }],
|
||||
};
|
||||
@@ -708,7 +392,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
parsed.data.paths.map(async (filePath: string) => {
|
||||
try {
|
||||
const validPath = await validatePath(filePath);
|
||||
const content = await fs.readFile(validPath, "utf-8");
|
||||
const content = await readFileContent(validPath);
|
||||
return `${filePath}:\n${content}\n`;
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
@@ -727,31 +411,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
throw new Error(`Invalid arguments for write_file: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await validatePath(parsed.data.path);
|
||||
|
||||
try {
|
||||
// Security: 'wx' flag ensures exclusive creation - fails if file/symlink exists,
|
||||
// preventing writes through pre-existing symlinks
|
||||
await fs.writeFile(validPath, parsed.data.content, { encoding: "utf-8", flag: 'wx' });
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'EEXIST') {
|
||||
// Security: Use atomic rename to prevent race conditions where symlinks
|
||||
// could be created between validation and write. Rename operations
|
||||
// replace the target file atomically and don't follow symlinks.
|
||||
const tempPath = `${validPath}.${randomBytes(16).toString('hex')}.tmp`;
|
||||
try {
|
||||
await fs.writeFile(tempPath, parsed.data.content, 'utf-8');
|
||||
await fs.rename(tempPath, validPath);
|
||||
} catch (renameError) {
|
||||
try {
|
||||
await fs.unlink(tempPath);
|
||||
} catch {}
|
||||
throw renameError;
|
||||
}
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
await writeFileContent(validPath, parsed.data.content);
|
||||
return {
|
||||
content: [{ type: "text", text: `Successfully wrote to ${parsed.data.path}` }],
|
||||
};
|
||||
@@ -868,43 +528,58 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
throw new Error(`Invalid arguments for directory_tree: ${parsed.error}`);
|
||||
}
|
||||
|
||||
interface TreeEntry {
|
||||
name: string;
|
||||
type: 'file' | 'directory';
|
||||
children?: TreeEntry[];
|
||||
}
|
||||
interface TreeEntry {
|
||||
name: string;
|
||||
type: 'file' | 'directory';
|
||||
children?: TreeEntry[];
|
||||
}
|
||||
const rootPath = parsed.data.path;
|
||||
|
||||
async function buildTree(currentPath: string): Promise<TreeEntry[]> {
|
||||
const validPath = await validatePath(currentPath);
|
||||
const entries = await fs.readdir(validPath, {withFileTypes: true});
|
||||
const result: TreeEntry[] = [];
|
||||
async function buildTree(currentPath: string, excludePatterns: string[] = []): Promise<TreeEntry[]> {
|
||||
const validPath = await validatePath(currentPath);
|
||||
const entries = await fs.readdir(validPath, {withFileTypes: true});
|
||||
const result: TreeEntry[] = [];
|
||||
|
||||
for (const entry of entries) {
|
||||
const entryData: TreeEntry = {
|
||||
name: entry.name,
|
||||
type: entry.isDirectory() ? 'directory' : 'file'
|
||||
};
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
const subPath = path.join(currentPath, entry.name);
|
||||
entryData.children = await buildTree(subPath);
|
||||
for (const entry of entries) {
|
||||
const relativePath = path.relative(rootPath, path.join(currentPath, entry.name));
|
||||
const shouldExclude = excludePatterns.some(pattern => {
|
||||
if (pattern.includes('*')) {
|
||||
return minimatch(relativePath, pattern, {dot: true});
|
||||
}
|
||||
// For files: match exact name or as part of path
|
||||
// For directories: match as directory path
|
||||
return minimatch(relativePath, pattern, {dot: true}) ||
|
||||
minimatch(relativePath, `**/${pattern}`, {dot: true}) ||
|
||||
minimatch(relativePath, `**/${pattern}/**`, {dot: true});
|
||||
});
|
||||
if (shouldExclude)
|
||||
continue;
|
||||
|
||||
result.push(entryData);
|
||||
const entryData: TreeEntry = {
|
||||
name: entry.name,
|
||||
type: entry.isDirectory() ? 'directory' : 'file'
|
||||
};
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
const subPath = path.join(currentPath, entry.name);
|
||||
entryData.children = await buildTree(subPath, excludePatterns);
|
||||
}
|
||||
|
||||
return result;
|
||||
result.push(entryData);
|
||||
}
|
||||
|
||||
const treeData = await buildTree(parsed.data.path);
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify(treeData, null, 2)
|
||||
}],
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
const treeData = await buildTree(rootPath, parsed.data.excludePatterns);
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify(treeData, null, 2)
|
||||
}],
|
||||
};
|
||||
}
|
||||
|
||||
case "move_file": {
|
||||
const parsed = MoveFileArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
@@ -924,7 +599,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
throw new Error(`Invalid arguments for search_files: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await validatePath(parsed.data.path);
|
||||
const results = await searchFiles(validPath, parsed.data.pattern, parsed.data.excludePatterns);
|
||||
const results = await searchFilesWithValidation(validPath, parsed.data.pattern, allowedDirectories, { excludePatterns: parsed.data.excludePatterns });
|
||||
return {
|
||||
content: [{ type: "text", text: results.length > 0 ? results.join("\n") : "No matches found" }],
|
||||
};
|
||||
@@ -970,6 +645,7 @@ async function updateAllowedDirectoriesFromRoots(requestedRoots: Root[]) {
|
||||
const validatedRootDirs = await getValidRootDirectories(requestedRoots);
|
||||
if (validatedRootDirs.length > 0) {
|
||||
allowedDirectories = [...validatedRootDirs];
|
||||
setAllowedDirectories(allowedDirectories); // Update the global state in lib.ts
|
||||
console.error(`Updated allowed directories from MCP roots: ${validatedRootDirs.length} valid directories`);
|
||||
} else {
|
||||
console.error("No valid root directories provided by client");
|
||||
|
||||
392
src/filesystem/lib.ts
Normal file
392
src/filesystem/lib.ts
Normal file
@@ -0,0 +1,392 @@
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import os from 'os';
|
||||
import { randomBytes } from 'crypto';
|
||||
import { diffLines, createTwoFilesPatch } from 'diff';
|
||||
import { minimatch } from 'minimatch';
|
||||
import { normalizePath, expandHome } from './path-utils.js';
|
||||
import { isPathWithinAllowedDirectories } from './path-validation.js';
|
||||
|
||||
// Global allowed directories - set by the main module
|
||||
let allowedDirectories: string[] = [];
|
||||
|
||||
// Function to set allowed directories from the main module
|
||||
export function setAllowedDirectories(directories: string[]): void {
|
||||
allowedDirectories = [...directories];
|
||||
}
|
||||
|
||||
// Function to get current allowed directories
|
||||
export function getAllowedDirectories(): string[] {
|
||||
return [...allowedDirectories];
|
||||
}
|
||||
|
||||
// Type definitions
|
||||
interface FileInfo {
|
||||
size: number;
|
||||
created: Date;
|
||||
modified: Date;
|
||||
accessed: Date;
|
||||
isDirectory: boolean;
|
||||
isFile: boolean;
|
||||
permissions: string;
|
||||
}
|
||||
|
||||
export interface SearchOptions {
|
||||
excludePatterns?: string[];
|
||||
}
|
||||
|
||||
export interface SearchResult {
|
||||
path: string;
|
||||
isDirectory: boolean;
|
||||
}
|
||||
|
||||
// Pure Utility Functions
|
||||
export function formatSize(bytes: number): string {
|
||||
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
if (bytes === 0) return '0 B';
|
||||
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
|
||||
if (i < 0 || i === 0) return `${bytes} ${units[0]}`;
|
||||
|
||||
const unitIndex = Math.min(i, units.length - 1);
|
||||
return `${(bytes / Math.pow(1024, unitIndex)).toFixed(2)} ${units[unitIndex]}`;
|
||||
}
|
||||
|
||||
export function normalizeLineEndings(text: string): string {
|
||||
return text.replace(/\r\n/g, '\n');
|
||||
}
|
||||
|
||||
export function createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string {
|
||||
// Ensure consistent line endings for diff
|
||||
const normalizedOriginal = normalizeLineEndings(originalContent);
|
||||
const normalizedNew = normalizeLineEndings(newContent);
|
||||
|
||||
return createTwoFilesPatch(
|
||||
filepath,
|
||||
filepath,
|
||||
normalizedOriginal,
|
||||
normalizedNew,
|
||||
'original',
|
||||
'modified'
|
||||
);
|
||||
}
|
||||
|
||||
// Security & Validation Functions
|
||||
export async function validatePath(requestedPath: string): Promise<string> {
|
||||
const expandedPath = expandHome(requestedPath);
|
||||
const absolute = path.isAbsolute(expandedPath)
|
||||
? path.resolve(expandedPath)
|
||||
: path.resolve(process.cwd(), expandedPath);
|
||||
|
||||
const normalizedRequested = normalizePath(absolute);
|
||||
|
||||
// Security: Check if path is within allowed directories before any file operations
|
||||
const isAllowed = isPathWithinAllowedDirectories(normalizedRequested, allowedDirectories);
|
||||
if (!isAllowed) {
|
||||
throw new Error(`Access denied - path outside allowed directories: ${absolute} not in ${allowedDirectories.join(', ')}`);
|
||||
}
|
||||
|
||||
// Security: Handle symlinks by checking their real path to prevent symlink attacks
|
||||
// This prevents attackers from creating symlinks that point outside allowed directories
|
||||
try {
|
||||
const realPath = await fs.realpath(absolute);
|
||||
const normalizedReal = normalizePath(realPath);
|
||||
if (!isPathWithinAllowedDirectories(normalizedReal, allowedDirectories)) {
|
||||
throw new Error(`Access denied - symlink target outside allowed directories: ${realPath} not in ${allowedDirectories.join(', ')}`);
|
||||
}
|
||||
return realPath;
|
||||
} catch (error) {
|
||||
// Security: For new files that don't exist yet, verify parent directory
|
||||
// This ensures we can't create files in unauthorized locations
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
const parentDir = path.dirname(absolute);
|
||||
try {
|
||||
const realParentPath = await fs.realpath(parentDir);
|
||||
const normalizedParent = normalizePath(realParentPath);
|
||||
if (!isPathWithinAllowedDirectories(normalizedParent, allowedDirectories)) {
|
||||
throw new Error(`Access denied - parent directory outside allowed directories: ${realParentPath} not in ${allowedDirectories.join(', ')}`);
|
||||
}
|
||||
return absolute;
|
||||
} catch {
|
||||
throw new Error(`Parent directory does not exist: ${parentDir}`);
|
||||
}
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// File Operations
|
||||
export async function getFileStats(filePath: string): Promise<FileInfo> {
|
||||
const stats = await fs.stat(filePath);
|
||||
return {
|
||||
size: stats.size,
|
||||
created: stats.birthtime,
|
||||
modified: stats.mtime,
|
||||
accessed: stats.atime,
|
||||
isDirectory: stats.isDirectory(),
|
||||
isFile: stats.isFile(),
|
||||
permissions: stats.mode.toString(8).slice(-3),
|
||||
};
|
||||
}
|
||||
|
||||
export async function readFileContent(filePath: string, encoding: string = 'utf-8'): Promise<string> {
|
||||
return await fs.readFile(filePath, encoding as BufferEncoding);
|
||||
}
|
||||
|
||||
export async function writeFileContent(filePath: string, content: string): Promise<void> {
|
||||
try {
|
||||
// Security: 'wx' flag ensures exclusive creation - fails if file/symlink exists,
|
||||
// preventing writes through pre-existing symlinks
|
||||
await fs.writeFile(filePath, content, { encoding: "utf-8", flag: 'wx' });
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'EEXIST') {
|
||||
// Security: Use atomic rename to prevent race conditions where symlinks
|
||||
// could be created between validation and write. Rename operations
|
||||
// replace the target file atomically and don't follow symlinks.
|
||||
const tempPath = `${filePath}.${randomBytes(16).toString('hex')}.tmp`;
|
||||
try {
|
||||
await fs.writeFile(tempPath, content, 'utf-8');
|
||||
await fs.rename(tempPath, filePath);
|
||||
} catch (renameError) {
|
||||
try {
|
||||
await fs.unlink(tempPath);
|
||||
} catch {}
|
||||
throw renameError;
|
||||
}
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// File Editing Functions
|
||||
interface FileEdit {
|
||||
oldText: string;
|
||||
newText: string;
|
||||
}
|
||||
|
||||
export async function applyFileEdits(
|
||||
filePath: string,
|
||||
edits: FileEdit[],
|
||||
dryRun: boolean = false
|
||||
): Promise<string> {
|
||||
// Read file content and normalize line endings
|
||||
const content = normalizeLineEndings(await fs.readFile(filePath, 'utf-8'));
|
||||
|
||||
// Apply edits sequentially
|
||||
let modifiedContent = content;
|
||||
for (const edit of edits) {
|
||||
const normalizedOld = normalizeLineEndings(edit.oldText);
|
||||
const normalizedNew = normalizeLineEndings(edit.newText);
|
||||
|
||||
// If exact match exists, use it
|
||||
if (modifiedContent.includes(normalizedOld)) {
|
||||
modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Otherwise, try line-by-line matching with flexibility for whitespace
|
||||
const oldLines = normalizedOld.split('\n');
|
||||
const contentLines = modifiedContent.split('\n');
|
||||
let matchFound = false;
|
||||
|
||||
for (let i = 0; i <= contentLines.length - oldLines.length; i++) {
|
||||
const potentialMatch = contentLines.slice(i, i + oldLines.length);
|
||||
|
||||
// Compare lines with normalized whitespace
|
||||
const isMatch = oldLines.every((oldLine, j) => {
|
||||
const contentLine = potentialMatch[j];
|
||||
return oldLine.trim() === contentLine.trim();
|
||||
});
|
||||
|
||||
if (isMatch) {
|
||||
// Preserve original indentation of first line
|
||||
const originalIndent = contentLines[i].match(/^\s*/)?.[0] || '';
|
||||
const newLines = normalizedNew.split('\n').map((line, j) => {
|
||||
if (j === 0) return originalIndent + line.trimStart();
|
||||
// For subsequent lines, try to preserve relative indentation
|
||||
const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || '';
|
||||
const newIndent = line.match(/^\s*/)?.[0] || '';
|
||||
if (oldIndent && newIndent) {
|
||||
const relativeIndent = newIndent.length - oldIndent.length;
|
||||
return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart();
|
||||
}
|
||||
return line;
|
||||
});
|
||||
|
||||
contentLines.splice(i, oldLines.length, ...newLines);
|
||||
modifiedContent = contentLines.join('\n');
|
||||
matchFound = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!matchFound) {
|
||||
throw new Error(`Could not find exact match for edit:\n${edit.oldText}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Create unified diff
|
||||
const diff = createUnifiedDiff(content, modifiedContent, filePath);
|
||||
|
||||
// Format diff with appropriate number of backticks
|
||||
let numBackticks = 3;
|
||||
while (diff.includes('`'.repeat(numBackticks))) {
|
||||
numBackticks++;
|
||||
}
|
||||
const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`;
|
||||
|
||||
if (!dryRun) {
|
||||
// Security: Use atomic rename to prevent race conditions where symlinks
|
||||
// could be created between validation and write. Rename operations
|
||||
// replace the target file atomically and don't follow symlinks.
|
||||
const tempPath = `${filePath}.${randomBytes(16).toString('hex')}.tmp`;
|
||||
try {
|
||||
await fs.writeFile(tempPath, modifiedContent, 'utf-8');
|
||||
await fs.rename(tempPath, filePath);
|
||||
} catch (error) {
|
||||
try {
|
||||
await fs.unlink(tempPath);
|
||||
} catch {}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return formattedDiff;
|
||||
}
|
||||
|
||||
// Memory-efficient implementation to get the last N lines of a file
|
||||
export async function tailFile(filePath: string, numLines: number): Promise<string> {
|
||||
const CHUNK_SIZE = 1024; // Read 1KB at a time
|
||||
const stats = await fs.stat(filePath);
|
||||
const fileSize = stats.size;
|
||||
|
||||
if (fileSize === 0) return '';
|
||||
|
||||
// Open file for reading
|
||||
const fileHandle = await fs.open(filePath, 'r');
|
||||
try {
|
||||
const lines: string[] = [];
|
||||
let position = fileSize;
|
||||
let chunk = Buffer.alloc(CHUNK_SIZE);
|
||||
let linesFound = 0;
|
||||
let remainingText = '';
|
||||
|
||||
// Read chunks from the end of the file until we have enough lines
|
||||
while (position > 0 && linesFound < numLines) {
|
||||
const size = Math.min(CHUNK_SIZE, position);
|
||||
position -= size;
|
||||
|
||||
const { bytesRead } = await fileHandle.read(chunk, 0, size, position);
|
||||
if (!bytesRead) break;
|
||||
|
||||
// Get the chunk as a string and prepend any remaining text from previous iteration
|
||||
const readData = chunk.slice(0, bytesRead).toString('utf-8');
|
||||
const chunkText = readData + remainingText;
|
||||
|
||||
// Split by newlines and count
|
||||
const chunkLines = normalizeLineEndings(chunkText).split('\n');
|
||||
|
||||
// If this isn't the end of the file, the first line is likely incomplete
|
||||
// Save it to prepend to the next chunk
|
||||
if (position > 0) {
|
||||
remainingText = chunkLines[0];
|
||||
chunkLines.shift(); // Remove the first (incomplete) line
|
||||
}
|
||||
|
||||
// Add lines to our result (up to the number we need)
|
||||
for (let i = chunkLines.length - 1; i >= 0 && linesFound < numLines; i--) {
|
||||
lines.unshift(chunkLines[i]);
|
||||
linesFound++;
|
||||
}
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
} finally {
|
||||
await fileHandle.close();
|
||||
}
|
||||
}
|
||||
|
||||
// New function to get the first N lines of a file
|
||||
export async function headFile(filePath: string, numLines: number): Promise<string> {
|
||||
const fileHandle = await fs.open(filePath, 'r');
|
||||
try {
|
||||
const lines: string[] = [];
|
||||
let buffer = '';
|
||||
let bytesRead = 0;
|
||||
const chunk = Buffer.alloc(1024); // 1KB buffer
|
||||
|
||||
// Read chunks and count lines until we have enough or reach EOF
|
||||
while (lines.length < numLines) {
|
||||
const result = await fileHandle.read(chunk, 0, chunk.length, bytesRead);
|
||||
if (result.bytesRead === 0) break; // End of file
|
||||
bytesRead += result.bytesRead;
|
||||
buffer += chunk.slice(0, result.bytesRead).toString('utf-8');
|
||||
|
||||
const newLineIndex = buffer.lastIndexOf('\n');
|
||||
if (newLineIndex !== -1) {
|
||||
const completeLines = buffer.slice(0, newLineIndex).split('\n');
|
||||
buffer = buffer.slice(newLineIndex + 1);
|
||||
for (const line of completeLines) {
|
||||
lines.push(line);
|
||||
if (lines.length >= numLines) break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If there is leftover content and we still need lines, add it
|
||||
if (buffer.length > 0 && lines.length < numLines) {
|
||||
lines.push(buffer);
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
} finally {
|
||||
await fileHandle.close();
|
||||
}
|
||||
}
|
||||
|
||||
export async function searchFilesWithValidation(
|
||||
rootPath: string,
|
||||
pattern: string,
|
||||
allowedDirectories: string[],
|
||||
options: SearchOptions = {}
|
||||
): Promise<string[]> {
|
||||
const { excludePatterns = [] } = options;
|
||||
const results: string[] = [];
|
||||
|
||||
async function search(currentPath: string) {
|
||||
const entries = await fs.readdir(currentPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(currentPath, entry.name);
|
||||
|
||||
try {
|
||||
await validatePath(fullPath);
|
||||
|
||||
const relativePath = path.relative(rootPath, fullPath);
|
||||
const shouldExclude = excludePatterns.some(excludePattern =>
|
||||
minimatch(relativePath, excludePattern, { dot: true })
|
||||
);
|
||||
|
||||
if (shouldExclude) continue;
|
||||
|
||||
// Use glob matching for the search pattern
|
||||
if (minimatch(relativePath, pattern, { dot: true })) {
|
||||
results.push(fullPath);
|
||||
}
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await search(fullPath);
|
||||
}
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await search(rootPath);
|
||||
return results;
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@modelcontextprotocol/server-filesystem",
|
||||
"version": "0.6.2",
|
||||
"version": "0.6.3",
|
||||
"description": "MCP server for filesystem access",
|
||||
"license": "MIT",
|
||||
"author": "Anthropic, PBC (https://anthropic.com)",
|
||||
|
||||
@@ -68,10 +68,19 @@ export function isPathWithinAllowedDirectories(absolutePath: string, allowedDire
|
||||
}
|
||||
|
||||
// Special case for root directory to avoid double slash
|
||||
// On Windows, we need to check if both paths are on the same drive
|
||||
if (normalizedDir === path.sep) {
|
||||
return normalizedPath.startsWith(path.sep);
|
||||
}
|
||||
|
||||
// On Windows, also check for drive root (e.g., "C:\")
|
||||
if (path.sep === '\\' && normalizedDir.match(/^[A-Za-z]:\\?$/)) {
|
||||
// Ensure both paths are on the same drive
|
||||
const dirDrive = normalizedDir.charAt(0).toLowerCase();
|
||||
const pathDrive = normalizedPath.charAt(0).toLowerCase();
|
||||
return pathDrive === dirDrive && normalizedPath.startsWith(normalizedDir.replace(/\\?$/, '\\'));
|
||||
}
|
||||
|
||||
return normalizedPath.startsWith(normalizedDir + path.sep);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -57,10 +57,12 @@ Please note that mcp-server-git is currently in early development. The functiona
|
||||
- Returns: Confirmation of reset operation
|
||||
|
||||
8. `git_log`
|
||||
- Shows the commit logs
|
||||
- Shows the commit logs with optional date filtering
|
||||
- Inputs:
|
||||
- `repo_path` (string): Path to Git repository
|
||||
- `max_count` (number, optional): Maximum number of commits to show (default: 10)
|
||||
- `start_timestamp` (string, optional): Start timestamp for filtering commits. Accepts ISO 8601 format (e.g., '2024-01-15T14:30:25'), relative dates (e.g., '2 weeks ago', 'yesterday'), or absolute dates (e.g., '2024-01-15', 'Jan 15 2024')
|
||||
- `end_timestamp` (string, optional): End timestamp for filtering commits. Accepts ISO 8601 format (e.g., '2024-01-15T14:30:25'), relative dates (e.g., '2 weeks ago', 'yesterday'), or absolute dates (e.g., '2024-01-15', 'Jan 15 2024')
|
||||
- Returns: Array of commit entries with hash, author, date, and message
|
||||
|
||||
9. `git_create_branch`
|
||||
|
||||
@@ -48,6 +48,14 @@ class GitReset(BaseModel):
|
||||
class GitLog(BaseModel):
|
||||
repo_path: str
|
||||
max_count: int = 10
|
||||
start_timestamp: Optional[str] = Field(
|
||||
None,
|
||||
description="Start timestamp for filtering commits. Accepts: ISO 8601 format (e.g., '2024-01-15T14:30:25'), relative dates (e.g., '2 weeks ago', 'yesterday'), or absolute dates (e.g., '2024-01-15', 'Jan 15 2024')"
|
||||
)
|
||||
end_timestamp: Optional[str] = Field(
|
||||
None,
|
||||
description="End timestamp for filtering commits. Accepts: ISO 8601 format (e.g., '2024-01-15T14:30:25'), relative dates (e.g., '2 weeks ago', 'yesterday'), or absolute dates (e.g., '2024-01-15', 'Jan 15 2024')"
|
||||
)
|
||||
|
||||
class GitCreateBranch(BaseModel):
|
||||
repo_path: str
|
||||
@@ -83,6 +91,7 @@ class GitBranch(BaseModel):
|
||||
description="The commit sha that branch should NOT contain. Do not pass anything to this param if no commit sha is specified",
|
||||
)
|
||||
|
||||
|
||||
class GitTools(str, Enum):
|
||||
STATUS = "git_status"
|
||||
DIFF_UNSTAGED = "git_diff_unstaged"
|
||||
@@ -125,17 +134,41 @@ def git_reset(repo: git.Repo) -> str:
|
||||
repo.index.reset()
|
||||
return "All staged changes reset"
|
||||
|
||||
def git_log(repo: git.Repo, max_count: int = 10) -> list[str]:
|
||||
commits = list(repo.iter_commits(max_count=max_count))
|
||||
log = []
|
||||
for commit in commits:
|
||||
log.append(
|
||||
f"Commit: {commit.hexsha!r}\n"
|
||||
f"Author: {commit.author!r}\n"
|
||||
f"Date: {commit.authored_datetime}\n"
|
||||
f"Message: {commit.message!r}\n"
|
||||
)
|
||||
return log
|
||||
def git_log(repo: git.Repo, max_count: int = 10, start_timestamp: Optional[str] = None, end_timestamp: Optional[str] = None) -> list[str]:
|
||||
if start_timestamp or end_timestamp:
|
||||
# Use git log command with date filtering
|
||||
args = []
|
||||
if start_timestamp:
|
||||
args.extend(['--since', start_timestamp])
|
||||
if end_timestamp:
|
||||
args.extend(['--until', end_timestamp])
|
||||
args.extend(['--format=%H%n%an%n%ad%n%s%n'])
|
||||
|
||||
log_output = repo.git.log(*args).split('\n')
|
||||
|
||||
log = []
|
||||
# Process commits in groups of 4 (hash, author, date, message)
|
||||
for i in range(0, len(log_output), 4):
|
||||
if i + 3 < len(log_output) and len(log) < max_count:
|
||||
log.append(
|
||||
f"Commit: {log_output[i]}\n"
|
||||
f"Author: {log_output[i+1]}\n"
|
||||
f"Date: {log_output[i+2]}\n"
|
||||
f"Message: {log_output[i+3]}\n"
|
||||
)
|
||||
return log
|
||||
else:
|
||||
# Use existing logic for simple log without date filtering
|
||||
commits = list(repo.iter_commits(max_count=max_count))
|
||||
log = []
|
||||
for commit in commits:
|
||||
log.append(
|
||||
f"Commit: {commit.hexsha!r}\n"
|
||||
f"Author: {commit.author!r}\n"
|
||||
f"Date: {commit.authored_datetime}\n"
|
||||
f"Message: {commit.message!r}\n"
|
||||
)
|
||||
return log
|
||||
|
||||
def git_create_branch(repo: git.Repo, branch_name: str, base_branch: str | None = None) -> str:
|
||||
if base_branch:
|
||||
@@ -203,6 +236,7 @@ def git_branch(repo: git.Repo, branch_type: str, contains: str | None = None, no
|
||||
|
||||
return branch_info
|
||||
|
||||
|
||||
async def serve(repository: Path | None) -> None:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -283,6 +317,7 @@ async def serve(repository: Path | None) -> None:
|
||||
name=GitTools.BRANCH,
|
||||
description="List Git branches",
|
||||
inputSchema=GitBranch.model_json_schema(),
|
||||
|
||||
)
|
||||
]
|
||||
|
||||
@@ -380,13 +415,19 @@ async def serve(repository: Path | None) -> None:
|
||||
text=result
|
||||
)]
|
||||
|
||||
# Update the LOG case:
|
||||
case GitTools.LOG:
|
||||
log = git_log(repo, arguments.get("max_count", 10))
|
||||
log = git_log(
|
||||
repo,
|
||||
arguments.get("max_count", 10),
|
||||
arguments.get("start_timestamp"),
|
||||
arguments.get("end_timestamp")
|
||||
)
|
||||
return [TextContent(
|
||||
type="text",
|
||||
text="Commit history:\n" + "\n".join(log)
|
||||
)]
|
||||
|
||||
|
||||
case GitTools.CREATE_BRANCH:
|
||||
result = git_create_branch(
|
||||
repo,
|
||||
@@ -423,7 +464,7 @@ async def serve(repository: Path | None) -> None:
|
||||
type="text",
|
||||
text=result
|
||||
)]
|
||||
|
||||
|
||||
case _:
|
||||
raise ValueError(f"Unknown tool: {name}")
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ class TimeTools(str, Enum):
|
||||
class TimeResult(BaseModel):
|
||||
timezone: str
|
||||
datetime: str
|
||||
day_of_week: str
|
||||
is_dst: bool
|
||||
|
||||
|
||||
@@ -64,6 +65,7 @@ class TimeServer:
|
||||
return TimeResult(
|
||||
timezone=timezone_name,
|
||||
datetime=current_time.isoformat(timespec="seconds"),
|
||||
day_of_week=current_time.strftime("%A"),
|
||||
is_dst=bool(current_time.dst()),
|
||||
)
|
||||
|
||||
@@ -104,11 +106,13 @@ class TimeServer:
|
||||
source=TimeResult(
|
||||
timezone=source_tz,
|
||||
datetime=source_time.isoformat(timespec="seconds"),
|
||||
day_of_week=source_time.strftime("%A"),
|
||||
is_dst=bool(source_time.dst()),
|
||||
),
|
||||
target=TimeResult(
|
||||
timezone=target_tz,
|
||||
datetime=target_time.isoformat(timespec="seconds"),
|
||||
day_of_week=target_time.strftime("%A"),
|
||||
is_dst=bool(target_time.dst()),
|
||||
),
|
||||
time_difference=time_diff_str,
|
||||
|
||||
Reference in New Issue
Block a user