diff --git a/TODO.md b/TODO.md index 08c8c41..3525bcd 100644 --- a/TODO.md +++ b/TODO.md @@ -1,11 +1,26 @@ -- [ ] Modify `src/agent.js` to detect and read `Coderrr.md` file -- [ ] Integrate custom prompt content into enhanced prompt in chat method -- [ ] Test the feature with a sample Coderrr.md file -- [ ] Ensure backward compatibility when no Coderrr.md exists -======= -# TODO: Implement Coderrr.md Custom System Prompt Feature - -- [x] Modify `src/agent.js` to detect and read `Coderrr.md` file -- [x] Integrate custom prompt content into enhanced prompt in chat method -- [ ] Test the feature with a sample Coderrr.md file -- [ ] Ensure backward compatibility when no Coderrr.md exists +# Refactor CodebaseScanner - Break into Smaller Classes + +## Overview +The CodebaseScanner class is too large (500+ lines) and has multiple responsibilities. Refactor into smaller, focused classes while maintaining backward compatibility. + +## Classes to Create +- [x] FileScanner: Directory scanning, file discovery, filtering +- [x] CacheManager: Caching of scan results +- [x] SearchEngine: Semantic, regex, fuzzy search functionality +- [x] ContentProcessor: Content chunking for large files +- [x] Refactor CodebaseScanner: Make it a facade orchestrating the above classes + +## Implementation Steps +1. [x] Create FileScanner class in src/fileScanner.js +2. [x] Create CacheManager class in src/cacheManager.js +3. [x] Create SearchEngine class in src/searchEngine.js +4. [x] Create ContentProcessor class in src/contentProcessor.js +5. [x] Update CodebaseScanner to use the new classes +6. [x] Test the refactored code + +## Files to Modify +- [x] src/codebaseScanner.js (refactor to use new classes) +- [x] Create: src/fileScanner.js +- [x] Create: src/cacheManager.js +- [x] Create: src/searchEngine.js +- [x] Create: src/contentProcessor.js diff --git a/src/agent.js b/src/agent.js index 8651137..871b93b 100644 --- a/src/agent.js +++ b/src/agent.js @@ -1,11 +1,7 @@ -const axios = require('axios'); -const fs = require('fs'); -const path = require('path'); -const ui = require('./ui'); -const FileOperations = require('./fileOps'); -const CommandExecutor = require('./executor').CommandExecutor; -const TodoManager = require('./todoManager'); +const GitOperations = require('./gitOps'); +======= const CodebaseScanner = require('./codebaseScanner'); +const SearchUtils = require('./searchUtils'); const GitOperations = require('./gitOps'); const { sanitizeAxiosError, formatUserError, createSafeError, isNetworkError } = require('./errorHandler'); const configManager = require('./configManager'); @@ -970,4 +966,4 @@ Current State: } } -module.exports = Agent; \ No newline at end of file +module.exports = Agent; diff --git a/src/cacheManager.js b/src/cacheManager.js new file mode 100644 index 0000000..bdcb69d --- /dev/null +++ b/src/cacheManager.js @@ -0,0 +1,56 @@ +/** + * CacheManager - Handles caching of scan results + * Provides simple time-based caching with configurable duration + */ + +class CacheManager { + constructor(cacheDuration = 60000) { // 1 minute default + this.cache = null; + this.cacheTimestamp = null; + this.cacheDuration = cacheDuration; + } + + /** + * Check if cache is valid (not expired) + */ + isCacheValid() { + if (!this.cache || !this.cacheTimestamp) { + return false; + } + + const now = Date.now(); + return (now - this.cacheTimestamp) < this.cacheDuration; + } + + /** + * Get cached data if valid + */ + get() { + return this.isCacheValid() ? this.cache : null; + } + + /** + * Set cache data with current timestamp + */ + set(data) { + this.cache = data; + this.cacheTimestamp = Date.now(); + } + + /** + * Clear the cache + */ + clear() { + this.cache = null; + this.cacheTimestamp = null; + } + + /** + * Set cache duration + */ + setCacheDuration(duration) { + this.cacheDuration = duration; + } +} + +module.exports = CacheManager; diff --git a/src/codebaseScanner.js b/src/codebaseScanner.js index e493901..fce5a25 100644 --- a/src/codebaseScanner.js +++ b/src/codebaseScanner.js @@ -1,208 +1,39 @@ -const fs = require('fs'); -const path = require('path'); +const FileScanner = require('./fileScanner'); +const CacheManager = require('./cacheManager'); +const SearchEngine = require('./searchEngine'); +const ContentProcessor = require('./contentProcessor'); /** - * Codebase Scanner - Discovers and reads source files in the project - * Ignores common non-source directories and files + * Codebase Scanner - Facade for codebase scanning and search operations + * Orchestrates FileScanner, CacheManager, SearchEngine, and ContentProcessor + * Maintains backward compatibility with existing interface */ class CodebaseScanner { constructor(workingDir = process.cwd()) { this.workingDir = workingDir; - this.cache = null; - this.cacheTimestamp = null; - this.cacheDuration = 60000; // 1 minute cache - - // Directories to ignore - this.ignoreDirs = new Set([ - 'node_modules', - 'env', - '.env', - 'venv', - '.venv', - '__pycache__', - '.git', - '.github', - 'dist', - 'build', - 'out', - 'target', - '.next', - '.nuxt', - 'coverage', - '.pytest_cache', - '.mypy_cache', - '.tox', - 'vendor', - 'bower_components' - ]); - - // Files to ignore - this.ignoreFiles = new Set([ - '.DS_Store', - 'Thumbs.db', - '.gitignore', - '.dockerignore', - 'package-lock.json', - 'yarn.lock', - 'pnpm-lock.yaml', - 'poetry.lock', - 'Pipfile.lock', - '.env', - '.env.local', - '.env.example' - ]); - - // Source file extensions to include - this.sourceExtensions = new Set([ - '.js', '.jsx', '.ts', '.tsx', - '.py', '.pyi', - '.java', '.kt', '.scala', - '.go', '.rs', - '.c', '.cpp', '.cc', '.h', '.hpp', - '.cs', '.vb', - '.rb', '.php', - '.swift', '.m', - '.sh', '.bash', - '.sql', - '.vue', '.svelte', - '.html', '.css', '.scss', '.less', - '.json', '.yaml', '.yml', '.toml', - '.md', '.txt' - ]); - - // Max file size to read (500KB) - this.maxFileSize = 500 * 1024; - } - /** - * Check if path should be ignored - */ - shouldIgnore(filePath, stats) { - const basename = path.basename(filePath); - - // Ignore specific files - if (this.ignoreFiles.has(basename)) { - return true; - } - - // Ignore directories - if (stats.isDirectory() && this.ignoreDirs.has(basename)) { - return true; - } - - // Ignore hidden files/directories (except .github is already ignored) - if (basename.startsWith('.') && !basename.match(/\.(js|ts|py|md|json|yaml|yml)$/)) { - return true; - } - - return false; + // Initialize component classes + this.fileScanner = new FileScanner(workingDir); + this.cacheManager = new CacheManager(60000); // 1 minute cache + this.searchEngine = new SearchEngine(); + this.contentProcessor = new ContentProcessor(); } - /** - * Check if file is a source file we want to read - */ - isSourceFile(filePath, stats) { - if (!stats.isFile()) { - return false; - } - - const ext = path.extname(filePath); - return this.sourceExtensions.has(ext); - } - /** - * Recursively scan directory for source files - */ - scanDirectory(dirPath, result = { structure: [], files: {} }) { - try { - const entries = fs.readdirSync(dirPath, { withFileTypes: true }); - - for (const entry of entries) { - const fullPath = path.join(dirPath, entry.name); - const relativePath = path.relative(this.workingDir, fullPath); - const stats = fs.statSync(fullPath); - - // Skip if should ignore - if (this.shouldIgnore(fullPath, stats)) { - continue; - } - - if (entry.isDirectory()) { - // Add to structure - result.structure.push({ - type: 'directory', - path: relativePath, - name: entry.name - }); - - // Recursively scan - this.scanDirectory(fullPath, result); - } else if (this.isSourceFile(fullPath, stats)) { - // Check file size - if (stats.size > this.maxFileSize) { - result.structure.push({ - type: 'file', - path: relativePath, - name: entry.name, - size: stats.size, - skipped: true, - reason: 'File too large' - }); - continue; - } - - // Add to structure - result.structure.push({ - type: 'file', - path: relativePath, - name: entry.name, - size: stats.size - }); - - // Read file content - try { - const content = fs.readFileSync(fullPath, 'utf8'); - result.files[relativePath] = { - path: relativePath, - name: entry.name, - size: stats.size, - extension: path.extname(entry.name), - content: content, - lines: content.split('\n').length - }; - } catch (readError) { - // Skip files we can't read - result.files[relativePath] = { - path: relativePath, - name: entry.name, - error: 'Could not read file' - }; - } - } - } - } catch (error) { - // Skip directories we can't access - console.error(`Error scanning ${dirPath}:`, error.message); - } - - return result; - } /** * Get project structure and file contents */ scan(forceRefresh = false) { // Return cached result if available and fresh - const now = Date.now(); - if (!forceRefresh && this.cache && this.cacheTimestamp && - (now - this.cacheTimestamp) < this.cacheDuration) { - return this.cache; + if (!forceRefresh && this.cacheManager.isCacheValid()) { + return this.cacheManager.get(); } - + // Perform scan - const result = this.scanDirectory(this.workingDir); - + const result = this.fileScanner.scanDirectory(this.workingDir); + // Add summary result.summary = { totalFiles: Object.keys(result.files).length, @@ -211,11 +42,10 @@ class CodebaseScanner { scannedAt: new Date().toISOString(), workingDir: this.workingDir }; - + // Cache the result - this.cache = result; - this.cacheTimestamp = now; - + this.cacheManager.set(result); + return result; } @@ -273,31 +103,120 @@ class CodebaseScanner { */ findFiles(searchTerm) { const scanResult = this.scan(); - const results = []; - - const searchLower = searchTerm.toLowerCase(); - - for (const [filePath, fileData] of Object.entries(scanResult.files)) { - if (fileData.name.toLowerCase().includes(searchLower) || - filePath.toLowerCase().includes(searchLower)) { - results.push({ - path: filePath, - name: fileData.name, - size: fileData.size, - extension: fileData.extension - }); + return this.searchEngine.findFiles(scanResult.files, searchTerm); + } + + /** + * Calculate fuzzy match score between two strings + */ + fuzzyMatchScore(searchTerm, target) { + const search = searchTerm.toLowerCase(); + const targetLower = target.toLowerCase(); + + // Exact match gets highest score + if (targetLower === search) return 100; + + // Starts with search term + if (targetLower.startsWith(search)) return 90; + + // Contains search term + if (targetLower.includes(search)) return 80; + + // Fuzzy matching - check for character sequence + let score = 0; + let searchIndex = 0; + + for (let i = 0; i < targetLower.length && searchIndex < search.length; i++) { + if (targetLower[i] === search[searchIndex]) { + score += 10; + searchIndex++; } } - - return results; + + // Bonus for consecutive matches + if (searchIndex === search.length) { + score += 20; + } + + return Math.min(score, 70); // Cap at 70 for non-exact matches + } + + /** + * Get semantic keywords for a search term + */ + getSemanticKeywords(searchTerm) { + const term = searchTerm.toLowerCase(); + const keywords = [term]; // Always include the original term + + // Add semantic mappings + for (const [concept, terms] of Object.entries(this.semanticMappings)) { + if (terms.some(t => t.includes(term) || term.includes(t))) { + keywords.push(...terms); + keywords.push(concept); + } + } + + // Add common variations + if (term.endsWith('s')) { + keywords.push(term.slice(0, -1)); // Remove plural + } else { + keywords.push(term + 's'); // Add plural + } + + return [...new Set(keywords)]; // Remove duplicates + } + + /** + * Perform semantic search across files and content + */ + semanticSearch(searchTerm, options = {}) { + const scanResult = this.scan(); + return this.searchEngine.semanticSearch(scanResult.files, searchTerm, options); + } + + /** + * Chunk large file content for processing + */ + chunkContent(content, chunkSize) { + return this.contentProcessor.chunkContent(content, chunkSize); + } + + /** + * Search within file chunks for large files + */ + async searchInChunks(filePath, searchTerm, options = {}) { + const scanResult = this.scan(); + const semanticKeywords = this.searchEngine.getSemanticKeywords(searchTerm); + return this.contentProcessor.searchInChunks( + this.workingDir, + filePath, + searchTerm, + semanticKeywords, + this.fileScanner.maxFileSize + ); + } + + /** + * Advanced search with multiple modes + */ + advancedSearch(query, mode = 'auto', options = {}) { + const scanResult = this.scan(); + return this.searchEngine.advancedSearch(scanResult.files, query, mode, options); + } + + /** + * Regex-based search + */ + regexSearch(pattern, options = {}) { + const scanResult = this.scan(); + return this.searchEngine.regexSearch(scanResult.files, pattern, options); } /** * Clear cache */ clearCache() { - this.cache = null; - this.cacheTimestamp = null; + this.cacheManager.clear(); } } diff --git a/src/contentProcessor.js b/src/contentProcessor.js new file mode 100644 index 0000000..b8e610f --- /dev/null +++ b/src/contentProcessor.js @@ -0,0 +1,116 @@ +const fs = require('fs'); +const path = require('path'); + +/** + * ContentProcessor - Handles content processing for large files + * Provides chunking functionality and content analysis + */ + +class ContentProcessor { + constructor(chunkSize = 100 * 1024) { // 100KB chunks + this.chunkSize = chunkSize; + } + + /** + * Chunk large file content for processing + */ + chunkContent(content, chunkSize = this.chunkSize) { + const chunks = []; + for (let i = 0; i < content.length; i += chunkSize) { + chunks.push({ + content: content.slice(i, i + chunkSize), + start: i, + end: Math.min(i + chunkSize, content.length), + index: chunks.length + }); + } + return chunks; + } + + /** + * Find matches in text content + */ + findMatchesInText(text, searchTerm, semanticKeywords) { + const keywords = semanticKeywords || [searchTerm]; + const matches = []; + const textLower = text.toLowerCase(); + + for (const keyword of keywords) { + let index = textLower.indexOf(keyword.toLowerCase()); + while (index !== -1) { + matches.push({ + keyword, + position: index, + context: text.slice(Math.max(0, index - 30), Math.min(text.length, index + keyword.length + 30)) + }); + index = textLower.indexOf(keyword.toLowerCase(), index + 1); + } + } + + return matches; + } + + /** + * Search within file chunks for large files + */ + async searchInChunks(workingDir, filePath, searchTerm, semanticKeywords, maxFileSize) { + try { + const fullPath = path.join(workingDir, filePath); + const stats = fs.statSync(fullPath); + + if (stats.size <= maxFileSize) { + // For smaller files, return empty result (handled by regular search) + return []; + } + + // For large files, read in chunks + const stream = fs.createReadStream(fullPath, { encoding: 'utf8' }); + const chunks = []; + let currentChunk = ''; + let chunkIndex = 0; + + return new Promise((resolve, reject) => { + stream.on('data', (chunk) => { + currentChunk += chunk; + + if (currentChunk.length >= this.chunkSize) { + chunks.push({ + content: currentChunk, + index: chunkIndex++, + matches: this.findMatchesInText(currentChunk, searchTerm, semanticKeywords) + }); + currentChunk = ''; + } + }); + + stream.on('end', () => { + // Process remaining chunk + if (currentChunk.length > 0) { + chunks.push({ + content: currentChunk, + index: chunkIndex, + matches: this.findMatchesInText(currentChunk, searchTerm, semanticKeywords) + }); + } + + const results = chunks + .filter(chunk => chunk.matches.length > 0) + .map(chunk => ({ + path: filePath, + chunkIndex: chunk.index, + matches: chunk.matches, + preview: chunk.content.slice(0, 200) + '...' + })); + + resolve(results); + }); + + stream.on('error', reject); + }); + } catch (error) { + return Promise.reject(error); + } + } +} + +module.exports = ContentProcessor; diff --git a/src/fileOps.js b/src/fileOps.js index af9223e..511ba22 100644 --- a/src/fileOps.js +++ b/src/fileOps.js @@ -1,4 +1,4 @@ -const fs = require('fs'); +const fsPromises = require('fs').promises; const path = require('path'); const ui = require('./ui'); @@ -7,7 +7,7 @@ const ui = require('./ui'); * * Provides safe file manipulation operations with automatic directory creation, * path resolution, and comprehensive error handling. All operations are - * synchronous to ensure atomicity and predictable behavior. + * asynchronous to ensure non-blocking behavior and proper error handling. */ // Protected paths that should never be deleted by Coderrr @@ -57,12 +57,28 @@ class FileOperations { : path.join(this.workingDir, filePath); } + /** + * Check if a file or directory exists + */ + async fileExists(filePath) { + try { + await fsPromises.access(filePath); + return true; + } catch { + return false; + } + } + /** * Ensure directory exists */ - ensureDir(dirPath) { - if (!fs.existsSync(dirPath)) { - fs.mkdirSync(dirPath, { recursive: true }); + async ensureDir(dirPath) { + try { + await fsPromises.mkdir(dirPath, { recursive: true }); + } catch (error) { + if (error.code !== 'EEXIST') { + throw error; + } } } @@ -80,15 +96,15 @@ class FileOperations { const dir = path.dirname(absolutePath); // Check if file already exists - if (fs.existsSync(absolutePath)) { + if (await this.fileExists(absolutePath)) { throw new Error(`File already exists: ${filePath}`); } // Ensure directory exists - this.ensureDir(dir); + await this.ensureDir(dir); // Write file - fs.writeFileSync(absolutePath, content, 'utf8'); + await fsPromises.writeFile(absolutePath, content, 'utf8'); ui.displayFileOp('create_file', filePath, 'success'); // Return with diff data (new file = empty old content) @@ -112,7 +128,7 @@ class FileOperations { const absolutePath = this.resolvePath(filePath); // Check if file exists - if (!fs.existsSync(absolutePath)) { + if (!(await this.fileExists(absolutePath))) { throw new Error(`File not found: ${filePath}`); } @@ -120,7 +136,7 @@ class FileOperations { const oldContent = fs.readFileSync(absolutePath, 'utf8'); // Write file - fs.writeFileSync(absolutePath, content, 'utf8'); + await fsPromises.writeFile(absolutePath, content, 'utf8'); ui.displayFileOp('update_file', filePath, 'success'); return { @@ -143,12 +159,12 @@ class FileOperations { const absolutePath = this.resolvePath(filePath); // Check if file exists - if (!fs.existsSync(absolutePath)) { + if (!(await this.fileExists(absolutePath))) { throw new Error(`File not found: ${filePath}`); } // Read current content - const originalContent = fs.readFileSync(absolutePath, 'utf8'); + let content = await fsPromises.readFile(absolutePath, 'utf8'); // Replace old content with new content if (!originalContent.includes(oldContent)) { @@ -158,7 +174,7 @@ class FileOperations { const patchedContent = originalContent.replace(oldContent, newContent); // Write back - fs.writeFileSync(absolutePath, patchedContent, 'utf8'); + await fsPromises.writeFile(absolutePath, content, 'utf8'); ui.displayFileOp('patch_file', filePath, 'success'); return { @@ -187,7 +203,7 @@ class FileOperations { } // Check if file exists - if (!fs.existsSync(absolutePath)) { + if (!(await this.fileExists(absolutePath))) { throw new Error(`File not found: ${filePath}`); } @@ -195,7 +211,7 @@ class FileOperations { const oldContent = fs.readFileSync(absolutePath, 'utf8'); // Delete file - fs.unlinkSync(absolutePath); + await fsPromises.unlink(absolutePath); ui.displayFileOp('delete_file', filePath, 'success'); return { @@ -218,12 +234,12 @@ class FileOperations { const absolutePath = this.resolvePath(filePath); // Check if file exists - if (!fs.existsSync(absolutePath)) { + if (!(await this.fileExists(absolutePath))) { throw new Error(`File not found: ${filePath}`); } // Read file - const content = fs.readFileSync(absolutePath, 'utf8'); + const content = await fsPromises.readFile(absolutePath, 'utf8'); ui.displayFileOp('read_file', filePath, 'success'); return { success: true, content, path: absolutePath }; } catch (error) { @@ -244,12 +260,12 @@ class FileOperations { const absolutePath = this.resolvePath(dirPath); // Check if directory already exists - if (fs.existsSync(absolutePath)) { + if (await this.fileExists(absolutePath)) { throw new Error(`Directory already exists: ${dirPath}`); } // Create directory (recursive) - fs.mkdirSync(absolutePath, { recursive: true }); + await fsPromises.mkdir(absolutePath, { recursive: true }); ui.displayFileOp('create_dir', dirPath, 'success'); return { success: true, path: absolutePath }; } catch (error) { @@ -276,23 +292,24 @@ class FileOperations { } // Check if directory exists - if (!fs.existsSync(absolutePath)) { + if (!(await this.fileExists(absolutePath))) { throw new Error(`Directory not found: ${dirPath}`); } // Check if it's actually a directory - if (!fs.statSync(absolutePath).isDirectory()) { + const stats = await fsPromises.stat(absolutePath); + if (!stats.isDirectory()) { throw new Error(`Path is not a directory: ${dirPath}`); } // Check if directory is empty - const contents = fs.readdirSync(absolutePath); + const contents = await fsPromises.readdir(absolutePath); if (contents.length > 0) { throw new Error(`Directory not empty: ${dirPath}`); } // Delete directory - fs.rmdirSync(absolutePath); + await fsPromises.rmdir(absolutePath); ui.displayFileOp('delete_dir', dirPath, 'success'); return { success: true, path: absolutePath }; } catch (error) { @@ -313,17 +330,18 @@ class FileOperations { const absolutePath = this.resolvePath(dirPath); // Check if directory exists - if (!fs.existsSync(absolutePath)) { + if (!(await this.fileExists(absolutePath))) { throw new Error(`Directory not found: ${dirPath}`); } // Check if it's actually a directory - if (!fs.statSync(absolutePath).isDirectory()) { + const stats = await fsPromises.stat(absolutePath); + if (!stats.isDirectory()) { throw new Error(`Path is not a directory: ${dirPath}`); } // List contents - const contents = fs.readdirSync(absolutePath); + const contents = await fsPromises.readdir(absolutePath); ui.displayFileOp('list_dir', dirPath, 'success'); return { success: true, path: absolutePath, contents }; } catch (error) { @@ -346,26 +364,27 @@ class FileOperations { const newAbsolutePath = this.resolvePath(newDirPath); // Check if source directory exists - if (!fs.existsSync(oldAbsolutePath)) { + if (!(await this.fileExists(oldAbsolutePath))) { throw new Error(`Directory not found: ${oldDirPath}`); } // Check if it's actually a directory - if (!fs.statSync(oldAbsolutePath).isDirectory()) { + const stats = await fsPromises.stat(oldAbsolutePath); + if (!stats.isDirectory()) { throw new Error(`Source path is not a directory: ${oldDirPath}`); } // Check if destination already exists - if (fs.existsSync(newAbsolutePath)) { + if (await this.fileExists(newAbsolutePath)) { throw new Error(`Destination already exists: ${newDirPath}`); } // Ensure parent directory of destination exists const newDirParent = path.dirname(newAbsolutePath); - this.ensureDir(newDirParent); + await this.ensureDir(newDirParent); // Rename/move directory - fs.renameSync(oldAbsolutePath, newAbsolutePath); + await fsPromises.rename(oldAbsolutePath, newAbsolutePath); ui.displayFileOp('rename_dir', `${oldDirPath} -> ${newDirPath}`, 'success'); return { success: true, oldPath: oldAbsolutePath, newPath: newAbsolutePath }; } catch (error) { diff --git a/src/fileScanner.js b/src/fileScanner.js new file mode 100644 index 0000000..2f762dd --- /dev/null +++ b/src/fileScanner.js @@ -0,0 +1,191 @@ +const fs = require('fs'); +const path = require('path'); +const logger = require('./logger'); + +/** + * FileScanner - Handles directory scanning and file discovery + * Responsible for finding source files while respecting ignore rules + */ + +class FileScanner { + constructor(workingDir = process.cwd()) { + this.workingDir = workingDir; + + // Directories to ignore + this.ignoreDirs = new Set([ + 'node_modules', + 'env', + '.env', + 'venv', + '.venv', + '__pycache__', + '.git', + '.github', + 'dist', + 'build', + 'out', + 'target', + '.next', + '.nuxt', + 'coverage', + '.pytest_cache', + '.mypy_cache', + '.tox', + 'vendor', + 'bower_components' + ]); + + // Files to ignore + this.ignoreFiles = new Set([ + '.DS_Store', + 'Thumbs.db', + '.gitignore', + '.dockerignore', + 'package-lock.json', + 'yarn.lock', + 'pnpm-lock.yaml', + 'poetry.lock', + 'Pipfile.lock', + '.env', + '.env.local', + '.env.example' + ]); + + // Source file extensions to include + this.sourceExtensions = new Set([ + '.js', '.jsx', '.ts', '.tsx', + '.py', '.pyi', + '.java', '.kt', '.scala', + '.go', '.rs', + '.c', '.cpp', '.cc', '.h', '.hpp', + '.cs', '.vb', + '.rb', '.php', + '.swift', '.m', + '.sh', '.bash', + '.sql', + '.vue', '.svelte', + '.html', '.css', '.scss', '.less', + '.json', '.yaml', '.yml', '.toml', + '.md', '.txt' + ]); + + // Max file size to read (500KB) + this.maxFileSize = 500 * 1024; + } + + /** + * Check if path should be ignored + */ + shouldIgnore(filePath, stats) { + const basename = path.basename(filePath); + + // Ignore specific files + if (this.ignoreFiles.has(basename)) { + return true; + } + + // Ignore directories + if (stats.isDirectory() && this.ignoreDirs.has(basename)) { + return true; + } + + // Ignore hidden files/directories (except .github is already ignored) + if (basename.startsWith('.') && !basename.match(/\.(js|ts|py|md|json|yaml|yml)$/)) { + return true; + } + + return false; + } + + /** + * Check if file is a source file we want to read + */ + isSourceFile(filePath, stats) { + if (!stats.isFile()) { + return false; + } + + const ext = path.extname(filePath); + return this.sourceExtensions.has(ext); + } + + /** + * Recursively scan directory for source files + */ + scanDirectory(dirPath, result = { structure: [], files: {} }) { + try { + const entries = fs.readdirSync(dirPath, { withFileTypes: true }); + + for (const entry of entries) { + const fullPath = path.join(dirPath, entry.name); + const relativePath = path.relative(this.workingDir, fullPath); + const stats = fs.statSync(fullPath); + + // Skip if should ignore + if (this.shouldIgnore(fullPath, stats)) { + continue; + } + + if (entry.isDirectory()) { + // Add to structure + result.structure.push({ + type: 'directory', + path: relativePath, + name: entry.name + }); + + // Recursively scan + this.scanDirectory(fullPath, result); + } else if (this.isSourceFile(fullPath, stats)) { + // Check file size + if (stats.size > this.maxFileSize) { + result.structure.push({ + type: 'file', + path: relativePath, + name: entry.name, + size: stats.size, + skipped: true, + reason: 'File too large' + }); + continue; + } + + // Add to structure + result.structure.push({ + type: 'file', + path: relativePath, + name: entry.name, + size: stats.size + }); + + // Read file content + try { + const content = fs.readFileSync(fullPath, 'utf8'); + result.files[relativePath] = { + path: relativePath, + name: entry.name, + size: stats.size, + extension: path.extname(entry.name), + content: content, + lines: content.split('\n').length + }; + } catch (readError) { + // Skip files we can't read + result.files[relativePath] = { + path: relativePath, + name: entry.name, + error: 'Could not read file' + }; + } + } + } + } catch (error) { + // Skip directories we can't access + logger.error(`Error scanning ${dirPath}:`, error.message); + } + + return result; + } +} + +module.exports = FileScanner; diff --git a/src/logger.js b/src/logger.js new file mode 100644 index 0000000..71b95a8 --- /dev/null +++ b/src/logger.js @@ -0,0 +1,94 @@ +/** + * Centralized Logger for Coderrr CLI + * + * Provides configurable logging with different levels to control output + * in development vs production environments. + */ + +class Logger { + constructor() { + // Log levels in order of verbosity + this.levels = { + debug: 0, + info: 1, + warn: 2, + error: 3, + none: 4 + }; + + // Default to info level (shows info, warn, error) + this.currentLevel = this.levels[process.env.LOG_LEVEL || 'info']; + + // Colors for different log levels + this.colors = { + debug: '\x1b[36m', // cyan + info: '\x1b[32m', // green + warn: '\x1b[33m', // yellow + error: '\x1b[31m' // red + }; + this.reset = '\x1b[0m'; + } + + /** + * Set the minimum log level + */ + setLevel(level) { + if (this.levels.hasOwnProperty(level)) { + this.currentLevel = this.levels[level]; + } else { + this.error(`Invalid log level: ${level}. Using 'info' instead.`); + this.currentLevel = this.levels.info; + } + } + + /** + * Format log message with timestamp and level + */ + formatMessage(level, message, ...args) { + const timestamp = new Date().toISOString(); + const levelUpper = level.toUpperCase(); + const color = this.colors[level] || ''; + const formattedArgs = args.length > 0 ? ' ' + args.join(' ') : ''; + + return `${color}[${timestamp}] ${levelUpper}: ${message}${formattedArgs}${this.reset}`; + } + + /** + * Log debug message + */ + debug(message, ...args) { + if (this.currentLevel <= this.levels.debug) { + console.log(this.formatMessage('debug', message, ...args)); + } + } + + /** + * Log info message + */ + info(message, ...args) { + if (this.currentLevel <= this.levels.info) { + console.log(this.formatMessage('info', message, ...args)); + } + } + + /** + * Log warning message + */ + warn(message, ...args) { + if (this.currentLevel <= this.levels.warn) { + console.warn(this.formatMessage('warn', message, ...args)); + } + } + + /** + * Log error message + */ + error(message, ...args) { + if (this.currentLevel <= this.levels.error) { + console.error(this.formatMessage('error', message, ...args)); + } + } +} + +// Export singleton instance +module.exports = new Logger(); diff --git a/src/searchEngine.js b/src/searchEngine.js new file mode 100644 index 0000000..7c8c456 --- /dev/null +++ b/src/searchEngine.js @@ -0,0 +1,247 @@ +/** + * SearchEngine - Handles all search functionalities + * Provides semantic search, regex search, fuzzy matching, and file finding + */ + +class SearchEngine { + constructor() { + // Semantic keyword mappings for concept-based search + this.semanticMappings = { + 'auth': ['auth', 'authentication', 'login', 'logout', 'oauth', 'jwt', 'token', 'session', 'user', 'password', 'signin', 'signup'], + 'database': ['db', 'database', 'sql', 'mongo', 'postgres', 'mysql', 'sqlite', 'orm', 'model', 'schema', 'migration'], + 'api': ['api', 'endpoint', 'route', 'controller', 'service', 'rest', 'graphql', 'http', 'request', 'response'], + 'config': ['config', 'settings', 'env', 'environment', 'constants', 'options'], + 'test': ['test', 'spec', 'mock', 'fixture', 'assert', 'expect', 'describe', 'it'], + 'ui': ['ui', 'component', 'view', 'template', 'html', 'css', 'style', 'layout', 'render'], + 'utils': ['util', 'helper', 'common', 'shared', 'tool', 'function', 'library'], + 'error': ['error', 'exception', 'catch', 'throw', 'try', 'fail', 'debug', 'log'], + 'security': ['security', 'encrypt', 'decrypt', 'hash', 'salt', 'key', 'cert', 'ssl', 'tls'] + }; + } + + /** + * Calculate fuzzy match score between two strings + */ + fuzzyMatchScore(searchTerm, target) { + const search = searchTerm.toLowerCase(); + const targetLower = target.toLowerCase(); + + // Exact match gets highest score + if (targetLower === search) return 100; + + // Starts with search term + if (targetLower.startsWith(search)) return 90; + + // Contains search term + if (targetLower.includes(search)) return 80; + + // Fuzzy matching - check for character sequence + let score = 0; + let searchIndex = 0; + + for (let i = 0; i < targetLower.length && searchIndex < search.length; i++) { + if (targetLower[i] === search[searchIndex]) { + score += 10; + searchIndex++; + } + } + + // Bonus for consecutive matches + if (searchIndex === search.length) { + score += 20; + } + + return Math.min(score, 70); // Cap at 70 for non-exact matches + } + + /** + * Get semantic keywords for a search term + */ + getSemanticKeywords(searchTerm) { + const term = searchTerm.toLowerCase(); + const keywords = [term]; // Always include the original term + + // Add semantic mappings + for (const [concept, terms] of Object.entries(this.semanticMappings)) { + if (terms.some(t => t.includes(term) || term.includes(t))) { + keywords.push(...terms); + keywords.push(concept); + } + } + + // Add common variations + if (term.endsWith('s')) { + keywords.push(term.slice(0, -1)); // Remove plural + } else { + keywords.push(term + 's'); // Add plural + } + + return [...new Set(keywords)]; // Remove duplicates + } + + /** + * Find files by name or partial name + */ + findFiles(files, searchTerm) { + const results = []; + const searchLower = searchTerm.toLowerCase(); + + for (const [filePath, fileData] of Object.entries(files)) { + if (fileData.name.toLowerCase().includes(searchLower) || + filePath.toLowerCase().includes(searchLower)) { + results.push({ + path: filePath, + name: fileData.name, + size: fileData.size, + extension: fileData.extension + }); + } + } + + return results; + } + + /** + * Perform semantic search across files and content + */ + semanticSearch(files, searchTerm, options = {}) { + const results = []; + const keywords = this.getSemanticKeywords(searchTerm); + + const { + maxResults = 50, + includeContent = true, + minScore = 30, + searchContent = true + } = options; + + for (const [filePath, fileData] of Object.entries(files)) { + let bestScore = 0; + let matchType = 'filename'; + let matchedKeyword = ''; + + // Check filename/path matches + for (const keyword of keywords) { + const nameScore = this.fuzzyMatchScore(keyword, fileData.name); + const pathScore = this.fuzzyMatchScore(keyword, filePath); + + if (nameScore > bestScore) { + bestScore = nameScore; + matchedKeyword = keyword; + } + if (pathScore > bestScore) { + bestScore = pathScore; + matchedKeyword = keyword; + matchType = 'path'; + } + } + + // Check content matches if enabled and file has content + if (searchContent && fileData.content && includeContent) { + const content = fileData.content.toLowerCase(); + for (const keyword of keywords) { + if (content.includes(keyword)) { + const contentScore = 60; // Content matches get good score + if (contentScore > bestScore) { + bestScore = contentScore; + matchType = 'content'; + matchedKeyword = keyword; + } + } + } + } + + // Add to results if score meets threshold + if (bestScore >= minScore) { + const result = { + path: filePath, + name: fileData.name, + size: fileData.size, + extension: fileData.extension, + score: bestScore, + matchType, + matchedKeyword + }; + + // Add content preview if content match and requested + if (matchType === 'content' && includeContent && fileData.content) { + const content = fileData.content; + const keywordIndex = content.toLowerCase().indexOf(matchedKeyword.toLowerCase()); + const start = Math.max(0, keywordIndex - 50); + const end = Math.min(content.length, keywordIndex + 50 + matchedKeyword.length); + result.preview = '...' + content.slice(start, end) + '...'; + } + + results.push(result); + } + } + + // Sort by score descending and limit results + results.sort((a, b) => b.score - a.score); + return results.slice(0, maxResults); + } + + /** + * Regex-based search + */ + regexSearch(files, pattern, options = {}) { + const results = []; + const regex = new RegExp(pattern, options.caseSensitive ? 'g' : 'gi'); + + const { maxResults = 50, includeContent = true } = options; + + for (const [filePath, fileData] of Object.entries(files)) { + let matches = []; + + // Check filename + const filenameMatches = fileData.name.match(regex); + if (filenameMatches) { + matches.push(...filenameMatches.map(match => ({ type: 'filename', match }))); + } + + // Check content + if (includeContent && fileData.content) { + const contentMatches = fileData.content.match(regex); + if (contentMatches) { + matches.push(...contentMatches.map(match => ({ type: 'content', match }))); + } + } + + if (matches.length > 0) { + results.push({ + path: filePath, + name: fileData.name, + size: fileData.size, + extension: fileData.extension, + matches + }); + } + } + + return results.slice(0, maxResults); + } + + /** + * Advanced search with multiple modes + */ + advancedSearch(files, query, mode = 'auto', options = {}) { + switch (mode) { + case 'filename': + return this.findFiles(files, query); + case 'semantic': + return this.semanticSearch(files, query, options); + case 'regex': + return this.regexSearch(files, query, options); + case 'auto': + default: + // Try semantic first, fall back to filename + const semanticResults = this.semanticSearch(files, query, options); + if (semanticResults.length > 0) { + return semanticResults; + } + return this.findFiles(files, query); + } + } +} + +module.exports = SearchEngine; diff --git a/src/searchUtils.js b/src/searchUtils.js new file mode 100644 index 0000000..e69de29 diff --git a/test-fileops-async.js b/test-fileops-async.js new file mode 100644 index 0000000..b2d6ff1 --- /dev/null +++ b/test-fileops-async.js @@ -0,0 +1,56 @@ +const FileOperations = require('./src/fileOps'); + +async function testAsyncFileOperations() { + console.log('๐Ÿงช Testing FileOperations async behavior...'); + + const fileOps = new FileOperations('./test-temp'); + + try { + // Test 1: Create a file + console.log('Test 1: Creating a file...'); + const result1 = await fileOps.createFile('test.txt', 'Hello, async world!'); + console.log('โœ“ File created:', result1.path); + + // Test 2: Read the file + console.log('Test 2: Reading the file...'); + const result2 = await fileOps.readFile('test.txt'); + console.log('โœ“ File content:', result2.content); + + // Test 3: Update the file + console.log('Test 3: Updating the file...'); + const result3 = await fileOps.updateFile('test.txt', 'Updated content!'); + console.log('โœ“ File updated:', result3.path); + + // Test 4: Patch the file + console.log('Test 4: Patching the file...'); + const result4 = await fileOps.patchFile('test.txt', 'Updated', 'Patched'); + console.log('โœ“ File patched:', result4.path); + + // Test 5: Create a directory + console.log('Test 5: Creating a directory...'); + const result5 = await fileOps.createDir('test-dir'); + console.log('โœ“ Directory created:', result5.path); + + // Test 6: List directory + console.log('Test 6: Listing directory...'); + const result6 = await fileOps.listDir('.'); + console.log('โœ“ Directory contents:', result6.contents.length, 'items'); + + // Test 7: Delete file + console.log('Test 7: Deleting the file...'); + const result7 = await fileOps.deleteFile('test.txt'); + console.log('โœ“ File deleted:', result7.path); + + // Test 8: Delete directory + console.log('Test 8: Deleting the directory...'); + const result8 = await fileOps.deleteDir('test-dir'); + console.log('โœ“ Directory deleted:', result8.path); + + console.log('โœ… All async FileOperations tests passed!'); + + } catch (error) { + console.error('โŒ Test failed:', error.message); + } +} + +testAsyncFileOperations(); diff --git a/test-fileops-concurrency.js b/test-fileops-concurrency.js new file mode 100644 index 0000000..33f1850 --- /dev/null +++ b/test-fileops-concurrency.js @@ -0,0 +1,53 @@ +const FileOperations = require('./src/fileOps'); +const fs = require('fs'); + +async function testConcurrency() { + console.log('๐Ÿงช Testing FileOperations concurrency (non-blocking behavior)...'); + + const fileOps = new FileOperations('./test-temp-concurrent'); + + // Create a large file for testing + const largeContent = 'x'.repeat(1024 * 1024); // 1MB of content + + try { + console.log('Creating large file...'); + await fileOps.createFile('large.txt', largeContent); + console.log('โœ“ Large file created'); + + // Test concurrent operations + console.log('Testing concurrent operations...'); + const startTime = Date.now(); + + const promises = [ + fileOps.readFile('large.txt'), + fileOps.createFile('file1.txt', 'content1'), + fileOps.createFile('file2.txt', 'content2'), + fileOps.createDir('dir1'), + fileOps.createDir('dir2'), + ]; + + await Promise.all(promises); + const endTime = Date.now(); + + console.log(`โœ“ All concurrent operations completed in ${endTime - startTime}ms`); + + // Verify files were created + const listResult = await fileOps.listDir('.'); + console.log(`โœ“ Directory contains ${listResult.contents.length} items`); + + // Clean up + console.log('Cleaning up...'); + await fileOps.deleteFile('large.txt'); + await fileOps.deleteFile('file1.txt'); + await fileOps.deleteFile('file2.txt'); + await fileOps.deleteDir('dir1'); + await fileOps.deleteDir('dir2'); + + console.log('โœ… Concurrency test passed! Operations are non-blocking.'); + + } catch (error) { + console.error('โŒ Concurrency test failed:', error.message); + } +} + +testConcurrency(); diff --git a/test-fileops-errors.js b/test-fileops-errors.js new file mode 100644 index 0000000..1b7eaf6 --- /dev/null +++ b/test-fileops-errors.js @@ -0,0 +1,88 @@ +const FileOperations = require('./src/fileOps'); + +async function testErrorHandling() { + console.log('๐Ÿงช Testing FileOperations error handling...'); + + const fileOps = new FileOperations('./test-temp-errors'); + + try { + // Test 1: Try to read non-existent file + console.log('Test 1: Reading non-existent file...'); + try { + await fileOps.readFile('nonexistent.txt'); + console.log('โŒ Should have thrown error'); + } catch (error) { + console.log('โœ“ Correctly threw error:', error.message); + } + + // Test 2: Try to create file that already exists + console.log('Test 2: Creating duplicate file...'); + await fileOps.createFile('test.txt', 'content'); + try { + await fileOps.createFile('test.txt', 'duplicate'); + console.log('โŒ Should have thrown error'); + } catch (error) { + console.log('โœ“ Correctly threw error:', error.message); + } + + // Test 3: Try to update non-existent file + console.log('Test 3: Updating non-existent file...'); + try { + await fileOps.updateFile('nonexistent.txt', 'content'); + console.log('โŒ Should have thrown error'); + } catch (error) { + console.log('โœ“ Correctly threw error:', error.message); + } + + // Test 4: Try to patch with non-existent pattern + console.log('Test 4: Patching with wrong pattern...'); + try { + await fileOps.patchFile('test.txt', 'nonexistent', 'replacement'); + console.log('โŒ Should have thrown error'); + } catch (error) { + console.log('โœ“ Correctly threw error:', error.message); + } + + // Test 5: Try to delete non-existent file + console.log('Test 5: Deleting non-existent file...'); + try { + await fileOps.deleteFile('nonexistent.txt'); + console.log('โŒ Should have thrown error'); + } catch (error) { + console.log('โœ“ Correctly threw error:', error.message); + } + + // Test 6: Try to create directory that already exists + console.log('Test 6: Creating duplicate directory...'); + await fileOps.createDir('test-dir'); + try { + await fileOps.createDir('test-dir'); + console.log('โŒ Should have thrown error'); + } catch (error) { + console.log('โœ“ Correctly threw error:', error.message); + } + + // Test 7: Try to delete non-empty directory + console.log('Test 7: Deleting non-empty directory...'); + await fileOps.createFile('test-dir/file.txt', 'content'); + try { + await fileOps.deleteDir('test-dir'); + console.log('โŒ Should have thrown error'); + } catch (error) { + console.log('โœ“ Correctly threw error:', error.message); + } + + // Clean up + console.log('Cleaning up...'); + await fileOps.deleteFile('test-dir/file.txt'); + await fileOps.deleteDir('test-dir'); + await fileOps.deleteFile('test.txt'); + + console.log('โœ… All error handling tests passed!'); + + } catch (error) { + console.error('โŒ Error handling test failed:', error.message); + } +} + +testErrorHandling(); diff --git a/test/test-refactored-scanner.js b/test/test-refactored-scanner.js new file mode 100644 index 0000000..7a5ee71 --- /dev/null +++ b/test/test-refactored-scanner.js @@ -0,0 +1,290 @@ +const path = require('path'); +const FileScanner = require('../src/fileScanner'); +const CacheManager = require('../src/cacheManager'); +const SearchEngine = require('../src/searchEngine'); +const ContentProcessor = require('../src/contentProcessor'); +const CodebaseScanner = require('../src/codebaseScanner'); + +console.log('๐Ÿงช Testing Refactored CodebaseScanner Components...\n'); + +// Test FileScanner +function testFileScanner() { + console.log('Testing FileScanner...'); + const scanner = new FileScanner(__dirname); + + try { + const result = scanner.scanDirectory(__dirname); + console.log('โœ… FileScanner.scanDirectory() works'); + console.log(` Found ${Object.keys(result.files).length} files`); + console.log(` Found ${result.structure.filter(s => s.type === 'directory').length} directories`); + return true; + } catch (error) { + console.log('โŒ FileScanner test failed:', error.message); + return false; + } +} + +// Test CacheManager +function testCacheManager() { + console.log('Testing CacheManager...'); + const cache = new CacheManager(1000); // 1 second cache + + try { + // Test empty cache + if (cache.get() === null) { + console.log('โœ… CacheManager returns null for empty cache'); + } else { + console.log('โŒ CacheManager should return null for empty cache'); + return false; + } + + // Test setting cache + const testData = { test: 'data' }; + cache.set(testData); + if (JSON.stringify(cache.get()) === JSON.stringify(testData)) { + console.log('โœ… CacheManager.set() and .get() work'); + } else { + console.log('โŒ CacheManager.set()/.get() failed'); + return false; + } + + // Test cache validity + if (cache.isCacheValid()) { + console.log('โœ… CacheManager.isCacheValid() works'); + } else { + console.log('โŒ CacheManager.isCacheValid() failed'); + return false; + } + + // Test cache clearing + cache.clear(); + if (cache.get() === null) { + console.log('โœ… CacheManager.clear() works'); + } else { + console.log('โŒ CacheManager.clear() failed'); + return false; + } + + return true; + } catch (error) { + console.log('โŒ CacheManager test failed:', error.message); + return false; + } +} + +// Test SearchEngine +function testSearchEngine() { + console.log('Testing SearchEngine...'); + const searchEngine = new SearchEngine(); + + try { + // Test fuzzy matching + const score = searchEngine.fuzzyMatchScore('test', 'testing'); + if (score >= 80) { + console.log('โœ… SearchEngine.fuzzyMatchScore() works'); + } else { + console.log('โŒ SearchEngine.fuzzyMatchScore() failed'); + return false; + } + + // Test semantic keywords + const keywords = searchEngine.getSemanticKeywords('auth'); + if (keywords.includes('authentication') && keywords.includes('login')) { + console.log('โœ… SearchEngine.getSemanticKeywords() works'); + } else { + console.log('โŒ SearchEngine.getSemanticKeywords() failed'); + return false; + } + + // Test file finding with mock data + const mockFiles = { + 'src/auth.js': { name: 'auth.js', content: 'login function' }, + 'src/user.js': { name: 'user.js', content: 'user management' } + }; + + const results = searchEngine.findFiles(mockFiles, 'auth'); + if (results.length > 0 && results[0].name === 'auth.js') { + console.log('โœ… SearchEngine.findFiles() works'); + } else { + console.log('โŒ SearchEngine.findFiles() failed'); + return false; + } + + return true; + } catch (error) { + console.log('โŒ SearchEngine test failed:', error.message); + return false; + } +} + +// Test ContentProcessor +function testContentProcessor() { + console.log('Testing ContentProcessor...'); + const processor = new ContentProcessor(); + + try { + // Test chunking + const content = 'This is a test content for chunking purposes.'; + const chunks = processor.chunkContent(content, 10); + + if (chunks.length > 1 && chunks[0].content.length <= 10) { + console.log('โœ… ContentProcessor.chunkContent() works'); + } else { + console.log('โŒ ContentProcessor.chunkContent() failed'); + return false; + } + + // Test match finding + const matches = processor.findMatchesInText('This is a test', 'test'); + if (matches.length > 0 && matches[0].keyword === 'test') { + console.log('โœ… ContentProcessor.findMatchesInText() works'); + } else { + console.log('โŒ ContentProcessor.findMatchesInText() failed'); + return false; + } + + return true; + } catch (error) { + console.log('โŒ ContentProcessor test failed:', error.message); + return false; + } +} + +// Test Refactored CodebaseScanner Integration +function testRefactoredCodebaseScanner() { + console.log('Testing Refactored CodebaseScanner Integration...'); + const scanner = new CodebaseScanner(__dirname); + + try { + // Test that all components are initialized + if (scanner.fileScanner && scanner.cacheManager && scanner.searchEngine && scanner.contentProcessor) { + console.log('โœ… CodebaseScanner components initialized correctly'); + } else { + console.log('โŒ CodebaseScanner components not initialized'); + return false; + } + + // Test scan method + const result = scanner.scan(); + if (result && result.files && result.structure) { + console.log('โœ… CodebaseScanner.scan() works'); + } else { + console.log('โŒ CodebaseScanner.scan() failed'); + return false; + } + + // Test search methods + const searchResults = scanner.findFiles('test'); + if (Array.isArray(searchResults)) { + console.log('โœ… CodebaseScanner.findFiles() works'); + } else { + console.log('โŒ CodebaseScanner.findFiles() failed'); + return false; + } + + // Test semantic search + const semanticResults = scanner.semanticSearch('function'); + if (Array.isArray(semanticResults)) { + console.log('โœ… CodebaseScanner.semanticSearch() works'); + } else { + console.log('โŒ CodebaseScanner.semanticSearch() failed'); + return false; + } + + // Test regex search + const regexResults = scanner.regexSearch('test'); + if (Array.isArray(regexResults)) { + console.log('โœ… CodebaseScanner.regexSearch() works'); + } else { + console.log('โŒ CodebaseScanner.regexSearch() failed'); + return false; + } + + // Test cache clearing + scanner.clearCache(); + console.log('โœ… CodebaseScanner.clearCache() works'); + + return true; + } catch (error) { + console.log('โŒ CodebaseScanner integration test failed:', error.message); + return false; + } +} + +// Test Backward Compatibility +function testBackwardCompatibility() { + console.log('Testing Backward Compatibility...'); + const scanner = new CodebaseScanner(__dirname); + + try { + // Test that all original methods still exist and work + const methods = ['scan', 'getSummaryForAI', 'getFileContents', 'findFiles', + 'semanticSearch', 'regexSearch', 'advancedSearch', 'clearCache']; + + for (const method of methods) { + if (typeof scanner[method] !== 'function') { + console.log(`โŒ Method ${method} is missing`); + return false; + } + } + + console.log('โœ… All original methods are present'); + + // Test that scan returns expected structure + const result = scanner.scan(); + if (result.summary && result.files && result.structure) { + console.log('โœ… Scan result structure is backward compatible'); + } else { + console.log('โŒ Scan result structure changed'); + return false; + } + + return true; + } catch (error) { + console.log('โŒ Backward compatibility test failed:', error.message); + return false; + } +} + +// Run all tests +async function runAllTests() { + const tests = [ + testFileScanner, + testCacheManager, + testSearchEngine, + testContentProcessor, + testRefactoredCodebaseScanner, + testBackwardCompatibility + ]; + + let passed = 0; + let failed = 0; + + for (const test of tests) { + try { + if (await test()) { + passed++; + } else { + failed++; + } + } catch (error) { + console.log(`โŒ Test ${test.name} threw exception:`, error.message); + failed++; + } + console.log(''); // Empty line between tests + } + + console.log(`๐Ÿ“Š Test Results: ${passed} passed, ${failed} failed`); + + if (failed === 0) { + console.log('๐ŸŽ‰ All tests passed! Refactoring is successful.'); + } else { + console.log('โš ๏ธ Some tests failed. Please review the implementation.'); + } +} + +// Run the tests +runAllTests().catch(error => { + console.error('Test suite failed:', error); + process.exit(1); +});