From 98075bafe2fc3ed790e16efb8eac99dc558033e2 Mon Sep 17 00:00:00 2001 From: johnhuang316 <134570882+johnhuang316@users.noreply.github.com> Date: Fri, 3 Oct 2025 10:11:15 +0800 Subject: [PATCH 1/4] feat(search): align code search filters with index --- src/code_index_mcp/search/ag.py | 20 +++ src/code_index_mcp/search/base.py | 15 +- src/code_index_mcp/search/basic.py | 26 ++-- src/code_index_mcp/search/grep.py | 21 +++ src/code_index_mcp/search/ripgrep.py | 25 +++ src/code_index_mcp/search/ugrep.py | 24 +++ src/code_index_mcp/services/search_service.py | 145 ++++++++++-------- tests/search/test_search_filters.py | 52 +++++++ 8 files changed, 257 insertions(+), 71 deletions(-) create mode 100644 tests/search/test_search_filters.py diff --git a/src/code_index_mcp/search/ag.py b/src/code_index_mcp/search/ag.py index e5c7af5..aa3eb33 100644 --- a/src/code_index_mcp/search/ag.py +++ b/src/code_index_mcp/search/ag.py @@ -95,6 +95,26 @@ def search( cmd.extend(['-G', regex_pattern]) + processed_patterns = set() + exclude_dirs = getattr(self, 'exclude_dirs', []) + exclude_file_patterns = getattr(self, 'exclude_file_patterns', []) + + for directory in exclude_dirs: + normalized = directory.strip() + if not normalized or normalized in processed_patterns: + continue + cmd.extend(['--ignore', normalized]) + processed_patterns.add(normalized) + + for pattern in exclude_file_patterns: + normalized = pattern.strip() + if not normalized or normalized in processed_patterns: + continue + if normalized.startswith('!'): + normalized = normalized[1:] + cmd.extend(['--ignore', normalized]) + processed_patterns.add(normalized) + # Add -- to treat pattern as a literal argument, preventing injection cmd.append('--') cmd.append(search_pattern) diff --git a/src/code_index_mcp/search/base.py b/src/code_index_mcp/search/base.py index 0d50886..5e4c63b 100644 --- a/src/code_index_mcp/search/base.py +++ b/src/code_index_mcp/search/base.py @@ -10,10 +10,13 @@ import subprocess import sys from abc import ABC, abstractmethod -from typing import Dict, List, Optional, Tuple, Any +from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING from ..indexing.qualified_names import normalize_file_path +if TYPE_CHECKING: # pragma: no cover + from ..utils.file_filter import FileFilter + def parse_search_output( output: str, base_path: str, @@ -182,6 +185,16 @@ class SearchStrategy(ABC): Each strategy is responsible for searching code using a specific tool or method. """ + def configure_excludes(self, file_filter: Optional['FileFilter']) -> None: + """Configure shared exclusion settings for the strategy.""" + self.file_filter = file_filter + if file_filter: + self.exclude_dirs = sorted(set(file_filter.exclude_dirs)) + self.exclude_file_patterns = sorted(set(file_filter.exclude_files)) + else: + self.exclude_dirs = [] + self.exclude_file_patterns = [] + @property @abstractmethod def name(self) -> str: diff --git a/src/code_index_mcp/search/basic.py b/src/code_index_mcp/search/basic.py index c480990..9ef1846 100644 --- a/src/code_index_mcp/search/basic.py +++ b/src/code_index_mcp/search/basic.py @@ -1,9 +1,10 @@ """ Basic, pure-Python search strategy. """ +import fnmatch import os import re -import fnmatch +from pathlib import Path from typing import Dict, List, Optional, Tuple from .base import SearchStrategy, create_word_boundary_pattern, is_safe_regex_pattern @@ -83,33 +84,38 @@ def search( except re.error as e: raise ValueError(f"Invalid regex pattern: {pattern}, error: {e}") - for root, _, files in os.walk(base_path): + file_filter = getattr(self, 'file_filter', None) + base = Path(base_path) + + for root, dirs, files in os.walk(base_path): + if file_filter: + dirs[:] = [d for d in dirs if not file_filter.should_exclude_directory(d)] + for file in files: - # Improved file pattern matching with glob support if file_pattern and not self._matches_pattern(file, file_pattern): continue - file_path = os.path.join(root, file) + file_path = Path(root) / file + + if file_filter and not file_filter.should_process_path(file_path, base): + continue + rel_path = os.path.relpath(file_path, base_path) - + try: with open(file_path, 'r', encoding='utf-8', errors='ignore') as f: for line_num, line in enumerate(f, 1): if search_regex.search(line): content = line.rstrip('\n') - # Truncate content if it exceeds max_line_length if max_line_length and len(content) > max_line_length: content = content[:max_line_length] + '... (truncated)' - + if rel_path not in results: results[rel_path] = [] - # Strip newline for consistent output results[rel_path].append((line_num, content)) except (UnicodeDecodeError, PermissionError, OSError): - # Ignore files that can't be opened or read due to encoding/permission issues continue except Exception: - # Ignore any other unexpected exceptions to maintain robustness continue return results diff --git a/src/code_index_mcp/search/grep.py b/src/code_index_mcp/search/grep.py index 91ba575..f24c469 100644 --- a/src/code_index_mcp/search/grep.py +++ b/src/code_index_mcp/search/grep.py @@ -83,6 +83,27 @@ def search( # Note: grep's --include uses glob patterns, not regex cmd.append(f'--include={file_pattern}') + exclude_dirs = getattr(self, 'exclude_dirs', []) + exclude_file_patterns = getattr(self, 'exclude_file_patterns', []) + + processed_dirs = set() + for directory in exclude_dirs: + normalized = directory.strip() + if not normalized or normalized in processed_dirs: + continue + cmd.append(f'--exclude-dir={normalized}') + processed_dirs.add(normalized) + + processed_files = set() + for pattern in exclude_file_patterns: + normalized = pattern.strip() + if not normalized or normalized in processed_files: + continue + if normalized.startswith('!'): + normalized = normalized[1:] + cmd.append(f'--exclude={normalized}') + processed_files.add(normalized) + # Add -- to treat pattern as a literal argument, preventing injection cmd.append('--') cmd.append(search_pattern) diff --git a/src/code_index_mcp/search/ripgrep.py b/src/code_index_mcp/search/ripgrep.py index 75efd3f..8a5c325 100644 --- a/src/code_index_mcp/search/ripgrep.py +++ b/src/code_index_mcp/search/ripgrep.py @@ -69,6 +69,31 @@ def search( if file_pattern: cmd.extend(['--glob', file_pattern]) + exclude_dirs = getattr(self, 'exclude_dirs', []) + exclude_file_patterns = getattr(self, 'exclude_file_patterns', []) + + processed_patterns = set() + + for directory in exclude_dirs: + normalized = directory.strip() + if not normalized or normalized in processed_patterns: + continue + cmd.extend(['--glob', f'!**/{normalized}/**']) + processed_patterns.add(normalized) + + for pattern in exclude_file_patterns: + normalized = pattern.strip() + if not normalized or normalized in processed_patterns: + continue + if normalized.startswith('!'): + glob_pattern = normalized + elif any(ch in normalized for ch in '*?[') or '/' in normalized: + glob_pattern = f'!{normalized}' + else: + glob_pattern = f'!**/{normalized}' + cmd.extend(['--glob', glob_pattern]) + processed_patterns.add(normalized) + # Add -- to treat pattern as a literal argument, preventing injection cmd.append('--') cmd.append(search_pattern) diff --git a/src/code_index_mcp/search/ugrep.py b/src/code_index_mcp/search/ugrep.py index 87f1c48..d4302c1 100644 --- a/src/code_index_mcp/search/ugrep.py +++ b/src/code_index_mcp/search/ugrep.py @@ -69,6 +69,30 @@ def search( if file_pattern: cmd.extend(['--include', file_pattern]) + processed_patterns = set() + exclude_dirs = getattr(self, 'exclude_dirs', []) + exclude_file_patterns = getattr(self, 'exclude_file_patterns', []) + + for directory in exclude_dirs: + normalized = directory.strip() + if not normalized or normalized in processed_patterns: + continue + cmd.extend(['--ignore', f'**/{normalized}/**']) + processed_patterns.add(normalized) + + for pattern in exclude_file_patterns: + normalized = pattern.strip() + if not normalized or normalized in processed_patterns: + continue + if normalized.startswith('!'): + ignore_pattern = normalized[1:] + elif any(ch in normalized for ch in '*?[') or '/' in normalized: + ignore_pattern = normalized + else: + ignore_pattern = f'**/{normalized}' + cmd.extend(['--ignore', ignore_pattern]) + processed_patterns.add(normalized) + # Add '--' to treat pattern as a literal argument, preventing injection cmd.append('--') cmd.append(pattern) diff --git a/src/code_index_mcp/services/search_service.py b/src/code_index_mcp/services/search_service.py index 8d66f2d..a2c2799 100644 --- a/src/code_index_mcp/services/search_service.py +++ b/src/code_index_mcp/services/search_service.py @@ -5,24 +5,20 @@ and search strategy selection. """ -from typing import Dict, Any, Optional +from pathlib import Path +from typing import Any, Dict, List, Optional from .base_service import BaseService -from ..utils import ValidationHelper, ResponseFormatter +from ..utils import FileFilter, ResponseFormatter, ValidationHelper from ..search.base import is_safe_regex_pattern class SearchService(BaseService): - """ - Service for managing code search operations. - - This service handles: - - Code search with various parameters and options - - Search tool management and detection - - Search strategy selection and optimization - - Search capabilities reporting - """ + """Service for managing code search operations.""" + def __init__(self, ctx): + super().__init__(ctx) + self.file_filter = self._create_file_filter() def search_code( # pylint: disable=too-many-arguments self, @@ -34,46 +30,21 @@ def search_code( # pylint: disable=too-many-arguments regex: Optional[bool] = None, max_line_length: Optional[int] = None ) -> Dict[str, Any]: - """ - Search for code patterns in the project. - - Handles the logic for search_code_advanced MCP tool. - - Args: - pattern: The search pattern - case_sensitive: Whether search should be case-sensitive - context_lines: Number of context lines to show - file_pattern: Glob pattern to filter files - fuzzy: Whether to enable fuzzy matching - regex: Regex mode - True/False to force, None for auto-detection - max_line_length: Optional. Default None (no limit). Limits the length of lines when context_lines is used. - - Returns: - Dictionary with search results or error information - - Raises: - ValueError: If project is not set up or search parameters are invalid - """ + """Search for code patterns in the project.""" self._require_project_setup() - # Smart regex detection if regex parameter is None if regex is None: regex = is_safe_regex_pattern(pattern) - if regex: - pass - # Validate search pattern error = ValidationHelper.validate_search_pattern(pattern, regex) if error: raise ValueError(error) - # Validate file pattern if provided if file_pattern: error = ValidationHelper.validate_glob_pattern(file_pattern) if error: raise ValueError(f"Invalid file pattern: {error}") - # Get search strategy from settings if not self.settings: raise ValueError("Settings not available") @@ -81,7 +52,7 @@ def search_code( # pylint: disable=too-many-arguments if not strategy: raise ValueError("No search strategies available") - + self._configure_strategy(strategy) try: results = strategy.search( @@ -94,23 +65,13 @@ def search_code( # pylint: disable=too-many-arguments regex=regex, max_line_length=max_line_length ) - return ResponseFormatter.search_results_response(results) - except Exception as e: - raise ValueError(f"Search failed using '{strategy.name}': {e}") from e - + filtered = self._filter_results(results) + return ResponseFormatter.search_results_response(filtered) + except Exception as exc: + raise ValueError(f"Search failed using '{strategy.name}': {exc}") from exc def refresh_search_tools(self) -> str: - """ - Refresh the available search tools. - - Handles the logic for refresh_search_tools MCP tool. - - Returns: - Success message with available tools information - - Raises: - ValueError: If refresh operation fails - """ + """Refresh the available search tools.""" if not self.settings: raise ValueError("Settings not available") @@ -121,14 +82,8 @@ def refresh_search_tools(self) -> str: preferred = config['preferred_tool'] return f"Search tools refreshed. Available: {available}. Preferred: {preferred}." - def get_search_capabilities(self) -> Dict[str, Any]: - """ - Get information about search capabilities and available tools. - - Returns: - Dictionary with search tool information and capabilities - """ + """Get information about search capabilities and available tools.""" if not self.settings: return {"error": "Settings not available"} @@ -145,3 +100,73 @@ def get_search_capabilities(self) -> Dict[str, Any]: } return capabilities + + def _configure_strategy(self, strategy) -> None: + """Apply shared exclusion configuration to the strategy if supported.""" + configure = getattr(strategy, 'configure_excludes', None) + if not configure: + return + + try: + configure(self.file_filter) + except Exception: # pragma: no cover - defensive fallback + pass + + def _create_file_filter(self) -> FileFilter: + """Build a shared file filter drawing from project settings.""" + additional_dirs: List[str] = [] + additional_file_patterns: List[str] = [] + + settings = self.settings + if settings: + try: + config = settings.get_file_watcher_config() + except Exception: # pragma: no cover - fallback if config fails + config = {} + + for key in ('exclude_patterns', 'additional_exclude_patterns'): + patterns = config.get(key) or [] + for pattern in patterns: + if not isinstance(pattern, str): + continue + normalized = pattern.strip() + if not normalized: + continue + additional_dirs.append(normalized) + additional_file_patterns.append(normalized) + + file_filter = FileFilter(additional_dirs or None) + + if additional_file_patterns: + file_filter.exclude_files.update(additional_file_patterns) + + return file_filter + + def _filter_results(self, results: Dict[str, Any]) -> Dict[str, Any]: + """Filter out matches that reside under excluded paths.""" + if not isinstance(results, dict) or not results: + return results + + if 'error' in results or not self.file_filter or not self.base_path: + return results + + base_path = Path(self.base_path) + filtered: Dict[str, Any] = {} + + for rel_path, matches in results.items(): + if not isinstance(rel_path, str): + continue + + normalized = Path(rel_path.replace('\\', '/')) + try: + absolute = (base_path / normalized).resolve() + except Exception: # pragma: no cover - invalid path safety + continue + + try: + if self.file_filter.should_process_path(absolute, base_path): + filtered[rel_path] = matches + except Exception: # pragma: no cover - defensive fallback + continue + + return filtered diff --git a/tests/search/test_search_filters.py b/tests/search/test_search_filters.py new file mode 100644 index 0000000..787461d --- /dev/null +++ b/tests/search/test_search_filters.py @@ -0,0 +1,52 @@ +"""Tests covering shared search filtering behaviour.""" +import os +from types import SimpleNamespace +from unittest.mock import patch +from pathlib import Path as _TestPath +import sys + +ROOT = _TestPath(__file__).resolve().parents[2] +SRC_PATH = ROOT / 'src' +if str(SRC_PATH) not in sys.path: + sys.path.insert(0, str(SRC_PATH)) + +from code_index_mcp.search.basic import BasicSearchStrategy +from code_index_mcp.search.ripgrep import RipgrepStrategy +from code_index_mcp.utils.file_filter import FileFilter + + +def test_basic_strategy_skips_excluded_directories(tmp_path): + base = tmp_path + src_dir = base / "src" + src_dir.mkdir() + (src_dir / 'app.js').write_text("const db = 'mongo';\n") + + node_modules_dir = base / "node_modules" / "pkg" + node_modules_dir.mkdir(parents=True) + (node_modules_dir / 'index.js').write_text("// mongo dependency\n") + + strategy = BasicSearchStrategy() + strategy.configure_excludes(FileFilter()) + + results = strategy.search("mongo", str(base), case_sensitive=False) + + included_path = os.path.join("src", "app.js") + excluded_path = os.path.join("node_modules", "pkg", "index.js") + + assert included_path in results + assert excluded_path not in results + + +@patch("code_index_mcp.search.ripgrep.subprocess.run") +def test_ripgrep_strategy_adds_exclude_globs(mock_run, tmp_path): + mock_run.return_value = SimpleNamespace(returncode=0, stdout="", stderr="") + + strategy = RipgrepStrategy() + strategy.configure_excludes(FileFilter()) + + strategy.search("mongo", str(tmp_path)) + + cmd = mock_run.call_args[0][0] + glob_args = [cmd[i + 1] for i, arg in enumerate(cmd) if arg == '--glob' and i + 1 < len(cmd)] + + assert any(value.startswith('!**/node_modules/') for value in glob_args) From efba963475e6bfbfe8126d3ce8c15d6f33bc50a7 Mon Sep 17 00:00:00 2001 From: johnhuang316 <134570882+johnhuang316@users.noreply.github.com> Date: Fri, 3 Oct 2025 10:24:00 +0800 Subject: [PATCH 2/4] chore(release): bump version to 2.4.1 --- AGENTS.md | 25 ++++++++++++++++++ RELEASE_NOTE.txt | 48 ++++++++++++++++++++++++++++++++++ pyproject.toml | 2 +- src/code_index_mcp/__init__.py | 3 ++- uv.lock | 3 ++- 5 files changed, 78 insertions(+), 3 deletions(-) create mode 100644 AGENTS.md create mode 100644 RELEASE_NOTE.txt diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 0000000..886f335 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,25 @@ +# Repository Guidelines + +## Project Structure & Module Organization +Code Index MCP lives in `src/code_index_mcp/`, with `indexing/` managing builders, `services/` exposing MCP tool implementations, `search/` coordinating query utilities, and `utils/` housing cross-cutting helpers. The lightweight CLI bootstrapper is `run.py`, which adds `src/` to `PYTHONPATH` before invoking `code_index_mcp.server`. Sample corpora for language regression reside under `test/sample-projects/` (for example `python/user_management/`). Reserve `tests/` for runnable suites and avoid checking in generated `__pycache__` artifacts. + +## Build, Test, and Development Commands +Install dependencies with `uv sync` after cloning. Use `uv run code-index-mcp` to launch the MCP server directly, or `uv run python run.py` when you need the local sys.path shim. During development, `uv run code-index-mcp --help` will list available CLI flags, and `uv run python -m code_index_mcp.server` mirrors the published entry point for debugging. + +## Coding Style & Naming Conventions +Target Python 3.10+ and follow the `.pylintrc` configuration: 4-space indentation, 100-character line limit, and restrained function signatures (<= 7 parameters). Modules and functions stay `snake_case`, classes use `PascalCase`, and constants remain uppercase with underscores. Prefer explicit imports from sibling packages (`from .services import ...`) and keep logging to stderr as implemented in `server.py`. + +## Testing Guidelines +Automated tests should live under `tests/`, mirroring the package hierarchy (`tests/indexing/test_shallow_index.py`, etc.). Use `uv run pytest` (with optional `-k` selectors) for unit and integration coverage, and stage representative fixtures inside `test/sample-projects/` when exercising new language strategies. Document expected behaviors in fixtures' README files or inline comments, and fail fast if tree-sitter support is not available for a language you add. + +## Commit & Pull Request Guidelines +Follow the Conventional Commits style seen in history (`feat`, `fix`, `refactor(scope): summary`). Reference issue numbers when relevant and keep subjects under 72 characters. Pull requests should include: 1) a concise problem statement, 2) before/after behavior or performance notes, 3) instructions for reproducing test runs (`uv run pytest`, `uv run code-index-mcp`). Attach updated screenshots or logs when touching developer experience flows, and confirm the file watcher still transitions to "active" in manual smoke tests. + +## Agent Workflow Tips +Always call `set_project_path` before invoking other tools, and prefer `search_code_advanced` with targeted `file_pattern` filters to minimize noise. When editing indexing strategies, run `refresh_index` in between changes to confirm cache rebuilds. Clean up temporary directories via `clear_settings` if you notice stale metadata, and document any new tooling you introduce in this guide. + +## Release Preparation Checklist +- Update the project version everywhere it lives: `pyproject.toml`, `src/code_index_mcp/__init__.py`, and `uv.lock`. +- Add a release note entry to `RELEASE_NOTE.txt` for the new version. +- Commit the version bump (plus any release artifacts) and push the branch to `origin`. +- Create a git tag for the new version and push the tag to `origin`. diff --git a/RELEASE_NOTE.txt b/RELEASE_NOTE.txt new file mode 100644 index 0000000..2295def --- /dev/null +++ b/RELEASE_NOTE.txt @@ -0,0 +1,48 @@ +## 2.4.1 - Search Filtering Alignment + +### Highlights +- Code search now shares the central FileFilter blacklist, keeping results consistent with indexing (no more `node_modules` noise). +- CLI search strategies emit the appropriate exclusion flags automatically (ripgrep, ugrep, ag, grep). +- Basic fallback search prunes excluded directories during traversal, avoiding unnecessary IO. +- Added regression coverage for the new filtering behaviour (`tests/search/test_search_filters.py`). + +### Upgrade Notes +- No new dependencies; update via standard `uv sync` after pulling. +- Run `uv run pytest` to confirm the new search filter tests on your environment. + +## Shallow Index Default & Streamlined Server + +This release focuses on faster first-run experiences and a slimmer MCP surface area. + +### Highlights + +- **Shallow index by default**: Projects initialize with the new JSON-based shallow index for rapid file discovery. +- **Deep index on demand**: Added the `build_deep_index` tool so symbol extraction happens only when you request it. +- **Watcher-friendly rebuilds**: File watcher callbacks now refresh the shallow index, keeping file lists current without long rebuilds. +- **Server cleanup**: Removed unused `structure://project` resource, legacy prompts, and auxiliary documents for a leaner runtime. + +### Developer Experience Improvements + +- `find_files` now enforces true glob semantics (single `*` for one segment, `**` for recursive matches). +- `get_file_summary` responds with a `needs_deep_index` hint when deep symbols are unavailable. +- Index management services split shallow vs deep rebuild paths to clarify tool behavior. +- Repository docs (README, localized copies) highlight when to run `build_deep_index`. + +### Cleanups + +- Removed deprecated architecture and benchmarking documents. +- Trimmed benchmark scripts and outdated tests tied to the old SCIP experiment. + +### Upgrade Notes + +1. After updating, call `set_project_path` as usual - the server will build the shallow index automatically. +2. Run `build_deep_index` whenever you need symbol-level summaries (`get_file_summary`) or deep search capabilities. +3. Optionally run `refresh_index` to refresh the shallow index if the watcher is disabled. + +### Compatibility + +- Tool names and signatures are unchanged. +- Deep-index workflows remain available; they now require an explicit `build_deep_index` call. +- Python 3.10+ requirement unchanged; no new third-party dependencies. + +Enjoy faster cold starts and a simpler interface tailored for LLM-driven workflows. diff --git a/pyproject.toml b/pyproject.toml index bf781b5..428e2d3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "code-index-mcp" -version = "2.4.0" +version = "2.4.1" description = "Code indexing and analysis tools for LLMs using MCP" readme = "README.md" requires-python = ">=3.10" diff --git a/src/code_index_mcp/__init__.py b/src/code_index_mcp/__init__.py index 8ae1a1d..f47ee02 100644 --- a/src/code_index_mcp/__init__.py +++ b/src/code_index_mcp/__init__.py @@ -3,4 +3,5 @@ A Model Context Protocol server for code indexing, searching, and analysis. """ -__version__ = "2.4.0" +__version__ = "2.4.1" + diff --git a/uv.lock b/uv.lock index 78cc596..08294cf 100644 --- a/uv.lock +++ b/uv.lock @@ -49,7 +49,7 @@ wheels = [ [[package]] name = "code-index-mcp" -version = "2.4.0" +version = "2.4.1" source = { editable = "." } dependencies = [ { name = "mcp" }, @@ -527,3 +527,4 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070 }, { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067 }, ] + From 72bc96eeb36cf6637df1eda615720c8c65dd3dea Mon Sep 17 00:00:00 2001 From: johnhuang316 <134570882+johnhuang316@users.noreply.github.com> Date: Fri, 3 Oct 2025 10:28:34 +0800 Subject: [PATCH 3/4] docs: limit release note to current version --- RELEASE_NOTE.txt | 37 ------------------------------------- 1 file changed, 37 deletions(-) diff --git a/RELEASE_NOTE.txt b/RELEASE_NOTE.txt index 2295def..7333033 100644 --- a/RELEASE_NOTE.txt +++ b/RELEASE_NOTE.txt @@ -9,40 +9,3 @@ ### Upgrade Notes - No new dependencies; update via standard `uv sync` after pulling. - Run `uv run pytest` to confirm the new search filter tests on your environment. - -## Shallow Index Default & Streamlined Server - -This release focuses on faster first-run experiences and a slimmer MCP surface area. - -### Highlights - -- **Shallow index by default**: Projects initialize with the new JSON-based shallow index for rapid file discovery. -- **Deep index on demand**: Added the `build_deep_index` tool so symbol extraction happens only when you request it. -- **Watcher-friendly rebuilds**: File watcher callbacks now refresh the shallow index, keeping file lists current without long rebuilds. -- **Server cleanup**: Removed unused `structure://project` resource, legacy prompts, and auxiliary documents for a leaner runtime. - -### Developer Experience Improvements - -- `find_files` now enforces true glob semantics (single `*` for one segment, `**` for recursive matches). -- `get_file_summary` responds with a `needs_deep_index` hint when deep symbols are unavailable. -- Index management services split shallow vs deep rebuild paths to clarify tool behavior. -- Repository docs (README, localized copies) highlight when to run `build_deep_index`. - -### Cleanups - -- Removed deprecated architecture and benchmarking documents. -- Trimmed benchmark scripts and outdated tests tied to the old SCIP experiment. - -### Upgrade Notes - -1. After updating, call `set_project_path` as usual - the server will build the shallow index automatically. -2. Run `build_deep_index` whenever you need symbol-level summaries (`get_file_summary`) or deep search capabilities. -3. Optionally run `refresh_index` to refresh the shallow index if the watcher is disabled. - -### Compatibility - -- Tool names and signatures are unchanged. -- Deep-index workflows remain available; they now require an explicit `build_deep_index` call. -- Python 3.10+ requirement unchanged; no new third-party dependencies. - -Enjoy faster cold starts and a simpler interface tailored for LLM-driven workflows. From 3699110c64cd5b6da2dd4b80865b65e2e9c55fcb Mon Sep 17 00:00:00 2001 From: johnhuang316 <134570882+johnhuang316@users.noreply.github.com> Date: Fri, 3 Oct 2025 10:29:45 +0800 Subject: [PATCH 4/4] docs: streamline 2.4.1 release notes --- RELEASE_NOTE.txt | 4 ---- 1 file changed, 4 deletions(-) diff --git a/RELEASE_NOTE.txt b/RELEASE_NOTE.txt index 7333033..8a744bb 100644 --- a/RELEASE_NOTE.txt +++ b/RELEASE_NOTE.txt @@ -5,7 +5,3 @@ - CLI search strategies emit the appropriate exclusion flags automatically (ripgrep, ugrep, ag, grep). - Basic fallback search prunes excluded directories during traversal, avoiding unnecessary IO. - Added regression coverage for the new filtering behaviour (`tests/search/test_search_filters.py`). - -### Upgrade Notes -- No new dependencies; update via standard `uv sync` after pulling. -- Run `uv run pytest` to confirm the new search filter tests on your environment.