From 78233d71c546cdb1bc86fa644f3cb0be9328fad6 Mon Sep 17 00:00:00 2001 From: Scott Nemes Date: Mon, 20 Apr 2026 17:22:36 -0700 Subject: [PATCH 1/4] Initial changes for schema prefetching --- mycli/clitoolbar.py | 5 + mycli/main.py | 15 ++ mycli/myclirc | 7 + mycli/schema_prefetcher.py | 231 +++++++++++++++++++++++++ mycli/sqlcompleter.py | 33 ++++ mycli/sqlexecute.py | 45 ++--- test/myclirc | 7 + test/pytests/test_clitoolbar.py | 11 ++ test/pytests/test_schema_prefetcher.py | 160 +++++++++++++++++ test/utils.py | 11 ++ 10 files changed, 505 insertions(+), 20 deletions(-) create mode 100644 mycli/schema_prefetcher.py create mode 100644 test/pytests/test_schema_prefetcher.py diff --git a/mycli/clitoolbar.py b/mycli/clitoolbar.py index 74df09ea..80700415 100644 --- a/mycli/clitoolbar.py +++ b/mycli/clitoolbar.py @@ -69,6 +69,11 @@ def get_toolbar_tokens() -> list[tuple[str, str]]: dynamic.append(divider) dynamic.append(("class:bottom-toolbar", "Refreshing completions…")) + schema_prefetcher = getattr(mycli, 'schema_prefetcher', None) + if schema_prefetcher is not None and schema_prefetcher.is_prefetching(): + dynamic.append(divider) + dynamic.append(("class:bottom-toolbar", "Prefetching schemas…")) + if format_string and format_string != r'\B': if format_string.startswith(r'\B'): amended_format = format_string[2:] diff --git a/mycli/main.py b/mycli/main.py index 515c2408..5d772844 100755 --- a/mycli/main.py +++ b/mycli/main.py @@ -80,6 +80,7 @@ from mycli.packages.sqlresult import SQLResult from mycli.packages.ssh_utils import read_ssh_config from mycli.packages.tabular_output import sql_format +from mycli.schema_prefetcher import SchemaPrefetcher from mycli.sqlcompleter import SQLCompleter from mycli.sqlexecute import FIELD_TYPES, SQLExecute from mycli.types import Query @@ -243,6 +244,8 @@ def __init__( self.logfile = False self.completion_refresher = CompletionRefresher() + self.prefetch_schemas_setting = c["main"].get("prefetch_schemas", "") or "" + self.schema_prefetcher = SchemaPrefetcher(self) self.logger = logging.getLogger(__name__) self.initialize_logging() @@ -301,6 +304,8 @@ def __init__( special.set_destructive_keywords(self.destructive_keywords) def close(self) -> None: + if hasattr(self, 'schema_prefetcher'): + self.schema_prefetcher.stop() if self.sqlexecute is not None: self.sqlexecute.close() @@ -1008,6 +1013,12 @@ def configure_pager(self) -> None: special.disable_pager() def refresh_completions(self, reset: bool = False) -> list[SQLResult]: + # Cancel any in-flight schema prefetch before the completer is + # replaced: the fresh completer will not contain the prefetched + # schemas, so we restart the prefetch pass after the swap. + self.schema_prefetcher.stop() + self.schema_prefetcher.clear_loaded() + if reset: with self._completer_lock: self.completer.reset_completions() @@ -1034,6 +1045,10 @@ def _on_completions_refreshed(self, new_completer: SQLCompleter) -> None: # "Refreshing completions..." indicator self.prompt_session.app.invalidate() + # Kick off background prefetch for any extra schemas configured + # via ``prefetch_schemas`` so users get cross-schema completions. + self.schema_prefetcher.start_configured() + def run_query( self, query: str, diff --git a/mycli/myclirc b/mycli/myclirc index 192b8e38..eed91ec7 100644 --- a/mycli/myclirc +++ b/mycli/myclirc @@ -13,6 +13,13 @@ smart_completion = True # Suggestion: 3. min_completion_trigger = 1 +# Comma-separated list of schemas to prefetch for auto-completion, or +# "all" to prefetch every schema the connected user can access. Prefetching +# runs in the background after launch so that qualified completions (e.g. +# ``OtherSchema.table``) work without first switching databases. When +# empty (the default), only the currently-selected schema is loaded. +prefetch_schemas = + # Multi-line mode allows breaking up the sql statements into multiple lines. If # this is set to True, then the end of the statements must have a semi-colon. # If this is set to False then sql statements can't be split into multiple diff --git a/mycli/schema_prefetcher.py b/mycli/schema_prefetcher.py new file mode 100644 index 00000000..e37daac5 --- /dev/null +++ b/mycli/schema_prefetcher.py @@ -0,0 +1,231 @@ +"""Background prefetcher for multi-schema auto-completion. + +The default completion refresher only populates metadata for the +currently-selected schema. ``SchemaPrefetcher`` loads metadata for +additional schemas on a background thread so that users can get +qualified auto-completion suggestions (``OtherSchema.table``) without +switching databases first. +""" + +from __future__ import annotations + +import logging +import threading +from typing import TYPE_CHECKING, Any, Iterable + +from mycli.sqlexecute import SQLExecute + +if TYPE_CHECKING: # pragma: no cover - typing only + from mycli.main import MyCli + from mycli.sqlcompleter import SQLCompleter + +_logger = logging.getLogger(__name__) + +ALL_SCHEMAS_SENTINEL = 'all' + + +def parse_prefetch_setting(raw: str | None) -> list[str] | None: + """Parse the ``prefetch_schemas`` option value. + + Returns ``None`` when the user wants every accessible schema + (``all``), a list of explicit schema names otherwise, or an empty + list when prefetching is disabled. + """ + if not raw: + return [] + value = raw.strip() + if not value: + return [] + if value.lower() == ALL_SCHEMAS_SENTINEL: + return None + return [part.strip() for part in value.split(',') if part.strip()] + + +class SchemaPrefetcher: + """Run schema prefetch work on a dedicated background thread.""" + + def __init__(self, mycli: 'MyCli') -> None: + self.mycli = mycli + self._thread: threading.Thread | None = None + self._cancel = threading.Event() + self._loaded: set[str] = set() + + def is_prefetching(self) -> bool: + return bool(self._thread and self._thread.is_alive()) + + def clear_loaded(self) -> None: + """Forget which schemas have been prefetched (used on reset).""" + self._loaded.clear() + + def stop(self, timeout: float = 2.0) -> None: + """Signal the background thread to stop and wait briefly for it.""" + if self._thread and self._thread.is_alive(): + self._cancel.set() + self._thread.join(timeout=timeout) + self._cancel = threading.Event() + self._thread = None + + def start_configured(self) -> None: + """Start prefetching based on the user's ``prefetch_schemas`` setting.""" + setting = getattr(self.mycli, 'prefetch_schemas_setting', '') + parsed = parse_prefetch_setting(setting) + if parsed is None: + self._start(self._resolve_all_schemas()) + else: + self._start(parsed) + + def prefetch_schema_now(self, schema: str) -> None: + """Fetch *schema* immediately on a background thread. + + Used when a user manually switches to a schema. The method + returns quickly; the actual work happens in the new thread. + """ + if not schema: + return + # Avoid double-fetching while a full-prefetch pass is running. + self.stop() + self._start([schema]) + + def _start(self, schemas: Iterable[str]) -> None: + self.stop() + current = self._current_schema() + queue = [s for s in schemas if s and s != current and s not in self._loaded] + if not queue: + self._invalidate_app() + return + self._cancel = threading.Event() + self._thread = threading.Thread( + target=self._run, + args=(queue,), + name='schema_prefetcher', + daemon=True, + ) + self._thread.start() + self._invalidate_app() + + def _run(self, schemas: list[str]) -> None: + executor: SQLExecute | None = None + try: + executor = self._make_executor() + except Exception as e: # pragma: no cover - defensive + _logger.error('schema prefetch could not open connection: %r', e) + self._invalidate_app() + return + try: + for schema in schemas: + if self._cancel.is_set(): + return + try: + self._prefetch_one(executor, schema) + self._loaded.add(schema) + except Exception as e: + _logger.error('prefetch failed for schema %r: %r', schema, e) + finally: + try: + executor.close() + except Exception: # pragma: no cover - defensive + pass + self._invalidate_app() + + def _prefetch_one(self, executor: SQLExecute, schema: str) -> None: + _logger.debug('prefetching schema %r', schema) + table_rows = list(executor.table_columns(schema=schema)) + fk_rows = list(executor.foreign_keys(schema=schema)) + enum_rows = list(executor.enum_values(schema=schema)) + func_rows = list(executor.functions(schema=schema)) + proc_rows = list(executor.procedures(schema=schema)) + + # Use the live completer's escape logic so keys match what the + # completion engine computes when parsing user input. + completer = self.mycli.completer + table_columns: dict[str, list[str]] = {} + for table, column in table_rows: + esc_table = completer.escape_name(table) + esc_col = completer.escape_name(column) + cols = table_columns.setdefault(esc_table, ['*']) + cols.append(esc_col) + + fk_tables: dict[str, set[str]] = {} + fk_relations: list[tuple[str, str, str, str]] = [] + for table, col, ref_table, ref_col in fk_rows: + esc_table = completer.escape_name(table) + esc_col = completer.escape_name(col) + esc_ref_table = completer.escape_name(ref_table) + esc_ref_col = completer.escape_name(ref_col) + fk_tables.setdefault(esc_table, set()).add(esc_ref_table) + fk_tables.setdefault(esc_ref_table, set()).add(esc_table) + fk_relations.append((esc_table, esc_col, esc_ref_table, esc_ref_col)) + fk_payload: dict[str, Any] = {'tables': fk_tables, 'relations': fk_relations} + + enum_values: dict[str, dict[str, list[str]]] = {} + for table, column, values in enum_rows: + esc_table = completer.escape_name(table) + esc_col = completer.escape_name(column) + enum_values.setdefault(esc_table, {})[esc_col] = list(values) + + functions: dict[str, None] = {} + for row in func_rows: + if not row or not row[0]: + continue + functions[completer.escape_name(row[0])] = None + + procedures: dict[str, None] = {} + for row in proc_rows: + if not row or not row[0]: + continue + procedures[completer.escape_name(row[0])] = None + + with self.mycli._completer_lock: + live_completer: 'SQLCompleter' = self.mycli.completer + live_completer.load_schema_metadata( + schema=schema, + table_columns=table_columns, + foreign_keys=fk_payload, + enum_values=enum_values, + functions=functions, + procedures=procedures, + ) + self._invalidate_app() + + def _resolve_all_schemas(self) -> list[str]: + sqlexecute = self.mycli.sqlexecute + if sqlexecute is None: + return [] + try: + return list(sqlexecute.databases()) + except Exception as e: + _logger.error('failed to list databases for prefetch: %r', e) + return [] + + def _current_schema(self) -> str | None: + sqlexecute = self.mycli.sqlexecute + return sqlexecute.dbname if sqlexecute is not None else None + + def _make_executor(self) -> SQLExecute: + sqlexecute = self.mycli.sqlexecute + assert sqlexecute is not None + return SQLExecute( + sqlexecute.dbname, + sqlexecute.user, + sqlexecute.password, + sqlexecute.host, + sqlexecute.port, + sqlexecute.socket, + sqlexecute.character_set, + sqlexecute.local_infile, + sqlexecute.ssl, + sqlexecute.ssh_user, + sqlexecute.ssh_host, + sqlexecute.ssh_port, + sqlexecute.ssh_password, + sqlexecute.ssh_key_filename, + ) + + def _invalidate_app(self) -> None: + prompt_session = getattr(self.mycli, 'prompt_session', None) + if prompt_session is None: + return + try: + prompt_session.app.invalidate() + except Exception: # pragma: no cover - defensive + pass diff --git a/mycli/sqlcompleter.py b/mycli/sqlcompleter.py index c0f669c8..750f3f2b 100644 --- a/mycli/sqlcompleter.py +++ b/mycli/sqlcompleter.py @@ -1157,6 +1157,39 @@ def extend_collations(self, collation_data: Generator[tuple]) -> None: def set_dbname(self, dbname: str | None) -> None: self.dbname = dbname or '' + def load_schema_metadata( + self, + schema: str, + table_columns: dict[str, list[str]], + foreign_keys: dict[str, Any], + enum_values: dict[str, dict[str, list[str]]], + functions: dict[str, None], + procedures: dict[str, None], + ) -> None: + """Atomically replace the completion metadata for *schema*. + + Each argument is pre-built by the caller in the same shape that + ``dbmetadata[kind][schema]`` uses internally. Replacing the + per-schema dicts by assignment (rather than appending to the live + structures) keeps concurrent readers of ``get_completions`` safe. + """ + if not schema: + return + self.dbmetadata["tables"][schema] = table_columns + self.dbmetadata["views"].setdefault(schema, {}) + self.dbmetadata["functions"][schema] = functions + self.dbmetadata["procedures"][schema] = procedures + self.dbmetadata["enum_values"][schema] = enum_values + self.dbmetadata["foreign_keys"][schema] = foreign_keys + self.all_completions.add(schema) + for table, cols in table_columns.items(): + self.all_completions.add(table) + for col in cols: + if col != "*": + self.all_completions.add(col) + for func_name in functions: + self.all_completions.add(func_name) + def reset_completions(self) -> None: self.databases: list[str] = [] self.users: list[str] = [] diff --git a/mycli/sqlexecute.py b/mycli/sqlexecute.py index b045a4c6..ecf975ff 100644 --- a/mycli/sqlexecute.py +++ b/mycli/sqlexecute.py @@ -444,32 +444,35 @@ def tables(self) -> Generator[tuple[str], None, None]: cur.execute(self.tables_query) yield from cur - def table_columns(self) -> Generator[tuple[str, str], None, None]: - """Yields (table name, column name) pairs""" + def table_columns(self, schema: str | None = None) -> Generator[tuple[str, str], None, None]: + """Yields (table name, column name) pairs for *schema* (default: current database).""" + target = schema if schema is not None else self.dbname assert isinstance(self.conn, Connection) with self.conn.cursor() as cur: - _logger.debug("Columns Query. sql: %r", self.table_columns_query) - cur.execute(self.table_columns_query, (self.dbname,)) + _logger.debug("Columns Query. sql: %r schema: %r", self.table_columns_query, target) + cur.execute(self.table_columns_query, (target,)) yield from cur - def enum_values(self) -> Generator[tuple[str, str, list[str]], None, None]: - """Yields (table name, column name, enum values) tuples""" + def enum_values(self, schema: str | None = None) -> Generator[tuple[str, str, list[str]], None, None]: + """Yields (table name, column name, enum values) tuples for *schema*.""" + target = schema if schema is not None else self.dbname assert isinstance(self.conn, Connection) with self.conn.cursor() as cur: - _logger.debug("Enum Values Query. sql: %r", self.enum_values_query) - cur.execute(self.enum_values_query, (self.dbname,)) + _logger.debug("Enum Values Query. sql: %r schema: %r", self.enum_values_query, target) + cur.execute(self.enum_values_query, (target,)) for table_name, column_name, column_type in cur: values = self._parse_enum_values(column_type) if values: yield (table_name, column_name, values) - def foreign_keys(self) -> Generator[tuple[str, str, str, str], None, None]: - """Yields (table_name, column_name, referenced_table_name, referenced_column_name) tuples""" + def foreign_keys(self, schema: str | None = None) -> Generator[tuple[str, str, str, str], None, None]: + """Yields (table_name, column_name, referenced_table_name, referenced_column_name) tuples for *schema*.""" + target = schema if schema is not None else self.dbname assert isinstance(self.conn, Connection) with self.conn.cursor() as cur: - _logger.debug("Foreign Keys Query. sql: %r", self.foreign_keys_query) + _logger.debug("Foreign Keys Query. sql: %r schema: %r", self.foreign_keys_query, target) try: - cur.execute(self.foreign_keys_query, (self.dbname,)) + cur.execute(self.foreign_keys_query, (target,)) yield from cur except Exception as e: _logger.error('No foreign key completions due to %r', e) @@ -481,23 +484,25 @@ def databases(self) -> list[str]: cur.execute(self.databases_query) return [x[0] for x in cur.fetchall()] - def functions(self) -> Generator[tuple[str, str], None, None]: - """Yields tuples of (schema_name, function_name)""" + def functions(self, schema: str | None = None) -> Generator[tuple[str, str], None, None]: + """Yields tuples of (schema_name, function_name) for *schema*.""" + target = schema if schema is not None else self.dbname assert isinstance(self.conn, Connection) with self.conn.cursor() as cur: - _logger.debug("Functions Query. sql: %r", self.functions_query) - cur.execute(self.functions_query, (self.dbname,)) + _logger.debug("Functions Query. sql: %r schema: %r", self.functions_query, target) + cur.execute(self.functions_query, (target,)) yield from cur - def procedures(self) -> Generator[tuple, None, None]: - """Yields tuples of (procedure_name, )""" + def procedures(self, schema: str | None = None) -> Generator[tuple, None, None]: + """Yields tuples of (procedure_name, ) for *schema*.""" + target = schema if schema is not None else self.dbname assert isinstance(self.conn, Connection) with self.conn.cursor() as cur: - _logger.debug("Procedures Query. sql: %r", self.procedures_query) + _logger.debug("Procedures Query. sql: %r schema: %r", self.procedures_query, target) try: - cur.execute(self.procedures_query, (self.dbname,)) + cur.execute(self.procedures_query, (target,)) except pymysql.DatabaseError as e: _logger.error('No procedure completions due to %r', e) yield () diff --git a/test/myclirc b/test/myclirc index ece0db36..240d12bb 100644 --- a/test/myclirc +++ b/test/myclirc @@ -13,6 +13,13 @@ smart_completion = True # Suggestion: 3. min_completion_trigger = 1 +# Comma-separated list of schemas to prefetch for auto-completion, or +# "all" to prefetch every schema the connected user can access. Prefetching +# runs in the background after launch so that qualified completions (e.g. +# ``OtherSchema.table``) work without first switching databases. When +# empty (the default), only the currently-selected schema is loaded. +prefetch_schemas = + # Multi-line mode allows breaking up the sql statements into multiple lines. If # this is set to True, then the end of the statements must have a semi-colon. # If this is set to False then sql statements can't be split into multiple diff --git a/test/pytests/test_clitoolbar.py b/test/pytests/test_clitoolbar.py index 50d7c097..d0ffc104 100644 --- a/test/pytests/test_clitoolbar.py +++ b/test/pytests/test_clitoolbar.py @@ -17,6 +17,7 @@ def make_mycli( editing_mode: EditingMode = EditingMode.EMACS, toolbar_error_message: str | None = None, refreshing: bool = False, + prefetching: bool = False, ): return SimpleNamespace( completer=SimpleNamespace(smart_completion=smart_completion), @@ -24,6 +25,7 @@ def make_mycli( prompt_session=SimpleNamespace(editing_mode=editing_mode), toolbar_error_message=toolbar_error_message, completion_refresher=SimpleNamespace(is_refreshing=MagicMock(return_value=refreshing)), + schema_prefetcher=SimpleNamespace(is_prefetching=MagicMock(return_value=prefetching)), get_custom_toolbar=MagicMock(return_value="custom toolbar"), ) @@ -54,6 +56,15 @@ def test_create_toolbar_tokens_func_clears_toolbar_error_message() -> None: assert ("class:bottom-toolbar", "right-arrow accepts full-line suggestion") not in first +def test_create_toolbar_tokens_func_shows_prefetching() -> None: + mycli = make_mycli(prefetching=True) + + toolbar = clitoolbar.create_toolbar_tokens_func(mycli, lambda: False, None, mycli.get_custom_toolbar) + result = toolbar() + + assert ("class:bottom-toolbar", "Prefetching schemas…") in result + + def test_create_toolbar_tokens_func_shows_multiline_vi_and_refreshing(monkeypatch) -> None: mycli = make_mycli( smart_completion=False, diff --git a/test/pytests/test_schema_prefetcher.py b/test/pytests/test_schema_prefetcher.py new file mode 100644 index 00000000..d61dd6b2 --- /dev/null +++ b/test/pytests/test_schema_prefetcher.py @@ -0,0 +1,160 @@ +# type: ignore + +import threading +from types import SimpleNamespace +from unittest.mock import MagicMock + +from mycli import schema_prefetcher as schema_prefetcher_module +from mycli.schema_prefetcher import SchemaPrefetcher, parse_prefetch_setting +from mycli.sqlcompleter import SQLCompleter + + +def test_parse_prefetch_setting_empty() -> None: + assert parse_prefetch_setting('') == [] + assert parse_prefetch_setting(None) == [] + assert parse_prefetch_setting(' ') == [] + + +def test_parse_prefetch_setting_all() -> None: + assert parse_prefetch_setting('all') is None + assert parse_prefetch_setting('ALL') is None + assert parse_prefetch_setting(' all ') is None + + +def test_parse_prefetch_setting_explicit_list() -> None: + assert parse_prefetch_setting('foo, bar , baz') == ['foo', 'bar', 'baz'] + assert parse_prefetch_setting('solo') == ['solo'] + + +def make_mycli(prefetch_setting: str = '', dbname: str = 'current', databases=None): + if databases is None: + databases = ['current', 'other1', 'other2'] + completer = SQLCompleter(smart_completion=True) + completer.set_dbname(dbname) + sqlexecute = SimpleNamespace( + dbname=dbname, + user='u', + password='p', + host='h', + port=3306, + socket=None, + character_set='utf8mb4', + local_infile=False, + ssl=None, + ssh_user=None, + ssh_host=None, + ssh_port=22, + ssh_password=None, + ssh_key_filename=None, + databases=MagicMock(return_value=list(databases)), + ) + return SimpleNamespace( + completer=completer, + sqlexecute=sqlexecute, + prefetch_schemas_setting=prefetch_setting, + _completer_lock=threading.Lock(), + prompt_session=None, + ) + + +def _fake_executor_factory(per_schema_tables): + """Build an executor stub whose schema-aware methods yield prebuilt rows.""" + + def make(*_args, **_kwargs): + executor = MagicMock() + executor.table_columns.side_effect = lambda schema=None: iter(per_schema_tables.get(schema, [])) + executor.foreign_keys.side_effect = lambda schema=None: iter([]) + executor.enum_values.side_effect = lambda schema=None: iter([]) + executor.functions.side_effect = lambda schema=None: iter([]) + executor.procedures.side_effect = lambda schema=None: iter([]) + executor.close = MagicMock() + return executor + + return make + + +def test_start_configured_skips_current_and_prefetches_others(monkeypatch): + mycli = make_mycli(prefetch_setting='other1, current, other2') + tables = { + 'other1': [('users', 'id'), ('users', 'email')], + 'other2': [('orders', 'id')], + } + monkeypatch.setattr(schema_prefetcher_module, 'SQLExecute', _fake_executor_factory(tables)) + + prefetcher = SchemaPrefetcher(mycli) + prefetcher.start_configured() + assert prefetcher._thread is not None + prefetcher._thread.join(timeout=5) + + tables_meta = mycli.completer.dbmetadata['tables'] + assert 'other1' in tables_meta + assert 'other2' in tables_meta + # Current schema must be untouched by the prefetcher. + assert 'current' not in tables_meta + assert set(tables_meta['other1'].keys()) == {'users'} + # Column list starts with '*' marker and contains escaped column names. + assert tables_meta['other1']['users'][0] == '*' + assert 'id' in tables_meta['other1']['users'] + + +def test_start_configured_all_resolves_from_databases(monkeypatch): + mycli = make_mycli(prefetch_setting='all', databases=['current', 'alpha', 'beta']) + tables = { + 'alpha': [('t_a', 'c')], + 'beta': [('t_b', 'c')], + } + monkeypatch.setattr(schema_prefetcher_module, 'SQLExecute', _fake_executor_factory(tables)) + + prefetcher = SchemaPrefetcher(mycli) + prefetcher.start_configured() + assert prefetcher._thread is not None + prefetcher._thread.join(timeout=5) + + tables_meta = mycli.completer.dbmetadata['tables'] + assert 'alpha' in tables_meta + assert 'beta' in tables_meta + assert 'current' not in tables_meta + + +def test_start_configured_noop_when_disabled(monkeypatch): + mycli = make_mycli(prefetch_setting='') + make_executor = MagicMock() + monkeypatch.setattr(schema_prefetcher_module, 'SQLExecute', make_executor) + + prefetcher = SchemaPrefetcher(mycli) + prefetcher.start_configured() + + assert prefetcher._thread is None + make_executor.assert_not_called() + + +def test_prefetch_schema_now_loads_single_schema(monkeypatch): + mycli = make_mycli(prefetch_setting='') + tables = {'target': [('t1', 'c1')]} + monkeypatch.setattr(schema_prefetcher_module, 'SQLExecute', _fake_executor_factory(tables)) + + prefetcher = SchemaPrefetcher(mycli) + prefetcher.prefetch_schema_now('target') + assert prefetcher._thread is not None + prefetcher._thread.join(timeout=5) + + assert 'target' in mycli.completer.dbmetadata['tables'] + + +def test_stop_interrupts_running_prefetch(monkeypatch): + mycli = make_mycli(prefetch_setting='a, b') + monkeypatch.setattr( + schema_prefetcher_module, + 'SQLExecute', + _fake_executor_factory({'a': [], 'b': []}), + ) + + prefetcher = SchemaPrefetcher(mycli) + # Immediately cancel before any work runs. + prefetcher._cancel.set() + prefetcher._start(['a', 'b']) + if prefetcher._thread is not None: + prefetcher._thread.join(timeout=5) + # stop() must be idempotent and leave the prefetcher ready to run again. + prefetcher.stop() + assert prefetcher._thread is None diff --git a/test/utils.py b/test/utils.py index db3d4bf3..57053f91 100644 --- a/test/utils.py +++ b/test/utils.py @@ -154,6 +154,17 @@ def make_bare_mycli() -> Any: cli.wider_completion_menu = False cli.explicit_pager = False cli._completer_lock = cast(Any, ReusableLock()) + cli.prefetch_schemas_setting = '' + cli.schema_prefetcher = cast( + Any, + SimpleNamespace( + stop=lambda: None, + clear_loaded=lambda: None, + start_configured=lambda: None, + is_prefetching=lambda: False, + prefetch_schema_now=lambda schema: None, + ), + ) cli.terminal_tab_title_format = '' cli.terminal_window_title_format = '' cli.multiplex_window_title_format = '' From 6696484a0e12e6a2d7b91a11e966f214fbf16b2c Mon Sep 17 00:00:00 2001 From: Scott Nemes Date: Mon, 20 Apr 2026 17:40:51 -0700 Subject: [PATCH 2/4] Save completion metadata after switching schemas --- changelog.md | 9 ++++ mycli/main.py | 13 +++--- mycli/schema_prefetcher.py | 3 +- mycli/sqlcompleter.py | 33 +++++++++++++++ test/pytests/test_main.py | 12 +++++- test/pytests/test_main_regression.py | 14 +++++-- test/pytests/test_schema_prefetcher.py | 37 ++++++++++++++++ test/pytests/test_sqlcompleter.py | 58 ++++++++++++++++++++++++++ 8 files changed, 168 insertions(+), 11 deletions(-) diff --git a/changelog.md b/changelog.md index 1972f47d..0a90dd91 100644 --- a/changelog.md +++ b/changelog.md @@ -1,3 +1,12 @@ +Upcoming (TBD) +============== + +Features +--------- +* Add option to prefetch completion metadata for some or all schemas +* Save fetched completion metadata when switching schemas + + 1.69.0 (2026/04/20) ============== diff --git a/mycli/main.py b/mycli/main.py index 5d772844..0d109ba9 100755 --- a/mycli/main.py +++ b/mycli/main.py @@ -1014,15 +1014,17 @@ def configure_pager(self) -> None: def refresh_completions(self, reset: bool = False) -> list[SQLResult]: # Cancel any in-flight schema prefetch before the completer is - # replaced: the fresh completer will not contain the prefetched - # schemas, so we restart the prefetch pass after the swap. + # replaced. Loaded-schema bookkeeping is intentionally preserved + # so switching between already-loaded schemas does not re-fetch. self.schema_prefetcher.stop() - self.schema_prefetcher.clear_loaded() + assert self.sqlexecute is not None if reset: + # Update the active completer's current-schema pointer right + # away so unqualified completions reflect a schema switch + # even before the background refresh finishes. with self._completer_lock: - self.completer.reset_completions() - assert self.sqlexecute is not None + self.completer.set_dbname(self.sqlexecute.dbname) self.completion_refresher.refresh( self.sqlexecute, self._on_completions_refreshed, @@ -1038,6 +1040,7 @@ def refresh_completions(self, reset: bool = False) -> list[SQLResult]: def _on_completions_refreshed(self, new_completer: SQLCompleter) -> None: """Swap the completer object in cli with the newly created completer.""" with self._completer_lock: + new_completer.copy_other_schemas_from(self.completer, exclude=new_completer.dbname) self.completer = new_completer if self.prompt_session: diff --git a/mycli/schema_prefetcher.py b/mycli/schema_prefetcher.py index e37daac5..8b99b177 100644 --- a/mycli/schema_prefetcher.py +++ b/mycli/schema_prefetcher.py @@ -89,7 +89,8 @@ def prefetch_schema_now(self, schema: str) -> None: def _start(self, schemas: Iterable[str]) -> None: self.stop() current = self._current_schema() - queue = [s for s in schemas if s and s != current and s not in self._loaded] + existing = set(self.mycli.completer.dbmetadata.get('tables', {}).keys()) + queue = [s for s in schemas if s and s != current and s not in self._loaded and s not in existing] if not queue: self._invalidate_app() return diff --git a/mycli/sqlcompleter.py b/mycli/sqlcompleter.py index 750f3f2b..8fe96a68 100644 --- a/mycli/sqlcompleter.py +++ b/mycli/sqlcompleter.py @@ -1181,6 +1181,39 @@ def load_schema_metadata( self.dbmetadata["procedures"][schema] = procedures self.dbmetadata["enum_values"][schema] = enum_values self.dbmetadata["foreign_keys"][schema] = foreign_keys + self._register_schema_completions(schema, table_columns, functions) + + def copy_other_schemas_from(self, source: "SQLCompleter", exclude: str | None) -> None: + """Copy per-schema metadata from *source*, skipping *exclude*. + + After a completion refresh swaps in a fresh completer that was + populated only with the current schema's data, this restores any + previously-loaded metadata for other schemas so the user can keep + using qualified completions (``OtherSchema.table``) without a + re-fetch. + """ + kinds = ("tables", "views", "functions", "procedures", "enum_values", "foreign_keys") + for kind in kinds: + src_map = source.dbmetadata.get(kind, {}) + dest_map = self.dbmetadata.setdefault(kind, {}) + for schema_name, data in src_map.items(): + if not schema_name or schema_name == exclude: + continue + if schema_name in dest_map: + continue + dest_map[schema_name] = data + for schema_name, table_columns in self.dbmetadata["tables"].items(): + if schema_name == exclude: + continue + functions = self.dbmetadata.get("functions", {}).get(schema_name, {}) + self._register_schema_completions(schema_name, table_columns, functions) + + def _register_schema_completions( + self, + schema: str, + table_columns: dict[str, list[str]], + functions: dict[str, None] | dict[str, Any], + ) -> None: self.all_completions.add(schema) for table, cols in table_columns.items(): self.all_completions.add(table) diff --git a/test/pytests/test_main.py b/test/pytests/test_main.py index b98a50ef..295e6987 100644 --- a/test/pytests/test_main.py +++ b/test/pytests/test_main.py @@ -2268,11 +2268,21 @@ def test_on_completions_refreshed_updates_completer_and_invalidates_prompt() -> invalidated: list[bool] = [] cli._completer_lock = cast(Any, ReusableLock(lambda: entered_lock.__setitem__('count', entered_lock['count'] + 1))) cli.prompt_session = cast(Any, SimpleNamespace(app=SimpleNamespace(invalidate=lambda: invalidated.append(True)))) - new_completer = cast(Any, SimpleNamespace(get_completions=lambda document, event: ['done'])) + cli.completer = cast(Any, SimpleNamespace(dbmetadata={})) + copy_calls: list[tuple[Any, str | None]] = [] + new_completer = cast( + Any, + SimpleNamespace( + dbname='current', + get_completions=lambda document, event: ['done'], + copy_other_schemas_from=lambda source, exclude: copy_calls.append((source, exclude)), + ), + ) main.MyCli._on_completions_refreshed(cli, new_completer) assert cli.completer is new_completer assert invalidated == [True] assert entered_lock['count'] == 1 + assert copy_calls == [(copy_calls[0][0], 'current')] def test_click_entrypoint_callback_covers_dsn_list_init_commands(monkeypatch: pytest.MonkeyPatch) -> None: diff --git a/test/pytests/test_main_regression.py b/test/pytests/test_main_regression.py index 5bc348ac..017fab0d 100644 --- a/test/pytests/test_main_regression.py +++ b/test/pytests/test_main_regression.py @@ -1467,19 +1467,25 @@ def fake_disable_pager() -> None: with pytest.raises(DisablePagerCalled): main.MyCli.configure_pager(cli) - reset_calls: list[bool] = [] + set_dbname_calls: list[str | None] = [] refresh_calls: list[tuple[Any, Any, dict[str, Any]]] = [] - cli.completer = cast(Any, SimpleNamespace(keyword_casing='upper', reset_completions=lambda: reset_calls.append(True))) + cli.completer = cast( + Any, + SimpleNamespace( + keyword_casing='upper', + set_dbname=lambda name: set_dbname_calls.append(name), + ), + ) cli.main_formatter = SimpleNamespace(supported_formats=['ascii', 'csv']) cli.completion_refresher = SimpleNamespace(refresh=lambda sql, callback, options: refresh_calls.append((sql, callback, options))) - cli.sqlexecute = 'sqlexecute' + cli.sqlexecute = SimpleNamespace(dbname='current_db') cli._on_completions_refreshed = lambda new_completer: None # type: ignore[assignment] def fake_refresh(reset: bool = False) -> list[SQLResult]: return main.MyCli.refresh_completions(cli, reset=reset) result = fake_refresh(reset=True) - assert reset_calls == [True] + assert set_dbname_calls == ['current_db'] assert refresh_calls[0][2] == { 'smart_completion': cli.smart_completion, 'supported_formats': ['ascii', 'csv'], diff --git a/test/pytests/test_schema_prefetcher.py b/test/pytests/test_schema_prefetcher.py index d61dd6b2..bc7c14ee 100644 --- a/test/pytests/test_schema_prefetcher.py +++ b/test/pytests/test_schema_prefetcher.py @@ -158,3 +158,40 @@ def test_stop_interrupts_running_prefetch(monkeypatch): # stop() must be idempotent and leave the prefetcher ready to run again. prefetcher.stop() assert prefetcher._thread is None + + +def test_start_skips_schemas_already_in_completer(monkeypatch): + """Previously-loaded schemas must not be re-fetched on refresh.""" + mycli = make_mycli(prefetch_setting='keep, fresh') + # Simulate a schema that was already loaded (e.g., preserved via + # copy_other_schemas_from after a completion refresh). + mycli.completer.dbmetadata['tables']['keep'] = {'cached_table': ['*', 'c1']} + + executor_calls: list[str] = [] + + def make(*_args, **_kwargs): + executor = MagicMock() + + def _track(schema=None): + executor_calls.append(schema) + return iter([]) + + executor.table_columns.side_effect = _track + executor.foreign_keys.side_effect = lambda schema=None: iter([]) + executor.enum_values.side_effect = lambda schema=None: iter([]) + executor.functions.side_effect = lambda schema=None: iter([]) + executor.procedures.side_effect = lambda schema=None: iter([]) + executor.close = MagicMock() + return executor + + monkeypatch.setattr(schema_prefetcher_module, 'SQLExecute', make) + + prefetcher = SchemaPrefetcher(mycli) + prefetcher.start_configured() + if prefetcher._thread is not None: + prefetcher._thread.join(timeout=5) + + # Only 'fresh' is queried; 'keep' and 'current' are skipped. + assert executor_calls == ['fresh'] + # Cached data for 'keep' is untouched. + assert mycli.completer.dbmetadata['tables']['keep'] == {'cached_table': ['*', 'c1']} diff --git a/test/pytests/test_sqlcompleter.py b/test/pytests/test_sqlcompleter.py index d26c51c9..b032d1bd 100644 --- a/test/pytests/test_sqlcompleter.py +++ b/test/pytests/test_sqlcompleter.py @@ -567,3 +567,61 @@ def test_strip_backticks(name: str | None, expected: str) -> None: ) def test_matches_parent(parent: str, schema: str | None, relname: str, alias: str | None, expected: bool) -> None: assert SQLCompleter._matches_parent(parent, schema, relname, alias) is expected + + +def test_copy_other_schemas_from_preserves_non_current_metadata() -> None: + source = SQLCompleter() + source.load_schema_metadata( + schema='other', + table_columns={'users': ['*', 'id', 'email']}, + foreign_keys={'tables': {}, 'relations': []}, + enum_values={}, + functions={'fn_foo': None}, + procedures={}, + ) + # Also populate the source's "current" schema; it should NOT be copied. + source.load_schema_metadata( + schema='current', + table_columns={'stale_current': ['*']}, + foreign_keys={'tables': {}, 'relations': []}, + enum_values={}, + functions={}, + procedures={}, + ) + + dest = SQLCompleter() + dest.set_dbname('current') + dest.extend_schemata('current') + + dest.copy_other_schemas_from(source, exclude='current') + + assert 'other' in dest.dbmetadata['tables'] + assert dest.dbmetadata['tables']['other'] == {'users': ['*', 'id', 'email']} + assert dest.dbmetadata['functions']['other'] == {'fn_foo': None} + # The excluded schema is not overwritten with stale source data. + assert dest.dbmetadata['tables']['current'] == {} + # Completion lookups pick up the copied names. + assert 'users' in dest.all_completions + assert 'email' in dest.all_completions + assert 'fn_foo' in dest.all_completions + + +def test_copy_other_schemas_from_does_not_overwrite_existing_dest() -> None: + source = SQLCompleter() + source.load_schema_metadata( + schema='shared', + table_columns={'from_source': ['*']}, + foreign_keys={'tables': {}, 'relations': []}, + enum_values={}, + functions={}, + procedures={}, + ) + + dest = SQLCompleter() + dest.set_dbname('current') + dest.dbmetadata['tables']['shared'] = {'from_dest': ['*']} + + dest.copy_other_schemas_from(source, exclude='current') + + # Destination's existing data wins over source when a conflict exists. + assert dest.dbmetadata['tables']['shared'] == {'from_dest': ['*']} From cabbf4b21ebde39a2d917dc1316c67978dfcc8e6 Mon Sep 17 00:00:00 2001 From: Scott Nemes Date: Mon, 20 Apr 2026 18:20:17 -0700 Subject: [PATCH 3/4] Simplified option explanation --- mycli/myclirc | 11 ++++++----- test/myclirc | 11 ++++++----- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/mycli/myclirc b/mycli/myclirc index eed91ec7..1a18878f 100644 --- a/mycli/myclirc +++ b/mycli/myclirc @@ -13,11 +13,12 @@ smart_completion = True # Suggestion: 3. min_completion_trigger = 1 -# Comma-separated list of schemas to prefetch for auto-completion, or -# "all" to prefetch every schema the connected user can access. Prefetching -# runs in the background after launch so that qualified completions (e.g. -# ``OtherSchema.table``) work without first switching databases. When -# empty (the default), only the currently-selected schema is loaded. +# Comma-separated list of schemas to prefetch for auto-completion. +# Prefetching starts in the background after launch. +# Examples: +# (empty) = default, disables prefetching +# schema1,schema2 = enables prefetch for given schemas +# all = enables prefetch for all schemas prefetch_schemas = # Multi-line mode allows breaking up the sql statements into multiple lines. If diff --git a/test/myclirc b/test/myclirc index 240d12bb..b9452e45 100644 --- a/test/myclirc +++ b/test/myclirc @@ -13,11 +13,12 @@ smart_completion = True # Suggestion: 3. min_completion_trigger = 1 -# Comma-separated list of schemas to prefetch for auto-completion, or -# "all" to prefetch every schema the connected user can access. Prefetching -# runs in the background after launch so that qualified completions (e.g. -# ``OtherSchema.table``) work without first switching databases. When -# empty (the default), only the currently-selected schema is loaded. +# Comma-separated list of schemas to prefetch for auto-completion. +# Prefetching starts in the background after launch. +# Examples: +# (empty) = default, disables prefetching +# schema1,schema2 = enables prefetch for given schemas +# all = enables prefetch for all schemas prefetch_schemas = # Multi-line mode allows breaking up the sql statements into multiple lines. If From 111bd70af1fe4fde7db9b3f71f60e42d57a7d0fa Mon Sep 17 00:00:00 2001 From: Scott Nemes Date: Tue, 21 Apr 2026 19:16:27 -0700 Subject: [PATCH 4/4] Added new var to contrl prefetch mode --- mycli/main.py | 5 +- mycli/myclirc | 17 ++--- mycli/schema_prefetcher.py | 87 ++++++++++++++------------ test/features/steps/basic_commands.py | 2 +- test/myclirc | 17 ++--- test/pytests/test_schema_prefetcher.py | 60 +++++++++++------- test/utils.py | 3 +- 7 files changed, 112 insertions(+), 79 deletions(-) diff --git a/mycli/main.py b/mycli/main.py index 0d109ba9..32b38a8b 100755 --- a/mycli/main.py +++ b/mycli/main.py @@ -244,7 +244,8 @@ def __init__( self.logfile = False self.completion_refresher = CompletionRefresher() - self.prefetch_schemas_setting = c["main"].get("prefetch_schemas", "") or "" + self.prefetch_schemas_mode = c["main"].get("prefetch_schemas_mode", "always") or "always" + self.prefetch_schemas_list = c["main"].get("prefetch_schemas_list", "") or "" self.schema_prefetcher = SchemaPrefetcher(self) self.logger = logging.getLogger(__name__) @@ -1049,7 +1050,7 @@ def _on_completions_refreshed(self, new_completer: SQLCompleter) -> None: self.prompt_session.app.invalidate() # Kick off background prefetch for any extra schemas configured - # via ``prefetch_schemas`` so users get cross-schema completions. + # via ``prefetch_schemas_mode`` so users get cross-schema completions. self.schema_prefetcher.start_configured() def run_query( diff --git a/mycli/myclirc b/mycli/myclirc index 1a18878f..3aa35189 100644 --- a/mycli/myclirc +++ b/mycli/myclirc @@ -13,13 +13,16 @@ smart_completion = True # Suggestion: 3. min_completion_trigger = 1 -# Comma-separated list of schemas to prefetch for auto-completion. -# Prefetching starts in the background after launch. -# Examples: -# (empty) = default, disables prefetching -# schema1,schema2 = enables prefetch for given schemas -# all = enables prefetch for all schemas -prefetch_schemas = +# Prefetch completion metadata for schemas in the background after launch. +# Possible values: +# always = prefetch all schemas (default) +# never = do not prefetch any schemas +# listed = prefetch only the schemas named in prefetch_schemas_list +prefetch_schemas_mode = always + +# Comma-separated list of schemas to prefetch when +# prefetch_schemas_mode = listed. Ignored in other modes. +prefetch_schemas_list = # Multi-line mode allows breaking up the sql statements into multiple lines. If # this is set to True, then the end of the statements must have a semi-colon. diff --git a/mycli/schema_prefetcher.py b/mycli/schema_prefetcher.py index 8b99b177..edd0f566 100644 --- a/mycli/schema_prefetcher.py +++ b/mycli/schema_prefetcher.py @@ -21,24 +21,31 @@ _logger = logging.getLogger(__name__) -ALL_SCHEMAS_SENTINEL = 'all' +PREFETCH_MODE_ALWAYS = 'always' +PREFETCH_MODE_NEVER = 'never' +PREFETCH_MODE_LISTED = 'listed' -def parse_prefetch_setting(raw: str | None) -> list[str] | None: - """Parse the ``prefetch_schemas`` option value. +def parse_prefetch_config(mode: str, schema_list: str | list[str] | None) -> list[str] | None: + """Parse the ``prefetch_schemas_mode`` / ``prefetch_schemas_list`` options. - Returns ``None`` when the user wants every accessible schema - (``all``), a list of explicit schema names otherwise, or an empty - list when prefetching is disabled. + Returns ``None`` when every accessible schema should be prefetched + (``always``), an empty list when prefetching is disabled + (``never``), or the explicit list parsed from ``schema_list`` when + the mode is ``listed``. Unknown modes fall back to ``always``. + + ``schema_list`` may be a CSV string (single-value configobj case) or + an already-split list (multi-value configobj case). """ - if not raw: - return [] - value = raw.strip() - if not value: + normalized = mode.strip().lower() + if normalized == PREFETCH_MODE_NEVER: return [] - if value.lower() == ALL_SCHEMAS_SENTINEL: - return None - return [part.strip() for part in value.split(',') if part.strip()] + if normalized == PREFETCH_MODE_LISTED: + if not schema_list: + return [] + parts = schema_list.split(',') if isinstance(schema_list, str) else schema_list + return [part.strip() for part in parts if part and part.strip()] + return None class SchemaPrefetcher: @@ -66,13 +73,14 @@ def stop(self, timeout: float = 2.0) -> None: self._thread = None def start_configured(self) -> None: - """Start prefetching based on the user's ``prefetch_schemas`` setting.""" - setting = getattr(self.mycli, 'prefetch_schemas_setting', '') - parsed = parse_prefetch_setting(setting) - if parsed is None: - self._start(self._resolve_all_schemas()) - else: - self._start(parsed) + """Start prefetching based on the user's prefetch settings.""" + mode = getattr(self.mycli, 'prefetch_schemas_mode', PREFETCH_MODE_ALWAYS) + schema_list = getattr(self.mycli, 'prefetch_schemas_list', '') + parsed = parse_prefetch_config(mode, schema_list) + if parsed is not None and not parsed: + # ``never`` or ``listed`` with an empty list — nothing to do. + return + self._start(parsed) def prefetch_schema_now(self, schema: str) -> None: """Fetch *schema* immediately on a background thread. @@ -86,14 +94,16 @@ def prefetch_schema_now(self, schema: str) -> None: self.stop() self._start([schema]) - def _start(self, schemas: Iterable[str]) -> None: + def _start(self, schemas: Iterable[str] | None) -> None: + """Spawn the background worker. + + ``schemas=None`` defers resolution to the worker, which lists + every database via its own dedicated connection — the main + thread's ``sqlexecute`` must not be used here since the worker + would race with the REPL. + """ self.stop() - current = self._current_schema() - existing = set(self.mycli.completer.dbmetadata.get('tables', {}).keys()) - queue = [s for s in schemas if s and s != current and s not in self._loaded and s not in existing] - if not queue: - self._invalidate_app() - return + queue: list[str] | None = None if schemas is None else list(schemas) self._cancel = threading.Event() self._thread = threading.Thread( target=self._run, @@ -104,7 +114,7 @@ def _start(self, schemas: Iterable[str]) -> None: self._thread.start() self._invalidate_app() - def _run(self, schemas: list[str]) -> None: + def _run(self, schemas: list[str] | None) -> None: executor: SQLExecute | None = None try: executor = self._make_executor() @@ -113,7 +123,16 @@ def _run(self, schemas: list[str]) -> None: self._invalidate_app() return try: - for schema in schemas: + if schemas is None: + try: + schemas = list(executor.databases()) + except Exception as e: + _logger.error('failed to list databases for prefetch: %r', e) + return + current = self._current_schema() + existing = set(self.mycli.completer.dbmetadata.get('tables', {}).keys()) + queue = [s for s in schemas if s and s != current and s not in self._loaded and s not in existing] + for schema in queue: if self._cancel.is_set(): return try: @@ -188,16 +207,6 @@ def _prefetch_one(self, executor: SQLExecute, schema: str) -> None: ) self._invalidate_app() - def _resolve_all_schemas(self) -> list[str]: - sqlexecute = self.mycli.sqlexecute - if sqlexecute is None: - return [] - try: - return list(sqlexecute.databases()) - except Exception as e: - _logger.error('failed to list databases for prefetch: %r', e) - return [] - def _current_schema(self) -> str | None: sqlexecute = self.mycli.sqlexecute return sqlexecute.dbname if sqlexecute is not None else None diff --git a/test/features/steps/basic_commands.py b/test/features/steps/basic_commands.py index 5718e340..f94d4937 100644 --- a/test/features/steps/basic_commands.py +++ b/test/features/steps/basic_commands.py @@ -67,7 +67,7 @@ def step_send_source_command(context): @when("we run query to check application_name") def step_check_application_name(context): context.cli.sendline( - "SELECT 'found' FROM performance_schema.session_connect_attrs WHERE attr_name = 'program_name' AND attr_value = 'mycli'" + "SELECT 'found' FROM performance_schema.session_connect_attrs WHERE attr_name = 'program_name' AND attr_value = 'mycli' LIMIT 1" ) diff --git a/test/myclirc b/test/myclirc index b9452e45..811c51d2 100644 --- a/test/myclirc +++ b/test/myclirc @@ -13,13 +13,16 @@ smart_completion = True # Suggestion: 3. min_completion_trigger = 1 -# Comma-separated list of schemas to prefetch for auto-completion. -# Prefetching starts in the background after launch. -# Examples: -# (empty) = default, disables prefetching -# schema1,schema2 = enables prefetch for given schemas -# all = enables prefetch for all schemas -prefetch_schemas = +# Prefetch completion metadata for schemas in the background after launch. +# Possible values: +# always = prefetch all schemas (default) +# never = do not prefetch any schemas +# listed = prefetch only the schemas named in prefetch_schemas_list +prefetch_schemas_mode = always + +# Comma-separated list of schemas to prefetch when +# prefetch_schemas_mode = listed. Ignored in other modes. +prefetch_schemas_list = # Multi-line mode allows breaking up the sql statements into multiple lines. If # this is set to True, then the end of the statements must have a semi-colon. diff --git a/test/pytests/test_schema_prefetcher.py b/test/pytests/test_schema_prefetcher.py index bc7c14ee..aaa4d5f2 100644 --- a/test/pytests/test_schema_prefetcher.py +++ b/test/pytests/test_schema_prefetcher.py @@ -5,28 +5,38 @@ from unittest.mock import MagicMock from mycli import schema_prefetcher as schema_prefetcher_module -from mycli.schema_prefetcher import SchemaPrefetcher, parse_prefetch_setting +from mycli.schema_prefetcher import SchemaPrefetcher, parse_prefetch_config from mycli.sqlcompleter import SQLCompleter -def test_parse_prefetch_setting_empty() -> None: - assert parse_prefetch_setting('') == [] - assert parse_prefetch_setting(None) == [] - assert parse_prefetch_setting(' ') == [] +def test_parse_prefetch_config_never() -> None: + assert parse_prefetch_config('never', '') == [] + assert parse_prefetch_config('NEVER', 'ignored,values') == [] + assert parse_prefetch_config(' never ', None) == [] -def test_parse_prefetch_setting_all() -> None: - assert parse_prefetch_setting('all') is None - assert parse_prefetch_setting('ALL') is None - assert parse_prefetch_setting(' all ') is None +def test_parse_prefetch_config_always() -> None: + assert parse_prefetch_config('always', '') is None + assert parse_prefetch_config('ALWAYS', None) is None + assert parse_prefetch_config(' always ', 'ignored') is None -def test_parse_prefetch_setting_explicit_list() -> None: - assert parse_prefetch_setting('foo, bar , baz') == ['foo', 'bar', 'baz'] - assert parse_prefetch_setting('solo') == ['solo'] +def test_parse_prefetch_config_listed() -> None: + assert parse_prefetch_config('listed', 'foo, bar , baz') == ['foo', 'bar', 'baz'] + assert parse_prefetch_config('LISTED', 'solo') == ['solo'] + assert parse_prefetch_config('listed', '') == [] + assert parse_prefetch_config('listed', None) == [] + # configobj pre-splits multi-value entries into a list of strings. + assert parse_prefetch_config('listed', ['foo', ' bar ', 'baz']) == ['foo', 'bar', 'baz'] + assert parse_prefetch_config('listed', []) == [] -def make_mycli(prefetch_setting: str = '', dbname: str = 'current', databases=None): +def make_mycli( + prefetch_mode: str = 'listed', + prefetch_list: str = '', + dbname: str = 'current', + databases=None, +): if databases is None: databases = ['current', 'other1', 'other2'] completer = SQLCompleter(smart_completion=True) @@ -51,17 +61,19 @@ def make_mycli(prefetch_setting: str = '', dbname: str = 'current', databases=No return SimpleNamespace( completer=completer, sqlexecute=sqlexecute, - prefetch_schemas_setting=prefetch_setting, + prefetch_schemas_mode=prefetch_mode, + prefetch_schemas_list=prefetch_list, _completer_lock=threading.Lock(), prompt_session=None, ) -def _fake_executor_factory(per_schema_tables): +def _fake_executor_factory(per_schema_tables, databases=None): """Build an executor stub whose schema-aware methods yield prebuilt rows.""" def make(*_args, **_kwargs): executor = MagicMock() + executor.databases.return_value = list(databases) if databases is not None else [] executor.table_columns.side_effect = lambda schema=None: iter(per_schema_tables.get(schema, [])) executor.foreign_keys.side_effect = lambda schema=None: iter([]) executor.enum_values.side_effect = lambda schema=None: iter([]) @@ -74,7 +86,7 @@ def make(*_args, **_kwargs): def test_start_configured_skips_current_and_prefetches_others(monkeypatch): - mycli = make_mycli(prefetch_setting='other1, current, other2') + mycli = make_mycli(prefetch_mode='listed', prefetch_list='other1, current, other2') tables = { 'other1': [('users', 'id'), ('users', 'email')], 'other2': [('orders', 'id')], @@ -98,12 +110,16 @@ def test_start_configured_skips_current_and_prefetches_others(monkeypatch): def test_start_configured_all_resolves_from_databases(monkeypatch): - mycli = make_mycli(prefetch_setting='all', databases=['current', 'alpha', 'beta']) + mycli = make_mycli(prefetch_mode='always', databases=['current', 'alpha', 'beta']) tables = { 'alpha': [('t_a', 'c')], 'beta': [('t_b', 'c')], } - monkeypatch.setattr(schema_prefetcher_module, 'SQLExecute', _fake_executor_factory(tables)) + monkeypatch.setattr( + schema_prefetcher_module, + 'SQLExecute', + _fake_executor_factory(tables, databases=['current', 'alpha', 'beta']), + ) prefetcher = SchemaPrefetcher(mycli) prefetcher.start_configured() @@ -117,7 +133,7 @@ def test_start_configured_all_resolves_from_databases(monkeypatch): def test_start_configured_noop_when_disabled(monkeypatch): - mycli = make_mycli(prefetch_setting='') + mycli = make_mycli(prefetch_mode='never') make_executor = MagicMock() monkeypatch.setattr(schema_prefetcher_module, 'SQLExecute', make_executor) @@ -129,7 +145,7 @@ def test_start_configured_noop_when_disabled(monkeypatch): def test_prefetch_schema_now_loads_single_schema(monkeypatch): - mycli = make_mycli(prefetch_setting='') + mycli = make_mycli(prefetch_mode='never') tables = {'target': [('t1', 'c1')]} monkeypatch.setattr(schema_prefetcher_module, 'SQLExecute', _fake_executor_factory(tables)) @@ -142,7 +158,7 @@ def test_prefetch_schema_now_loads_single_schema(monkeypatch): def test_stop_interrupts_running_prefetch(monkeypatch): - mycli = make_mycli(prefetch_setting='a, b') + mycli = make_mycli(prefetch_mode='listed', prefetch_list='a, b') monkeypatch.setattr( schema_prefetcher_module, 'SQLExecute', @@ -162,7 +178,7 @@ def test_stop_interrupts_running_prefetch(monkeypatch): def test_start_skips_schemas_already_in_completer(monkeypatch): """Previously-loaded schemas must not be re-fetched on refresh.""" - mycli = make_mycli(prefetch_setting='keep, fresh') + mycli = make_mycli(prefetch_mode='listed', prefetch_list='keep, fresh') # Simulate a schema that was already loaded (e.g., preserved via # copy_other_schemas_from after a completion refresh). mycli.completer.dbmetadata['tables']['keep'] = {'cached_table': ['*', 'c1']} diff --git a/test/utils.py b/test/utils.py index 57053f91..d57e5ed8 100644 --- a/test/utils.py +++ b/test/utils.py @@ -154,7 +154,8 @@ def make_bare_mycli() -> Any: cli.wider_completion_menu = False cli.explicit_pager = False cli._completer_lock = cast(Any, ReusableLock()) - cli.prefetch_schemas_setting = '' + cli.prefetch_schemas_mode = 'never' + cli.prefetch_schemas_list = '' cli.schema_prefetcher = cast( Any, SimpleNamespace(