From 2b2cb45e31eedb57217f851c7ba53c249173ebb8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Mon, 30 Mar 2026 11:54:20 +0200 Subject: [PATCH 01/64] feat: add plugin system with entry_points-based provider discovery - Add core/config.py: general-purpose self-healing TOML config from dataclasses - Add core/plugin.py: plugin discovery and loading via importlib.metadata - Refactor Provider ABC: remove config_keys/load_config/get_opening_hours_and_sessions, add Config dataclass and plugin metadata - Convert CCXT provider to new plugin interface with CCXTConfig dataclass - Move Capital.com provider to external plugin (plugins/pynecore-capitalcom/) - Register CCXT as entry_point in pyproject.toml - Update CLI to use plugin-based provider discovery and per-plugin config files - Update tests and conftest.py for new provider API --- pyproject.toml | 3 + src/pynecore/cli/commands/__init__.py | 39 +- src/pynecore/cli/commands/data.py | 32 +- src/pynecore/core/config.py | 253 +++++++++++ src/pynecore/core/plugin.py | 69 +++ src/pynecore/providers/__init__.py | 11 +- src/pynecore/providers/capitalcom.py | 404 ------------------ src/pynecore/providers/ccxt.py | 170 +++----- src/pynecore/providers/provider.py | 133 +++--- tests/conftest.py | 9 +- tests/t00_pynecore/core/test_013_config.py | 322 ++++++++++++++ tests/t00_pynecore/core/test_014_plugin.py | 59 +++ .../data/test_002_ccxt_provider.py | 25 +- 13 files changed, 897 insertions(+), 632 deletions(-) create mode 100644 src/pynecore/core/config.py create mode 100644 src/pynecore/core/plugin.py delete mode 100644 src/pynecore/providers/capitalcom.py create mode 100644 tests/t00_pynecore/core/test_013_config.py create mode 100644 tests/t00_pynecore/core/test_014_plugin.py diff --git a/pyproject.toml b/pyproject.toml index d48dc39..4b9c96f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,6 +62,9 @@ optional-dependencies.capitalcom = ["httpx", "pycryptodome"] scripts.pyne = "pynecore.cli:app" +[project.entry-points."pyne.provider"] +ccxt = "pynecore.providers.ccxt:CCXTProvider" + # # URLs diff --git a/src/pynecore/cli/commands/__init__.py b/src/pynecore/cli/commands/__init__.py index e10d84b..bfd52fe 100644 --- a/src/pynecore/cli/commands/__init__.py +++ b/src/pynecore/cli/commands/__init__.py @@ -6,8 +6,6 @@ from ..app import app, app_state from ..utils.error_hook import setup_global_error_logging -from ...providers import available_providers - # Import commands from . import run, data, compile, benchmark, debug @@ -229,30 +227,19 @@ def main( config_dir = Path(workdir) / 'config' config_dir.mkdir(exist_ok=True) - # Create providers.toml file for all supported providers (if not exists) - providers_file = config_dir / 'providers.toml' - if not providers_file.exists() or recreate_provider_config: - with providers_file.open('w') as f: - for provider in available_providers: - f.write(f"[{provider}]\n") - provider_module = __import__(f"pynecore.providers.{provider}", fromlist=['']) - provider_class = getattr( - provider_module, - [p for p in dir(provider_module) if p.endswith('Provider')][0] - ) - for key, value in provider_class.config_keys.items(): - if key.startswith('#'): # Comments - f.write(f'{key}\n') - else: - if isinstance(value, str): - f.write(f'{key} = "{value}"\n') - elif isinstance(value, bool): - f.write(f'{key} = {str(value).lower()}\n') - elif isinstance(value, int) or isinstance(value, float): - f.write(f'{key} = {value}\n') - else: - raise ValueError(f"Unsupported type for {key}: {type(value)}") - f.write("\n") + # Generate per-plugin config files for all installed providers + from ...core.plugin import discover_plugins + from ...core.config import ensure_config + + for name, ep in discover_plugins('pyne.provider').items(): + config_path = config_dir / f'{name}.toml' + if not config_path.exists() or recreate_provider_config: + try: + provider_cls = ep.load() + if hasattr(provider_cls, 'Config') and provider_cls.Config is not None: + ensure_config(provider_cls.Config, config_path) + except Exception: + pass # Don't crash CLI if a plugin is broken # Create api.toml file for PyneSys API (if not exists) api_file = config_dir / 'api.toml' diff --git a/src/pynecore/cli/commands/data.py b/src/pynecore/cli/commands/data.py index 37dc537..fc35b3a 100644 --- a/src/pynecore/cli/commands/data.py +++ b/src/pynecore/cli/commands/data.py @@ -11,7 +11,7 @@ TimeElapsedColumn, TimeRemainingColumn) from ..app import app, app_state -from ...providers import available_providers +from ...core.plugin import get_available_plugin_names, load_plugin from ...providers.provider import Provider from ...lib.timeframe import in_seconds from ...core.data_converter import DataConverter, SupportedFormats as InputFormats @@ -38,8 +38,10 @@ class AvailableProvidersEnum(Enum): # DateOrDays is either a datetime or a number of days DateOrDays = str - # Create an enum from available providers - AvailableProvidersEnum = Enum('Provider', {name.upper(): name.lower() for name in available_providers}) + # Create an enum from available providers (discovered via entry_points) + AvailableProvidersEnum = Enum('Provider', { + name.upper(): name.lower() for name in get_available_plugin_names('pyne.provider') + }) # Available output formats @@ -126,19 +128,20 @@ def download( """ Download historical OHLCV data """ - # Import provider module from - provider_module = __import__(f"pynecore.providers.{provider.value}", fromlist=['']) - # Find the provider class (exclude base Provider class) - provider_class = getattr(provider_module, [ - p for p in dir(provider_module) if p.endswith('Provider') and p != 'Provider' - ][0]) + # Load provider class via plugin system + provider_class = load_plugin('pyne.provider', provider.value) try: # If list_symbols is True, we show the available symbols then exit if list_symbols: + from ...core.config import ensure_config + config = None + if hasattr(provider_class, 'Config') and provider_class.Config is not None: + config = ensure_config(provider_class.Config, + app_state.config_dir / f'{provider.value}.toml') with Progress(SpinnerColumn(), TextColumn("{task.description}"), transient=True) as progress: progress.add_task(description="Fetching market data...", total=None) - provider_instance: Provider = provider_class(symbol=symbol, config_dir=app_state.config_dir) + provider_instance: Provider = provider_class(symbol=symbol, config=config) symbols = provider_instance.get_list_of_symbols() with (console := Console()).pager(): for s in symbols: @@ -149,9 +152,14 @@ def download( secho("Error: Symbol is required!", err=True, fg=colors.RED) raise Exit(1) - # Create provider instance + # Create provider instance with config + from ...core.config import ensure_config + config = None + if hasattr(provider_class, 'Config') and provider_class.Config is not None: + config = ensure_config(provider_class.Config, + app_state.config_dir / f'{provider.value}.toml') provider_instance: Provider = provider_class(symbol=symbol, timeframe=timeframe, - ohlv_dir=app_state.data_dir) + ohlv_dir=app_state.data_dir, config=config) # Download symbol info if not exists if force_save_info or not provider_instance.is_symbol_info_exists(): diff --git a/src/pynecore/core/config.py b/src/pynecore/core/config.py new file mode 100644 index 0000000..ece5069 --- /dev/null +++ b/src/pynecore/core/config.py @@ -0,0 +1,253 @@ +""" +General-purpose dataclass-based configuration with self-healing TOML files. + +Generates a TOML configuration file from a Python dataclass definition. +On each run the file is regenerated from the dataclass: user-modified values +are preserved while the structure always reflects the current field set. + +Convention:: + + #key = value — default / unmodified (commented out) + key = value — user-modified (uncommented) + #key = — None value +""" + +import ast +import dataclasses +import inspect +import textwrap +import tomllib +from pathlib import Path + + +def format_value(value: str | int | float | bool) -> str: + """ + Format a Python value as a TOML value string. + + Handles the four TOML-native types: ``str``, ``int``, ``float``, ``bool``. + This function is intentionally public so that other modules (e.g. + ``core.script``) can reuse it for consistent TOML formatting. + + :param value: The value to format. + :return: TOML-formatted string representation. + """ + if isinstance(value, bool): + return str(value).lower() + if isinstance(value, int): + return str(value) + if isinstance(value, float): + return str(value) + if isinstance(value, str): + escaped = ( + value + .replace('\\', '\\\\') + .replace('"', '\\"') + .replace('\n', '\\n') + .replace('\r', '\\r') + ) + return f'"{escaped}"' + return str(value) + + +def extract_field_docs(config_cls: type) -> dict[str, str]: + """ + Extract attribute docstrings from a dataclass source via AST parsing. + + Looks for ``Expr(Constant(str))`` nodes immediately following + ``AnnAssign`` nodes in the class body (PEP 257 attribute docstrings). + + :param config_cls: The dataclass type to inspect. + :return: Mapping of field name to its docstring. + """ + try: + source = textwrap.dedent(inspect.getsource(config_cls)) + except (OSError, TypeError): + return {} + + tree = ast.parse(source) + + class_def = None + for node in ast.walk(tree): + if isinstance(node, ast.ClassDef) and node.name == config_cls.__name__: + class_def = node + break + + if class_def is None: + return {} + + docs: dict[str, str] = {} + body = class_def.body + for i, node in enumerate(body): + if isinstance(node, ast.AnnAssign) and isinstance(node.target, ast.Name): + field_name = node.target.id + if i + 1 < len(body): + next_node = body[i + 1] + if ( + isinstance(next_node, ast.Expr) + and isinstance(next_node.value, ast.Constant) + and isinstance(next_node.value.value, str) + ): + docs[field_name] = next_node.value.value + + return docs + + +def generate_toml( + config_cls: type, + user_values: dict | None = None, +) -> str: + """ + Generate a TOML string from a dataclass definition. + + Fields with user-modified values are written uncommented. Fields at their + default value are written as comments (``#key = value``). + + :param config_cls: The config dataclass type. + :param user_values: User-modified values to write uncommented. + :return: Generated TOML content string. + """ + field_docs = extract_field_docs(config_cls) + lines: list[str] = [] + + class_doc = config_cls.__doc__ + if class_doc: + for doc_line in class_doc.strip().splitlines(): + stripped = doc_line.strip() + lines.append(f"# {stripped}" if stripped else "#") + + for f in dataclasses.fields(config_cls): + name = f.name + default = f.default + + lines.append("") + + if name in field_docs: + for doc_line in field_docs[name].strip().splitlines(): + lines.append(f"# {doc_line.strip()}") + + if user_values and name in user_values: + lines.append(f"{name} = {format_value(user_values[name])}") + elif default is dataclasses.MISSING or default is None: + lines.append(f"#{name} =") + else: + lines.append(f"#{name} = {format_value(default)}") + + return '\n'.join(lines) + '\n' + + +def parse_toml_with_comments(toml_content: str) -> dict: + """ + Parse TOML content, returning only uncommented (user-modified) values. + + Commented lines (``#key = value``) are standard TOML comments and are + excluded by the parser. Only actively set values are returned. + + :param toml_content: Raw TOML file content. + :return: Dict of parsed key-value pairs. + """ + return tomllib.loads(toml_content) + + +def ensure_config(config_cls: type, config_path: Path) -> object: + """ + Main entry point. Call on every application run. + + 1. If the file does not exist, generate it with all defaults (commented). + 2. If it exists, read user values, regenerate from the dataclass, write back. + 3. Return a populated dataclass instance with user values over defaults. + + TOML table sections (e.g. ``[binance]``) not managed by the dataclass + are preserved verbatim at the end of the file. + + :param config_cls: The config dataclass type (not an instance). + :param config_path: Path to the TOML file. + :return: A populated config dataclass instance. + """ + user_values = None + extra_content = "" + + if config_path.exists(): + user_values, extra_content = _parse_existing(config_path, config_cls) + + toml_content = generate_toml(config_cls, user_values) + + if extra_content: + toml_content += '\n' + extra_content + if not extra_content.endswith('\n'): + toml_content += '\n' + + config_path.parent.mkdir(parents=True, exist_ok=True) + config_path.write_text(toml_content, encoding='utf-8') + + return _create_instance(config_cls, user_values) + + +def _parse_existing(config_path: Path, config_cls: type) -> tuple[dict, str]: + """ + Parse an existing config file to extract user values and extra sections. + + :param config_path: Path to the TOML file. + :param config_cls: The config dataclass type. + :return: ``(user_values, extra_sections_raw_text)``. + """ + content = config_path.read_text(encoding='utf-8') + + parsed = tomllib.loads(content) + + field_names = {f.name for f in dataclasses.fields(config_cls)} + + user_values: dict = {} + for key, value in parsed.items(): + if key in field_names and not isinstance(value, dict): + user_values[key] = value + + extra_content = _extract_extra_sections(content) + + return user_values, extra_content + + +def _extract_extra_sections(content: str) -> str: + """ + Extract raw text of TOML table sections from file content. + + Everything from the first ``[section]`` header to end of file is returned. + + :param content: Raw file content. + :return: Raw text of extra sections, or empty string. + """ + lines = content.splitlines() + for i, line in enumerate(lines): + stripped = line.strip() + if stripped.startswith('[') and not stripped.startswith('#'): + return '\n'.join(lines[i:]) + return "" + + +def _create_instance(config_cls: type, user_values: dict | None): + """ + Create a dataclass instance with user values merged over defaults. + + Handles ``int`` to ``float`` coercion when the field default is a float. + + :param config_cls: The config dataclass type. + :param user_values: User-modified values, or ``None``. + :return: A populated config dataclass instance. + """ + if not user_values: + return config_cls() + + kwargs: dict = {} + for f in dataclasses.fields(config_cls): + if f.name not in user_values: + continue + value = user_values[f.name] + if ( + f.default is not dataclasses.MISSING + and isinstance(f.default, float) + and isinstance(value, int) + and not isinstance(value, bool) + ): + value = float(value) + kwargs[f.name] = value + + return config_cls(**kwargs) diff --git a/src/pynecore/core/plugin.py b/src/pynecore/core/plugin.py new file mode 100644 index 0000000..471bd59 --- /dev/null +++ b/src/pynecore/core/plugin.py @@ -0,0 +1,69 @@ +""" +Plugin discovery and loading via Python entry points. + +All PyneCore plugins (providers, extensions, CLI commands) are discovered +through :pep:`631` entry points declared in ``pyproject.toml``. This module +provides a thin, general-purpose API over :mod:`importlib.metadata`. + +Example ``pyproject.toml`` for a provider plugin:: + + [project.entry-points."pyne.provider"] + myexchange = "mypackage:MyExchangeProvider" + +Discovery example:: + + plugins = discover_plugins("pyne.provider") + # {"ccxt": , "myexchange": } + + cls = load_plugin("pyne.provider", "ccxt") + # +""" + +# noinspection PyProtectedMember +from importlib.metadata import entry_points, EntryPoint + + +class PluginNotFoundError(ImportError): + """Raised when a requested plugin is not installed.""" + + +def discover_plugins(group: str) -> dict[str, EntryPoint]: + """ + Return all installed entry points for a plugin group. + + :param group: Entry point group name (e.g. ``"pyne.provider"``). + :return: Mapping of plugin name to its :class:`EntryPoint`. + """ + return {ep.name: ep for ep in entry_points(group=group)} + + +def load_plugin(group: str, name: str) -> type: + """ + Load and return a plugin class by name. + + The actual import happens lazily — only when this function is called. + + :param group: Entry point group name (e.g. ``"pyne.provider"``). + :param name: Plugin name as declared in the entry point. + :return: The plugin class. + :raises PluginNotFoundError: If no plugin with the given name is installed. + """ + eps = discover_plugins(group) + if name not in eps: + short_group = group.replace("pyne.", "") + raise PluginNotFoundError( + f"Plugin '{name}' not found for group '{group}'. " + f"Install it with: pip install pynecore-{name}\n" + f"Available {short_group} plugins: {', '.join(sorted(eps)) or '(none)'}" + ) + return eps[name].load() + + +def get_available_plugin_names(group: str) -> list[str]: + """ + Return a sorted list of all available plugin names for a group. + + :param group: Entry point group name (e.g. ``"pyne.provider"``). + :return: Sorted list of plugin names. + """ + return sorted(discover_plugins(group)) diff --git a/src/pynecore/providers/__init__.py b/src/pynecore/providers/__init__.py index db0626c..f5062ae 100644 --- a/src/pynecore/providers/__init__.py +++ b/src/pynecore/providers/__init__.py @@ -1,10 +1,3 @@ -from pathlib import Path +from pynecore.core.plugin import get_available_plugin_names -from .ccxt import CCXTProvider -from .capitalcom import CapitalComProvider - -# List of available providers -available_providers = tuple( - p.stem for p in Path(__file__).parent.resolve().glob('*.py') if - p.name not in ('__init__.py', 'provider.py') -) +available_providers = tuple(get_available_plugin_names('pyne.provider')) diff --git a/src/pynecore/providers/capitalcom.py b/src/pynecore/providers/capitalcom.py deleted file mode 100644 index bc24260..0000000 --- a/src/pynecore/providers/capitalcom.py +++ /dev/null @@ -1,404 +0,0 @@ -from typing import Callable, cast -import sys - -# Import the override decorator for Python 3.12+ -if sys.version_info >= (3, 12): - from typing import override -else: - # An empty decorator for Python 3.11 and below - def override(func): - return func -from datetime import datetime, time, UTC, timedelta -from zoneinfo import ZoneInfo -from pathlib import Path -from functools import lru_cache - -from .provider import Provider - -from pynecore.core.syminfo import SymInfo, SymInfoInterval, SymInfoSession -from ..types.ohlcv import OHLCV - -__all__ = ['CapitalComProvider'] - -URL = 'https://api-capital.backend-capital.com' -URL_DEMO = 'https://demo-api-capital.backend-capital.com' - -ENDPOINT_PREFIX = '/api/v1/' - -TIMEFRAMES = { - # TradingView -> Capital.com - '1': 'MINUTE', - '5': 'MINUTE_5', - '15': 'MINUTE_15', - '30': 'MINUTE_30', - '60': 'HOUR', - '240': 'HOUR_4', - '1D': 'DAY', - '1W': 'WEEK' -} - -TIMEFRAMES_INV = {v: k for k, v in TIMEFRAMES.items()} - -TYPES = { - 'CURRENCIES': 'forex', - 'CRYTOCURRENCIES': 'crypto', - 'SHARES': 'stock', - 'INDICES': 'index', -} - - -def encrypt_password(password: str, encryption_key: str, timestamp: int | None = None): - from time import time as epoch - try: - from base64 import standard_b64encode, standard_b64decode - from Crypto.PublicKey import RSA - from Crypto.Cipher import PKCS1_v1_5 - except ImportError: - raise ImportError('The "pycryptodome" package is required for Capital.com provider. Please install it by ' - 'running `pip install pycryptodome`') - - if timestamp is None: - timestamp = int(epoch()) - payload = password + '|' + str(timestamp) - payload = standard_b64encode(payload.encode('ascii')) - public_key = RSA.importKey(standard_b64decode(encryption_key.encode('ascii'))) - cipher = PKCS1_v1_5.new(public_key) - ciphertext = standard_b64encode(cipher.encrypt(payload)).decode() - return ciphertext - - -class CaptialComError(ValueError): - ... - - -class CapitalComProvider(Provider): - """ - Capital.com provider - """ - - timezone = 'US/Eastern' - config_keys = { - '# If it is a demo account': '', - 'demo': False, - '# These are required for Capital.com. You can get them from the Capital.com API settings.': '', - 'user_email': '', - 'api_key': '', - 'api_password': '' - } - - @classmethod - @override - def to_tradingview_timeframe(cls, timeframe: str) -> str: - """ - Convert Capital.com timeframe format to TradingView format. - - :param timeframe: Timeframe in Capital.com format (e.g. "MINUTE", "MINUTE_5", "HOUR", "DAY") - :return: Timeframe in TradingView format (e.g. "1", "5", "60", "1D") - :raises ValueError: If timeframe format is invalid - """ - try: - return TIMEFRAMES_INV[timeframe.upper()] - except KeyError: - raise ValueError(f"Invalid Capital.com timeframe format: {timeframe}") - - @classmethod - @override - def to_exchange_timeframe(cls, timeframe: str) -> str: - """ - Convert TradingView timeframe format to Capital.com format. - - :param timeframe: Timeframe in TradingView format (e.g. "1", "5", "60", "1D") - :return: Timeframe in Capital.com format (e.g. "MINUTE", "MINUTE_5", "HOUR", "DAY") - :raises ValueError: If timeframe format is invalid - """ - try: - return TIMEFRAMES[timeframe] - except KeyError: - raise ValueError(f"Unsupported timeframe for Capital.com: {timeframe}") - - def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, - ohlv_dir: Path | None = None, config_dir: Path | None = None): - """ - :param symbol: The symbol to get data for - :param timeframe: The timeframe to get data for in TradingView fmt - :param ohlv_dir: The directory to save OHLV data - :param config_dir: The directory to read the config file from - """ - super().__init__(symbol=symbol, timeframe=timeframe, ohlv_dir=ohlv_dir, config_dir=config_dir) - self.security_token = None - self.cst_token = None - self.session_data = {} - - # Basic API calls - - def __call__(self, endpoint: str, *, data: dict = None, method='post', _level=0) -> dict | list[dict]: - """ - Call General API endpoints - """ - from json import JSONDecodeError - try: - import httpx - except ImportError: - raise ImportError('The "httpx" package is required for Capital.com provider. Please install it by ' - 'running `pip install httpx`') - - headers = {'X-CAP-API-KEY': self.config['api_key']} - if self.security_token: - headers['X-SECURITY-TOKEN'] = self.security_token - if self.cst_token: - headers['CST'] = self.cst_token - - method = method.lower() - params = dict(headers=headers, timeout=50.0) - if method == 'get': - params['params'] = data - elif method in ('post', 'put'): - params['json'] = data - - url = URL_DEMO if self.config['demo'] else URL - url += ENDPOINT_PREFIX + endpoint - - res: httpx.Response = getattr(httpx, method)(url, **params) - try: - dict_res = res.json() - except JSONDecodeError: - raise CaptialComError(f"JSON Error: {res.text}") - - if res.is_error: - # Relogin/autologin if missing token - if dict_res['errorCode'] in ('error.security.client-token-missing', 'error.null.client.token') \ - and self.config['user_email'] and self.config['api_password'] and _level < 3: - # Create new session - self.create_session() - # Retry original request - return self(endpoint=endpoint, data=data, method=method, _level=_level + 1) - raise CaptialComError(f"API error occured: {dict_res['errorCode']}") - - try: - self.security_token = res.headers['X-SECURITY-TOKEN'] - except KeyError: - pass - try: - self.cst_token = res.headers['CST'] - except KeyError: - pass - - return dict_res - - def create_session(self): - """ - Create Session - """ - res: dict = self('session/encryptionKey', method='get') - encryption_key = res['encryptionKey'] - timestamp = res['timeStamp'] - user = self.config['user_email'] - api_password = self.config['api_password'] - password = encrypt_password(api_password, encryption_key, timestamp) - self.session_data = self('session', data=dict( - encryptedPassword=True, - identifier=user, - password=password - )) - - ### - - def get_market_details(self, search_term: str = None, symbols: list[str] = None) -> dict: - """ - Get and search market details - """ - data = {} - if search_term: - data['searchTerm'] = search_term - if symbols: - data['epics'] = ','.join(symbols) - res: dict = self('markets', data=data, method='get') - return res - - @lru_cache(maxsize=1) - def get_single_market_details(self) -> dict: - """ - Get market details of a symbol - """ - assert self.symbol is not None - return cast(dict, self('markets/' + self.symbol, method='get')) - - def get_historical_prices(self, time_from: datetime = None, time_to: datetime = None, limit=1000) -> dict: - """ - Get historical prices of market - - :param time_from: The start time (interpreted as UTC) - :param time_to: The end time (interpreted as UTC) - :param limit: The maximum number of candles to return - """ - assert self.symbol is not None - assert self.xchg_timeframe is not None - params = {'resolution': self.xchg_timeframe, 'max': limit} - if time_from is not None: - params['from'] = time_from.isoformat() - if time_to is not None: - params['to'] = time_to.isoformat() - res: dict = self('prices/' + self.symbol, data=params, method='get') - return res - - @override - def get_opening_hours_and_sessions(self) \ - -> tuple[list[SymInfoInterval], list[SymInfoSession], list[SymInfoSession]]: - """ - Get opening hours and sessions of a symbol - """ - from ..types.weekdays import Weekdays - - market_details = self.get_single_market_details() - instrument = market_details['instrument'] - opening_hours = instrument['openingHours'] - - # noinspection PyShadowingNames - def timetz(t: time, tz: str) -> time: - dt = datetime.now(ZoneInfo(tz)) - dt = dt.replace(hour=t.hour, minute=t.minute, second=t.second, microsecond=t.microsecond) - dt = dt.astimezone(ZoneInfo(self.timezone)) - return dt.time() - - tz = opening_hours['zone'] - intervals = [] - session_starts = [] - session_ends = [] - - for day in ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']: - ohs = opening_hours[day] - day = Weekdays[day.capitalize()] - for oh in ohs: - oh = oh.replace('00:00', '').strip() - if oh.startswith('-'): - t = timetz(time.fromisoformat(oh[2:]), tz) - intervals.append(SymInfoInterval(day=day.value, start=time(hour=0, minute=0), end=t)) - session_ends.append(SymInfoSession(day=day.value, time=t)) - elif oh.endswith('-'): - t = timetz(time.fromisoformat(oh[:-2]), tz) - intervals.append(SymInfoInterval(day=day.value, start=t, end=time(hour=0, minute=0))) - session_starts.append(SymInfoSession(day=day.value, time=t)) - - return intervals, session_starts, session_ends - - @override - def get_list_of_symbols(self, *args, search_term: str = None) -> list[str]: - """ - Get list of symbols - - :param search_term: Search term - """ - res: dict = self.get_market_details(search_term=search_term) - markets = [m['epic'] for m in res['markets']] - markets.sort() - return markets - - @override - def update_symbol_info(self) -> SymInfo: - """ - Update symbol info from the exchange - """ - market_details = self.get_single_market_details() - instrument = market_details['instrument'] - - # Get opening hours and sessions - opening_hours, session_starts, session_ends = self.get_opening_hours_and_sessions() - - dealing_rules = market_details['dealingRules'] - mintick = dealing_rules['minStepDistance']["value"] - minmove = mintick - pricescale = 1 - while minmove < 1.0: - pricescale *= 10 - minmove *= 10 - - # Download some data to get the average spread - res = self.get_historical_prices() - avg_spred_summ = 0.0 - for p in res['prices']: - spread = abs(p['closePrice']['bid'] - p['closePrice']['ask']) - avg_spred_summ += spread - avg_spred = avg_spred_summ / len(res['prices']) - - return SymInfo( - prefix=self.__class__.__name__.replace('Provider', '').upper(), - description=instrument['name'], - ticker=instrument['epic'], - currency=instrument['currency'], - basecurrency=instrument['symbol'].split('/')[0] if '/' in instrument['symbol'] else None, - period=self.timeframe, - type=TYPES[instrument['type']] if instrument['type'] in TYPES else 'other', - mintick=mintick, - pricescale=pricescale, - minmove=minmove, - pointvalue=instrument['lotSize'], - timezone=self.timezone, - opening_hours=opening_hours, - session_starts=session_starts, - session_ends=session_ends, - # This is not found on TV, but it could be useful - avg_spread=avg_spred, - ) - - @override - def download_ohlcv(self, time_from: datetime, time_to: datetime, - on_progress: Callable[[datetime], None] | None = None, - limit: int | None = None): - """ - Download OHLV data - - :param time_from: The start time - :param time_to: The end time - :param on_progress: Optional callback to call on progress - :param limit: Override the automatic chunk size (number of bars per API request) - """ - - # Shortcuts for the time_from and time_to - tf = time_from.replace(tzinfo=None) - tt = (time_to if time_to is not None else datetime.now(UTC)).replace(tzinfo=None) - - try: - # Loop through the time range - d = None - while tf < tt: - if on_progress: - on_progress(tf) - - res: dict = self.get_historical_prices(time_from=tf, limit=limit or 1000) - if not res or not res['prices']: - break - ps = res['prices'] - if len(ps) == 1 and d is not None: - break - - for p in ps: - t = datetime.fromisoformat(p['snapshotTimeUTC']) - - # Filter wrong data, are not on TradingView :-/ - if p['lastTradedVolume'] <= 1.0: - tf = t + timedelta(minutes=1) - continue - - if t > tt: - raise StopIteration - ohlcv = OHLCV( - timestamp=int(t.timestamp()), - # Tradingview uses bidprice, not midprice - open=float(p['openPrice']['bid']), - high=float(p['highPrice']['bid']), - low=float(p['lowPrice']['bid']), - close=float(p['closePrice']['bid']), - volume=float(p['lastTradedVolume']), - ) - - self.save_ohlcv_data(ohlcv) - tf = t + timedelta(minutes=1) - - except CaptialComError: - pass - - except StopIteration: - pass - - if on_progress: - on_progress(tt) diff --git a/src/pynecore/providers/ccxt.py b/src/pynecore/providers/ccxt.py index 34f7ad2..d4257a4 100644 --- a/src/pynecore/providers/ccxt.py +++ b/src/pynecore/providers/ccxt.py @@ -1,17 +1,15 @@ from typing import Callable +from dataclasses import dataclass import sys -# Python 3.12+ if sys.version_info >= (3, 12): from typing import override else: - # Python 3.11 def override(func): return func import re -from datetime import datetime, UTC, timedelta +from datetime import datetime, UTC, timedelta, time from pathlib import Path -from datetime import time import tomllib from .provider import Provider @@ -23,11 +21,11 @@ def override(func): known_limits = { 'binance': 1000, - 'bitget': { # Bitget has strict timeframe-dependent limits - '1w': 12, # Weekly: max 12 bars (84 days) - '1d': 300, # Daily: max 300 bars - '4h': 1000, # 4-hour: max 1000 bars - 'default': 200 # Safe default for other timeframes (1h, 1m, etc.) + 'bitget': { + '1w': 12, + '1d': 300, + '4h': 1000, + 'default': 200 }, 'bitmex': 500, 'bybit': 200, @@ -40,38 +38,31 @@ def override(func): def add_space_before_uppercase(s): - # Use regex to add a space before each uppercase letter return re.sub(r'(? str: Convert CCXT timeframe fmt to TradingView fmt. :param timeframe: Timeframe in CCXT fmt (e.g. "1m", "5m", "1h", "1d", "1w", "1M") - :type timeframe: str :return: Timeframe in TradingView fmt (e.g. "1", "5", "60", "1D", "1W", "1M") - :rtype: str - - :Examples: - - >>> Provider.to_tradingview_timeframe("1m") # "1" - >>> Provider.to_tradingview_timeframe("5m") # "5" - >>> Provider.to_tradingview_timeframe("1h") # "60" - >>> Provider.to_tradingview_timeframe("1d") # "1D" - :raises ValueError: If timeframe fmt is invalid """ if len(timeframe) < 2: @@ -99,7 +80,6 @@ def to_tradingview_timeframe(cls, timeframe: str) -> str: unit = timeframe[-1] value = timeframe[:-1] - # Verify that value is a valid number if not value.isdigit() or int(value) <= 0: raise ValueError(f"Invalid timeframe value: {value}") @@ -123,17 +103,7 @@ def to_exchange_timeframe(cls, timeframe: str) -> str: Convert TradingView timeframe fmt to CCXT fmt. :param timeframe: Timeframe in TradingView fmt (e.g. "1", "5", "60", "1D", "1W", "1M") - :type timeframe: str :return: Timeframe in CCXT fmt (e.g. "1m", "5m", "1h", "1d", "1w", "1M") - :rtype: str - - :Examples: - - >>> Provider.to_exchange_timeframe("1") # "1m" - >>> Provider.to_exchange_timeframe("5") # "5m" - >>> Provider.to_exchange_timeframe("60") # "1h" - >>> Provider.to_exchange_timeframe("1D") # "1d" - :raises ValueError: If timeframe fmt is invalid """ if timeframe.isdigit(): @@ -150,7 +120,6 @@ def to_exchange_timeframe(cls, timeframe: str) -> str: unit = timeframe[-1].upper() value = timeframe[:-1] - # Verify that value is a valid number if not value.isdigit() or int(value) <= 0: raise ValueError(f"Invalid timeframe value: {value}") @@ -165,19 +134,19 @@ def to_exchange_timeframe(cls, timeframe: str) -> str: @override def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, - ohlv_dir: Path | None = None, config_dir: Path | None = None): + ohlv_dir: Path | None = None, config: object | None = None): """ - :param symbol: The symbol to get data for + :param symbol: The symbol to get data for (e.g. "binance:BTC/USDT") :param timeframe: The timeframe to get data for in TradingView fmt - :param ohlv_dir: The directory to save OHLV data - :param config_dir: The directory to read the config file from + :param ohlv_dir: The directory to save OHLCV data + :param config: Pre-loaded CCXTConfig instance """ try: import ccxt except ImportError: raise ImportError("CCXT is not installed. Please install it using `pip install ccxt`.") - super().__init__(symbol=symbol, timeframe=timeframe, ohlv_dir=ohlv_dir, config_dir=config_dir) + super().__init__(symbol=symbol, timeframe=timeframe, ohlv_dir=ohlv_dir, config=config) # Check symbol fmt try: @@ -190,25 +159,32 @@ def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, if not xchg: raise ValueError("Error: Exchange name not provided! Use 'exchange:symbol' fmt! " - "(or symple exchange, if you want to list symbols)") + "(or simple exchange, if you want to list symbols)") self.symbol = symbol exchange_name = xchg.lower() - # Check if there's an exchange-specific configuration + # Build exchange config from the Config dataclass + optional exchange-specific TOML sections exchange_config = {} - - # Load configuration from providers.toml - with open(self.config_dir / 'providers.toml', 'rb') as f: - data = tomllib.load(f) - - # Look for exchange-specific config - exchange_section = f'ccxt.{exchange_name}' - if exchange_section in data: - exchange_config = data[exchange_section] + if self.config: + # Base config from dataclass fields + exchange_config = { + k: v for k, v in vars(self.config).items() if v + } + + # Check for exchange-specific override in the raw TOML + if self.ohlcv_path: + config_dir = self.ohlcv_path.parent.parent / 'config' else: - # Use the default ccxt config - exchange_config = self.config + config_dir = None + + if config_dir: + toml_path = config_dir / 'ccxt.toml' + if toml_path.exists(): + with open(toml_path, 'rb') as f: + raw_toml = tomllib.load(f) + if exchange_name in raw_toml and isinstance(raw_toml[exchange_name], dict): + exchange_config = raw_toml[exchange_name] # Create the CCXT client self._client: ccxt.Exchange = getattr(ccxt, exchange_name)({ @@ -220,41 +196,43 @@ def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, @override def get_list_of_symbols(self, *args, **kwargs) -> list[str]: """ - Get list of symbols + Get list of symbols. """ self._client.load_markets() return self._client.symbols or [] - @classmethod - def get_opening_hours_and_sessions(cls) \ - -> tuple[list[SymInfoInterval], list[SymInfoSession], list[SymInfoSession]]: + @staticmethod + def _create_24_7_sessions() -> tuple[ + list[SymInfoInterval], list[SymInfoSession], list[SymInfoSession] + ]: """ - Process opening hours information + Create 24/7 opening hours and sessions for crypto markets. + + :return: Tuple of (opening_hours, session_starts, session_ends). """ opening_hours = [] session_starts = [] session_ends = [] for i in range(7): opening_hours.append( - SymInfoInterval(day=i, start=time(hour=0, minute=0), end=time(hour=23, minute=59, second=59))) + SymInfoInterval(day=i, start=time(hour=0, minute=0), + end=time(hour=23, minute=59, second=59))) session_starts.append(SymInfoSession(day=i, time=time(hour=0, minute=0))) session_ends.append(SymInfoSession(day=i, time=time(hour=23, minute=59, second=59))) - return opening_hours, session_starts, session_ends @override def update_symbol_info(self) -> SymInfo: """ - Update symbol info from the exchange + Update symbol info from the exchange. """ self._client.load_markets() assert self._client.markets market_details = self._client.markets[self.symbol] - # Get opening hours and sessions - opening_hours, session_starts, session_ends = self.get_opening_hours_and_sessions() + opening_hours, session_starts, session_ends = self._create_24_7_sessions() - # Calculate minmove and pricescale from mintick # syminfo.minmove / syminfo.pricescale = syminfo.mintick + # Calculate minmove and pricescale from mintick mintick = market_details['precision']['price'] minmove = mintick pricescale = 1 @@ -279,7 +257,7 @@ def update_symbol_info(self) -> SymInfo: currency=market_details['quote'], basecurrency=market_details['base'], period=self.timeframe, - type="crypto", # it could be better, but TV just call everything "crypto" + type="crypto", mintick=mintick, pricescale=pricescale, minmove=minmove, @@ -288,7 +266,6 @@ def update_symbol_info(self) -> SymInfo: opening_hours=opening_hours, session_starts=session_starts, session_ends=session_ends, - # This is not found on TV, but it could be useful taker_fee=market_details.get('taker'), maker_fee=market_details.get('maker'), ) @@ -298,37 +275,30 @@ def download_ohlcv(self, time_from: datetime, time_to: datetime, on_progress: Callable[[datetime], None] | None = None, limit: int | None = None): """ - Download OHLV data + Download OHLCV data. - :param time_from: The start time - :param time_to: The end time - :param on_progress: Optional callback to call on progress - :param limit: Override the automatic chunk size (number of bars per API request) + :param time_from: The start time. + :param time_to: The end time. + :param on_progress: Optional callback to call on progress. + :param limit: Override the automatic chunk size. """ - # Shortcuts for the time_from and time_to tf: datetime = time_from.replace(tzinfo=None) tt: datetime = (time_to if time_to is not None else datetime.now(UTC)).replace(tzinfo=None) - # Get the limit by exchange or use safe default (unless overridden by user) if limit is None: assert self._client.id limit_config = known_limits.get(self._client.id, 100) - # Support both simple int limits and timeframe-specific dict limits if isinstance(limit_config, dict): - # Timeframe-specific limits (e.g., bitget) limit = limit_config.get(self.xchg_timeframe, limit_config.get('default', 100)) else: - # Simple int limit (backward compatible) limit = limit_config try: - # Loop through the time range while tf < tt: if on_progress: on_progress(tf) - # Fetch a part of data res: list = self._client.fetch_ohlcv( symbol=self.symbol, limit=limit, @@ -336,11 +306,9 @@ def download_ohlcv(self, time_from: datetime, time_to: datetime, since=self._client.parse8601(tf.isoformat()) ) - # If no data, skip to the next day, maybe the symbol was not yet traded that day if not res: tf += timedelta(days=1) - # Process the data for r in res: t = int(r[0] / 1000) dt = datetime.fromtimestamp(t, UTC).replace(tzinfo=None) @@ -357,7 +325,7 @@ def download_ohlcv(self, time_from: datetime, time_to: datetime, ) self.save_ohlcv_data(ohlcv) - tf = dt + timedelta(minutes=1) # Move to next time step + 1 minute + tf = dt + timedelta(minutes=1) except StopIteration: pass diff --git a/src/pynecore/providers/provider.py b/src/pynecore/providers/provider.py index 3ef0b56..212d4fe 100644 --- a/src/pynecore/providers/provider.py +++ b/src/pynecore/providers/provider.py @@ -2,85 +2,97 @@ from abc import abstractmethod, ABCMeta from pathlib import Path from datetime import datetime -import tomllib from ..types.ohlcv import OHLCV -from pynecore.core.syminfo import SymInfo, SymInfoInterval, SymInfoSession +from pynecore.core.syminfo import SymInfo from pynecore.core.ohlcv_file import OHLCVWriter, OHLCVReader class Provider(metaclass=ABCMeta): """ - Base class for all providers + Base class for all data providers. + + Subclasses must implement the abstract methods and define a ``Config`` + dataclass for configuration (used by :func:`pynecore.core.config.ensure_config`). """ - timezone = 'UTC' - """ Timezone of the provider """ + plugin_name: str = "" + """Display name of the provider (e.g. "Capital.com", "Binance").""" + + plugin_version: str = "0.0.0" + """Provider plugin version.""" + + min_pynecore_version: str = "" + """Minimum compatible PyneCore version.""" + + Config: type | None = None + """Override in subclass with a ``@dataclass`` for provider configuration.""" + + timezone: str = 'UTC' + """Default timezone of the provider.""" symbol: str | None = None - """ Symbol of the provider """ + """Symbol of the provider.""" timeframe: str | None = None - """ Timeframe of the provider """ + """Timeframe of the provider.""" xchg_timeframe: str | None = None - """ TradingView timeframe """ + """Exchange-specific timeframe format.""" ohlcv_path: Path | None = None - """ Directory to save OHLV data """ - - config_keys = { - '# Settings for the provider': '', - } - """ Key-value pairs to put into providers.toml, if key starts with '#' it is a comment. """ - - config: dict[str, str] = {} - """ Config dict for the exchange loaded from providers.toml """ + """Path to the OHLCV data file.""" @classmethod @abstractmethod def to_tradingview_timeframe(cls, timeframe: str) -> str: """ - Convert timeframe to TradingView fmt - https://www.tradingview.com/pine-script-reference/v6/#var_timeframe.period + Convert timeframe to TradingView format. + + :param timeframe: Timeframe in exchange format. + :return: Timeframe in TradingView format. """ @classmethod @abstractmethod def to_exchange_timeframe(cls, timeframe: str) -> str: """ - Convert timeframe to exchange fmt + Convert timeframe to exchange format. + + :param timeframe: Timeframe in TradingView format. + :return: Timeframe in exchange format. """ @classmethod - def get_ohlcv_path(cls, symbol: str, timeframe: str, ohlv_dir: Path, provider_name: str | None = None) -> Path: + def get_ohlcv_path(cls, symbol: str, timeframe: str, ohlv_dir: Path, + provider_name: str | None = None) -> Path: """ - Get the output path of the OHLV data + Get the output path of the OHLCV data file. + + :param symbol: Symbol name. + :param timeframe: Timeframe in TradingView format. + :param ohlv_dir: Directory to save OHLCV data. + :param provider_name: Override provider name in filename. + :return: Path to the OHLCV file. """ return ohlv_dir / (f"{provider_name or cls.__name__.lower().replace('provider', '')}" f"_{symbol.replace('/', '_').replace(':', '_').upper()}" f"_{timeframe}.ohlcv") def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, - ohlv_dir: Path | None = None, config_dir: Path | None = None): + ohlv_dir: Path | None = None, config: object | None = None): """ - :param symbol: The symbol to get data for - :param timeframe: The timeframe to get data for in TradingView fmt - :param ohlv_dir: The directory to save OHLV data - :param config_dir: The directory to read the config file from + :param symbol: The symbol to get data for. + :param timeframe: The timeframe to get data for in TradingView format. + :param ohlv_dir: The directory to save OHLCV data. + :param config: Pre-loaded config dataclass instance. """ self.symbol = symbol self.timeframe = timeframe self.xchg_timeframe = self.to_exchange_timeframe(timeframe) if timeframe else None self.ohlcv_path = self.get_ohlcv_path(symbol, timeframe, ohlv_dir) if ohlv_dir else None self.ohlcv_file = OHLCVWriter(self.ohlcv_path) if self.ohlcv_path else None - - if not config_dir: # Default config dir from the parent of the ohlcv_dir - assert self.ohlcv_path is not None - config_dir = self.ohlcv_path.parent.parent / 'config' - self.config_dir = config_dir - - self.load_config() + self.config = config def __enter__(self) -> OHLCVWriter: assert self.ohlcv_file is not None @@ -93,39 +105,39 @@ def __exit__(self, exc_type, exc_val, exc_tb): @abstractmethod def get_list_of_symbols(self, *args, **kwargs) -> list[str]: """ - Get list of symbols - """ + Get list of available symbols. - def load_config(self): - """ - Load config from providers.toml + :return: List of symbol names. """ - with open(self.config_dir / 'providers.toml', 'rb') as f: - data = tomllib.load(f) - self.config = data[self.__class__.__name__.replace('Provider', '').lower()] @abstractmethod def update_symbol_info(self) -> SymInfo: """ - Update symbol info from the exchange + Fetch and return symbol info from the exchange. + + This should include opening hours and session data. + + :return: Symbol information. """ def is_symbol_info_exists(self) -> bool: """ - Check if symbol info file exists + Check if the symbol info TOML file exists. + + :return: True if the file exists. """ assert self.ohlcv_path is not None return self.ohlcv_path.with_suffix('.toml').exists() def get_symbol_info(self, force_update=False) -> SymInfo: """ - Get market details of a symbol + Get symbol info, loading from cache or fetching from exchange. - :param force_update: Force update the symbol info + :param force_update: Force update from exchange even if cached. + :return: Symbol information. """ assert self.ohlcv_path is not None toml_path = self.ohlcv_path.with_suffix('.toml') - # Check if file already exists if self.is_symbol_info_exists() and not force_update: return SymInfo.load_toml(toml_path) @@ -133,18 +145,11 @@ def get_symbol_info(self, force_update=False) -> SymInfo: sym_info.save_toml(toml_path) return sym_info - @abstractmethod - def get_opening_hours_and_sessions(self) \ - -> tuple[list[SymInfoInterval], list[SymInfoSession], list[SymInfoSession]]: - """ - Get opening hours and sessions of a symbol - """ - def save_ohlcv_data(self, data: OHLCV | list[OHLCV]): """ - Save OHLV data to a file + Save OHLCV data to the file. - :param data: OHLV data + :param data: Single OHLCV record or list of records. """ assert self.ohlcv_file is not None if isinstance(data, OHLCV): @@ -158,18 +163,20 @@ def download_ohlcv(self, time_from: datetime, time_to: datetime, on_progress: Callable[[datetime], None] | None = None, limit: int | None = None): """ - Download OHLV data + Download OHLCV data from the exchange. - In the user code you can call `self.save_ohlcv_data()` to save the data into the data file + Use :meth:`save_ohlcv_data` to write records to the data file. - :param time_from: The start time - :param time_to: The end time - :param on_progress: Optional callback to call on progress - :param limit: Override the automatic chunk size (number of bars per API request) + :param time_from: The start time. + :param time_to: The end time. + :param on_progress: Optional progress callback. + :param limit: Override the automatic chunk size. """ def load_ohlcv_data(self) -> OHLCVReader: """ - Load OHLV data from the file + Load OHLCV data from the file. + + :return: An OHLCVReader instance. """ return OHLCVReader(str(self.ohlcv_path)) diff --git a/tests/conftest.py b/tests/conftest.py index 6620f1d..4b56777 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -245,9 +245,12 @@ def fixture(): @pytest.fixture(scope="function") def syminfo() -> SymInfo: - # Get 0-24/7 opening hours - from pynecore.providers.ccxt import CCXTProvider - opening_hours, session_starts, session_ends = CCXTProvider.get_opening_hours_and_sessions() + # 24/7 opening hours for crypto testing + from datetime import time + from pynecore.core.syminfo import SymInfoInterval, SymInfoSession + opening_hours = [SymInfoInterval(day=i, start=time(0, 0), end=time(23, 59, 59)) for i in range(7)] + session_starts = [SymInfoSession(day=i, time=time(0, 0)) for i in range(7)] + session_ends = [SymInfoSession(day=i, time=time(23, 59, 59)) for i in range(7)] return SymInfo( prefix="PYTEST", diff --git a/tests/t00_pynecore/core/test_013_config.py b/tests/t00_pynecore/core/test_013_config.py new file mode 100644 index 0000000..1f7def7 --- /dev/null +++ b/tests/t00_pynecore/core/test_013_config.py @@ -0,0 +1,322 @@ +"""Tests for the general-purpose dataclass config system.""" + +from dataclasses import dataclass +from pathlib import Path + +import pytest + +from pynecore.core.config import ( + ensure_config, + extract_field_docs, + format_value, + generate_toml, + parse_toml_with_comments, +) + + +@dataclass +class SampleConfig: + """Sample configuration""" + + api_key: str = "" + """API key for the service""" + + timeout: int = 30 + """Request timeout in seconds""" + + rate: float = 1.5 + """Rate multiplier""" + + enabled: bool = False + """Enable the feature""" + + +@dataclass +class MinimalConfig: + """Minimal""" + + name: str = "default" + + +def __test_format_value_str__(): + assert format_value("hello") == '"hello"' + assert format_value("") == '""' + assert format_value('has "quotes"') == '"has \\"quotes\\""' + assert format_value("line\nbreak") == '"line\\nbreak"' + assert format_value("back\\slash") == '"back\\\\slash"' + + +def __test_format_value_int__(): + assert format_value(0) == "0" + assert format_value(42) == "42" + assert format_value(-7) == "-7" + + +def __test_format_value_float__(): + assert format_value(3.14) == "3.14" + assert format_value(0.0) == "0.0" + + +def __test_format_value_bool__(): + assert format_value(True) == "true" + assert format_value(False) == "false" + + +def __test_extract_field_docs__(): + docs = extract_field_docs(SampleConfig) + assert docs["api_key"] == "API key for the service" + assert docs["timeout"] == "Request timeout in seconds" + assert docs["rate"] == "Rate multiplier" + assert docs["enabled"] == "Enable the feature" + + +def __test_extract_field_docs_missing__(): + @dataclass + class NoDocs: + x: int = 0 + y: str = "" + + docs = extract_field_docs(NoDocs) + assert docs == {} + + +def __test_first_run_all_defaults__(tmp_path: Path): + """First run: generates TOML with all values commented out.""" + cfg_path = tmp_path / "config.toml" + result = ensure_config(SampleConfig, cfg_path) + + content = cfg_path.read_text() + assert "# Sample configuration" in content + assert '#api_key = ""' in content + assert "#timeout = 30" in content + assert "#rate = 1.5" in content + assert "#enabled = false" in content + + for line in content.splitlines(): + stripped = line.strip() + if "=" in stripped and not stripped.startswith("#"): + pytest.fail(f"Unexpected uncommented line: {stripped}") + + assert result.api_key == "" + assert result.timeout == 30 + assert result.rate == 1.5 + assert result.enabled is False + + +def __test_user_values_preserved__(tmp_path: Path): + """User-modified values survive regeneration.""" + cfg_path = tmp_path / "config.toml" + + ensure_config(SampleConfig, cfg_path) + + content = cfg_path.read_text() + content = content.replace('#api_key = ""', 'api_key = "my_key"') + content = content.replace("#enabled = false", "enabled = true") + cfg_path.write_text(content) + + result = ensure_config(SampleConfig, cfg_path) + + content = cfg_path.read_text() + assert 'api_key = "my_key"' in content + assert "enabled = true" in content + assert "#timeout = 30" in content + assert "#rate = 1.5" in content + + assert result.api_key == "my_key" + assert result.enabled is True + assert result.timeout == 30 + assert result.rate == 1.5 + + +def __test_new_field_appears__(tmp_path: Path): + """A new field in the dataclass appears commented in existing TOML.""" + cfg_path = tmp_path / "config.toml" + + ensure_config(MinimalConfig, cfg_path) + + content = cfg_path.read_text() + assert '#name = "default"' in content + + cfg_path.write_text('name = "custom"\n') + + result = ensure_config(SampleConfig, cfg_path) + + content = cfg_path.read_text() + assert "#timeout = 30" in content + assert "#rate = 1.5" in content + assert "#enabled = false" in content + + +def __test_removed_field_disappears__(tmp_path: Path): + """A field removed from the dataclass disappears from TOML.""" + cfg_path = tmp_path / "config.toml" + + ensure_config(SampleConfig, cfg_path) + + content = cfg_path.read_text() + content = content.replace("#timeout = 30", "timeout = 60") + cfg_path.write_text(content) + + result = ensure_config(MinimalConfig, cfg_path) + + content = cfg_path.read_text() + assert "timeout" not in content + assert "api_key" not in content + assert "rate" not in content + assert "enabled" not in content + assert '#name = "default"' in content + + +def __test_user_resets_to_default__(tmp_path: Path): + """When user comments a field back, it stays commented.""" + cfg_path = tmp_path / "config.toml" + + ensure_config(SampleConfig, cfg_path) + + result = ensure_config(SampleConfig, cfg_path) + + content = cfg_path.read_text() + assert "#timeout = 30" in content + assert result.timeout == 30 + + +def __test_type_validation__(tmp_path: Path): + """All four types are correctly formatted and parsed.""" + cfg_path = tmp_path / "config.toml" + + cfg_path.write_text( + 'api_key = "test"\n' + 'timeout = 99\n' + 'rate = 2.718\n' + 'enabled = true\n' + ) + + result = ensure_config(SampleConfig, cfg_path) + + assert result.api_key == "test" + assert result.timeout == 99 + assert result.rate == 2.718 + assert result.enabled is True + + content = cfg_path.read_text() + assert 'api_key = "test"' in content + assert "timeout = 99" in content + assert "rate = 2.718" in content + assert "enabled = true" in content + + +def __test_class_docstring_as_header__(tmp_path: Path): + """Class docstring becomes the header comment in TOML.""" + cfg_path = tmp_path / "config.toml" + ensure_config(SampleConfig, cfg_path) + + content = cfg_path.read_text() + lines = content.splitlines() + assert lines[0] == "# Sample configuration" + + +def __test_extra_sections_preserved__(tmp_path: Path): + """TOML table sections not in the dataclass are preserved.""" + cfg_path = tmp_path / "config.toml" + + ensure_config(SampleConfig, cfg_path) + + content = cfg_path.read_text() + content += '\n[binance]\napiKey = "binance_key"\nsecret = "binance_secret"\n' + cfg_path.write_text(content) + + result = ensure_config(SampleConfig, cfg_path) + + content = cfg_path.read_text() + assert "[binance]" in content + assert 'apiKey = "binance_key"' in content + assert 'secret = "binance_secret"' in content + + +def __test_ensure_config_returns_correct_instance__(tmp_path: Path): + """ensure_config returns a properly typed dataclass instance.""" + cfg_path = tmp_path / "config.toml" + + cfg_path.write_text('api_key = "real_key"\ntimeout = 60\n') + + result = ensure_config(SampleConfig, cfg_path) + + assert isinstance(result, SampleConfig) + assert result.api_key == "real_key" + assert result.timeout == 60 + assert result.rate == 1.5 + assert result.enabled is False + + +def __test_generate_toml_standalone__(): + """generate_toml produces correct output without file I/O.""" + toml = generate_toml(SampleConfig) + assert "# Sample configuration" in toml + assert '#api_key = ""' in toml + assert "#timeout = 30" in toml + + toml_with_user = generate_toml(SampleConfig, {"api_key": "key1", "timeout": 99}) + assert 'api_key = "key1"' in toml_with_user + assert "timeout = 99" in toml_with_user + assert "#rate = 1.5" in toml_with_user + + +def __test_parse_toml_with_comments__(): + """parse_toml_with_comments returns only uncommented values.""" + content = '# header\n#commented = 1\nactive = 2\n' + result = parse_toml_with_comments(content) + assert result == {"active": 2} + assert "commented" not in result + + +def __test_int_to_float_coercion__(tmp_path: Path): + """Integer TOML values are coerced to float when the field default is float.""" + cfg_path = tmp_path / "config.toml" + cfg_path.write_text("rate = 3\n") + + result = ensure_config(SampleConfig, cfg_path) + assert result.rate == 3.0 + assert isinstance(result.rate, float) + + +def __test_creates_parent_directories__(tmp_path: Path): + """ensure_config creates parent directories if they don't exist.""" + cfg_path = tmp_path / "sub" / "dir" / "config.toml" + result = ensure_config(SampleConfig, cfg_path) + assert cfg_path.exists() + assert isinstance(result, SampleConfig) + + +def __test_none_default_field__(tmp_path: Path): + """Fields with None default are written as #key =.""" + + @dataclass + class WithNone: + """Config with optional field""" + + name: str = "test" + """The name""" + + tag: str | None = None + """Optional tag""" + + cfg_path = tmp_path / "config.toml" + ensure_config(WithNone, cfg_path) + + content = cfg_path.read_text() + assert '#name = "test"' in content + assert "#tag =" in content + assert "#tag = None" not in content + + +def __test_idempotent_regeneration__(tmp_path: Path): + """Running ensure_config twice with no changes produces identical files.""" + cfg_path = tmp_path / "config.toml" + + ensure_config(SampleConfig, cfg_path) + content1 = cfg_path.read_text() + + ensure_config(SampleConfig, cfg_path) + content2 = cfg_path.read_text() + + assert content1 == content2 diff --git a/tests/t00_pynecore/core/test_014_plugin.py b/tests/t00_pynecore/core/test_014_plugin.py new file mode 100644 index 0000000..a454cf1 --- /dev/null +++ b/tests/t00_pynecore/core/test_014_plugin.py @@ -0,0 +1,59 @@ +""" +Tests for the plugin discovery and loading system. +""" + +from pynecore.core.plugin import ( + discover_plugins, + load_plugin, + get_available_plugin_names, + PluginNotFoundError, +) + + +def __test_discover_plugins_returns_dict__(): + """discover_plugins returns a dict (possibly empty for unknown group)""" + result = discover_plugins("pyne.nonexistent_group_for_testing") + assert isinstance(result, dict) + assert len(result) == 0 + + +def __test_discover_providers__(): + """discover_plugins finds CCXT provider via entry_points""" + result = discover_plugins("pyne.provider") + assert "ccxt" in result, ( + f"CCXT not found in pyne.provider entry points. " + f"Available: {list(result.keys())}. " + f"Make sure pynecore is installed with: pip install -e pynecore/" + ) + + +def __test_load_plugin_ccxt__(): + """load_plugin loads the CCXTProvider class""" + from pynecore.providers.ccxt import CCXTProvider + + cls = load_plugin("pyne.provider", "ccxt") + assert cls is CCXTProvider + + +def __test_load_plugin_not_found__(): + """load_plugin raises PluginNotFoundError for missing plugin""" + try: + load_plugin("pyne.provider", "nonexistent_provider_xyz") + assert False, "Should have raised PluginNotFoundError" + except PluginNotFoundError as e: + assert "nonexistent_provider_xyz" in str(e) + assert "pip install" in str(e) + + +def __test_get_available_plugin_names__(): + """get_available_plugin_names returns sorted list including ccxt""" + names = get_available_plugin_names("pyne.provider") + assert isinstance(names, list) + assert "ccxt" in names + assert names == sorted(names) + + +def __test_get_available_plugin_names_empty_group__(): + """get_available_plugin_names returns empty list for unknown group""" + names = get_available_plugin_names("pyne.nonexistent_group_for_testing") + assert names == [] diff --git a/tests/t00_pynecore/data/test_002_ccxt_provider.py b/tests/t00_pynecore/data/test_002_ccxt_provider.py index 095e1a3..1e484d0 100644 --- a/tests/t00_pynecore/data/test_002_ccxt_provider.py +++ b/tests/t00_pynecore/data/test_002_ccxt_provider.py @@ -54,7 +54,7 @@ def __test_ccxt_provider_path_handling__(tmp_path): def __test_ccxt_session_hours__(): """Test session hours creation for CCXT provider""" - opening_hours, session_starts, session_ends = CCXTProvider.get_opening_hours_and_sessions() + opening_hours, session_starts, session_ends = CCXTProvider._create_24_7_sessions() # Check that we have entries for all days assert len(opening_hours) == 7 @@ -108,19 +108,11 @@ def __test_ccxt_real_data_download__(tmp_path): if not config_dir.exists() or not config_dir.is_dir(): pytest.skip("No config directory found in workdir") - # Check if providers.toml exists + # Check if ccxt config exists (new per-plugin config or legacy providers.toml) + ccxt_toml = config_dir / "ccxt.toml" providers_toml = config_dir / "providers.toml" - if not providers_toml.exists() or not providers_toml.is_file(): - pytest.skip("No providers.toml found in config directory") - - # Check if ccxt section exists in providers.toml - try: - with open(providers_toml, 'rb') as f: - config = tomllib.load(f) - if 'ccxt' not in config: - pytest.skip("No ccxt section found in providers.toml") - except Exception as e: - pytest.skip(f"Error reading providers.toml: {str(e)}") + if not ccxt_toml.exists() and not providers_toml.exists(): + pytest.skip("No ccxt.toml or providers.toml found in config directory") # Create temporary data directory for the test data_dir = tmp_path / "data" @@ -154,12 +146,17 @@ def __test_ccxt_real_data_download__(tmp_path): print(f"Could not load reference data: {e}") expected_data = None + # Load config via plugin config system + from pynecore.core.config import ensure_config + from pynecore.providers.ccxt import CCXTConfig + config = ensure_config(CCXTConfig, config_dir / 'ccxt.toml') + # Create provider instance provider = CCXTProvider( symbol=symbol, timeframe=timeframe, ohlv_dir=data_dir, - config_dir=config_dir + config=config ) # Define a function to download data From 7e08984f2d09978bee63cee93f3c9a839dd3d371 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Mon, 30 Mar 2026 11:57:33 +0200 Subject: [PATCH 02/64] feat: add `pyne plugin list/info` CLI commands, move capitalcom test to plugin - Add cli/commands/plugin.py with list and info subcommands - Register plugin command in CLI - Move capitalcom test and reference data to plugins/pynecore-capitalcom/ - Delete old capitalcom test from pynecore --- src/pynecore/cli/commands/__init__.py | 4 +- src/pynecore/cli/commands/plugin.py | 99 ++++++ .../data/test_003_capitalcom_provider.py | 288 ------------------ 3 files changed, 101 insertions(+), 290 deletions(-) create mode 100644 src/pynecore/cli/commands/plugin.py delete mode 100644 tests/t00_pynecore/data/test_003_capitalcom_provider.py diff --git a/src/pynecore/cli/commands/__init__.py b/src/pynecore/cli/commands/__init__.py index bfd52fe..e926ead 100644 --- a/src/pynecore/cli/commands/__init__.py +++ b/src/pynecore/cli/commands/__init__.py @@ -7,9 +7,9 @@ from ..utils.error_hook import setup_global_error_logging # Import commands -from . import run, data, compile, benchmark, debug +from . import run, data, compile, benchmark, debug, plugin -__all__ = ['run', 'data', 'compile', 'benchmark', 'debug'] +__all__ = ['run', 'data', 'compile', 'benchmark', 'debug', 'plugin'] # Conditional import for private TradingView test command _tv_path = Path(__file__).parent / "tv.py" diff --git a/src/pynecore/cli/commands/plugin.py b/src/pynecore/cli/commands/plugin.py new file mode 100644 index 0000000..0cc4e0e --- /dev/null +++ b/src/pynecore/cli/commands/plugin.py @@ -0,0 +1,99 @@ +from typer import Typer, Option, Argument, Exit, secho, colors + +from ..app import app + +__all__ = [] + +app_plugin = Typer(help="Plugin management commands") +app.add_typer(app_plugin, name="plugin") + +# Plugin group names and display labels +PLUGIN_GROUPS = { + 'provider': 'pyne.provider', +} + + +@app_plugin.command("list") +def list_plugins( + plugin_type: str = Option( + None, '--type', '-t', + help="Filter by plugin type (e.g. 'provider')", + ), +): + """ + List all installed PyneCore plugins. + """ + from ...core.plugin import discover_plugins + + if plugin_type and plugin_type not in PLUGIN_GROUPS: + secho(f"Unknown plugin type: {plugin_type}", fg=colors.RED, err=True) + secho(f"Available types: {', '.join(PLUGIN_GROUPS)}", fg=colors.YELLOW, err=True) + raise Exit(1) + + groups = {plugin_type: PLUGIN_GROUPS[plugin_type]} if plugin_type else PLUGIN_GROUPS + found_any = False + + for type_name, group in groups.items(): + plugins = discover_plugins(group) + if not plugins: + continue + found_any = True + secho(f"\n {type_name} plugins:", fg=colors.BRIGHT_WHITE, bold=True) + for name, ep in sorted(plugins.items()): + try: + cls = ep.load() + display_name = getattr(cls, 'plugin_name', '') or name + version = getattr(cls, 'plugin_version', '') + version_str = f" v{version}" if version and version != "0.0.0" else "" + secho(f" {name:20s} {display_name}{version_str} ({ep.value})") + except Exception as e: + secho(f" {name:20s} (failed to load: {e})", fg=colors.RED) + + if not found_any: + secho("No plugins installed.", fg=colors.YELLOW) + + secho("") + + +@app_plugin.command("info") +def plugin_info( + name: str = Argument(..., help="Plugin name (e.g. 'ccxt', 'capitalcom')"), +): + """ + Show detailed information about an installed plugin. + """ + from ...core.plugin import discover_plugins + + for type_name, group in PLUGIN_GROUPS.items(): + plugins = discover_plugins(group) + if name in plugins: + ep = plugins[name] + try: + cls = ep.load() + except Exception as e: + secho(f"Failed to load plugin '{name}': {e}", fg=colors.RED, err=True) + raise Exit(1) + + secho(f"\n Plugin: {name}", fg=colors.BRIGHT_WHITE, bold=True) + secho(f" Type: {type_name}") + secho(f" Display name: {getattr(cls, 'plugin_name', '') or name}") + secho(f" Version: {getattr(cls, 'plugin_version', 'unknown')}") + secho(f" Entry point: {ep.value}") + secho(f" Min PyneCore: {getattr(cls, 'min_pynecore_version', '') or 'any'}") + + config_cls = getattr(cls, 'Config', None) + if config_cls: + import dataclasses + fields = dataclasses.fields(config_cls) + if fields: + secho(f"\n Config fields:") + for f in fields: + default = f"= {f.default!r}" if f.default is not dataclasses.MISSING else "(required)" + secho(f" {f.name:20s} {default}") + + secho("") + return + + secho(f"Plugin '{name}' not found.", fg=colors.RED, err=True) + secho(f"Install it with: pip install pynecore-{name}", fg=colors.YELLOW, err=True) + raise Exit(1) diff --git a/tests/t00_pynecore/data/test_003_capitalcom_provider.py b/tests/t00_pynecore/data/test_003_capitalcom_provider.py deleted file mode 100644 index b95867b..0000000 --- a/tests/t00_pynecore/data/test_003_capitalcom_provider.py +++ /dev/null @@ -1,288 +0,0 @@ -""" -@pyne -""" -from pathlib import Path -import pytest -import tomllib -import json -import logging -from datetime import datetime, UTC -import os -import tempfile - -from pynecore.providers.capitalcom import CapitalComProvider -from pynecore.core.ohlcv_file import OHLCVReader -from pynecore.cli.app import app_state - - -def main(): - """ - Dummy main function to be a valid Pyne script - """ - pass - - -def __test_capitalcom_timeframe_conversion__(): - """Test timeframe conversion for CapitalCom provider""" - # TradingView to CapitalCom conversion - according to the actual implementation - assert CapitalComProvider.to_exchange_timeframe("1") == "MINUTE" - assert CapitalComProvider.to_exchange_timeframe("5") == "MINUTE_5" - assert CapitalComProvider.to_exchange_timeframe("15") == "MINUTE_15" - assert CapitalComProvider.to_exchange_timeframe("30") == "MINUTE_30" - assert CapitalComProvider.to_exchange_timeframe("60") == "HOUR" - assert CapitalComProvider.to_exchange_timeframe("240") == "HOUR_4" - assert CapitalComProvider.to_exchange_timeframe("1D") == "DAY" - assert CapitalComProvider.to_exchange_timeframe("1W") == "WEEK" - with pytest.raises(ValueError): - CapitalComProvider.to_exchange_timeframe("1M") # Not directly supported in TIMEFRAMES - - # CapitalCom to TradingView conversion - according to the actual implementation - assert CapitalComProvider.to_tradingview_timeframe("MINUTE") == "1" - assert CapitalComProvider.to_tradingview_timeframe("MINUTE_5") == "5" - assert CapitalComProvider.to_tradingview_timeframe("MINUTE_15") == "15" - assert CapitalComProvider.to_tradingview_timeframe("MINUTE_30") == "30" - assert CapitalComProvider.to_tradingview_timeframe("HOUR") == "60" - assert CapitalComProvider.to_tradingview_timeframe("HOUR_4") == "240" - assert CapitalComProvider.to_tradingview_timeframe("DAY") == "1D" - assert CapitalComProvider.to_tradingview_timeframe("WEEK") == "1W" - - # Test invalid formats - with pytest.raises(ValueError): - CapitalComProvider.to_exchange_timeframe("invalid") - - with pytest.raises(ValueError): - CapitalComProvider.to_tradingview_timeframe("invalid") - - -def __test_capitalcom_provider_path_handling__(tmp_path): - """Test path handling in CapitalCom provider""" - data_dir = tmp_path / "data" - data_dir.mkdir() - - # Test path construction for different symbol formats - path = CapitalComProvider.get_ohlcv_path("CAPITALCOM:US500", "1D", data_dir) - - # Verify paths are created correctly - assert "capitalcom_CAPITALCOM_US500_1D.ohlcv" in str(path) - - -def __test_capitalcom_market_hours__(): - """Test market hours for CapitalCom provider - simplified test""" - # Since this test is quite complex due to internal provider implementation - # and would need multiple mocks, we'll create a simple test to check if - # the SymInfoInterval and SymInfoSession classes exist and can be instantiated - - from datetime import time - from pynecore.core.syminfo import SymInfoInterval, SymInfoSession - - # Create a sample interval and session entries - interval = SymInfoInterval(day=0, start=time(hour=9, minute=30), end=time(hour=16, minute=0)) - session_start = SymInfoSession(day=0, time=time(hour=9, minute=30)) - session_end = SymInfoSession(day=0, time=time(hour=16, minute=0)) - - # Verify they have the correct attributes - assert hasattr(interval, 'day') - assert hasattr(interval, 'start') - assert hasattr(interval, 'end') - assert isinstance(interval.start, time) - assert isinstance(interval.end, time) - - assert hasattr(session_start, 'day') - assert hasattr(session_start, 'time') - assert isinstance(session_start.time, time) - - assert hasattr(session_end, 'day') - assert hasattr(session_end, 'time') - assert isinstance(session_end.time, time) - - -def __test_capitalcom_known_limits__(): - """Test max candles limit for CapitalCom""" - # The limit is hard-coded in the get_historical_prices method as default parameter - assert 1000 == 1000 # This is just a placeholder, actual limit is in the function parameter - - -def __test_capitalcom_real_data_download__(tmp_path): - """Test CapitalCom provider with real data download if configuration exists""" - # Disable debug logging to reduce output noise - logging.getLogger().setLevel(logging.WARNING) - - # Skip this test if we can't import necessary libraries - try: - import httpx # noqa - from Crypto.PublicKey import RSA # From pycryptodome - except ImportError: - pytest.skip("Either httpx or pycryptodome libraries not available") - - # Find workdir using AppState._find_workdir to get config - workdir = app_state._find_workdir() # noqa - - # Check if config directory exists - config_dir = workdir / "config" - if not config_dir.exists() or not config_dir.is_dir(): - pytest.skip("No config directory found in workdir") - - # Check if providers.toml exists - providers_toml = config_dir / "providers.toml" - if not providers_toml.exists() or not providers_toml.is_file(): - pytest.skip("No providers.toml found in config directory") - - # Check if capitalcom section exists in providers.toml - try: - with open(providers_toml, 'rb') as f: - config = tomllib.load(f) - if 'capitalcom' not in config: - pytest.skip("No capitalcom section found in providers.toml") - - # Verify minimal required configuration exists - required_keys = ['user_email', 'api_key', 'api_password'] - for key in required_keys: - if key not in config['capitalcom'] or not config['capitalcom'][key]: - pytest.skip(f"Missing required key '{key}' in capitalcom configuration") - except Exception as e: - pytest.skip(f"Error reading providers.toml: {str(e)}") - - # From this point, we have confirmed that configuration exists, so we should not skip tests - # but let them fail if there are errors - - # Create temporary data directory for the test - data_dir = tmp_path / "data" - data_dir.mkdir() - - # Define test data - use EURUSD without any separator as specified in CLI command - symbol = "EURUSD" # Forex pair without separator, as used in CLI - timeframe = "15" # 15-minute timeframe as in the CLI example - - # Use the specified date range to avoid weekends - time_from = datetime(2025, 1, 6, tzinfo=UTC) # Monday - time_to = datetime(2025, 1, 10, tzinfo=UTC) # Friday - - # Enable saving reference data to a file - # Set to True to force saving test data, even if reference file exists - force_save_reference = False - - # Check if a reference file with expected data already exists - test_data_path = Path(__file__).parent / "capitalcom_test_data.json" - expected_data = None - - if test_data_path.exists() and not force_save_reference: - try: - with open(test_data_path, 'r') as f: - expected_data = json.load(f) - print(f"Loaded reference data from {test_data_path}") - except Exception as e: - print(f"Could not load reference data: {e}") - expected_data = None - - # Create provider instance - provider = CapitalComProvider( - symbol=symbol, - timeframe=timeframe, - ohlv_dir=data_dir, - config_dir=config_dir - ) - - # Use a custom download function that properly creates OHLCV objects - # noinspection PyShadowingNames - def custom_download(provider_instance, time_from, time_to): - # Start downloading - provider_instance.download_ohlcv(time_from, time_to, on_progress=None) - - try: - # Use the provider as a context manager to ensure file is opened - with provider: - custom_download(provider, time_from, time_to) - - # Check that the OHLCV file was created with the correct filename format - ohlcv_path = provider.get_ohlcv_path(symbol, timeframe, data_dir) - assert ohlcv_path.exists(), f"OHLCV file was not created at {ohlcv_path}" - - # The path should include the correct filename format (no separator in the symbol) - assert f"capitalcom_{symbol}_{timeframe}.ohlcv" in str(ohlcv_path) - - # Verify the content of the file matches expected data - with OHLCVReader(str(ohlcv_path)) as reader: - # Check that we have data - assert reader.size > 0, "No data was downloaded" - - # Read candles - actual_candles = list(reader) - - print(f"\nDownloaded {len(actual_candles)} candles from CapitalCom API") - - # Save the downloaded data for future reference if needed - if expected_data is None or force_save_reference: - # Save candles to temporary file for further analysis - candle_data = [] - for candle in actual_candles: - candle_data.append({ - "timestamp": candle.timestamp, - "datetime": datetime.fromtimestamp(candle.timestamp, UTC).isoformat(), - "open": float(candle.open), - "high": float(candle.high), - "low": float(candle.low), - "close": float(candle.close), - "volume": float(candle.volume) - }) - - # Create a temporary file in the system's temp directory - with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.json') as temp_file: - temp_data_file = temp_file.name - json.dump(candle_data, temp_file, indent=2) - - try: - # Verify the file was created and has content - file_size = os.path.getsize(temp_data_file) - - # Force print this information with the highest priority - print("\n" + "=" * 80) - print(f"TEST DATA SUCCESSFULLY SAVED TO: {temp_data_file}") - print(f"File size: {file_size} bytes, Number of candles: {len(candle_data)}") - print("Please copy this file to the test directory with the name " - "'capitalcom_test_data.json' for future reference.") - print("=" * 80 + "\n") - - except Exception as e: - # If there's any error with file operations, print it clearly - print(f"\nERROR SAVING TEST DATA: {str(e)}") - - # Basic validation - timestamps should be in ascending order - for i in range(1, len(actual_candles)): - assert actual_candles[i].timestamp > actual_candles[i - 1].timestamp, \ - "Timestamps not in ascending order" - - else: - # Compare the actual results with the expected data - # We'll check the first 10 candles as they should be stable - check_count = min(10, len(actual_candles), len(expected_data)) - - # Now compare with expected data - print(f"Comparing first {check_count} candles with expected data") - for i in range(check_count): - actual = actual_candles[i] - expected = expected_data[i] - - # Compare timestamp - timestamps should be exactly the same - assert actual.timestamp == expected["timestamp"], \ - f"Timestamp mismatch at candle {i}: expected {expected['timestamp']}, got {actual.timestamp}" - - # Compare OHLCV values with a small tolerance for floating point differences - tolerance = 0.0001 - assert abs(actual.open - expected["open"]) < tolerance, \ - f"Open price mismatch at candle {i}: expected {expected['open']}, got {actual.open}" - assert abs(actual.high - expected["high"]) < tolerance, \ - f"High price mismatch at candle {i}: expected {expected['high']}, got {actual.high}" - assert abs(actual.low - expected["low"]) < tolerance, \ - f"Low price mismatch at candle {i}: expected {expected['low']}, got {actual.low}" - assert abs(actual.close - expected["close"]) < tolerance, \ - f"Close price mismatch at candle {i}: expected {expected['close']}, got {actual.close}" - - # Volume might have slightly larger variation - volume_tolerance = 1.0 # Allow 1 unit difference in volume - assert abs(actual.volume - expected["volume"]) <= volume_tolerance, \ - f"Volume mismatch at candle {i}: expected {expected['volume']}, got {actual.volume}" - - print("Data validation successful - downloaded data matches expected values") - - finally: - pass # No need to restore any method since we're not patching anymore From bbfe64196633cbeb353af531ac46991f1d354e3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Mon, 30 Mar 2026 12:08:11 +0200 Subject: [PATCH 03/64] chore: update plugin naming convention and install hints Official plugins: pynesys-pynecore-{name}, 3rd party: pynecore-{name} --- src/pynecore/cli/commands/plugin.py | 3 ++- src/pynecore/core/plugin.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/pynecore/cli/commands/plugin.py b/src/pynecore/cli/commands/plugin.py index 0cc4e0e..c94af78 100644 --- a/src/pynecore/cli/commands/plugin.py +++ b/src/pynecore/cli/commands/plugin.py @@ -95,5 +95,6 @@ def plugin_info( return secho(f"Plugin '{name}' not found.", fg=colors.RED, err=True) - secho(f"Install it with: pip install pynecore-{name}", fg=colors.YELLOW, err=True) + secho(f"Install it with: pip install pynesys-pynecore-{name} (official)", fg=colors.YELLOW, err=True) + secho(f" or: pip install pynecore-{name} (3rd party)", fg=colors.YELLOW, err=True) raise Exit(1) diff --git a/src/pynecore/core/plugin.py b/src/pynecore/core/plugin.py index 471bd59..b694097 100644 --- a/src/pynecore/core/plugin.py +++ b/src/pynecore/core/plugin.py @@ -53,7 +53,8 @@ def load_plugin(group: str, name: str) -> type: short_group = group.replace("pyne.", "") raise PluginNotFoundError( f"Plugin '{name}' not found for group '{group}'. " - f"Install it with: pip install pynecore-{name}\n" + f"Install it with: pip install pynesys-pynecore-{name} (official) " + f"or: pip install pynecore-{name} (3rd party)\n" f"Available {short_group} plugins: {', '.join(sorted(eps)) or '(none)'}" ) return eps[name].load() From f08f0879c828475d42fee18b663a140f163f103d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Mon, 30 Mar 2026 12:20:45 +0200 Subject: [PATCH 04/64] refactor: move plugin configs to config/plugins/ subdirectory Separates credentials from other config files for cleaner organization. --- src/pynecore/cli/commands/__init__.py | 5 ++++- src/pynecore/cli/commands/data.py | 4 ++-- src/pynecore/providers/ccxt.py | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/pynecore/cli/commands/__init__.py b/src/pynecore/cli/commands/__init__.py index e926ead..9b5f0b3 100644 --- a/src/pynecore/cli/commands/__init__.py +++ b/src/pynecore/cli/commands/__init__.py @@ -231,8 +231,11 @@ def main( from ...core.plugin import discover_plugins from ...core.config import ensure_config + plugins_dir = config_dir / 'plugins' + plugins_dir.mkdir(exist_ok=True) + for name, ep in discover_plugins('pyne.provider').items(): - config_path = config_dir / f'{name}.toml' + config_path = plugins_dir / f'{name}.toml' if not config_path.exists() or recreate_provider_config: try: provider_cls = ep.load() diff --git a/src/pynecore/cli/commands/data.py b/src/pynecore/cli/commands/data.py index fc35b3a..a284c7b 100644 --- a/src/pynecore/cli/commands/data.py +++ b/src/pynecore/cli/commands/data.py @@ -138,7 +138,7 @@ def download( config = None if hasattr(provider_class, 'Config') and provider_class.Config is not None: config = ensure_config(provider_class.Config, - app_state.config_dir / f'{provider.value}.toml') + app_state.config_dir / 'plugins' / f'{provider.value}.toml') with Progress(SpinnerColumn(), TextColumn("{task.description}"), transient=True) as progress: progress.add_task(description="Fetching market data...", total=None) provider_instance: Provider = provider_class(symbol=symbol, config=config) @@ -157,7 +157,7 @@ def download( config = None if hasattr(provider_class, 'Config') and provider_class.Config is not None: config = ensure_config(provider_class.Config, - app_state.config_dir / f'{provider.value}.toml') + app_state.config_dir / 'plugins' / f'{provider.value}.toml') provider_instance: Provider = provider_class(symbol=symbol, timeframe=timeframe, ohlv_dir=app_state.data_dir, config=config) diff --git a/src/pynecore/providers/ccxt.py b/src/pynecore/providers/ccxt.py index d4257a4..9c37a7e 100644 --- a/src/pynecore/providers/ccxt.py +++ b/src/pynecore/providers/ccxt.py @@ -179,7 +179,7 @@ def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, config_dir = None if config_dir: - toml_path = config_dir / 'ccxt.toml' + toml_path = config_dir / 'plugins' / 'ccxt.toml' if toml_path.exists(): with open(toml_path, 'rb') as f: raw_toml = tomllib.load(f) From 6bb7bce7a24b6c7dc51fe30c8391aeee4b0d942e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Mon, 30 Mar 2026 12:27:14 +0200 Subject: [PATCH 05/64] chore: bump version to 6.5.0 for plugin system feature --- pyproject.toml | 2 +- src/pynecore/cli/commands/plugin.py | 2 +- src/pynecore/providers/ccxt.py | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4b9c96f..40e1756 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ egg_info.egg_base = "build" [project] name = "pynesys-pynecore" -version = "6.4.1" # PineVersion.Major.Minor +version = "6.5.0" # PineVersion.Major.Minor description = "Python based Pine Script like runtime and API" authors = [{ name = "PYNESYS LLC", email = "hello@pynesys.com" }] readme = "README.md" diff --git a/src/pynecore/cli/commands/plugin.py b/src/pynecore/cli/commands/plugin.py index c94af78..2f870ce 100644 --- a/src/pynecore/cli/commands/plugin.py +++ b/src/pynecore/cli/commands/plugin.py @@ -86,7 +86,7 @@ def plugin_info( import dataclasses fields = dataclasses.fields(config_cls) if fields: - secho(f"\n Config fields:") + secho(f"\n Config fields (defaults):") for f in fields: default = f"= {f.default!r}" if f.default is not dataclasses.MISSING else "(required)" secho(f" {f.name:20s} {default}") diff --git a/src/pynecore/providers/ccxt.py b/src/pynecore/providers/ccxt.py index 9c37a7e..8de9a2f 100644 --- a/src/pynecore/providers/ccxt.py +++ b/src/pynecore/providers/ccxt.py @@ -62,6 +62,7 @@ class CCXTProvider(Provider): plugin_name = "CCXT" plugin_version = "1.0.0" + min_pynecore_version = "6.5" Config = CCXTConfig @classmethod From 1f470aa715763e186b506f7af4ac363250b19c93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Mon, 30 Mar 2026 15:00:19 +0200 Subject: [PATCH 06/64] refactor: unify plugin system with single entry point and Plugin base class MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Introduce Plugin base class that all plugins inherit from (Provider, Extension, etc.). Replace multiple entry point groups (pyne.provider) with a single pyne.plugin group — capabilities determined by class hierarchy via issubclass(). Plugin metadata (version, description, min_pynecore) now read from pyproject.toml via importlib.metadata instead of class attributes. --- pyproject.toml | 2 +- src/pynecore/cli/commands/__init__.py | 10 +- src/pynecore/cli/commands/data.py | 16 +- src/pynecore/cli/commands/plugin.py | 164 ++++++++++++--------- src/pynecore/core/config.py | 7 +- src/pynecore/core/plugin.py | 84 +++++++---- src/pynecore/core/plugin_base.py | 55 +++++++ src/pynecore/providers/__init__.py | 3 - src/pynecore/providers/ccxt.py | 2 - src/pynecore/providers/provider.py | 15 +- tests/t00_pynecore/core/test_014_plugin.py | 48 ++++-- 11 files changed, 269 insertions(+), 137 deletions(-) create mode 100644 src/pynecore/core/plugin_base.py diff --git a/pyproject.toml b/pyproject.toml index 40e1756..813807a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,7 +62,7 @@ optional-dependencies.capitalcom = ["httpx", "pycryptodome"] scripts.pyne = "pynecore.cli:app" -[project.entry-points."pyne.provider"] +[project.entry-points."pyne.plugin"] ccxt = "pynecore.providers.ccxt:CCXTProvider" diff --git a/src/pynecore/cli/commands/__init__.py b/src/pynecore/cli/commands/__init__.py index 9b5f0b3..759fb20 100644 --- a/src/pynecore/cli/commands/__init__.py +++ b/src/pynecore/cli/commands/__init__.py @@ -227,20 +227,20 @@ def main( config_dir = Path(workdir) / 'config' config_dir.mkdir(exist_ok=True) - # Generate per-plugin config files for all installed providers + # Generate per-plugin config files for all installed plugins from ...core.plugin import discover_plugins from ...core.config import ensure_config plugins_dir = config_dir / 'plugins' plugins_dir.mkdir(exist_ok=True) - for name, ep in discover_plugins('pyne.provider').items(): + for name, ep in discover_plugins().items(): config_path = plugins_dir / f'{name}.toml' if not config_path.exists() or recreate_provider_config: try: - provider_cls = ep.load() - if hasattr(provider_cls, 'Config') and provider_cls.Config is not None: - ensure_config(provider_cls.Config, config_path) + plugin_cls = ep.load() + if hasattr(plugin_cls, 'Config') and plugin_cls.Config is not None: + ensure_config(plugin_cls.Config, config_path) except Exception: pass # Don't crash CLI if a plugin is broken diff --git a/src/pynecore/cli/commands/data.py b/src/pynecore/cli/commands/data.py index a284c7b..099fd7f 100644 --- a/src/pynecore/cli/commands/data.py +++ b/src/pynecore/cli/commands/data.py @@ -11,7 +11,7 @@ TimeElapsedColumn, TimeRemainingColumn) from ..app import app, app_state -from ...core.plugin import get_available_plugin_names, load_plugin +from ...core.plugin import discover_plugins, load_plugin from ...providers.provider import Provider from ...lib.timeframe import in_seconds from ...core.data_converter import DataConverter, SupportedFormats as InputFormats @@ -38,9 +38,17 @@ class AvailableProvidersEnum(Enum): # DateOrDays is either a datetime or a number of days DateOrDays = str - # Create an enum from available providers (discovered via entry_points) + # Create an enum from plugins that are Provider subclasses + _provider_names = [] + for _name, _ep in discover_plugins().items(): + try: + _cls = _ep.load() + if isinstance(_cls, type) and issubclass(_cls, Provider): + _provider_names.append(_name) + except Exception: + pass AvailableProvidersEnum = Enum('Provider', { - name.upper(): name.lower() for name in get_available_plugin_names('pyne.provider') + name.upper(): name.lower() for name in sorted(_provider_names) }) @@ -129,7 +137,7 @@ def download( Download historical OHLCV data """ # Load provider class via plugin system - provider_class = load_plugin('pyne.provider', provider.value) + provider_class = load_plugin(provider.value) try: # If list_symbols is True, we show the available symbols then exit diff --git a/src/pynecore/cli/commands/plugin.py b/src/pynecore/cli/commands/plugin.py index 2f870ce..91d1fd9 100644 --- a/src/pynecore/cli/commands/plugin.py +++ b/src/pynecore/cli/commands/plugin.py @@ -7,52 +7,80 @@ app_plugin = Typer(help="Plugin management commands") app.add_typer(app_plugin, name="plugin") -# Plugin group names and display labels -PLUGIN_GROUPS = { - 'provider': 'pyne.provider', -} + +def _get_capabilities(cls: type) -> list[str]: + """Determine plugin capabilities from its class hierarchy.""" + from ...providers.provider import Provider + + caps = [] + if isinstance(cls, type) and issubclass(cls, Provider): + caps.append('provider') + # Future: Extension, LiveProvider checks will go here + if cls.cli() is not None: + caps.append('cli') + if cls.cli_params('run'): + caps.append('params') + return caps @app_plugin.command("list") def list_plugins( plugin_type: str = Option( None, '--type', '-t', - help="Filter by plugin type (e.g. 'provider')", + help="Filter by capability (e.g. 'provider', 'cli')", ), ): """ List all installed PyneCore plugins. """ - from ...core.plugin import discover_plugins - - if plugin_type and plugin_type not in PLUGIN_GROUPS: - secho(f"Unknown plugin type: {plugin_type}", fg=colors.RED, err=True) - secho(f"Available types: {', '.join(PLUGIN_GROUPS)}", fg=colors.YELLOW, err=True) - raise Exit(1) + from ...core.plugin import discover_plugins, get_plugin_metadata - groups = {plugin_type: PLUGIN_GROUPS[plugin_type]} if plugin_type else PLUGIN_GROUPS - found_any = False - - for type_name, group in groups.items(): - plugins = discover_plugins(group) - if not plugins: - continue - found_any = True - secho(f"\n {type_name} plugins:", fg=colors.BRIGHT_WHITE, bold=True) - for name, ep in sorted(plugins.items()): - try: - cls = ep.load() - display_name = getattr(cls, 'plugin_name', '') or name - version = getattr(cls, 'plugin_version', '') - version_str = f" v{version}" if version and version != "0.0.0" else "" - secho(f" {name:20s} {display_name}{version_str} ({ep.value})") - except Exception as e: - secho(f" {name:20s} (failed to load: {e})", fg=colors.RED) - - if not found_any: + plugins = discover_plugins() + if not plugins: secho("No plugins installed.", fg=colors.YELLOW) + secho("") + return + + # Collect data first to calculate column widths + rows = [] + errors = [] + for name, ep in sorted(plugins.items()): + try: + cls = ep.load() + meta = get_plugin_metadata(ep) + caps = _get_capabilities(cls) + + if plugin_type and plugin_type not in caps: + continue + + display_name = getattr(cls, 'plugin_name', '') or name + version = f"v{meta['version']}" if meta['version'] else '' + caps_str = ', '.join(caps) if caps else 'library' + rows.append((name, display_name, version, caps_str)) + except Exception as e: + errors.append((name, str(e))) + + if not rows and not errors: + secho("No plugins found for the given filter.", fg=colors.YELLOW) + secho("") + return + + # Calculate column widths + w_name = max((len(r[0]) for r in rows), default=0) + w_disp = max((len(r[1]) for r in rows), default=0) + w_ver = max((len(r[2]) for r in rows), default=0) + + secho(f"\n Installed plugins:\n", fg=colors.BRIGHT_WHITE, bold=True) + + for name, display_name, version, caps_str in rows: + secho(f" {name:<{w_name}} {display_name:<{w_disp}} {version:<{w_ver}} [{caps_str}]") + + for name, error in errors: + secho(f" {name:<{w_name}} (failed to load: {error})", fg=colors.RED) secho("") + secho(" Use 'pyne plugin info ' for details.", dim=True) + secho("") @app_plugin.command("info") @@ -62,39 +90,41 @@ def plugin_info( """ Show detailed information about an installed plugin. """ - from ...core.plugin import discover_plugins - - for type_name, group in PLUGIN_GROUPS.items(): - plugins = discover_plugins(group) - if name in plugins: - ep = plugins[name] - try: - cls = ep.load() - except Exception as e: - secho(f"Failed to load plugin '{name}': {e}", fg=colors.RED, err=True) - raise Exit(1) - - secho(f"\n Plugin: {name}", fg=colors.BRIGHT_WHITE, bold=True) - secho(f" Type: {type_name}") - secho(f" Display name: {getattr(cls, 'plugin_name', '') or name}") - secho(f" Version: {getattr(cls, 'plugin_version', 'unknown')}") - secho(f" Entry point: {ep.value}") - secho(f" Min PyneCore: {getattr(cls, 'min_pynecore_version', '') or 'any'}") - - config_cls = getattr(cls, 'Config', None) - if config_cls: - import dataclasses - fields = dataclasses.fields(config_cls) - if fields: - secho(f"\n Config fields (defaults):") - for f in fields: - default = f"= {f.default!r}" if f.default is not dataclasses.MISSING else "(required)" - secho(f" {f.name:20s} {default}") - - secho("") - return - - secho(f"Plugin '{name}' not found.", fg=colors.RED, err=True) - secho(f"Install it with: pip install pynesys-pynecore-{name} (official)", fg=colors.YELLOW, err=True) - secho(f" or: pip install pynecore-{name} (3rd party)", fg=colors.YELLOW, err=True) - raise Exit(1) + from ...core.plugin import discover_plugins, get_plugin_metadata + import dataclasses + + plugins = discover_plugins() + if name not in plugins: + secho(f"Plugin '{name}' not found.", fg=colors.RED, err=True) + secho(f"Install it with: pip install pynesys-pynecore-{name} (official)", fg=colors.YELLOW, err=True) + secho(f" or: pip install pynecore-{name} (3rd party)", fg=colors.YELLOW, err=True) + raise Exit(1) + + ep = plugins[name] + try: + cls = ep.load() + except Exception as e: + secho(f"Failed to load plugin '{name}': {e}", fg=colors.RED, err=True) + raise Exit(1) + + meta = get_plugin_metadata(ep) + caps = _get_capabilities(cls) + + secho(f"\n Plugin: {name}", fg=colors.BRIGHT_WHITE, bold=True) + secho(f" Package: {meta['package']}") + secho(f" Version: {meta['version'] or 'unknown'}") + secho(f" Description: {meta['description'] or '-'}") + secho(f" Min PyneCore: {'>=' + meta['min_pynecore'] if meta['min_pynecore'] else 'any'}") + secho(f" Capabilities: {', '.join(caps) if caps else 'library'}") + secho(f" Entry point: {ep.value}") + + config_cls = getattr(cls, 'Config', None) + if config_cls and dataclasses.is_dataclass(config_cls): + fields = dataclasses.fields(config_cls) + if fields: + secho(f"\n Config fields (defaults):") + for f in fields: + default = f"= {f.default!r}" if f.default is not dataclasses.MISSING else "(required)" + secho(f" {f.name:20s} {default}") + + secho("") diff --git a/src/pynecore/core/config.py b/src/pynecore/core/config.py index ece5069..0d5f1ce 100644 --- a/src/pynecore/core/config.py +++ b/src/pynecore/core/config.py @@ -18,6 +18,7 @@ import textwrap import tomllib from pathlib import Path +from typing import Any, cast def format_value(value: str | int | float | bool) -> str: @@ -115,7 +116,7 @@ def generate_toml( stripped = doc_line.strip() lines.append(f"# {stripped}" if stripped else "#") - for f in dataclasses.fields(config_cls): + for f in dataclasses.fields(cast(Any, config_cls)): name = f.name default = f.default @@ -194,7 +195,7 @@ def _parse_existing(config_path: Path, config_cls: type) -> tuple[dict, str]: parsed = tomllib.loads(content) - field_names = {f.name for f in dataclasses.fields(config_cls)} + field_names = {f.name for f in dataclasses.fields(cast(Any, config_cls))} user_values: dict = {} for key, value in parsed.items(): @@ -237,7 +238,7 @@ def _create_instance(config_cls: type, user_values: dict | None): return config_cls() kwargs: dict = {} - for f in dataclasses.fields(config_cls): + for f in dataclasses.fields(cast(Any, config_cls)): if f.name not in user_values: continue value = user_values[f.name] diff --git a/src/pynecore/core/plugin.py b/src/pynecore/core/plugin.py index b694097..a99ad96 100644 --- a/src/pynecore/core/plugin.py +++ b/src/pynecore/core/plugin.py @@ -1,70 +1,104 @@ """ Plugin discovery and loading via Python entry points. -All PyneCore plugins (providers, extensions, CLI commands) are discovered -through :pep:`631` entry points declared in ``pyproject.toml``. This module -provides a thin, general-purpose API over :mod:`importlib.metadata`. +All PyneCore plugins register under a single entry point group +(``pyne.plugin``) in their ``pyproject.toml``. The plugin class hierarchy +determines capabilities (Provider, Extension, etc.). Example ``pyproject.toml`` for a provider plugin:: - [project.entry-points."pyne.provider"] + [project.entry-points."pyne.plugin"] myexchange = "mypackage:MyExchangeProvider" -Discovery example:: +Discovery:: - plugins = discover_plugins("pyne.provider") - # {"ccxt": , "myexchange": } + plugins = discover_plugins() + cls = load_plugin("capitalcom") - cls = load_plugin("pyne.provider", "ccxt") - # +Metadata (name, version, description) is read from the package's +``pyproject.toml`` via :mod:`importlib.metadata`, not from class attributes. """ +import re + # noinspection PyProtectedMember from importlib.metadata import entry_points, EntryPoint +PLUGIN_GROUP = 'pyne.plugin' + class PluginNotFoundError(ImportError): """Raised when a requested plugin is not installed.""" -def discover_plugins(group: str) -> dict[str, EntryPoint]: +def discover_plugins() -> dict[str, EntryPoint]: """ - Return all installed entry points for a plugin group. + Return all installed plugins. - :param group: Entry point group name (e.g. ``"pyne.provider"``). :return: Mapping of plugin name to its :class:`EntryPoint`. """ - return {ep.name: ep for ep in entry_points(group=group)} + return {ep.name: ep for ep in entry_points(group=PLUGIN_GROUP)} -def load_plugin(group: str, name: str) -> type: +def load_plugin(name: str) -> type: """ Load and return a plugin class by name. - The actual import happens lazily — only when this function is called. - - :param group: Entry point group name (e.g. ``"pyne.provider"``). :param name: Plugin name as declared in the entry point. :return: The plugin class. :raises PluginNotFoundError: If no plugin with the given name is installed. """ - eps = discover_plugins(group) + eps = discover_plugins() if name not in eps: - short_group = group.replace("pyne.", "") raise PluginNotFoundError( - f"Plugin '{name}' not found for group '{group}'. " + f"Plugin '{name}' not found. " f"Install it with: pip install pynesys-pynecore-{name} (official) " f"or: pip install pynecore-{name} (3rd party)\n" - f"Available {short_group} plugins: {', '.join(sorted(eps)) or '(none)'}" + f"Available plugins: {', '.join(sorted(eps)) or '(none)'}" ) return eps[name].load() -def get_available_plugin_names(group: str) -> list[str]: +def get_available_plugin_names() -> list[str]: """ - Return a sorted list of all available plugin names for a group. + Return a sorted list of all available plugin names. - :param group: Entry point group name (e.g. ``"pyne.provider"``). :return: Sorted list of plugin names. """ - return sorted(discover_plugins(group)) + return sorted(discover_plugins()) + + +def get_plugin_metadata(ep: EntryPoint) -> dict[str, str]: + """ + Extract plugin metadata from its package distribution. + + :param ep: The entry point of the plugin. + :return: Dict with ``name``, ``version``, ``description``, ``min_pynecore``. + """ + meta = ep.dist.metadata + return { + 'name': ep.name, + 'package': meta['Name'] or '', + 'version': meta['Version'] or '', + 'description': meta['Summary'] or '', + 'min_pynecore': _parse_min_pynecore(ep), + } + + +def _parse_min_pynecore(ep: EntryPoint) -> str: + """ + Extract the minimum PyneCore version from the package dependencies. + + Parses ``pynesys-pynecore>=X.Y`` from the ``Requires-Dist`` list. + + :param ep: The entry point of the plugin. + :return: Version string (e.g. ``"6.5"``) or ``""`` if not found. + """ + requires = ep.dist.requires + if not requires: + return '' + for req in requires: + m = re.match(r'pynesys-pynecore(?:\[.*?])?>=([.\d]+)', req) + if m: + return m.group(1) + return '' diff --git a/src/pynecore/core/plugin_base.py b/src/pynecore/core/plugin_base.py new file mode 100644 index 0000000..83c4ff8 --- /dev/null +++ b/src/pynecore/core/plugin_base.py @@ -0,0 +1,55 @@ +""" +Base class for all PyneCore plugins. + +Every plugin inherits from :class:`Plugin` (directly or via :class:`Provider`, +:class:`Extension`, etc.). The class hierarchy determines capabilities: + +- ``Provider(Plugin)`` — offline OHLCV data provider +- ``Extension(Plugin)`` — hook-based script extension +- ``LiveProvider(Plugin)`` — WebSocket/streaming data +- ``Plugin`` directly — CLI-only plugin + +CLI methods (``cli()``, ``cli_params()``) are optional with sensible defaults. +Plugin metadata (name, version) comes from the package's ``pyproject.toml`` +via :mod:`importlib.metadata`, not from class attributes. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + import click + import typer + + +class Plugin: + """Base class for all PyneCore plugins.""" + + Config: type | None = None + """Override with a ``@dataclass`` for plugin configuration.""" + + plugin_name: str = "" + """Optional display name override. If empty, the entry point name is used.""" + + @staticmethod + def cli() -> typer.Typer | None: + """ + Return a Typer app for plugin subcommands. + + Override to add commands like ``pyne ``. + Return ``None`` (default) if the plugin has no CLI commands. + """ + return None + + @staticmethod + def cli_params(command_name: str) -> list[click.Parameter]: + """ + Return extra parameters for an existing command. + + Override to inject flags/options into commands like ``pyne run``. + Return ``[]`` (default) if the plugin has no parameter hooks. + + :param command_name: The command to extend (e.g. ``"run"``). + """ + return [] diff --git a/src/pynecore/providers/__init__.py b/src/pynecore/providers/__init__.py index f5062ae..e69de29 100644 --- a/src/pynecore/providers/__init__.py +++ b/src/pynecore/providers/__init__.py @@ -1,3 +0,0 @@ -from pynecore.core.plugin import get_available_plugin_names - -available_providers = tuple(get_available_plugin_names('pyne.provider')) diff --git a/src/pynecore/providers/ccxt.py b/src/pynecore/providers/ccxt.py index 8de9a2f..8e12317 100644 --- a/src/pynecore/providers/ccxt.py +++ b/src/pynecore/providers/ccxt.py @@ -61,8 +61,6 @@ class CCXTProvider(Provider): """ plugin_name = "CCXT" - plugin_version = "1.0.0" - min_pynecore_version = "6.5" Config = CCXTConfig @classmethod diff --git a/src/pynecore/providers/provider.py b/src/pynecore/providers/provider.py index 212d4fe..d11bce5 100644 --- a/src/pynecore/providers/provider.py +++ b/src/pynecore/providers/provider.py @@ -6,9 +6,10 @@ from ..types.ohlcv import OHLCV from pynecore.core.syminfo import SymInfo from pynecore.core.ohlcv_file import OHLCVWriter, OHLCVReader +from pynecore.core.plugin_base import Plugin -class Provider(metaclass=ABCMeta): +class Provider(Plugin, metaclass=ABCMeta): """ Base class for all data providers. @@ -16,18 +17,6 @@ class Provider(metaclass=ABCMeta): dataclass for configuration (used by :func:`pynecore.core.config.ensure_config`). """ - plugin_name: str = "" - """Display name of the provider (e.g. "Capital.com", "Binance").""" - - plugin_version: str = "0.0.0" - """Provider plugin version.""" - - min_pynecore_version: str = "" - """Minimum compatible PyneCore version.""" - - Config: type | None = None - """Override in subclass with a ``@dataclass`` for provider configuration.""" - timezone: str = 'UTC' """Default timezone of the provider.""" diff --git a/tests/t00_pynecore/core/test_014_plugin.py b/tests/t00_pynecore/core/test_014_plugin.py index a454cf1..20e49f0 100644 --- a/tests/t00_pynecore/core/test_014_plugin.py +++ b/tests/t00_pynecore/core/test_014_plugin.py @@ -6,22 +6,24 @@ discover_plugins, load_plugin, get_available_plugin_names, + get_plugin_metadata, PluginNotFoundError, ) +from pynecore.core.plugin_base import Plugin +from pynecore.providers.provider import Provider def __test_discover_plugins_returns_dict__(): - """discover_plugins returns a dict (possibly empty for unknown group)""" - result = discover_plugins("pyne.nonexistent_group_for_testing") + """discover_plugins returns a dict of installed plugins""" + result = discover_plugins() assert isinstance(result, dict) - assert len(result) == 0 -def __test_discover_providers__(): - """discover_plugins finds CCXT provider via entry_points""" - result = discover_plugins("pyne.provider") +def __test_discover_ccxt__(): + """discover_plugins finds CCXT provider via pyne.plugin entry point""" + result = discover_plugins() assert "ccxt" in result, ( - f"CCXT not found in pyne.provider entry points. " + f"CCXT not found in pyne.plugin entry points. " f"Available: {list(result.keys())}. " f"Make sure pynecore is installed with: pip install -e pynecore/" ) @@ -31,14 +33,14 @@ def __test_load_plugin_ccxt__(): """load_plugin loads the CCXTProvider class""" from pynecore.providers.ccxt import CCXTProvider - cls = load_plugin("pyne.provider", "ccxt") + cls = load_plugin("ccxt") assert cls is CCXTProvider def __test_load_plugin_not_found__(): """load_plugin raises PluginNotFoundError for missing plugin""" try: - load_plugin("pyne.provider", "nonexistent_provider_xyz") + load_plugin("nonexistent_provider_xyz") assert False, "Should have raised PluginNotFoundError" except PluginNotFoundError as e: assert "nonexistent_provider_xyz" in str(e) @@ -47,13 +49,31 @@ def __test_load_plugin_not_found__(): def __test_get_available_plugin_names__(): """get_available_plugin_names returns sorted list including ccxt""" - names = get_available_plugin_names("pyne.provider") + names = get_available_plugin_names() assert isinstance(names, list) assert "ccxt" in names assert names == sorted(names) -def __test_get_available_plugin_names_empty_group__(): - """get_available_plugin_names returns empty list for unknown group""" - names = get_available_plugin_names("pyne.nonexistent_group_for_testing") - assert names == [] +def __test_ccxt_is_provider__(): + """CCXTProvider inherits from Plugin and Provider""" + cls = load_plugin("ccxt") + assert issubclass(cls, Plugin) + assert issubclass(cls, Provider) + + +def __test_plugin_metadata__(): + """get_plugin_metadata extracts metadata from pyproject.toml""" + plugins = discover_plugins() + assert "ccxt" in plugins + meta = get_plugin_metadata(plugins["ccxt"]) + assert meta['name'] == 'ccxt' + assert meta['version'] # should be non-empty + assert meta['package'] == 'pynesys-pynecore' + + +def __test_plugin_base_defaults__(): + """Plugin base class has sensible defaults for CLI methods""" + assert Plugin.cli() is None + assert Plugin.cli_params('run') == [] + assert Plugin.Config is None From 6d075a26fab6be960aa85b16922dcc218cefd22b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Mon, 30 Mar 2026 16:45:29 +0200 Subject: [PATCH 07/64] refactor: merge plugin_base.py into plugin.py Single module for Plugin class + discovery/loading logic. --- src/pynecore/core/plugin.py | 55 ++++++++++++++++++++-- src/pynecore/core/plugin_base.py | 55 ---------------------- src/pynecore/providers/provider.py | 2 +- tests/t00_pynecore/core/test_014_plugin.py | 2 +- 4 files changed, 52 insertions(+), 62 deletions(-) delete mode 100644 src/pynecore/core/plugin_base.py diff --git a/src/pynecore/core/plugin.py b/src/pynecore/core/plugin.py index a99ad96..5c51167 100644 --- a/src/pynecore/core/plugin.py +++ b/src/pynecore/core/plugin.py @@ -1,9 +1,18 @@ """ -Plugin discovery and loading via Python entry points. +Plugin base class, discovery, and loading via Python entry points. All PyneCore plugins register under a single entry point group (``pyne.plugin``) in their ``pyproject.toml``. The plugin class hierarchy -determines capabilities (Provider, Extension, etc.). +determines capabilities (Provider, Extension, etc.): + +- ``Provider(Plugin)`` — offline OHLCV data provider +- ``Extension(Plugin)`` — hook-based script extension +- ``LiveProvider(Plugin)`` — WebSocket/streaming data +- ``Plugin`` directly — CLI-only plugin + +CLI methods (``cli()``, ``cli_params()``) are optional with sensible defaults. +Plugin metadata (name, version) comes from the package's ``pyproject.toml`` +via :mod:`importlib.metadata`, not from class attributes. Example ``pyproject.toml`` for a provider plugin:: @@ -14,19 +23,55 @@ plugins = discover_plugins() cls = load_plugin("capitalcom") - -Metadata (name, version, description) is read from the package's -``pyproject.toml`` via :mod:`importlib.metadata`, not from class attributes. """ +from __future__ import annotations + import re +from typing import TYPE_CHECKING # noinspection PyProtectedMember from importlib.metadata import entry_points, EntryPoint +if TYPE_CHECKING: + import click + import typer + PLUGIN_GROUP = 'pyne.plugin' +class Plugin: + """Base class for all PyneCore plugins.""" + + Config: type | None = None + """Override with a ``@dataclass`` for plugin configuration.""" + + plugin_name: str = "" + """Optional display name override. If empty, the entry point name is used.""" + + @staticmethod + def cli() -> typer.Typer | None: + """ + Return a Typer app for plugin subcommands. + + Override to add commands like ``pyne ``. + Return ``None`` (default) if the plugin has no CLI commands. + """ + return None + + @staticmethod + def cli_params(command_name: str) -> list[click.Parameter]: + """ + Return extra parameters for an existing command. + + Override to inject flags/options into commands like ``pyne run``. + Return ``[]`` (default) if the plugin has no parameter hooks. + + :param command_name: The command to extend (e.g. ``"run"``). + """ + return [] + + class PluginNotFoundError(ImportError): """Raised when a requested plugin is not installed.""" diff --git a/src/pynecore/core/plugin_base.py b/src/pynecore/core/plugin_base.py deleted file mode 100644 index 83c4ff8..0000000 --- a/src/pynecore/core/plugin_base.py +++ /dev/null @@ -1,55 +0,0 @@ -""" -Base class for all PyneCore plugins. - -Every plugin inherits from :class:`Plugin` (directly or via :class:`Provider`, -:class:`Extension`, etc.). The class hierarchy determines capabilities: - -- ``Provider(Plugin)`` — offline OHLCV data provider -- ``Extension(Plugin)`` — hook-based script extension -- ``LiveProvider(Plugin)`` — WebSocket/streaming data -- ``Plugin`` directly — CLI-only plugin - -CLI methods (``cli()``, ``cli_params()``) are optional with sensible defaults. -Plugin metadata (name, version) comes from the package's ``pyproject.toml`` -via :mod:`importlib.metadata`, not from class attributes. -""" - -from __future__ import annotations - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - import click - import typer - - -class Plugin: - """Base class for all PyneCore plugins.""" - - Config: type | None = None - """Override with a ``@dataclass`` for plugin configuration.""" - - plugin_name: str = "" - """Optional display name override. If empty, the entry point name is used.""" - - @staticmethod - def cli() -> typer.Typer | None: - """ - Return a Typer app for plugin subcommands. - - Override to add commands like ``pyne ``. - Return ``None`` (default) if the plugin has no CLI commands. - """ - return None - - @staticmethod - def cli_params(command_name: str) -> list[click.Parameter]: - """ - Return extra parameters for an existing command. - - Override to inject flags/options into commands like ``pyne run``. - Return ``[]`` (default) if the plugin has no parameter hooks. - - :param command_name: The command to extend (e.g. ``"run"``). - """ - return [] diff --git a/src/pynecore/providers/provider.py b/src/pynecore/providers/provider.py index d11bce5..0fb23ae 100644 --- a/src/pynecore/providers/provider.py +++ b/src/pynecore/providers/provider.py @@ -6,7 +6,7 @@ from ..types.ohlcv import OHLCV from pynecore.core.syminfo import SymInfo from pynecore.core.ohlcv_file import OHLCVWriter, OHLCVReader -from pynecore.core.plugin_base import Plugin +from pynecore.core.plugin import Plugin class Provider(Plugin, metaclass=ABCMeta): diff --git a/tests/t00_pynecore/core/test_014_plugin.py b/tests/t00_pynecore/core/test_014_plugin.py index 20e49f0..195e91f 100644 --- a/tests/t00_pynecore/core/test_014_plugin.py +++ b/tests/t00_pynecore/core/test_014_plugin.py @@ -9,7 +9,7 @@ get_plugin_metadata, PluginNotFoundError, ) -from pynecore.core.plugin_base import Plugin +from pynecore.core.plugin import Plugin from pynecore.providers.provider import Provider From a7df735b1fa8eb6a286f2bd2ee0c35f7688cd9f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Mon, 30 Mar 2026 17:46:08 +0200 Subject: [PATCH 08/64] refactor(core): migrate plugin system to new plugin type hierarchy - Move plugin base class and discovery logic to core.plugin.__init__.py - Introduce ProviderPlugin and CLIPlugin subclasses for clearer plugin types - Refactor provider base class to core.plugin.provider.ProviderPlugin - Update imports and tests to use new plugin classes - Remove old src/pynecore/core/plugin.py and src/pynecore/providers/provider.py - Update CCXTProvider and CLI commands to use ProviderPlugin - Enhance plugin capability detection for CLI and provider types --- src/pynecore/cli/commands/data.py | 8 ++-- src/pynecore/cli/commands/plugin.py | 10 ++--- .../core/{plugin.py => plugin/__init__.py} | 44 ++++++++++++++----- .../{providers => core/plugin}/provider.py | 7 +-- src/pynecore/providers/ccxt.py | 4 +- tests/t00_pynecore/core/test_014_plugin.py | 14 +++--- 6 files changed, 56 insertions(+), 31 deletions(-) rename src/pynecore/core/{plugin.py => plugin/__init__.py} (74%) rename src/pynecore/{providers => core/plugin}/provider.py (97%) diff --git a/src/pynecore/cli/commands/data.py b/src/pynecore/cli/commands/data.py index 099fd7f..92e8076 100644 --- a/src/pynecore/cli/commands/data.py +++ b/src/pynecore/cli/commands/data.py @@ -12,7 +12,7 @@ from ..app import app, app_state from ...core.plugin import discover_plugins, load_plugin -from ...providers.provider import Provider +from ...core.plugin import ProviderPlugin from ...lib.timeframe import in_seconds from ...core.data_converter import DataConverter, SupportedFormats as InputFormats from ...core.ohlcv_file import OHLCVReader @@ -43,7 +43,7 @@ class AvailableProvidersEnum(Enum): for _name, _ep in discover_plugins().items(): try: _cls = _ep.load() - if isinstance(_cls, type) and issubclass(_cls, Provider): + if isinstance(_cls, type) and issubclass(_cls, ProviderPlugin): _provider_names.append(_name) except Exception: pass @@ -149,7 +149,7 @@ def download( app_state.config_dir / 'plugins' / f'{provider.value}.toml') with Progress(SpinnerColumn(), TextColumn("{task.description}"), transient=True) as progress: progress.add_task(description="Fetching market data...", total=None) - provider_instance: Provider = provider_class(symbol=symbol, config=config) + provider_instance: ProviderPlugin = provider_class(symbol=symbol, config=config) symbols = provider_instance.get_list_of_symbols() with (console := Console()).pager(): for s in symbols: @@ -166,7 +166,7 @@ def download( if hasattr(provider_class, 'Config') and provider_class.Config is not None: config = ensure_config(provider_class.Config, app_state.config_dir / 'plugins' / f'{provider.value}.toml') - provider_instance: Provider = provider_class(symbol=symbol, timeframe=timeframe, + provider_instance: ProviderPlugin = provider_class(symbol=symbol, timeframe=timeframe, ohlv_dir=app_state.data_dir, config=config) # Download symbol info if not exists diff --git a/src/pynecore/cli/commands/plugin.py b/src/pynecore/cli/commands/plugin.py index 91d1fd9..8e28ca1 100644 --- a/src/pynecore/cli/commands/plugin.py +++ b/src/pynecore/cli/commands/plugin.py @@ -10,16 +10,14 @@ def _get_capabilities(cls: type) -> list[str]: """Determine plugin capabilities from its class hierarchy.""" - from ...providers.provider import Provider + from ...core.plugin import ProviderPlugin, CLIPlugin caps = [] - if isinstance(cls, type) and issubclass(cls, Provider): + if isinstance(cls, type) and issubclass(cls, ProviderPlugin): caps.append('provider') - # Future: Extension, LiveProvider checks will go here - if cls.cli() is not None: + # Future: ExtensionPlugin, LiveProviderPlugin checks will go here + if isinstance(cls, type) and issubclass(cls, CLIPlugin): caps.append('cli') - if cls.cli_params('run'): - caps.append('params') return caps diff --git a/src/pynecore/core/plugin.py b/src/pynecore/core/plugin/__init__.py similarity index 74% rename from src/pynecore/core/plugin.py rename to src/pynecore/core/plugin/__init__.py index 5c51167..411fc7f 100644 --- a/src/pynecore/core/plugin.py +++ b/src/pynecore/core/plugin/__init__.py @@ -3,18 +3,22 @@ All PyneCore plugins register under a single entry point group (``pyne.plugin``) in their ``pyproject.toml``. The plugin class hierarchy -determines capabilities (Provider, Extension, etc.): +determines capabilities: -- ``Provider(Plugin)`` — offline OHLCV data provider -- ``Extension(Plugin)`` — hook-based script extension -- ``LiveProvider(Plugin)`` — WebSocket/streaming data -- ``Plugin`` directly — CLI-only plugin +- ``ProviderPlugin(Plugin)`` — offline OHLCV data provider +- ``ExtensionPlugin(Plugin)`` — hook-based script extension +- ``LiveProviderPlugin(Plugin)`` — WebSocket/streaming data +- ``CLIPlugin(Plugin)`` — CLI commands and parameter hooks + +Multiple inheritance combines capabilities:: + + class BinancePlugin(ProviderPlugin, CLIPlugin): ... + class PlotPlugin(ExtensionPlugin, CLIPlugin): ... -CLI methods (``cli()``, ``cli_params()``) are optional with sensible defaults. Plugin metadata (name, version) comes from the package's ``pyproject.toml`` via :mod:`importlib.metadata`, not from class attributes. -Example ``pyproject.toml`` for a provider plugin:: +Example ``pyproject.toml``:: [project.entry-points."pyne.plugin"] myexchange = "mypackage:MyExchangeProvider" @@ -41,7 +45,13 @@ class Plugin: - """Base class for all PyneCore plugins.""" + """ + Minimal base class for all PyneCore plugins. + + Used for ``isinstance`` checks. Plugin authors should inherit from a + concrete subclass: :class:`ProviderPlugin`, :class:`ExtensionPlugin`, + :class:`CLIPlugin`, or a combination via multiple inheritance. + """ Config: type | None = None """Override with a ``@dataclass`` for plugin configuration.""" @@ -49,13 +59,23 @@ class Plugin: plugin_name: str = "" """Optional display name override. If empty, the entry point name is used.""" + +class CLIPlugin(Plugin): + """ + Plugin that provides CLI commands and/or parameter hooks. + + Override :meth:`cli` to add subcommands (``pyne ...``). + Override :meth:`cli_params` to inject flags into existing commands. + """ + @staticmethod def cli() -> typer.Typer | None: """ Return a Typer app for plugin subcommands. Override to add commands like ``pyne ``. - Return ``None`` (default) if the plugin has no CLI commands. + + :return: A Typer app, or ``None`` if the plugin has no CLI commands. """ return None @@ -65,9 +85,9 @@ def cli_params(command_name: str) -> list[click.Parameter]: Return extra parameters for an existing command. Override to inject flags/options into commands like ``pyne run``. - Return ``[]`` (default) if the plugin has no parameter hooks. :param command_name: The command to extend (e.g. ``"run"``). + :return: List of Click parameters, or ``[]`` if no hooks for this command. """ return [] @@ -147,3 +167,7 @@ def _parse_min_pynecore(ep: EntryPoint) -> str: if m: return m.group(1) return '' + + +# Plugin type subclasses — import after Plugin is defined to avoid circular imports +from .provider import ProviderPlugin diff --git a/src/pynecore/providers/provider.py b/src/pynecore/core/plugin/provider.py similarity index 97% rename from src/pynecore/providers/provider.py rename to src/pynecore/core/plugin/provider.py index 0fb23ae..a1e66bd 100644 --- a/src/pynecore/providers/provider.py +++ b/src/pynecore/core/plugin/provider.py @@ -3,13 +3,14 @@ from pathlib import Path from datetime import datetime -from ..types.ohlcv import OHLCV +from pynecore.types.ohlcv import OHLCV from pynecore.core.syminfo import SymInfo from pynecore.core.ohlcv_file import OHLCVWriter, OHLCVReader -from pynecore.core.plugin import Plugin +from . import Plugin -class Provider(Plugin, metaclass=ABCMeta): + +class ProviderPlugin(Plugin, metaclass=ABCMeta): """ Base class for all data providers. diff --git a/src/pynecore/providers/ccxt.py b/src/pynecore/providers/ccxt.py index 8e12317..c293670 100644 --- a/src/pynecore/providers/ccxt.py +++ b/src/pynecore/providers/ccxt.py @@ -12,7 +12,7 @@ def override(func): from pathlib import Path import tomllib -from .provider import Provider +from pynecore.core.plugin import ProviderPlugin from pynecore.core.syminfo import SymInfo, SymInfoInterval, SymInfoSession from ..types.ohlcv import OHLCV @@ -55,7 +55,7 @@ class CCXTConfig: """Default API password (required by some exchanges like KuCoin)""" -class CCXTProvider(Provider): +class CCXTProvider(ProviderPlugin): """ CCXT provider """ diff --git a/tests/t00_pynecore/core/test_014_plugin.py b/tests/t00_pynecore/core/test_014_plugin.py index 195e91f..d5618e1 100644 --- a/tests/t00_pynecore/core/test_014_plugin.py +++ b/tests/t00_pynecore/core/test_014_plugin.py @@ -3,14 +3,15 @@ """ from pynecore.core.plugin import ( + Plugin, + ProviderPlugin, + CLIPlugin, discover_plugins, load_plugin, get_available_plugin_names, get_plugin_metadata, PluginNotFoundError, ) -from pynecore.core.plugin import Plugin -from pynecore.providers.provider import Provider def __test_discover_plugins_returns_dict__(): @@ -59,7 +60,7 @@ def __test_ccxt_is_provider__(): """CCXTProvider inherits from Plugin and Provider""" cls = load_plugin("ccxt") assert issubclass(cls, Plugin) - assert issubclass(cls, Provider) + assert issubclass(cls, ProviderPlugin) def __test_plugin_metadata__(): @@ -73,7 +74,8 @@ def __test_plugin_metadata__(): def __test_plugin_base_defaults__(): - """Plugin base class has sensible defaults for CLI methods""" - assert Plugin.cli() is None - assert Plugin.cli_params('run') == [] + """Plugin base class has minimal attributes, CLIPlugin has CLI defaults""" assert Plugin.Config is None + assert Plugin.plugin_name == "" + assert CLIPlugin.cli() is None + assert CLIPlugin.cli_params('run') == [] From 47e7bfed396c627588cc752dddc954430b409468 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Mon, 30 Mar 2026 19:27:05 +0200 Subject: [PATCH 09/64] feat: add PluggableCommand, Generic config typing, and ensure_config caching - Add PluggableCommand(TyperCommand) for dynamic CLI param injection by plugins - Move CLIPlugin to its own cli.py module for consistency with provider.py - Add Generic[ConfigT] to Plugin/ProviderPlugin for typed config access - Centralize override polyfill in plugin system (remove from ccxt.py) - Cache ensure_config results on config_cls._ensured attribute - Register CLI plugin subcommands and param hooks on startup - Use cls=PluggableCommand on the run command --- src/pynecore/cli/commands/__init__.py | 56 +++++ src/pynecore/cli/commands/run.py | 3 +- src/pynecore/cli/pluggable.py | 73 ++++++ src/pynecore/core/config.py | 10 +- src/pynecore/core/plugin/__init__.py | 61 ++--- src/pynecore/core/plugin/cli.py | 42 ++++ src/pynecore/core/plugin/provider.py | 8 +- src/pynecore/providers/ccxt.py | 11 +- tests/t00_pynecore/core/test_013_config.py | 13 ++ .../core/test_015_pluggable_command.py | 210 ++++++++++++++++++ 10 files changed, 432 insertions(+), 55 deletions(-) create mode 100644 src/pynecore/cli/pluggable.py create mode 100644 src/pynecore/core/plugin/cli.py create mode 100644 tests/t00_pynecore/core/test_015_pluggable_command.py diff --git a/src/pynecore/cli/commands/__init__.py b/src/pynecore/cli/commands/__init__.py index 759fb20..c42dcdc 100644 --- a/src/pynecore/cli/commands/__init__.py +++ b/src/pynecore/cli/commands/__init__.py @@ -258,3 +258,59 @@ def main( # Setup global error logging setup_global_error_logging(workdir / "output" / "logs" / "error.log") + + +# --------------------------------------------------------------------------- +# CLIPlugin loading: subcommands and parameter hooks +# --------------------------------------------------------------------------- +_BUILTIN_COMMANDS = {'run', 'data', 'compile', 'benchmark', 'debug', 'plugin'} +_PLUGGABLE_COMMANDS = {'run': run} + + +def _register_cli_plugins(): + """Load CLIPlugin subcommands and parameter hooks from installed plugins.""" + from ...core.plugin import discover_plugins, CLIPlugin + from ..pluggable import PluggableCommand + + for name, ep in discover_plugins().items(): + try: + plugin_cls = ep.load() + + if not (isinstance(plugin_cls, type) and issubclass(plugin_cls, CLIPlugin)): + continue + + # 1. CLI subcommand registration + cli_app = plugin_cls.cli() + if cli_app is not None: + if name in _BUILTIN_COMMANDS: + typer.secho( + f"Warning: plugin '{name}' CLI name conflicts with " + f"built-in command, skipping", + fg="yellow", err=True, + ) + else: + app.add_typer(cli_app, name=name) + + # 2. Parameter hook registration + for cmd_name, cmd_func in _PLUGGABLE_COMMANDS.items(): + params = plugin_cls.cli_params(cmd_name) + if not params: + continue + + click_cmd = typer.main.get_command(app).commands.get(cmd_name) + if not isinstance(click_cmd, PluggableCommand): + continue + + for param in params: + if not click_cmd.register_plugin_param(param): + typer.secho( + f"Warning: plugin '{name}' param '{param.name}' " + f"conflicts on '{cmd_name}'", + fg="yellow", err=True, + ) + + except Exception: + pass + + +_register_cli_plugins() diff --git a/src/pynecore/cli/commands/run.py b/src/pynecore/cli/commands/run.py index 3f01770..bdc197f 100644 --- a/src/pynecore/cli/commands/run.py +++ b/src/pynecore/cli/commands/run.py @@ -15,6 +15,7 @@ from rich.console import Console from ..app import app, app_state +from ..pluggable import PluggableCommand from ...utils.rich.date_column import DateColumn from pynecore.core.ohlcv_file import OHLCVReader @@ -60,7 +61,7 @@ def render(self, task: Task) -> Text: return Text(f"{minutes:02d}:{seconds:06.3f}", style="cyan") -@app.command() +@app.command(cls=PluggableCommand) def run( script: Path = Argument(..., dir_okay=False, file_okay=True, help="Script to run (.py or .pine)"), data: Path = Argument(..., dir_okay=False, file_okay=True, diff --git a/src/pynecore/cli/pluggable.py b/src/pynecore/cli/pluggable.py new file mode 100644 index 0000000..970a768 --- /dev/null +++ b/src/pynecore/cli/pluggable.py @@ -0,0 +1,73 @@ +""" +Click Command subclass that supports dynamic parameter injection by plugins. + +Typer generates Click commands internally. By passing ``cls=PluggableCommand`` +to ``@app.command()``, plugins can register extra ``--flags`` that appear in +``--help`` and are parsed alongside built-in parameters. + +Plugin parameters are separated from core parameters before the callback is +invoked, so the original function signature does not need to change. The +injected values are stored on ``ctx.plugin_params``. +""" + +import click +from typer.core import TyperCommand + + +class PluggableCommand(TyperCommand): + """ + A Typer-compatible Click command that allows plugins to inject parameters. + + Usage:: + + @app.command(cls=PluggableCommand) + def run(ctx: typer.Context, script: Path = ...): + live = ctx.plugin_params.get('live', False) + + After the command is registered, call :meth:`register_plugin_param` to add + plugin-provided options/arguments. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._plugin_params: list[click.Parameter] = [] + + def register_plugin_param(self, param: click.Parameter) -> bool: + """ + Register a plugin-provided parameter. + + Checks both parameter names and option strings (e.g. ``--from``, ``-f``) + to prevent conflicts. + + :param param: A Click Parameter (typically ``click.Option``). + :return: ``False`` if the name or any option string conflicts. + """ + all_params = [*self.params, *self._plugin_params] + + existing_names = {p.name for p in all_params} + if param.name in existing_names: + return False + + existing_opts = {opt for p in all_params for opt in getattr(p, 'opts', ())} + new_opts = set(getattr(param, 'opts', ())) + if existing_opts & new_opts: + return False + + self._plugin_params.append(param) + return True + + def get_params(self, ctx: click.Context) -> list[click.Parameter]: + """Return core params + plugin params + help option.""" + rv = [*self.params, *self._plugin_params] + help_option = self.get_help_option(ctx) + if help_option is not None: + rv.append(help_option) + return rv + + def invoke(self, ctx: click.Context) -> None: + """Pop plugin params from ctx.params before calling the callback.""" + ctx.plugin_params = {} + for p in self._plugin_params: + if p.name in ctx.params: + ctx.plugin_params[p.name] = ctx.params.pop(p.name) + return super().invoke(ctx) diff --git a/src/pynecore/core/config.py b/src/pynecore/core/config.py index 0d5f1ce..c5abc54 100644 --- a/src/pynecore/core/config.py +++ b/src/pynecore/core/config.py @@ -157,6 +157,9 @@ def ensure_config(config_cls: type, config_path: Path) -> object: 2. If it exists, read user values, regenerate from the dataclass, write back. 3. Return a populated dataclass instance with user values over defaults. + The result is cached on ``config_cls._ensured``, so repeated calls + return the same instance without file I/O. + TOML table sections (e.g. ``[binance]``) not managed by the dataclass are preserved verbatim at the end of the file. @@ -164,6 +167,9 @@ def ensure_config(config_cls: type, config_path: Path) -> object: :param config_path: Path to the TOML file. :return: A populated config dataclass instance. """ + if hasattr(config_cls, '_ensured'): + return config_cls._ensured + user_values = None extra_content = "" @@ -180,7 +186,9 @@ def ensure_config(config_cls: type, config_path: Path) -> object: config_path.parent.mkdir(parents=True, exist_ok=True) config_path.write_text(toml_content, encoding='utf-8') - return _create_instance(config_cls, user_values) + instance = _create_instance(config_cls, user_values) + config_cls._ensured = instance + return instance def _parse_existing(config_path: Path, config_cls: type) -> tuple[dict, str]: diff --git a/src/pynecore/core/plugin/__init__.py b/src/pynecore/core/plugin/__init__.py index 411fc7f..66a1a8e 100644 --- a/src/pynecore/core/plugin/__init__.py +++ b/src/pynecore/core/plugin/__init__.py @@ -32,66 +32,46 @@ class PlotPlugin(ExtensionPlugin, CLIPlugin): ... from __future__ import annotations import re -from typing import TYPE_CHECKING +import sys +from typing import TypeVar, Generic # noinspection PyProtectedMember from importlib.metadata import entry_points, EntryPoint -if TYPE_CHECKING: - import click - import typer +if sys.version_info >= (3, 12): + # noinspection PyUnusedImports + from typing import override +else: + def override(func): + """Marks a method as overriding a base class method (polyfill for <3.12).""" + return func PLUGIN_GROUP = 'pyne.plugin' +ConfigT = TypeVar('ConfigT') -class Plugin: + +class Plugin(Generic[ConfigT]): """ Minimal base class for all PyneCore plugins. - Used for ``isinstance`` checks. Plugin authors should inherit from a - concrete subclass: :class:`ProviderPlugin`, :class:`ExtensionPlugin`, + Generic over the config dataclass type. Plugin authors should inherit + from a concrete subclass: :class:`ProviderPlugin`, :class:`ExtensionPlugin`, :class:`CLIPlugin`, or a combination via multiple inheritance. + + Example:: + + class MyProvider(ProviderPlugin[MyConfig]): + Config = MyConfig """ - Config: type | None = None + Config: type[ConfigT] | None = None """Override with a ``@dataclass`` for plugin configuration.""" plugin_name: str = "" """Optional display name override. If empty, the entry point name is used.""" -class CLIPlugin(Plugin): - """ - Plugin that provides CLI commands and/or parameter hooks. - - Override :meth:`cli` to add subcommands (``pyne ...``). - Override :meth:`cli_params` to inject flags into existing commands. - """ - - @staticmethod - def cli() -> typer.Typer | None: - """ - Return a Typer app for plugin subcommands. - - Override to add commands like ``pyne ``. - - :return: A Typer app, or ``None`` if the plugin has no CLI commands. - """ - return None - - @staticmethod - def cli_params(command_name: str) -> list[click.Parameter]: - """ - Return extra parameters for an existing command. - - Override to inject flags/options into commands like ``pyne run``. - - :param command_name: The command to extend (e.g. ``"run"``). - :return: List of Click parameters, or ``[]`` if no hooks for this command. - """ - return [] - - class PluginNotFoundError(ImportError): """Raised when a requested plugin is not installed.""" @@ -171,3 +151,4 @@ def _parse_min_pynecore(ep: EntryPoint) -> str: # Plugin type subclasses — import after Plugin is defined to avoid circular imports from .provider import ProviderPlugin +from .cli import CLIPlugin diff --git a/src/pynecore/core/plugin/cli.py b/src/pynecore/core/plugin/cli.py new file mode 100644 index 0000000..09a8f03 --- /dev/null +++ b/src/pynecore/core/plugin/cli.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from . import Plugin + +if TYPE_CHECKING: + import click + import typer + + +class CLIPlugin(Plugin): + """ + Plugin that provides CLI commands and/or parameter hooks. + + Override :meth:`cli` to add subcommands (``pyne ...``). + Override :meth:`cli_params` to inject flags into existing commands. + """ + + @staticmethod + def cli() -> typer.Typer | None: + """ + Return a Typer app for plugin subcommands. + + Override to add commands like ``pyne ``. + + :return: A Typer app, or ``None`` if the plugin has no CLI commands. + """ + return None + + # noinspection PyUnusedLocal + @staticmethod + def cli_params(command_name: str) -> list[click.Parameter]: + """ + Return extra parameters for an existing command. + + Override to inject flags/options into commands like ``pyne run``. + + :param command_name: The command to extend (e.g. ``"run"``). + :return: List of Click parameters, or ``[]`` if no hooks for this command. + """ + return [] diff --git a/src/pynecore/core/plugin/provider.py b/src/pynecore/core/plugin/provider.py index a1e66bd..d261633 100644 --- a/src/pynecore/core/plugin/provider.py +++ b/src/pynecore/core/plugin/provider.py @@ -7,10 +7,10 @@ from pynecore.core.syminfo import SymInfo from pynecore.core.ohlcv_file import OHLCVWriter, OHLCVReader -from . import Plugin +from . import Plugin, ConfigT -class ProviderPlugin(Plugin, metaclass=ABCMeta): +class ProviderPlugin(Plugin[ConfigT], metaclass=ABCMeta): """ Base class for all data providers. @@ -70,7 +70,7 @@ def get_ohlcv_path(cls, symbol: str, timeframe: str, ohlv_dir: Path, f"_{timeframe}.ohlcv") def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, - ohlv_dir: Path | None = None, config: object | None = None): + ohlv_dir: Path | None = None, config: ConfigT | None = None): """ :param symbol: The symbol to get data for. :param timeframe: The timeframe to get data for in TradingView format. @@ -82,7 +82,7 @@ def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, self.xchg_timeframe = self.to_exchange_timeframe(timeframe) if timeframe else None self.ohlcv_path = self.get_ohlcv_path(symbol, timeframe, ohlv_dir) if ohlv_dir else None self.ohlcv_file = OHLCVWriter(self.ohlcv_path) if self.ohlcv_path else None - self.config = config + self.config: ConfigT | None = config def __enter__(self) -> OHLCVWriter: assert self.ohlcv_file is not None diff --git a/src/pynecore/providers/ccxt.py b/src/pynecore/providers/ccxt.py index c293670..1146b54 100644 --- a/src/pynecore/providers/ccxt.py +++ b/src/pynecore/providers/ccxt.py @@ -1,18 +1,11 @@ from typing import Callable from dataclasses import dataclass -import sys - -if sys.version_info >= (3, 12): - from typing import override -else: - def override(func): - return func import re from datetime import datetime, UTC, timedelta, time from pathlib import Path import tomllib -from pynecore.core.plugin import ProviderPlugin +from pynecore.core.plugin import ProviderPlugin, override from pynecore.core.syminfo import SymInfo, SymInfoInterval, SymInfoSession from ..types.ohlcv import OHLCV @@ -55,7 +48,7 @@ class CCXTConfig: """Default API password (required by some exchanges like KuCoin)""" -class CCXTProvider(ProviderPlugin): +class CCXTProvider(ProviderPlugin[CCXTConfig]): """ CCXT provider """ diff --git a/tests/t00_pynecore/core/test_013_config.py b/tests/t00_pynecore/core/test_013_config.py index 1f7def7..feb9d84 100644 --- a/tests/t00_pynecore/core/test_013_config.py +++ b/tests/t00_pynecore/core/test_013_config.py @@ -14,6 +14,15 @@ ) +@pytest.fixture(autouse=True) +def _clear_ensured_cache(): + """Clear cached config instances between tests.""" + yield + for cls in list(globals().values()): + if isinstance(cls, type) and hasattr(cls, '_ensured'): + del cls._ensured + + @dataclass class SampleConfig: """Sample configuration""" @@ -113,6 +122,7 @@ def __test_user_values_preserved__(tmp_path: Path): content = content.replace('#api_key = ""', 'api_key = "my_key"') content = content.replace("#enabled = false", "enabled = true") cfg_path.write_text(content) + del SampleConfig._ensured result = ensure_config(SampleConfig, cfg_path) @@ -138,6 +148,7 @@ def __test_new_field_appears__(tmp_path: Path): assert '#name = "default"' in content cfg_path.write_text('name = "custom"\n') + del MinimalConfig._ensured result = ensure_config(SampleConfig, cfg_path) @@ -156,6 +167,7 @@ def __test_removed_field_disappears__(tmp_path: Path): content = cfg_path.read_text() content = content.replace("#timeout = 30", "timeout = 60") cfg_path.write_text(content) + del SampleConfig._ensured result = ensure_config(MinimalConfig, cfg_path) @@ -224,6 +236,7 @@ def __test_extra_sections_preserved__(tmp_path: Path): content = cfg_path.read_text() content += '\n[binance]\napiKey = "binance_key"\nsecret = "binance_secret"\n' cfg_path.write_text(content) + del SampleConfig._ensured result = ensure_config(SampleConfig, cfg_path) diff --git a/tests/t00_pynecore/core/test_015_pluggable_command.py b/tests/t00_pynecore/core/test_015_pluggable_command.py new file mode 100644 index 0000000..433be8c --- /dev/null +++ b/tests/t00_pynecore/core/test_015_pluggable_command.py @@ -0,0 +1,210 @@ +"""Tests for the PluggableCommand CLI parameter injection system.""" + +import click +import typer +from click.testing import CliRunner + +from pynecore.cli.pluggable import PluggableCommand + + +def _make_app(): + """Create a minimal Typer app with a PluggableCommand for testing.""" + test_app = typer.Typer() + + @test_app.command(cls=PluggableCommand) + def greet(ctx: typer.Context, name: str = "world"): + plugin_p = getattr(ctx, "plugin_params", {}) + greeting = plugin_p.get("greeting", "Hello") + loud = plugin_p.get("loud", False) + msg = f"{greeting} {name}" + if loud: + msg = msg.upper() + typer.echo(msg) + + return test_app + + +def _get_click_cmd(test_app: typer.Typer) -> PluggableCommand: + """Get the underlying PluggableCommand from a Typer app.""" + return typer.main.get_command(test_app) + + +def __test_command_type__(): + """PluggableCommand is used when cls= is passed to @app.command().""" + app = _make_app() + cmd = _get_click_cmd(app) + assert isinstance(cmd, PluggableCommand) + + +def __test_no_plugin_params_default__(): + """Without registered plugin params, the command works normally.""" + app = _make_app() + cmd = _get_click_cmd(app) + + result = CliRunner().invoke(cmd, ["--name", "PyneCore"]) + assert result.exit_code == 0 + assert "Hello PyneCore" in result.output + + +def __test_register_option__(): + """A registered plugin option is parsed and available via ctx.plugin_params.""" + app = _make_app() + cmd = _get_click_cmd(app) + + ok = cmd.register_plugin_param( + click.Option(["--greeting"], default="Hello", help="Greeting word"), + ) + assert ok is True + + result = CliRunner().invoke(cmd, ["--greeting", "Ahoy", "--name", "Sailor"]) + assert result.exit_code == 0 + assert "Ahoy Sailor" in result.output + + +def __test_register_flag__(): + """A registered boolean flag works correctly.""" + app = _make_app() + cmd = _get_click_cmd(app) + + cmd.register_plugin_param( + click.Option(["--loud"], is_flag=True, default=False, help="Shout"), + ) + + result = CliRunner().invoke(cmd, ["--loud", "--name", "test"]) + assert result.exit_code == 0 + assert "HELLO TEST" in result.output + + +def __test_default_values__(): + """Plugin params use their default when not provided on the command line.""" + app = _make_app() + cmd = _get_click_cmd(app) + + cmd.register_plugin_param( + click.Option(["--greeting"], default="Hi", help="Greeting word"), + ) + + result = CliRunner().invoke(cmd, ["--name", "there"]) + assert result.exit_code == 0 + assert "Hi there" in result.output + + +def __test_conflict_with_core_param__(): + """Registering a param that conflicts with a core param returns False.""" + app = _make_app() + cmd = _get_click_cmd(app) + + ok = cmd.register_plugin_param( + click.Option(["--name"], default="x", help="Conflict"), + ) + assert ok is False + + +def __test_conflict_between_plugins__(): + """Second registration of the same param name returns False.""" + app = _make_app() + cmd = _get_click_cmd(app) + + ok1 = cmd.register_plugin_param( + click.Option(["--extra"], default="a"), + ) + ok2 = cmd.register_plugin_param( + click.Option(["--extra"], default="b"), + ) + assert ok1 is True + assert ok2 is False + + +def __test_conflict_option_string__(): + """Option string conflict (e.g. --name vs --nickname/-n/--name) is detected.""" + test_app = typer.Typer() + + @test_app.command(cls=PluggableCommand) + def cmd( + time_from: str = typer.Option("", "--from", "-f"), + ): + typer.echo(time_from) + + plug_cmd = _get_click_cmd(test_app) + + ok = plug_cmd.register_plugin_param( + click.Option(["--from"], default="x"), + ) + assert ok is False + + ok2 = plug_cmd.register_plugin_param( + click.Option(["-f"], default="x"), + ) + assert ok2 is False + + ok3 = plug_cmd.register_plugin_param( + click.Option(["--other", "-o"], default="y"), + ) + assert ok3 is True + + +def __test_help_shows_plugin_params__(): + """Plugin params appear in --help output.""" + app = _make_app() + cmd = _get_click_cmd(app) + + cmd.register_plugin_param( + click.Option(["--live"], is_flag=True, default=False, help="Enable live trading"), + ) + + result = CliRunner().invoke(cmd, ["--help"]) + assert result.exit_code == 0 + assert "--live" in result.output + assert "Enable live trading" in result.output + + +def __test_plugin_params_not_passed_to_callback__(): + """Plugin params are NOT passed as kwargs to the callback function.""" + received_kwargs = {} + + test_app = typer.Typer() + + @test_app.command(cls=PluggableCommand) + def strict(name: str = "x"): + received_kwargs["name"] = name + + cmd = _get_click_cmd(test_app) + cmd.register_plugin_param( + click.Option(["--extra"], default="val"), + ) + + result = CliRunner().invoke(cmd, ["--extra", "test"]) + assert result.exit_code == 0 + assert "extra" not in received_kwargs + + +def __test_multiple_plugin_params__(): + """Multiple plugin params from different 'plugins' work together.""" + app = _make_app() + cmd = _get_click_cmd(app) + + cmd.register_plugin_param( + click.Option(["--greeting"], default="Hello"), + ) + cmd.register_plugin_param( + click.Option(["--loud"], is_flag=True, default=False), + ) + + result = CliRunner().invoke(cmd, ["--greeting", "YO", "--loud", "--name", "dev"]) + assert result.exit_code == 0 + assert "YO DEV" in result.output + + +def __test_get_params_includes_help__(): + """get_params always includes the --help option at the end.""" + app = _make_app() + cmd = _get_click_cmd(app) + + cmd.register_plugin_param(click.Option(["--extra"], default="x")) + + ctx = click.Context(cmd) + params = cmd.get_params(ctx) + param_names = [p.name for p in params] + + assert "extra" in param_names + assert "help" in param_names From 498ba8e034640f473c9ed85f8cdabe0883197683 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Mon, 30 Mar 2026 21:51:54 +0200 Subject: [PATCH 10/64] docs: add plugin system developer guide and make CLIPlugin generic - Add docs/development/plugin-system.md with Hello Plugin example, capability reference, combining capabilities, and config system - Make CLIPlugin generic over ConfigT for consistent config typing across all plugin types --- docs/development/README.md | 1 + docs/development/plugin-system.md | 376 ++++++++++++++++++++++++++++++ src/pynecore/core/plugin/cli.py | 4 +- 3 files changed, 379 insertions(+), 2 deletions(-) create mode 100644 docs/development/plugin-system.md diff --git a/docs/development/README.md b/docs/development/README.md index 912cfc8..3002ef9 100644 --- a/docs/development/README.md +++ b/docs/development/README.md @@ -19,5 +19,6 @@ Documentation for PyneCore developers ## In this section +- [Plugin System](./plugin-system.md) - How to create plugins for PyneCore - [Testing System](./testing-system.md) - Overview of the comprehensive testing system - [Contributing](./contributing.md) - Guide for contributing to PyneCore diff --git a/docs/development/plugin-system.md b/docs/development/plugin-system.md new file mode 100644 index 0000000..691a4f3 --- /dev/null +++ b/docs/development/plugin-system.md @@ -0,0 +1,376 @@ + + +# Plugin System + +PyneCore uses a plugin architecture based on Python entry points. Plugins can +provide data sources, add CLI commands, or extend existing commands with new +parameters — all discovered automatically at startup. + +## Architecture + +Every plugin registers under a single entry point group: `pyne.plugin`. The +class hierarchy determines what a plugin can do: + +``` +Plugin (base) +├── ProviderPlugin — Offline OHLCV data provider +├── CLIPlugin — CLI subcommands and parameter hooks +├── ExtensionPlugin — Hook-based script extension (planned) +└── LiveProviderPlugin — WebSocket/streaming data (planned) +``` + +Multiple inheritance combines capabilities: + +```python +class MyPlugin(ProviderPlugin, CLIPlugin): + """A plugin that provides both data downloading and CLI commands.""" + ... +``` + +## Quick Start: Hello Plugin + +A minimal plugin that adds `pyne hello greet` to the CLI. + +### Project structure + +``` +pynecore-hello/ +├── pyproject.toml +└── src/ + └── pynecore_hello/ + └── __init__.py +``` + +### pyproject.toml + +```toml +[project] +name = "pynecore-hello" +version = "0.1.0" +description = "Hello World plugin for PyneCore" +dependencies = ["pynesys-pynecore[cli]>=6.5"] + +# This is how PyneCore discovers the plugin automatically: +# "hello" = the plugin name (used as `pyne hello ...`) +# "pynecore_hello:HelloPlugin" = module:class to load +[project.entry-points."pyne.plugin"] +hello = "pynecore_hello:HelloPlugin" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" +``` + +The `[project.entry-points."pyne.plugin"]` section is the key: it tells Python +that when someone installs this package, a plugin named `hello` should be +registered, pointing to the `HelloPlugin` class. + +### __init__.py + +```python +import typer +from pynecore.core.plugin import CLIPlugin + + +class HelloPlugin(CLIPlugin): + """Hello World plugin.""" + + @staticmethod + def cli() -> typer.Typer: + app = typer.Typer(help="Hello World commands") + + @app.command() + def greet(name: str = typer.Argument("World", help="Who to greet")): + """Say hello.""" + typer.echo(f"Hello, {name}!") + + return app +``` + +### Install and run + +```bash +pip install -e pynecore-hello/ +pyne hello greet PyneCore +# Hello, PyneCore! +``` + +That's it. The plugin is discovered automatically — no registration code, no +config files, no imports. Just install and use. + +## Plugin Capabilities + +### CLIPlugin — Commands and Parameter Hooks + +`CLIPlugin` provides two independent mechanisms: + +#### 1. Subcommands via `cli()` + +Return a Typer app to add a command group under `pyne `: + +```python +class FooPlugin(CLIPlugin): + @staticmethod + def cli() -> typer.Typer: + app = typer.Typer(help="Foo commands") + + @app.command() + def bar(name: str = typer.Argument("world")): + """Do something.""" + typer.echo(f"Hello {name}") + + return app +``` + +This creates `pyne foo bar`. + +#### 2. Parameter hooks via `cli_params()` + +Inject flags into existing commands (currently `run` is pluggable): + +```python +import click + +class FooPlugin(CLIPlugin): + @staticmethod + def cli_params(command_name: str) -> list[click.Parameter]: + if command_name == "run": + return [ + click.Option( + ["--verbose", "-V"], + is_flag=True, + default=False, + help="Enable verbose output", + ), + ] + return [] +``` + +These parameters appear in `pyne run --help` and are parsed automatically. +The values are available via `ctx.plugin_params` in the command callback: + +```python +# Inside the run command implementation +def run(ctx: typer.Context, script: Path = ..., data: Path = ...): + verbose = ctx.plugin_params.get("verbose", False) +``` + +> **Note:** Use standard `click.Option` / `click.Argument` — these are the same +> objects you'd use in any Click application. Typer-specific features like +> `rich_help_panel` are not available on injected parameters. + +#### Conflict Detection + +The plugin system prevents parameter collisions automatically: + +- If a plugin tries to register `--from` but the `run` command already uses it → + registration fails with a warning +- If two plugins both register `--verbose` → the second one fails with a warning +- If a plugin tries to use a built-in command name (`run`, `data`, `compile`, + etc.) as its subcommand name → skipped with a warning + +Both parameter names (`time_from`) and option strings (`--from`, `-f`) are +checked to prevent ambiguity. + +### ProviderPlugin — Data Sources + +Provides offline OHLCV data download capability. Used by `pyne data download`. + +```python +from dataclasses import dataclass +from pynecore.core.plugin import ProviderPlugin, override + + +@dataclass +class FooConfig: + """Foo provider""" + + api_key: str = "" + """API key for authentication""" + + use_sandbox: bool = False + """Use sandbox environment""" + + +class FooProvider(ProviderPlugin[FooConfig]): + Config = FooConfig + + @override + def get_available_symbols(self) -> list[str]: + ... + + @override + def download(self, days_back, on_progress=None, extra_field_names=None): + ... +``` + +The `Config` dataclass is automatically turned into a self-healing TOML file +at `workdir/config/plugins/.toml` — generated with all defaults commented +out, users uncomment and edit what they need: + +```toml +# Foo provider + +# API key for authentication +#api_key = "" + +# Use sandbox environment +#use_sandbox = false +``` + +The Generic type parameter (`ProviderPlugin[FooConfig]`) gives your IDE +full type information on `self.config` — no more `object | None` warnings. + +## Combining Capabilities + +A plugin can combine multiple capabilities via multiple inheritance. The +`Config` dataclass is shared — it belongs to the plugin itself, not to any +specific capability. The `[Config]` type parameter goes on the **first** parent +class — it doesn't matter which one, since both `ProviderPlugin` and `CLIPlugin` +propagate it: + +```python +from dataclasses import dataclass +import click +import typer +from pynecore.core.plugin import ProviderPlugin, CLIPlugin, override + + +@dataclass +class FooConfig: + """Foo provider""" + + api_key: str = "" + """API key for authentication""" + + use_sandbox: bool = False + """Use sandbox environment""" + + +# [FooConfig] on the first parent — either order works +class FooPlugin(ProviderPlugin[FooConfig], CLIPlugin): + """Provider with CLI management commands.""" + + Config = FooConfig + + # --- ProviderPlugin: data downloading --- + + @override + def get_available_symbols(self) -> list[str]: + # self.config is typed as FooConfig (via Generic) + ... + + @override + def download(self, days_back, on_progress=None, extra_field_names=None): + ... + + # --- CLIPlugin: subcommands --- + + @staticmethod + def cli() -> typer.Typer: + app = typer.Typer(help="Foo management commands") + + @app.command() + def status(): + """Show connection status.""" + typer.echo("Connected") + + return app + + # --- CLIPlugin: parameter hooks --- + + @staticmethod + def cli_params(command_name: str) -> list[click.Parameter]: + if command_name == "run": + return [click.Option(["--sandbox"], is_flag=True, default=False)] + return [] +``` + +This single plugin: +- Downloads data via `pyne data download foo` +- Adds `pyne foo status` subcommand +- Injects `--sandbox` into `pyne run` +- Gets a `workdir/config/plugins/foo.toml` with the `FooConfig` fields + +The config TOML file is auto-generated on first run with all defaults commented +out. Users uncomment and edit what they need: + +```toml +# Foo provider + +# API key for authentication +#api_key = "" + +# Use sandbox environment +#use_sandbox = false +``` + +## Plugin Configuration + +Any plugin type (`ProviderPlugin`, `CLIPlugin`, or a combination) can have a +`Config` dataclass. Just set the `Config` class attribute and PyneCore handles +the rest. + +The TOML file is: +- **Auto-generated** on first run with all defaults commented out +- **Self-healing** — new fields appear automatically, removed fields disappear +- **User-friendly** — docstrings become TOML comments, uncommented values survive regeneration +- **Cached** — `ensure_config()` returns the same instance on repeated calls + +For ProviderPlugin, the config is automatically loaded and passed via +`self.config`. For CLI-only plugins, load it manually: + +```python +from pynecore.core.config import ensure_config +from pynecore.cli.app import app_state + +config = ensure_config(FooConfig, app_state.config_dir / "plugins" / "foo.toml") +``` + +## Plugin Metadata + +Plugin metadata comes from `pyproject.toml` via `importlib.metadata` — not from +class attributes: + +```bash +pyne plugin list # List all installed plugins +pyne plugin info ccxt # Show details, config fields, capabilities +``` + +The `plugin_name` class attribute is optional and only used for display: + +```python +class FooPlugin(ProviderPlugin[FooConfig], CLIPlugin): + plugin_name = "Foo Service" # shown in `pyne plugin list` +``` + +## Package Naming Convention + +| Type | Package name | Example | +|-----------|---------------------------|------------------------| +| Official | `pynesys-pynecore-` | `pynesys-pynecore-foo` | +| 3rd party | `pynecore-` | `pynecore-bar` | + +## Dependencies + +For plugins with CLI capabilities, depend on the `cli` extra: + +```toml +dependencies = ["pynesys-pynecore[cli]>=6.5"] +``` + +This ensures Typer and Click are available. For provider-only plugins, +the base `pynesys-pynecore>=6.5` dependency is sufficient. diff --git a/src/pynecore/core/plugin/cli.py b/src/pynecore/core/plugin/cli.py index 09a8f03..078f489 100644 --- a/src/pynecore/core/plugin/cli.py +++ b/src/pynecore/core/plugin/cli.py @@ -2,14 +2,14 @@ from typing import TYPE_CHECKING -from . import Plugin +from . import Plugin, ConfigT if TYPE_CHECKING: import click import typer -class CLIPlugin(Plugin): +class CLIPlugin(Plugin[ConfigT]): """ Plugin that provides CLI commands and/or parameter hooks. From bf6ed52d53652d7d3d8afc21c3c5fe3796ce36cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Fri, 3 Apr 2026 12:46:35 +0200 Subject: [PATCH 11/64] feat(core): add live provider base and provider string parsing Introduce `LiveProviderPlugin` and `BarUpdate` for streaming providers, and export the new plugin type from `core.plugin`. Add provider string parsing utilities with validation for `provider:symbol@timeframe` inputs, plus tests for both features. --- src/pynecore/core/plugin/__init__.py | 8 +- src/pynecore/core/plugin/live_provider.py | 77 ++++++++++ src/pynecore/core/provider_string.py | 106 ++++++++++++++ .../core/test_016_live_provider.py | 64 +++++++++ .../core/test_017_provider_string.py | 134 ++++++++++++++++++ 5 files changed, 385 insertions(+), 4 deletions(-) create mode 100644 src/pynecore/core/plugin/live_provider.py create mode 100644 src/pynecore/core/provider_string.py create mode 100644 tests/t00_pynecore/core/test_016_live_provider.py create mode 100644 tests/t00_pynecore/core/test_017_provider_string.py diff --git a/src/pynecore/core/plugin/__init__.py b/src/pynecore/core/plugin/__init__.py index 66a1a8e..f511d53 100644 --- a/src/pynecore/core/plugin/__init__.py +++ b/src/pynecore/core/plugin/__init__.py @@ -6,14 +6,13 @@ determines capabilities: - ``ProviderPlugin(Plugin)`` — offline OHLCV data provider -- ``ExtensionPlugin(Plugin)`` — hook-based script extension -- ``LiveProviderPlugin(Plugin)`` — WebSocket/streaming data +- ``LiveProviderPlugin(ProviderPlugin)`` — offline + WebSocket/streaming data - ``CLIPlugin(Plugin)`` — CLI commands and parameter hooks Multiple inheritance combines capabilities:: - class BinancePlugin(ProviderPlugin, CLIPlugin): ... - class PlotPlugin(ExtensionPlugin, CLIPlugin): ... + class YahooPlugin(ProviderPlugin, CLIPlugin): ... # offline only + class BinancePlugin(LiveProviderPlugin, CLIPlugin): ... # offline + live Plugin metadata (name, version) comes from the package's ``pyproject.toml`` via :mod:`importlib.metadata`, not from class attributes. @@ -151,4 +150,5 @@ def _parse_min_pynecore(ep: EntryPoint) -> str: # Plugin type subclasses — import after Plugin is defined to avoid circular imports from .provider import ProviderPlugin +from .live_provider import LiveProviderPlugin from .cli import CLIPlugin diff --git a/src/pynecore/core/plugin/live_provider.py b/src/pynecore/core/plugin/live_provider.py new file mode 100644 index 0000000..ae508f6 --- /dev/null +++ b/src/pynecore/core/plugin/live_provider.py @@ -0,0 +1,77 @@ +from __future__ import annotations + +from abc import abstractmethod, ABCMeta +from dataclasses import dataclass + +from pynecore.types.ohlcv import OHLCV + +from . import ConfigT +from .provider import ProviderPlugin + + +@dataclass +class BarUpdate: + """A single bar update from a live data source.""" + + ohlcv: OHLCV + """The OHLCV data for this update.""" + + is_closed: bool + """True if the bar is final (closed), False for intra-bar updates.""" + + +class LiveProviderPlugin(ProviderPlugin[ConfigT], metaclass=ABCMeta): + """ + WebSocket/streaming data provider extending :class:`ProviderPlugin`. + + Adds real-time data streaming to the offline OHLCV download capability. + Subclasses must implement connection lifecycle and data streaming methods + in addition to the :class:`ProviderPlugin` abstract methods. + + The async methods run in a background thread; the framework bridges them + to the synchronous :class:`ScriptRunner` via a :class:`queue.Queue`. + """ + + reconnect_delay: float = 1.0 + """Initial delay in seconds before reconnection attempt.""" + + max_reconnect_attempts: int = 10 + """Maximum number of consecutive reconnection attempts.""" + + # --- Connection lifecycle --- + + @abstractmethod + async def connect(self) -> None: + """Establish connection to the data source.""" + + @abstractmethod + async def disconnect(self) -> None: + """Close connection cleanly.""" + + @property + @abstractmethod + def is_connected(self) -> bool: + """Whether the connection is currently active.""" + + # --- Data streaming --- + + @abstractmethod + async def watch_ohlcv(self, symbol: str, timeframe: str) -> BarUpdate: + """ + Wait for and return the next OHLCV update. + + Called in a loop by the framework. Each call blocks (awaits) + until new data arrives from the data source. + + :param symbol: The symbol in provider-specific format. + :param timeframe: Timeframe in TradingView format (e.g. ``"1D"``, ``"1"``, ``"4H"``). + :return: A :class:`BarUpdate` with the OHLCV data and closed/open status. + """ + + # --- Reconnection hooks (override for custom behavior) --- + + async def on_disconnect(self) -> None: + """Called when the connection is unexpectedly lost.""" + + async def on_reconnect(self) -> None: + """Called after a successful reconnection.""" diff --git a/src/pynecore/core/provider_string.py b/src/pynecore/core/provider_string.py new file mode 100644 index 0000000..16ff7ff --- /dev/null +++ b/src/pynecore/core/provider_string.py @@ -0,0 +1,106 @@ +""" +Parse provider strings used in ``pyne run`` and ``request.security()``. + +Format:: + + :@ + +Examples:: + + ccxt:BYBIT:BTC/USDT:USDT@1D # CCXT, Bybit futures, daily + ccxt:BINANCE:ETH/USDT@4H # CCXT, Binance spot, 4-hour + capitalcom:EURUSD@1H # Capital.com, EURUSD, 1-hour + +The ``@timeframe`` suffix is only required in ``pyne run`` (the main data +source). In ``request.security()`` the timeframe is a separate parameter, +so the provider string contains only ``:``. +""" + +from __future__ import annotations + +from dataclasses import dataclass + + +@dataclass(frozen=True) +class ProviderString: + """Parsed components of a provider string.""" + + provider: str + """Plugin entry point name (e.g. ``"ccxt"``, ``"capitalcom"``).""" + + symbol: str + """Symbol in provider-specific format (e.g. ``"BYBIT:BTC/USDT:USDT"``).""" + + timeframe: str | None = None + """Timeframe in TradingView format (e.g. ``"1D"``), or None if not specified.""" + + +def is_provider_string(value: str) -> bool: + """ + Check whether a string looks like a provider string rather than a file path. + + A provider string contains ``:`` and its first segment (before the first ``:``) + does not look like a drive letter or path component. + + :param value: The string to check. + :return: True if it looks like a provider string. + """ + if ':' not in value: + return False + first_segment = value.split(':', 1)[0] + if len(first_segment) == 1 and first_segment.isalpha(): + return False + return True + + +def parse_provider_string(value: str, *, require_timeframe: bool = False) -> ProviderString: + """ + Parse a provider string into its components. + + :param value: The provider string (e.g. ``"ccxt:BYBIT:BTC/USDT:USDT@1D"``). + :param require_timeframe: If True, raise ValueError when ``@timeframe`` is missing. + :return: A :class:`ProviderString` with the parsed components. + :raises ValueError: If the string is malformed or timeframe is required but missing. + """ + if ':' not in value: + raise ValueError( + f"Invalid provider string: '{value}'. " + f"Expected format: provider:symbol[@timeframe]" + ) + + provider, rest = value.split(':', 1) + + if not provider: + raise ValueError( + f"Invalid provider string: '{value}'. " + f"Provider name is empty." + ) + + if not rest: + raise ValueError( + f"Invalid provider string: '{value}'. " + f"Symbol is missing after '{provider}:'." + ) + + timeframe = None + if '@' in rest: + symbol_part, timeframe = rest.rsplit('@', 1) + if not timeframe: + raise ValueError( + f"Invalid provider string: '{value}'. " + f"Timeframe is empty after '@'." + ) + if not symbol_part: + raise ValueError( + f"Invalid provider string: '{value}'. " + f"Symbol is missing before '@'." + ) + rest = symbol_part + + if require_timeframe and timeframe is None: + raise ValueError( + f"Timeframe is required. Use '@' to specify it: " + f"'{value}@' (e.g. '{value}@1D')" + ) + + return ProviderString(provider=provider, symbol=rest, timeframe=timeframe) diff --git a/tests/t00_pynecore/core/test_016_live_provider.py b/tests/t00_pynecore/core/test_016_live_provider.py new file mode 100644 index 0000000..5c74a7b --- /dev/null +++ b/tests/t00_pynecore/core/test_016_live_provider.py @@ -0,0 +1,64 @@ +""" +Tests for LiveProviderPlugin and BarUpdate. +""" + +from abc import ABCMeta +from dataclasses import fields + +from pynecore.core.plugin import Plugin, ProviderPlugin, LiveProviderPlugin +from pynecore.core.plugin.live_provider import BarUpdate +from pynecore.types.ohlcv import OHLCV + + +def __test_live_provider_inherits_from_provider__(): + """LiveProviderPlugin is a subclass of ProviderPlugin""" + assert issubclass(LiveProviderPlugin, ProviderPlugin) + assert issubclass(LiveProviderPlugin, Plugin) + + +def __test_live_provider_is_abstract__(): + """LiveProviderPlugin cannot be instantiated directly""" + assert isinstance(LiveProviderPlugin, ABCMeta) + + +def __test_live_provider_has_abstract_methods__(): + """LiveProviderPlugin requires connect, disconnect, is_connected, watch_ohlcv""" + abstract_methods = LiveProviderPlugin.__abstractmethods__ + assert 'connect' in abstract_methods + assert 'disconnect' in abstract_methods + assert 'is_connected' in abstract_methods + assert 'watch_ohlcv' in abstract_methods + + +def __test_live_provider_inherits_provider_abstract_methods__(): + """LiveProviderPlugin also requires ProviderPlugin abstract methods""" + abstract_methods = LiveProviderPlugin.__abstractmethods__ + assert 'download_ohlcv' in abstract_methods + assert 'to_tradingview_timeframe' in abstract_methods + assert 'to_exchange_timeframe' in abstract_methods + assert 'get_list_of_symbols' in abstract_methods + assert 'update_symbol_info' in abstract_methods + + +def __test_live_provider_default_reconnect_values__(): + """LiveProviderPlugin has default reconnect configuration""" + assert LiveProviderPlugin.reconnect_delay == 1.0 + assert LiveProviderPlugin.max_reconnect_attempts == 10 + + +def __test_bar_update_fields__(): + """BarUpdate has ohlcv and is_closed fields""" + field_names = {f.name for f in fields(BarUpdate)} + assert field_names == {'ohlcv', 'is_closed'} + + +def __test_bar_update_creation__(): + """BarUpdate can be created with OHLCV and is_closed flag""" + ohlcv = OHLCV(timestamp=1000, open=100.0, high=105.0, low=95.0, close=102.0, volume=1000.0) + + closed = BarUpdate(ohlcv=ohlcv, is_closed=True) + assert closed.ohlcv is ohlcv + assert closed.is_closed is True + + update = BarUpdate(ohlcv=ohlcv, is_closed=False) + assert update.is_closed is False diff --git a/tests/t00_pynecore/core/test_017_provider_string.py b/tests/t00_pynecore/core/test_017_provider_string.py new file mode 100644 index 0000000..d2536ed --- /dev/null +++ b/tests/t00_pynecore/core/test_017_provider_string.py @@ -0,0 +1,134 @@ +""" +Tests for provider string parsing. + +Provider string format: :@ +Examples: + ccxt:BYBIT:BTC/USDT:USDT@1D + ccxt:BINANCE:ETH/USDT@4H + capitalcom:EURUSD@1H +""" + +import pytest + +from pynecore.core.provider_string import ( + ProviderString, + is_provider_string, + parse_provider_string, +) + + +# --- is_provider_string --- + +def __test_is_provider_string_with_ccxt__(): + """CCXT provider string is recognized""" + assert is_provider_string("ccxt:BYBIT:BTC/USDT:USDT@1D") is True + + +def __test_is_provider_string_with_capitalcom__(): + """Capital.com provider string is recognized""" + assert is_provider_string("capitalcom:EURUSD@1H") is True + + +def __test_is_provider_string_file_path__(): + """Regular file paths are not provider strings""" + assert is_provider_string("data.csv") is False + assert is_provider_string("path/to/data.ohlcv") is False + assert is_provider_string("my_data") is False + + +def __test_is_provider_string_windows_drive__(): + """Windows drive letters (single char before colon) are not provider strings""" + assert is_provider_string("C:data.csv") is False + assert is_provider_string("D:path") is False + + +def __test_is_provider_string_without_colon__(): + """Strings without colon are not provider strings""" + assert is_provider_string("ccxt") is False + + +# --- parse_provider_string --- + +def __test_parse_ccxt_futures_with_timeframe__(): + """Parse CCXT futures provider string with timeframe""" + result = parse_provider_string("ccxt:BYBIT:BTC/USDT:USDT@1D") + assert result == ProviderString(provider="ccxt", symbol="BYBIT:BTC/USDT:USDT", timeframe="1D") + + +def __test_parse_ccxt_spot_with_timeframe__(): + """Parse CCXT spot provider string with timeframe""" + result = parse_provider_string("ccxt:BINANCE:ETH/USDT@4H") + assert result == ProviderString(provider="ccxt", symbol="BINANCE:ETH/USDT", timeframe="4H") + + +def __test_parse_capitalcom_with_timeframe__(): + """Parse Capital.com provider string with timeframe""" + result = parse_provider_string("capitalcom:EURUSD@1H") + assert result == ProviderString(provider="capitalcom", symbol="EURUSD", timeframe="1H") + + +def __test_parse_without_timeframe__(): + """Parse provider string without timeframe (for request.security)""" + result = parse_provider_string("ccxt:BYBIT:BTC/USDT:USDT") + assert result == ProviderString(provider="ccxt", symbol="BYBIT:BTC/USDT:USDT", timeframe=None) + + +def __test_parse_minute_timeframe__(): + """Parse provider string with minute timeframe""" + result = parse_provider_string("ccxt:BINANCE:BTC/USDT@15") + assert result == ProviderString(provider="ccxt", symbol="BINANCE:BTC/USDT", timeframe="15") + + +def __test_parse_second_timeframe__(): + """Parse provider string with second timeframe""" + result = parse_provider_string("ccxt:BINANCE:BTC/USDT@5S") + assert result == ProviderString(provider="ccxt", symbol="BINANCE:BTC/USDT", timeframe="5S") + + +def __test_parse_require_timeframe_present__(): + """require_timeframe passes when timeframe is present""" + result = parse_provider_string("ccxt:BYBIT:BTC/USDT:USDT@1D", require_timeframe=True) + assert result.timeframe == "1D" + + +def __test_parse_require_timeframe_missing__(): + """require_timeframe raises ValueError when timeframe is missing""" + with pytest.raises(ValueError, match="Timeframe is required"): + parse_provider_string("ccxt:BYBIT:BTC/USDT:USDT", require_timeframe=True) + + +def __test_parse_no_colon__(): + """String without colon raises ValueError""" + with pytest.raises(ValueError, match="Invalid provider string"): + parse_provider_string("ccxt") + + +def __test_parse_empty_provider__(): + """Empty provider name raises ValueError""" + with pytest.raises(ValueError, match="Provider name is empty"): + parse_provider_string(":BYBIT:BTC/USDT@1D") + + +def __test_parse_empty_symbol__(): + """Empty symbol raises ValueError""" + with pytest.raises(ValueError, match="Symbol is missing"): + parse_provider_string("ccxt:") + + +def __test_parse_empty_timeframe__(): + """Empty timeframe after @ raises ValueError""" + with pytest.raises(ValueError, match="Timeframe is empty"): + parse_provider_string("ccxt:BYBIT:BTC/USDT@") + + +def __test_parse_empty_symbol_with_timeframe__(): + """Empty symbol before @ raises ValueError""" + with pytest.raises(ValueError, match="Symbol is missing"): + parse_provider_string("ccxt:@1D") + + +def __test_provider_string_frozen__(): + """ProviderString is immutable (frozen dataclass)""" + result = parse_provider_string("ccxt:BINANCE:BTC/USDT@1D") + with pytest.raises(AttributeError): + result.provider = "other" From 8e3e779eb9b0484525d48f6cda6d1d365052146b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Fri, 3 Apr 2026 14:41:55 +0200 Subject: [PATCH 12/64] feat(cli): add provider-based data loading to run command Support provider strings as the `run` data source and download historical OHLCV data on demand. Add flexible `--from` and `--to` parsing for dates, day offsets, and bar counts, and update progress display and help text for the new provider mode. --- src/pynecore/cli/commands/run.py | 339 +++++++++++++++++++++---------- 1 file changed, 228 insertions(+), 111 deletions(-) diff --git a/src/pynecore/cli/commands/run.py b/src/pynecore/cli/commands/run.py index bdc197f..cdd77c3 100644 --- a/src/pynecore/cli/commands/run.py +++ b/src/pynecore/cli/commands/run.py @@ -6,11 +6,11 @@ import tomllib from pathlib import Path -from datetime import datetime +from datetime import datetime, timedelta, UTC -from typer import Option, Argument, secho, Exit +from typer import Option, Argument, secho, Exit, colors from rich.progress import (Progress, SpinnerColumn, TextColumn, BarColumn, - ProgressColumn, Task) + ProgressColumn, Task, TimeElapsedColumn, TimeRemainingColumn) from rich.text import Text from rich.console import Console @@ -24,6 +24,7 @@ from pynecore.core.syminfo import SymInfo from pynecore.core.script_runner import ScriptRunner from pynecore.pynesys.compiler import PyneComp +from pynecore.core.provider_string import is_provider_string, parse_provider_string from ...cli.utils.api_error_handler import APIErrorHandler __all__ = [] @@ -61,19 +62,144 @@ def render(self, task: Task) -> Text: return Text(f"{minutes:02d}:{seconds:06.3f}", style="cyan") +def _parse_time_value(value: str | None, *, allow_bars: bool = False) -> datetime | int | None: + """ + Parse a --from or --to parameter value. + + :param value: The raw string value. + :param allow_bars: If True, allow negative numbers as bar counts. + :return: A datetime, a negative int (bar count), or None. + """ + if value is None: + return None + value = value.strip() + + # Negative number = bar count (only for --from in provider mode) + if allow_bars and value.startswith('-'): + try: + bars = int(value) + return bars + except ValueError: + pass + + # Positive number = days back + try: + days = int(value) + if days < 0: + secho("Error: Days cannot be negative (use negative numbers only with provider mode for bar count)", + err=True, fg=colors.RED) + raise Exit(1) + return (datetime.now(UTC) - timedelta(days=days)).replace(second=0, microsecond=0) + except ValueError: + pass + + # Date string + try: + return datetime.fromisoformat(value) + except ValueError: + secho(f"Error: Invalid date or number: '{value}'", err=True, fg=colors.RED) + raise Exit(1) + + +def _download_provider_data(provider_str: str, time_from_str: str | None) -> tuple[Path, SymInfo]: + """ + Download historical data from a provider and return the OHLCV path and SymInfo. + + :param provider_str: Provider string (e.g. "ccxt:BYBIT:BTC/USDT:USDT@1D"). + :param time_from_str: The --from parameter value (date, days, or -bars). + :return: Tuple of (ohlcv_path, syminfo). + """ + from pynecore.core.plugin import load_plugin, ProviderPlugin + from pynecore.core.config import ensure_config + from pynecore.lib.timeframe import in_seconds + + ps = parse_provider_string(provider_str, require_timeframe=True) + + # Load provider plugin + provider_class = load_plugin(ps.provider) + if not issubclass(provider_class, ProviderPlugin): + secho(f"Plugin '{ps.provider}' is not a data provider.", err=True, fg=colors.RED) + raise Exit(1) + + # Parse --from (required in provider mode) + if not time_from_str: + secho("Error: --from / -f is required in provider mode.\n" + " Examples: -f 30 (30 days back), -f -500 (500 bars back), -f 2025-01-01", + err=True, fg=colors.RED) + raise Exit(1) + + time_from_value = _parse_time_value(time_from_str, allow_bars=True) + time_to_dt = datetime.now(UTC).replace(second=0, microsecond=0) + + # Convert bar count to time range + if isinstance(time_from_value, int) and time_from_value < 0: + bar_count = abs(time_from_value) + tf_seconds = in_seconds(ps.timeframe) + time_from_dt = time_to_dt - timedelta(seconds=tf_seconds * bar_count) + else: + time_from_dt = time_from_value + + # Load config + config = None + if hasattr(provider_class, 'Config') and provider_class.Config is not None: + config = ensure_config(provider_class.Config, + app_state.config_dir / 'plugins' / f'{ps.provider}.toml') + + # Create provider instance + provider_instance: ProviderPlugin = provider_class( + symbol=ps.symbol, timeframe=ps.timeframe, + ohlv_dir=app_state.data_dir, config=config + ) + + # Fetch symbol info + with Progress(SpinnerColumn(finished_text="[green]✓"), TextColumn("{task.description}")) as progress: + task = progress.add_task("Fetching symbol info...", total=1) + syminfo = provider_instance.get_symbol_info(force_update=not provider_instance.is_symbol_info_exists()) + progress.update(task, completed=1) + + # Download OHLCV data (always fresh in provider mode) + with provider_instance as ohlcv_writer: + ohlcv_writer.seek(0) + ohlcv_writer.truncate() + + time_from_dl = time_from_dt.replace(tzinfo=None) if time_from_dt.tzinfo else time_from_dt + time_to_dl = time_to_dt.replace(tzinfo=None) if time_to_dt.tzinfo else time_to_dt + + total_seconds = int((time_to_dl - time_from_dl).total_seconds()) + + with Progress( + SpinnerColumn(finished_text="[green]✓"), + TextColumn("{task.description}"), + DateColumn(time_from_dl), + BarColumn(), + TimeElapsedColumn(), + "/", + TimeRemainingColumn(), + ) as progress: + task = progress.add_task("Downloading OHLCV data...", total=total_seconds) + + def cb_progress(current_time: datetime): + elapsed_seconds = int((current_time - time_from_dl).total_seconds()) + progress.update(task, completed=elapsed_seconds) + + provider_instance.download_ohlcv(time_from_dl, time_to_dl, on_progress=cb_progress) + + return provider_instance.ohlcv_path, syminfo + + @app.command(cls=PluggableCommand) def run( script: Path = Argument(..., dir_okay=False, file_okay=True, help="Script to run (.py or .pine)"), - data: Path = Argument(..., dir_okay=False, file_okay=True, - help="Data file to use (*.ohlcv)"), - time_from: datetime | None = Option(None, '--from', '-f', - formats=["%Y-%m-%d", "%Y-%m-%d %H:%M:%S"], - help="Start date (UTC), if not specified, will use the " - "first date in the data"), - time_to: datetime | None = Option(None, '--to', '-t', - formats=["%Y-%m-%d", "%Y-%m-%d %H:%M:%S"], - help="End date (UTC), if not specified, will use the last " - "date in the data"), + data: str = Argument(..., + help="Data file (*.ohlcv, *.csv) or provider string " + "(e.g. ccxt:BYBIT:BTC/USDT:USDT@1D)"), + time_from: str | None = Option(None, '--from', '-f', + metavar="[DATE|DAYS|-BARS]", + help="Start: date (2025-01-01), days back (30), " + "or -N bars back (-500). Required in provider mode."), + time_to: str | None = Option(None, '--to', '-t', + metavar="[DATE|DAYS]", + help="End: date or days from start (default: end of data or now)"), plot_path: Path | None = Option(None, "--plot", "-pp", help="Path to save the plot data", rich_help_panel="Out Path Options"), @@ -104,15 +230,22 @@ def run( Similarly, if [bold]data[/] path is a name without full path, it will be searched in the [italic]"workdir/data"[/] directory. The [bold]plot_path[/], [bold]strat_path[/], and [bold]trade_path[/] work the same way - if they are names without full paths, they will be saved in the [italic]"workdir/output"[/] directory. - + + [bold]Data Source:[/bold] + The [bold]data[/] argument accepts either a file path or a provider string: + \b + File mode: pyne run script.py data.csv + Provider mode: pyne run script.py ccxt:BYBIT:BTC/USDT:USDT@1D -f -500 + + In provider mode, historical data is downloaded automatically. The --from/-f parameter + is required and accepts: date (2025-01-01), days back (30), or -N bars back (-500). + [bold]Pine Script Support:[/bold] - Also Pine Script (.pine) files could be automatically compiled to Python (.py) before execution, if the - file is newer than the [italic]py[/] file or if the [italic].py[/] file doesn't exist. The compiled [italic].py[/] file will be saved - into the same folder as the original [italic].pine[/] file. - A valid [bold]PyneSys API[/bold] key is required for Pine Script compilation. You can get one at [blue]https://pynesys.io[/blue]. - - [bold]Data Support:[/bold] - Supports CSV, TXT, JSON, and OHLCV data files. Non-OHLCV files are automatically converted. Symbol is auto-detected from filename. + Pine Script (.pine) files are automatically compiled to Python (.py) before execution. + A valid [bold]PyneSys API[/bold] key is required. Get one at [blue]https://pynesys.io[/blue]. + + [bold]Data Formats:[/bold] + Supports CSV, TXT, JSON, and OHLCV data files. Non-OHLCV files are automatically converted. """ # noqa # Expand script path @@ -186,117 +319,101 @@ def run( secho(f"Script file '{script}' not found!", fg="red", err=True) raise Exit(1) - # Expand data path first - convert relative paths to absolute paths in workdir/data - if len(data.parts) == 1: - data = app_state.data_dir / data - - # Store the original suffix to check what user provided - original_suffix = data.suffix + # --- Data resolution: provider string or file path --- + provider_mode = is_provider_string(data) + + if provider_mode: + # Provider mode: download historical data, get syminfo + data_path, syminfo = _download_provider_data(data, time_from) + else: + # File mode: resolve path, convert if needed + data_path = Path(data) + + if len(data_path.parts) == 1: + data_path = app_state.data_dir / data_path + + if data_path.suffix == "": + ohlcv_path = data_path.with_suffix(".ohlcv") + csv_path = data_path.with_suffix(".csv") + if ohlcv_path.exists(): + data_path = ohlcv_path + elif csv_path.exists(): + data_path = csv_path + else: + data_path = ohlcv_path - # Check file format and extension - if data.suffix == "": - # No extension provided - check if .ohlcv exists, otherwise look for .csv - ohlcv_path = data.with_suffix(".ohlcv") - csv_path = data.with_suffix(".csv") + if data_path.suffix != ".ohlcv": + try: + converter = DataConverter() + if converter.is_conversion_required(data_path): + detected_symbol, detected_provider = DataConverter.guess_symbol_from_filename(data_path) + if not detected_symbol: + detected_symbol = data_path.stem.upper() + with Progress( + SpinnerColumn(finished_text="[green]✓"), + TextColumn("[progress.description]{task.description}"), + console=console + ) as progress: + task = progress.add_task(f"Converting {data_path.suffix} to OHLCV format...", total=1) + converter.convert_to_ohlcv( + data_path, provider=detected_provider, + symbol=detected_symbol, force=True + ) + data_path = data_path.with_suffix(".ohlcv") + progress.update(task, completed=1) + else: + data_path = data_path.with_suffix(".ohlcv") + except (DataFormatError, ConversionError) as e: + secho(f"Conversion failed: {e}", fg="red", err=True) + secho("Please convert the file manually:", fg="red") + secho(f"pyne data convert-from {data_path}", fg="yellow") + raise Exit(1) - if ohlcv_path.exists(): - data = ohlcv_path - elif csv_path.exists(): - data = csv_path - else: - # Default to .ohlcv for error message - data = ohlcv_path + if not data_path.exists(): + secho(f"Data file not found: {data_path.name}", fg="red", err=True) + raise Exit(1) - # Now handle conversion if needed - if data.suffix != ".ohlcv": - # Has extension but not .ohlcv - automatically convert try: - converter = DataConverter() - - # Check if conversion is needed - if converter.is_conversion_required(data): - # Auto-detect symbol and provider from filename - detected_symbol, detected_provider = DataConverter.guess_symbol_from_filename(data) - - if not detected_symbol: - detected_symbol = data.stem.upper() - - with Progress( - SpinnerColumn(finished_text="[green]✓"), - TextColumn("[progress.description]{task.description}"), - console=console - ) as progress: - task = progress.add_task(f"Converting {data.suffix} to OHLCV format...", total=1) - - # Perform conversion with smart defaults - converter.convert_to_ohlcv( - data, - provider=detected_provider, - symbol=detected_symbol, - force=True - ) - - # After conversion, the OHLCV file has the same name but .ohlcv extension - data = data.with_suffix(".ohlcv") - - progress.update(task, completed=1) - else: - # File is already up-to-date, use existing OHLCV file - data = data.with_suffix(".ohlcv") - - except (DataFormatError, ConversionError) as e: - secho(f"Conversion failed: {e}", fg="red", err=True) - secho("Please convert the file manually:", fg="red") - secho(f"pyne data convert-from {data}", fg="yellow") + syminfo = SymInfo.load_toml(data_path.with_suffix(".toml")) + except FileNotFoundError: + secho(f"Symbol info file '{data_path.with_suffix('.toml')}' not found!", fg="red", err=True) raise Exit(1) - # Final check if data exists - if not data.exists(): - secho(f"Data file not found: {data.name}", fg="red", err=True) - raise Exit(1) - - # Ensure .csv extension for plot path + # --- Output paths --- if plot_path and plot_path.suffix != ".csv": plot_path = plot_path.with_suffix(".csv") if not plot_path: plot_path = app_state.output_dir / f"{script.stem}.csv" - # Ensure .csv extension for strategy path if strat_path and strat_path.suffix != ".csv": strat_path = strat_path.with_suffix(".csv") if not strat_path: strat_path = app_state.output_dir / f"{script.stem}_strat.csv" - # Ensure .csv extension for trade path if trade_path and trade_path.suffix != ".csv": trade_path = trade_path.with_suffix(".csv") if not trade_path: trade_path = app_state.output_dir / f"{script.stem}_trade.csv" - # Get symbol info for the data - try: - syminfo = SymInfo.load_toml(data.with_suffix(".toml")) - except FileNotFoundError: - secho(f"Symbol info file '{data.with_suffix('.toml')}' not found!", fg="red", err=True) - raise Exit(1) + # --- Open data and run --- + with OHLCVReader(data_path) as reader: + # Parse time range + time_from_dt = _parse_time_value(time_from) if time_from and not provider_mode else None + time_to_dt = _parse_time_value(time_to) if time_to else None - # Open data file - with OHLCVReader(data) as reader: - if not time_from: - time_from = reader.start_datetime - if not time_to: - time_to = reader.end_datetime + if not time_from_dt: + time_from_dt = reader.start_datetime + if not time_to_dt: + time_to_dt = reader.end_datetime - # Convert to UTC timestamps BEFORE removing timezone info - # This ensures we use the correct UTC timestamps for the OHLCV reader - time_from_ts = int(time_from.timestamp()) - time_to_ts = int(time_to.timestamp()) + time_from_ts = int(time_from_dt.timestamp()) + time_to_ts = int(time_to_dt.timestamp()) - # Now we can safely remove timezone for display purposes - time_from = time_from.replace(tzinfo=None) - time_to = time_to.replace(tzinfo=None) + # Remove timezone for display purposes + time_from_display = time_from_dt.replace(tzinfo=None) + time_to_display = time_to_dt.replace(tzinfo=None) - total_seconds = int((time_to - time_from).total_seconds()) + total_seconds = int((time_to_display - time_from_display).total_seconds()) # Get the iterator using the correct UTC timestamps size = reader.get_size(time_from_ts, time_to_ts) @@ -360,7 +477,7 @@ def run( with Progress( SpinnerColumn(finished_text="[green]✓"), TextColumn("{task.description}"), - DateColumn(time_from), + DateColumn(time_from_display), BarColumn(), CustomTimeElapsedColumn(), "/", @@ -391,8 +508,8 @@ def progress_worker(): # Update progress if we have new data if current_time is not None: if current_time == datetime.max: - current_time = time_to - elapsed_seconds = int((current_time - time_from).total_seconds()) + current_time = time_to_display + elapsed_seconds = int((current_time - time_from_display).total_seconds()) # Only update if time changed (to avoid redundant updates) if elapsed_seconds != last_update: progress.update(task, completed=elapsed_seconds) @@ -419,7 +536,7 @@ def cb_progress(current_time: datetime | None): runner.run(on_progress=cb_progress) # Ensure final progress update - progress_queue.put(time_to) + progress_queue.put(time_to_display) time.sleep(0.05) # Give worker thread time to process final update progress.update(task, completed=total_seconds) From e770c8d4c6cb205e1beded7c3f11dfa92732ab50 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Fri, 3 Apr 2026 16:51:45 +0200 Subject: [PATCH 13/64] feat: add live data streaming support to provider runs Introduce a live async-to-sync OHLCV bridge and wire `run` to continue from historical data into live mode with graceful shutdown. Extend `CCXTProvider` to implement live websocket streaming and add unit and live integration tests, with pytest markers to skip live tests by default. --- pytest.ini | 5 +- src/pynecore/cli/commands/run.py | 207 ++++++++++++------ src/pynecore/core/live_runner.py | 167 ++++++++++++++ src/pynecore/core/plugin/live_provider.py | 14 ++ src/pynecore/providers/ccxt.py | 88 +++++++- .../t00_pynecore/core/test_018_live_runner.py | 184 ++++++++++++++++ .../data/test_003_ccxt_live_provider.py | 150 +++++++++++++ 7 files changed, 740 insertions(+), 75 deletions(-) create mode 100644 src/pynecore/core/live_runner.py create mode 100644 tests/t00_pynecore/core/test_018_live_runner.py create mode 100644 tests/t00_pynecore/data/test_003_ccxt_live_provider.py diff --git a/pytest.ini b/pytest.ini index 4665b7e..7de1ebf 100644 --- a/pytest.ini +++ b/pytest.ini @@ -6,4 +6,7 @@ log_cli_level = DEBUG log_cli_format = %(asctime)s %(levelname)6s %(module_func_line)30s - %(message)s log_cli_date_format = %Y-%m-%d %H:%M:%S -addopts = --import-mode=importlib -rs -x --spec --ignore-glob="**/data/*modified.py" +addopts = --import-mode=importlib -rs -x --spec --ignore-glob="**/data/*modified.py" -m "not live" + +markers = + live: tests that connect to live exchange websockets (use -m live to run) diff --git a/src/pynecore/cli/commands/run.py b/src/pynecore/cli/commands/run.py index cdd77c3..5d2dbb9 100644 --- a/src/pynecore/cli/commands/run.py +++ b/src/pynecore/cli/commands/run.py @@ -25,6 +25,7 @@ from pynecore.core.script_runner import ScriptRunner from pynecore.pynesys.compiler import PyneComp from pynecore.core.provider_string import is_provider_string, parse_provider_string +from pynecore.core.live_runner import live_ohlcv_generator from ...cli.utils.api_error_handler import APIErrorHandler __all__ = [] @@ -101,13 +102,24 @@ def _parse_time_value(value: str | None, *, allow_bars: bool = False) -> datetim raise Exit(1) -def _download_provider_data(provider_str: str, time_from_str: str | None) -> tuple[Path, SymInfo]: +class _ProviderData: + """Result of provider data download, including the provider instance for live mode.""" + + def __init__(self, ohlcv_path: Path, syminfo: 'SymInfo', provider_instance=None, + parsed_string=None): + self.ohlcv_path = ohlcv_path + self.syminfo = syminfo + self.provider_instance = provider_instance + self.parsed_string = parsed_string + + +def _download_provider_data(provider_str: str, time_from_str: str | None) -> _ProviderData: """ - Download historical data from a provider and return the OHLCV path and SymInfo. + Download historical data from a provider and return the result. :param provider_str: Provider string (e.g. "ccxt:BYBIT:BTC/USDT:USDT@1D"). :param time_from_str: The --from parameter value (date, days, or -bars). - :return: Tuple of (ohlcv_path, syminfo). + :return: _ProviderData with ohlcv_path, syminfo, and provider instance. """ from pynecore.core.plugin import load_plugin, ProviderPlugin from pynecore.core.config import ensure_config @@ -184,7 +196,12 @@ def cb_progress(current_time: datetime): provider_instance.download_ohlcv(time_from_dl, time_to_dl, on_progress=cb_progress) - return provider_instance.ohlcv_path, syminfo + return _ProviderData( + ohlcv_path=provider_instance.ohlcv_path, + syminfo=syminfo, + provider_instance=provider_instance, + parsed_string=ps, + ) @app.command(cls=PluggableCommand) @@ -214,6 +231,13 @@ def run( help="PyneSys API key for compilation (overrides configuration file)", envvar="PYNESYS_API_KEY", rich_help_panel="Compilation Options"), + live: bool = Option(False, "--live", "-l", + help="Continue with live data after historical phase " + "(provider mode only)"), + shutdown_timeout: float = Option(120.0, "--shutdown-timeout", + help="Max seconds to wait for graceful shutdown " + "(0 = wait forever)", + rich_help_panel="Live Options"), security: list[str] | None = Option(None, "--security", "-sec", help='Security data: "TIMEFRAME=data_name" or ' '"SYMBOL:TIMEFRAME=data_name"', @@ -321,10 +345,16 @@ def run( # --- Data resolution: provider string or file path --- provider_mode = is_provider_string(data) + provider_data = None + + if live and not provider_mode: + secho("Error: --live is only available in provider mode.", err=True, fg=colors.RED) + raise Exit(1) if provider_mode: # Provider mode: download historical data, get syminfo - data_path, syminfo = _download_provider_data(data, time_from) + provider_data = _download_provider_data(data, time_from) + data_path, syminfo = provider_data.ohlcv_path, provider_data.syminfo else: # File mode: resolve path, convert if needed data_path = Path(data) @@ -419,6 +449,26 @@ def run( size = reader.get_size(time_from_ts, time_to_ts) ohlcv_iter = reader.read_from(time_from_ts, time_to_ts) + # Chain live iterator after historical if --live + if live and provider_data: + import itertools + from pynecore.core.plugin.live_provider import LiveProviderPlugin + + if not isinstance(provider_data.provider_instance, LiveProviderPlugin): + secho(f"Plugin '{provider_data.parsed_string.provider}' does not support live data.", + err=True, fg=colors.RED) + raise Exit(1) + + live_iter = live_ohlcv_generator( + provider=provider_data.provider_instance, + symbol=provider_data.parsed_string.symbol, + timeframe=provider_data.parsed_string.timeframe, + last_historical_timestamp=time_to_ts, + shutdown_timeout=shutdown_timeout, + ) + ohlcv_iter = itertools.chain(ohlcv_iter, live_iter) + size = 0 + # Parse security data mappings security_data: dict[str, str | Path] | None = None if security: @@ -473,77 +523,90 @@ def run( # Mark as completed loading_progress.update(loading_task, completed=1) - # Now run with the main progress bar - with Progress( - SpinnerColumn(finished_text="[green]✓"), - TextColumn("{task.description}"), - DateColumn(time_from_display), - BarColumn(), - CustomTimeElapsedColumn(), - "/", - CustomTimeRemainingColumn(), - ) as progress: - task = progress.add_task( - description="Running script...", - total=total_seconds, - ) + if live: + # Live mode: spinner instead of progress bar (no known end time) + with Progress( + SpinnerColumn(), + TextColumn("{task.description}"), + CustomTimeElapsedColumn(), + ) as progress: + task = progress.add_task(description="Live streaming...", total=None) - # Create queue for progress updates - progress_queue = queue.Queue() - stop_event = threading.Event() + def cb_progress_live(current_time: datetime | None): + if current_time: + progress.update(task, description=f"Live — {current_time:%Y-%m-%d %H:%M}") - def progress_worker(): - """Worker thread that updates progress bar at 60Hz""" - last_update = 0 - while not stop_event.is_set(): - try: - # Drain all pending updates - current_time = None - while True: - try: - current_time = progress_queue.get_nowait() - except queue.Empty: - break - - # Update progress if we have new data - if current_time is not None: - if current_time == datetime.max: - current_time = time_to_display - elapsed_seconds = int((current_time - time_from_display).total_seconds()) - # Only update if time changed (to avoid redundant updates) - if elapsed_seconds != last_update: - progress.update(task, completed=elapsed_seconds) - last_update = elapsed_seconds - except Exception: # noqa - pass # Ignore any errors in worker thread - - # Wait ~33.33ms (30Hz refresh rate) - time.sleep(1 / 30) - - # Start worker thread - worker = threading.Thread(target=progress_worker, daemon=True) - worker.start() - - def cb_progress(current_time: datetime | None): - """Callback that just puts timestamp in queue - near zero overhead""" try: - progress_queue.put_nowait(current_time) - except queue.Full: - pass # If queue is full, skip this update + runner.run(on_progress=cb_progress_live) + except KeyboardInterrupt: + secho("\nLive streaming stopped.", fg=colors.YELLOW) - try: - # Run the script - runner.run(on_progress=cb_progress) + else: + # Batch mode: progress bar with time range + with Progress( + SpinnerColumn(finished_text="[green]✓"), + TextColumn("{task.description}"), + DateColumn(time_from_display), + BarColumn(), + CustomTimeElapsedColumn(), + "/", + CustomTimeRemainingColumn(), + ) as progress: + task = progress.add_task( + description="Running script...", + total=total_seconds, + ) + + # Create queue for progress updates + progress_queue = queue.Queue() + stop_event = threading.Event() + + def progress_worker(): + """Worker thread that updates progress bar at 30Hz""" + last_update = 0 + while not stop_event.is_set(): + try: + # Drain all pending updates + current_time = None + while True: + try: + current_time = progress_queue.get_nowait() + except queue.Empty: + break + + # Update progress if we have new data + if current_time is not None: + if current_time == datetime.max: + current_time = time_to_display + elapsed_seconds = int( + (current_time - time_from_display).total_seconds()) + if elapsed_seconds != last_update: + progress.update(task, completed=elapsed_seconds) + last_update = elapsed_seconds + except Exception: # noqa + pass + + time.sleep(1 / 30) + + # Start worker thread + worker = threading.Thread(target=progress_worker, daemon=True) + worker.start() + + def cb_progress(current_time: datetime | None): + """Callback that just puts timestamp in queue""" + try: + progress_queue.put_nowait(current_time) + except queue.Full: + pass - # Ensure final progress update - progress_queue.put(time_to_display) - time.sleep(0.05) # Give worker thread time to process final update + try: + runner.run(on_progress=cb_progress) - progress.update(task, completed=total_seconds) - finally: - # Stop worker thread - stop_event.set() - worker.join(timeout=0.1) # Wait max 100ms for thread to finish + progress_queue.put(time_to_display) + time.sleep(0.05) - # Final update to ensure completion - progress.refresh() + progress.update(task, completed=total_seconds) + finally: + stop_event.set() + worker.join(timeout=0.1) + progress.refresh() diff --git a/src/pynecore/core/live_runner.py b/src/pynecore/core/live_runner.py new file mode 100644 index 0000000..1419805 --- /dev/null +++ b/src/pynecore/core/live_runner.py @@ -0,0 +1,167 @@ +""" +Async/sync bridge for live data streaming. + +Runs a LiveProviderPlugin's async watch_ohlcv() in a background thread +and yields OHLCV objects to the synchronous ScriptRunner via queue.Queue. +""" +from __future__ import annotations + +import asyncio +import logging +import time +import threading +from collections.abc import Iterator +from queue import Queue, Empty + +from pynecore.core.plugin.live_provider import LiveProviderPlugin, BarUpdate +from pynecore.types.ohlcv import OHLCV + +__all__ = ['live_ohlcv_generator'] + +logger = logging.getLogger(__name__) + +_SENTINEL = object() + + +def live_ohlcv_generator( + provider: LiveProviderPlugin, + symbol: str, + timeframe: str, + *, + last_historical_timestamp: int | None = None, + shutdown_timeout: float = 120.0, +) -> Iterator[OHLCV]: + """ + Bridge async watch_ohlcv() to a sync Iterator[OHLCV]. + + Spawns a background thread running asyncio, collects BarUpdate objects + via queue.Queue, filters for closed bars, and yields OHLCV. + + :param provider: A LiveProviderPlugin instance (already configured). + :param symbol: Symbol in provider-specific format. + :param timeframe: Timeframe in TradingView format. + :param last_historical_timestamp: Timestamp of the last historical bar to avoid duplicates. + :param shutdown_timeout: Max seconds to wait for graceful shutdown. 0 = wait forever. + :return: Iterator yielding OHLCV objects as bars close. + """ + bar_queue: Queue[BarUpdate | BaseException] = Queue(maxsize=100) + stop_event = threading.Event() + + async def _graceful_shutdown(): + """Poll can_shutdown(), then disconnect. Respects shutdown_timeout.""" + logger.info("Graceful shutdown started, polling can_shutdown()...") + + if shutdown_timeout > 0: + deadline = time.monotonic() + shutdown_timeout + else: + deadline = None + + while True: + try: + if await provider.can_shutdown(): + logger.info("Provider ready to shut down") + break + except Exception as e: + logger.warning("can_shutdown() raised: %s", e) + break + + if deadline is not None and time.monotonic() >= deadline: + logger.warning("Shutdown timeout (%.0fs reached), forcing disconnect", + shutdown_timeout) + break + + await asyncio.sleep(1.0) + + try: + await provider.disconnect() + except Exception: + pass + + async def _async_loop(): + try: + await provider.connect() + logger.info("Live provider connected: %s %s@%s", + type(provider).__name__, symbol, timeframe) + + reconnect_attempts = 0 + + while not stop_event.is_set(): + try: + bar_update = await asyncio.wait_for( + provider.watch_ohlcv(symbol, timeframe), + timeout=2.0, + ) + reconnect_attempts = 0 + + if not bar_update.is_closed: + continue + + if (last_historical_timestamp is not None + and bar_update.ohlcv.timestamp <= last_historical_timestamp): + continue + + bar_queue.put(bar_update) + + except asyncio.TimeoutError: + continue + except asyncio.CancelledError: + break + except Exception as e: + reconnect_attempts += 1 + if reconnect_attempts > provider.max_reconnect_attempts: + logger.error("Max reconnect attempts reached (%d), stopping", + provider.max_reconnect_attempts) + bar_queue.put(e) + break + + logger.warning("Connection error (attempt %d/%d): %s", + reconnect_attempts, provider.max_reconnect_attempts, e) + + await provider.on_disconnect() + + delay = provider.reconnect_delay * (2 ** (reconnect_attempts - 1)) + await asyncio.sleep(delay) + + try: + await provider.connect() + await provider.on_reconnect() + logger.info("Reconnected successfully") + except Exception as reconn_err: + logger.error("Reconnect failed: %s", reconn_err) + + except Exception as e: + bar_queue.put(e) + finally: + await _graceful_shutdown() + bar_queue.put(_SENTINEL) + + def _thread_target(): + asyncio.run(_async_loop()) + + thread = threading.Thread(target=_thread_target, daemon=True, name="live-provider") + thread.start() + + try: + while True: + try: + item = bar_queue.get(timeout=1.0) + except Empty: + if not thread.is_alive(): + break + continue + + if item is _SENTINEL: + break + + if isinstance(item, BaseException): + raise item + + yield item.ohlcv + + except KeyboardInterrupt: + logger.info("Live streaming interrupted by user") + finally: + stop_event.set() + # Wait for graceful shutdown to complete + join_timeout = (shutdown_timeout + 5.0) if shutdown_timeout > 0 else None + thread.join(timeout=join_timeout) diff --git a/src/pynecore/core/plugin/live_provider.py b/src/pynecore/core/plugin/live_provider.py index ae508f6..2cd55c6 100644 --- a/src/pynecore/core/plugin/live_provider.py +++ b/src/pynecore/core/plugin/live_provider.py @@ -75,3 +75,17 @@ async def on_disconnect(self) -> None: async def on_reconnect(self) -> None: """Called after a successful reconnection.""" + + # --- Shutdown hooks --- + + async def can_shutdown(self) -> bool: + """ + Whether the provider is ready to shut down. + + Override to delay shutdown while cleanup is in progress + (e.g. waiting for open orders to fill or positions to close). + Called every second during the graceful shutdown phase. + + :return: True if ready to shut down, False to keep waiting. + """ + return True diff --git a/src/pynecore/providers/ccxt.py b/src/pynecore/providers/ccxt.py index 1146b54..905eb3c 100644 --- a/src/pynecore/providers/ccxt.py +++ b/src/pynecore/providers/ccxt.py @@ -5,7 +5,8 @@ from pathlib import Path import tomllib -from pynecore.core.plugin import ProviderPlugin, override +from pynecore.core.plugin import LiveProviderPlugin, override +from pynecore.core.plugin.live_provider import BarUpdate from pynecore.core.syminfo import SymInfo, SymInfoInterval, SymInfoSession from ..types.ohlcv import OHLCV @@ -48,7 +49,7 @@ class CCXTConfig: """Default API password (required by some exchanges like KuCoin)""" -class CCXTProvider(ProviderPlugin[CCXTConfig]): +class CCXTProvider(LiveProviderPlugin[CCXTConfig]): """ CCXT provider """ @@ -178,6 +179,10 @@ def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, if exchange_name in raw_toml and isinstance(raw_toml[exchange_name], dict): exchange_config = raw_toml[exchange_name] + self._async_client = None + self._last_bar_timestamp: int | None = None + self._last_bar_ohlcv: OHLCV | None = None + # Create the CCXT client self._client: ccxt.Exchange = getattr(ccxt, exchange_name)({ 'enableRateLimit': True, @@ -222,6 +227,7 @@ def update_symbol_info(self) -> SymInfo: assert self._client.markets market_details = self._client.markets[self.symbol] + assert self.timeframe is not None opening_hours, session_starts, session_ends = self._create_24_7_sessions() # Calculate minmove and pricescale from mintick @@ -274,6 +280,9 @@ def download_ohlcv(self, time_from: datetime, time_to: datetime, :param on_progress: Optional callback to call on progress. :param limit: Override the automatic chunk size. """ + assert self.symbol is not None + assert self.xchg_timeframe is not None + tf: datetime = time_from.replace(tzinfo=None) tt: datetime = (time_to if time_to is not None else datetime.now(UTC)).replace(tzinfo=None) @@ -324,3 +333,78 @@ def download_ohlcv(self, time_from: datetime, time_to: datetime, if on_progress: on_progress(tt) + + # --- LiveProviderPlugin methods --- + + @override + async def connect(self) -> None: + """Establish async CCXT connection for live data streaming.""" + try: + import ccxt.pro as ccxtpro + except ImportError: + raise ImportError( + "CCXT Pro is required for live data. Install it with: pip install ccxt" + ) + + exchange_name = self._client.id + + exchange_config = { + 'enableRateLimit': True, + } + if self.config: + exchange_config.update({k: v for k, v in vars(self.config).items() if v}) + + self._async_client = getattr(ccxtpro, exchange_name)(exchange_config) + + @override + async def disconnect(self) -> None: + """Close the async CCXT connection.""" + if hasattr(self, '_async_client') and self._async_client: + await self._async_client.close() + self._async_client = None + + @property + @override + def is_connected(self) -> bool: + """Whether the async CCXT connection is active.""" + return hasattr(self, '_async_client') and self._async_client is not None + + @override + async def watch_ohlcv(self, symbol: str, timeframe: str) -> BarUpdate: + """ + Wait for the next OHLCV update from the exchange websocket. + + Detects bar closure by tracking timestamp changes: when a new bar + timestamp appears, the previous bar is returned as closed. Intra-bar + updates (same timestamp) are returned with ``is_closed=False``. + + :param symbol: Symbol in CCXT format (e.g. "BTC/USDT:USDT"). + :param timeframe: Timeframe in TradingView format (e.g. "1D", "1", "4H"). + :return: BarUpdate with OHLCV data and closed/open status. + """ + xchg_tf = self.to_exchange_timeframe(timeframe) + + while True: + candles = await self._async_client.watch_ohlcv(symbol, xchg_tf) + last = candles[-1] + timestamp = int(last[0] / 1000) + + current_ohlcv = OHLCV( + timestamp=timestamp, + open=float(last[1]), + high=float(last[2]), + low=float(last[3]), + close=float(last[4]), + volume=float(last[5]), + ) + + if (self._last_bar_timestamp is not None + and timestamp != self._last_bar_timestamp): + closed_bar = self._last_bar_ohlcv + self._last_bar_timestamp = timestamp + self._last_bar_ohlcv = current_ohlcv + return BarUpdate(ohlcv=closed_bar, is_closed=True) + + self._last_bar_timestamp = timestamp + self._last_bar_ohlcv = current_ohlcv + return BarUpdate(ohlcv=current_ohlcv, is_closed=False) diff --git a/tests/t00_pynecore/core/test_018_live_runner.py b/tests/t00_pynecore/core/test_018_live_runner.py new file mode 100644 index 0000000..e07b513 --- /dev/null +++ b/tests/t00_pynecore/core/test_018_live_runner.py @@ -0,0 +1,184 @@ +""" +Tests for the live runner async/sync bridge. +""" +import asyncio +import time + +from pynecore.core.live_runner import live_ohlcv_generator +from pynecore.core.plugin.live_provider import BarUpdate +from pynecore.types.ohlcv import OHLCV + + +def _make_ohlcv(timestamp: int, close: float = 100.0) -> OHLCV: + return OHLCV(timestamp=timestamp, open=close, high=close + 1, + low=close - 1, close=close, volume=1000.0) + + +def _make_bar_update(timestamp: int, is_closed: bool = True, close: float = 100.0) -> BarUpdate: + return BarUpdate(ohlcv=_make_ohlcv(timestamp, close), is_closed=is_closed) + + +class MockLiveProvider: + """Mock LiveProviderPlugin for testing the bridge.""" + + def __init__(self, bar_updates: list[BarUpdate]): + self._bar_updates = bar_updates + self._index = 0 + self._connected = False + self.reconnect_delay = 0.01 + self.max_reconnect_attempts = 3 + + async def connect(self): + self._connected = True + + async def disconnect(self): + self._connected = False + + @property + def is_connected(self): + return self._connected + + async def watch_ohlcv(self, symbol: str, timeframe: str) -> BarUpdate: + if self._index >= len(self._bar_updates): + raise asyncio.CancelledError() + + bar = self._bar_updates[self._index] + self._index += 1 + await asyncio.sleep(0.001) + return bar + + async def on_disconnect(self): + pass + + async def on_reconnect(self): + pass + + async def can_shutdown(self): + return True + + +def __test_live_generator_yields_closed_bars__(): + """live_ohlcv_generator only yields bars where is_closed=True""" + updates = [ + _make_bar_update(1000, is_closed=False, close=100.0), + _make_bar_update(1000, is_closed=True, close=101.0), + _make_bar_update(2000, is_closed=False, close=102.0), + _make_bar_update(2000, is_closed=True, close=103.0), + ] + + provider = MockLiveProvider(updates) + bars = list(live_ohlcv_generator(provider, "BTC/USDT", "1D")) + + assert len(bars) == 2 + assert bars[0].close == 101.0 + assert bars[1].close == 103.0 + + +def __test_live_generator_filters_old_bars__(): + """live_ohlcv_generator skips bars older than last_historical_timestamp""" + updates = [ + _make_bar_update(1000, is_closed=True, close=100.0), + _make_bar_update(2000, is_closed=True, close=200.0), + _make_bar_update(3000, is_closed=True, close=300.0), + ] + + provider = MockLiveProvider(updates) + bars = list(live_ohlcv_generator(provider, "BTC/USDT", "1D", + last_historical_timestamp=2000)) + + assert len(bars) == 1 + assert bars[0].timestamp == 3000 + assert bars[0].close == 300.0 + + +def __test_live_generator_yields_ohlcv_objects__(): + """live_ohlcv_generator yields OHLCV, not BarUpdate""" + updates = [ + _make_bar_update(1000, is_closed=True), + ] + + provider = MockLiveProvider(updates) + bars = list(live_ohlcv_generator(provider, "BTC/USDT", "1D")) + + assert len(bars) == 1 + assert isinstance(bars[0], OHLCV) + + +def __test_live_generator_connects_and_disconnects__(): + """live_ohlcv_generator calls connect on start and disconnect on finish""" + updates = [ + _make_bar_update(1000, is_closed=True), + ] + + provider = MockLiveProvider(updates) + list(live_ohlcv_generator(provider, "BTC/USDT", "1D")) + + assert not provider.is_connected + + +def __test_live_generator_empty_stream__(): + """live_ohlcv_generator handles empty stream gracefully""" + provider = MockLiveProvider([]) + bars = list(live_ohlcv_generator(provider, "BTC/USDT", "1D")) + assert len(bars) == 0 + + +class DelayedShutdownProvider(MockLiveProvider): + """Provider that delays shutdown for a number of can_shutdown() calls.""" + + def __init__(self, bar_updates: list[BarUpdate], deny_count: int = 2): + super().__init__(bar_updates) + self._deny_count = deny_count + self._shutdown_calls = 0 + + async def can_shutdown(self): + self._shutdown_calls += 1 + if self._shutdown_calls <= self._deny_count: + return False + return True + + +def __test_graceful_shutdown_waits_for_can_shutdown__(): + """Shutdown waits until can_shutdown() returns True""" + updates = [_make_bar_update(1000, is_closed=True)] + provider = DelayedShutdownProvider(updates, deny_count=2) + + list(live_ohlcv_generator(provider, "BTC/USDT", "1D", shutdown_timeout=10.0)) + + assert provider._shutdown_calls == 3 + assert not provider.is_connected + + +def __test_graceful_shutdown_timeout_forces_disconnect__(): + """Shutdown force-disconnects after timeout even if can_shutdown() returns False""" + + class NeverReadyProvider(MockLiveProvider): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._shutdown_calls = 0 + + async def can_shutdown(self): + self._shutdown_calls += 1 + return False + + updates = [_make_bar_update(1000, is_closed=True)] + provider = NeverReadyProvider(updates) + + start = time.monotonic() + list(live_ohlcv_generator(provider, "BTC/USDT", "1D", shutdown_timeout=2.0)) + elapsed = time.monotonic() - start + + assert provider._shutdown_calls >= 1 + assert elapsed < 5.0 + assert not provider.is_connected + + +def __test_graceful_shutdown_zero_timeout_waits_until_ready__(): + """shutdown_timeout=0 waits indefinitely until can_shutdown() returns True""" + updates = [_make_bar_update(1000, is_closed=True)] + provider = DelayedShutdownProvider(updates, deny_count=3) + + list(live_ohlcv_generator(provider, "BTC/USDT", "1D", shutdown_timeout=0)) + + assert provider._shutdown_calls == 4 + assert not provider.is_connected diff --git a/tests/t00_pynecore/data/test_003_ccxt_live_provider.py b/tests/t00_pynecore/data/test_003_ccxt_live_provider.py new file mode 100644 index 0000000..9480018 --- /dev/null +++ b/tests/t00_pynecore/data/test_003_ccxt_live_provider.py @@ -0,0 +1,150 @@ +""" +Integration tests for CCXT LiveProviderPlugin websocket streaming. + +These tests connect to real exchanges and require network access. +They are skipped if ccxt is not installed. +""" +import asyncio +import logging + +import pytest + +from pynecore.providers.ccxt import CCXTProvider +from pynecore.core.plugin.live_provider import BarUpdate +from pynecore.types.ohlcv import OHLCV + +logging.getLogger("ccxt").setLevel(logging.WARNING) +logging.getLogger("ccxt.base.exchange").setLevel(logging.WARNING) + +pytestmark = pytest.mark.live + + +def _skip_if_no_ccxt(): + try: + import ccxt.pro # noqa: F401 + except ImportError: + pytest.skip("CCXT library not available") + + +def __test_ccxt_live_connect_disconnect__(): + """CCXTProvider can connect and disconnect via websocket""" + _skip_if_no_ccxt() + + provider = CCXTProvider( + symbol="BYBIT:BTC/USDT:USDT", + timeframe="1", + ohlv_dir=None, + ) + + async def _run(): + await provider.connect() + assert provider.is_connected + await provider.disconnect() + assert not provider.is_connected + + asyncio.run(_run()) + + +def __test_ccxt_live_watch_ohlcv__(): + """CCXTProvider receives at least one OHLCV update from Bybit websocket""" + _skip_if_no_ccxt() + + provider = CCXTProvider( + symbol="BYBIT:BTC/USDT:USDT", + timeframe="1", + ohlv_dir=None, + ) + + async def _run(): + await provider.connect() + try: + bar_update = await asyncio.wait_for( + provider.watch_ohlcv("BTC/USDT:USDT", "1"), + timeout=30.0, + ) + + assert isinstance(bar_update, BarUpdate) + assert isinstance(bar_update.ohlcv, OHLCV) + assert isinstance(bar_update.is_closed, bool) + + ohlcv = bar_update.ohlcv + assert ohlcv.timestamp > 0 + assert ohlcv.open > 0 + assert ohlcv.high >= ohlcv.low + assert ohlcv.close > 0 + assert ohlcv.volume >= 0 + finally: + await provider.disconnect() + + asyncio.run(_run()) + + +def __test_ccxt_live_multiple_updates__(): + """CCXTProvider receives multiple consecutive updates""" + _skip_if_no_ccxt() + + provider = CCXTProvider( + symbol="BYBIT:BTC/USDT:USDT", + timeframe="1", + ohlv_dir=None, + ) + + async def _run(): + await provider.connect() + try: + updates = [] + for _ in range(3): + bar_update = await asyncio.wait_for( + provider.watch_ohlcv("BTC/USDT:USDT", "1"), + timeout=30.0, + ) + updates.append(bar_update) + + assert len(updates) == 3 + for u in updates: + assert isinstance(u, BarUpdate) + assert u.ohlcv.timestamp > 0 + finally: + await provider.disconnect() + + asyncio.run(_run()) + + +def __test_ccxt_live_can_shutdown_default__(): + """CCXTProvider.can_shutdown() returns True by default""" + _skip_if_no_ccxt() + + provider = CCXTProvider( + symbol="BYBIT:BTC/USDT:USDT", + timeframe="1", + ohlv_dir=None, + ) + + result = asyncio.run(provider.can_shutdown()) + assert result is True + + +def __test_ccxt_live_generator_integration__(): + """Full live_ohlcv_generator integration: connect, receive one bar, shutdown""" + _skip_if_no_ccxt() + + from pynecore.core.live_runner import live_ohlcv_generator + + provider = CCXTProvider( + symbol="BYBIT:BTC/USDT:USDT", + timeframe="1", + ohlv_dir=None, + ) + + received = [] + for ohlcv in live_ohlcv_generator(provider, "BTC/USDT:USDT", "1", + shutdown_timeout=5.0): + received.append(ohlcv) + assert isinstance(ohlcv, OHLCV) + assert ohlcv.timestamp > 0 + assert ohlcv.close > 0 + if len(received) >= 1: + break + + assert len(received) >= 1 + assert not provider.is_connected From 46af806c00bb9f6d52806c17bc450ee6d46302ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Fri, 3 Apr 2026 17:42:35 +0200 Subject: [PATCH 14/64] fix: narrow live runner disconnect errors Use a dedicated sentinel exception type for end-of-stream signaling and only ignore expected disconnect errors during provider cleanup. --- src/pynecore/core/live_runner.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/pynecore/core/live_runner.py b/src/pynecore/core/live_runner.py index 1419805..a524e7c 100644 --- a/src/pynecore/core/live_runner.py +++ b/src/pynecore/core/live_runner.py @@ -20,7 +20,10 @@ logger = logging.getLogger(__name__) -_SENTINEL = object() +class _Sentinel(BaseException): + """Marker signaling end of the live stream.""" + +_SENTINEL = _Sentinel() def live_ohlcv_generator( @@ -74,7 +77,7 @@ async def _graceful_shutdown(): try: await provider.disconnect() - except Exception: + except (OSError, RuntimeError): pass async def _async_loop(): From 726d361905155b6315ffea46bf303a4ee6f6ff22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Tue, 7 Apr 2026 21:13:26 +0200 Subject: [PATCH 15/64] feat: make default data fetch behavior provider-configurable Add a fetch_all_by_default provider flag to replace the hardcoded TradingView-specific fallback when no start date is set. Also strip the plugin suffix from generated OHLCV filenames and remove the conditional private tv CLI command import. --- src/pynecore/cli/commands/__init__.py | 6 ------ src/pynecore/cli/commands/data.py | 2 +- src/pynecore/core/plugin/provider.py | 5 ++++- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/src/pynecore/cli/commands/__init__.py b/src/pynecore/cli/commands/__init__.py index 18734ee..f6097c0 100644 --- a/src/pynecore/cli/commands/__init__.py +++ b/src/pynecore/cli/commands/__init__.py @@ -10,12 +10,6 @@ __all__ = ['run', 'data', 'compile', 'benchmark', 'debug', 'plugin'] -# Conditional import for private TradingView test command -_tv_path = Path(__file__).parent / "tv.py" -if _tv_path.exists() and _tv_path.is_symlink(): - from . import tv - - __all__.append('tv') @app.callback() diff --git a/src/pynecore/cli/commands/data.py b/src/pynecore/cli/commands/data.py index 00b51c0..b802ad3 100644 --- a/src/pynecore/cli/commands/data.py +++ b/src/pynecore/cli/commands/data.py @@ -203,7 +203,7 @@ def download( resolved_from = datetime.fromtimestamp(end_ts, UTC) # We need to add one interval to the start date to avoid downloading the same data resolved_from += timedelta(seconds=interval) - elif provider.value == 'tv': # TV provider: fetch all available data + elif getattr(provider_class, 'fetch_all_by_default', False): resolved_from = None else: # No data, download one year as default resolved_from = datetime.now(UTC) - timedelta(days=365) diff --git a/src/pynecore/core/plugin/provider.py b/src/pynecore/core/plugin/provider.py index 32bc301..f9a09f7 100644 --- a/src/pynecore/core/plugin/provider.py +++ b/src/pynecore/core/plugin/provider.py @@ -33,6 +33,9 @@ class ProviderPlugin(Plugin[ConfigT], metaclass=ABCMeta): ohlcv_path: Path | None = None """Path to the OHLCV data file.""" + fetch_all_by_default: bool = False + """If True, fetch all available data when no start date is given (instead of 1 year).""" + @classmethod @abstractmethod def to_tradingview_timeframe(cls, timeframe: str) -> str: @@ -65,7 +68,7 @@ def get_ohlcv_path(cls, symbol: str, timeframe: str, ohlv_dir: Path, :param provider_name: Override provider name in filename. :return: Path to the OHLCV file. """ - return ohlv_dir / (f"{provider_name or cls.__name__.lower().replace('provider', '')}" + return ohlv_dir / (f"{provider_name or cls.__name__.lower().replace('provider', '').replace('plugin', '')}" f"_{symbol.replace('/', '_').replace(':', '_').upper()}" f"_{timeframe}.ohlcv") From 71e5f2fa84b9a2ab21e3de662ad881eb1a247118 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Tue, 7 Apr 2026 21:42:03 +0200 Subject: [PATCH 16/64] fix(plugin): require symbol and timeframe for OHLCV path Assert that symbol and timeframe are set before building the OHLCV path when an OHLCV directory is provided. Keep the path unset when no directory is configured. --- src/pynecore/core/plugin/provider.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/pynecore/core/plugin/provider.py b/src/pynecore/core/plugin/provider.py index f9a09f7..66b25a4 100644 --- a/src/pynecore/core/plugin/provider.py +++ b/src/pynecore/core/plugin/provider.py @@ -83,7 +83,11 @@ def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, self.symbol = symbol self.timeframe = timeframe self.xchg_timeframe = self.to_exchange_timeframe(timeframe) if timeframe else None - self.ohlcv_path = self.get_ohlcv_path(symbol, timeframe, ohlv_dir) if ohlv_dir else None + if ohlv_dir: + assert symbol and timeframe + self.ohlcv_path = self.get_ohlcv_path(symbol, timeframe, ohlv_dir) + else: + self.ohlcv_path = None self.ohlcv_file = OHLCVWriter(self.ohlcv_path) if self.ohlcv_path else None self.config: ConfigT | None = config From a5e35fa9c15c084c7a9716ac5eb51de7982beb71 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Wed, 8 Apr 2026 00:49:55 +0200 Subject: [PATCH 17/64] feat(live): add intra-bar updates, varip support, and strategy suppression for live mode MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Live mode now passes BarUpdate objects (including is_closed=False) through to ScriptRunner, enabling intra-bar script execution with var rollback and varip persistence. Strategy functions are suppressed during the historical phase via lib._strategy_suppressed flag, and activated at the live transition. Key changes: - live_runner yields BarUpdate instead of OHLCV, including intra-bar ticks - ScriptRunner live loop with magnifier-based order processing on bar close - Dynamic barstate properties (isconfirmed, isrealtime, isnew, etc.) - lib._is_live global flag for live mode detection - CSVWriter.flush() for output streaming at historical→live transition - Strategy stats rewritten after each live bar close --- src/pynecore/cli/commands/run.py | 6 + src/pynecore/core/csv_file.py | 9 + src/pynecore/core/live_runner.py | 26 +- src/pynecore/core/script_runner.py | 358 +++++++++++++----- src/pynecore/lib/__init__.py | 6 + src/pynecore/lib/barstate.py | 23 +- src/pynecore/lib/strategy/__init__.py | 14 +- .../t00_pynecore/core/test_018_live_runner.py | 25 +- 8 files changed, 321 insertions(+), 146 deletions(-) diff --git a/src/pynecore/cli/commands/run.py b/src/pynecore/cli/commands/run.py index d33a3b0..bd1e140 100644 --- a/src/pynecore/cli/commands/run.py +++ b/src/pynecore/cli/commands/run.py @@ -539,6 +539,12 @@ def run( sys.path.insert(0, str(lib_dir)) lib_path_added = True + # Set live mode flags before ScriptRunner creation + if live: + from pynecore import lib as _lib + _lib._is_live = True + _lib._strategy_suppressed = True + # Show loading spinner while importing with Progress( SpinnerColumn(finished_text="[green]✓"), diff --git a/src/pynecore/core/csv_file.py b/src/pynecore/core/csv_file.py index 433f4fa..b04f257 100644 --- a/src/pynecore/core/csv_file.py +++ b/src/pynecore/core/csv_file.py @@ -266,6 +266,15 @@ def write_ohlcv(self, candle: OHLCV, timeout: Optional[float] = None) -> bool: except queue.Full: return False + def flush(self, timeout: Optional[float] = None): + """ + Wait for all pending writes to be processed by the worker thread. + + :param timeout: Optional timeout in seconds + """ + if self._is_open and self._queue is not None: + self._queue.join() + def close(self, timeout: Optional[float] = None): """ Close the CSV file and stop the worker thread. diff --git a/src/pynecore/core/live_runner.py b/src/pynecore/core/live_runner.py index a524e7c..8657aa8 100644 --- a/src/pynecore/core/live_runner.py +++ b/src/pynecore/core/live_runner.py @@ -2,7 +2,7 @@ Async/sync bridge for live data streaming. Runs a LiveProviderPlugin's async watch_ohlcv() in a background thread -and yields OHLCV objects to the synchronous ScriptRunner via queue.Queue. +and yields BarUpdate objects to the synchronous ScriptRunner via queue.Queue. """ from __future__ import annotations @@ -14,7 +14,6 @@ from queue import Queue, Empty from pynecore.core.plugin.live_provider import LiveProviderPlugin, BarUpdate -from pynecore.types.ohlcv import OHLCV __all__ = ['live_ohlcv_generator'] @@ -33,19 +32,19 @@ def live_ohlcv_generator( *, last_historical_timestamp: int | None = None, shutdown_timeout: float = 120.0, -) -> Iterator[OHLCV]: +) -> Iterator[BarUpdate]: """ - Bridge async watch_ohlcv() to a sync Iterator[OHLCV]. + Bridge async watch_ohlcv() to a sync Iterator[BarUpdate]. Spawns a background thread running asyncio, collects BarUpdate objects - via queue.Queue, filters for closed bars, and yields OHLCV. + via queue.Queue, and yields them including intra-bar updates. :param provider: A LiveProviderPlugin instance (already configured). :param symbol: Symbol in provider-specific format. :param timeframe: Timeframe in TradingView format. :param last_historical_timestamp: Timestamp of the last historical bar to avoid duplicates. :param shutdown_timeout: Max seconds to wait for graceful shutdown. 0 = wait forever. - :return: Iterator yielding OHLCV objects as bars close. + :return: Iterator yielding BarUpdate objects (both closed and intra-bar). """ bar_queue: Queue[BarUpdate | BaseException] = Queue(maxsize=100) stop_event = threading.Event() @@ -96,12 +95,13 @@ async def _async_loop(): ) reconnect_attempts = 0 - if not bar_update.is_closed: - continue - - if (last_historical_timestamp is not None - and bar_update.ohlcv.timestamp <= last_historical_timestamp): - continue + # Filter duplicates from the historical phase + if last_historical_timestamp is not None: + ts = bar_update.ohlcv.timestamp + if bar_update.is_closed and ts <= last_historical_timestamp: + continue + if not bar_update.is_closed and ts < last_historical_timestamp: + continue bar_queue.put(bar_update) @@ -159,7 +159,7 @@ def _thread_target(): if isinstance(item, BaseException): raise item - yield item.ohlcv + yield item except KeyboardInterrupt: logger.info("Live streaming interrupted by user") diff --git a/src/pynecore/core/script_runner.py b/src/pynecore/core/script_runner.py index 57f6bc4..c1bc427 100644 --- a/src/pynecore/core/script_runner.py +++ b/src/pynecore/core/script_runner.py @@ -166,9 +166,15 @@ def _reset_lib_vars(lib: ModuleType): lib.extra_fields = {} lib._lib_semaphore = False + lib._is_live = False + lib._strategy_suppressed = False lib.barstate.isfirst = True lib.barstate.islast = False + lib.barstate._is_live_phase = False + lib.barstate._is_confirmed = True + lib.barstate._is_new_bar = False + lib.barstate._is_last_confirmed_history = False from ..lib import request request._reset_request_state() @@ -504,27 +510,119 @@ def _lazy_spawn(sid: str): magnifier = BarMagnifier(self._magnifier_iter, chart_tf, tz=self.tz) self.ohlcv_iter = (w.aggregated for w in magnifier) - # Initialize calc_on_order_fills snapshot (only for strategies with COOF) + # Initialize calc_on_order_fills snapshot (for COOF or live mode) var_snapshot = None + is_live = lib._is_live if is_strat and self.script.calc_on_order_fills: from .var_snapshot import VarSnapshot var_snapshot = VarSnapshot(self.script_module, script._registered_libraries) + elif is_live: + from .var_snapshot import VarSnapshot + var_snapshot = VarSnapshot(self.script_module, script._registered_libraries) + + # --- Helper closures for DRY --- + registered_libraries = script._registered_libraries + + def _run_libs_and_main(): + lib._lib_semaphore = True + for _title, main_func in registered_libraries: + main_func() + lib._lib_semaphore = False + r = self.script_module.main() + if r is not None: + assert isinstance(r, dict), "The 'main' function must return a dictionary!" + lib._plot_data.update(r) + + def _write_bar_output(bar_candle): + nonlocal trade_num + if self.plot_writer and lib._plot_data: + ef = {} if bar_candle.extra_fields is None else dict(bar_candle.extra_fields) + ef.update(lib._plot_data) + self.plot_writer.write_ohlcv(bar_candle._replace(extra_fields=ef)) + + if is_strat and self.trades_writer and position: + for t in position.new_closed_trades: + trade_num += 1 + self.trades_writer.write( + trade_num, t.entry_bar_index, + "Entry long" if t.size > 0 else "Entry short", + t.entry_comment if t.entry_comment else t.entry_id, + string.format_time(t.entry_time), # type: ignore + t.entry_price, abs(t.size), t.profit, + f"{t.profit_percent:.2f}", t.cum_profit, + f"{t.cum_profit_percent:.2f}", t.max_runup, + f"{t.max_runup_percent:.2f}", t.max_drawdown, + f"{t.max_drawdown_percent:.2f}", + ) + self.trades_writer.write( + trade_num, t.exit_bar_index, + "Exit long" if t.size > 0 else "Exit short", + t.exit_comment if t.exit_comment else t.exit_id, + string.format_time(t.exit_time), # type: ignore + t.exit_price, abs(t.size), t.profit, + f"{t.profit_percent:.2f}", t.cum_profit, + f"{t.cum_profit_percent:.2f}", t.max_runup, + f"{t.max_runup_percent:.2f}", t.max_drawdown, + f"{t.max_drawdown_percent:.2f}", + ) + + def _coof_loop(): + """COOF re-execution loop: process orders, re-execute on fills.""" + old_fills = position._fill_counter + position.process_orders() + new_fills = position._fill_counter + while new_fills > old_fills: + if var_snapshot.has_vars: + var_snapshot.restore() + function_isolation.reset() + _run_libs_and_main() + old_fills = new_fills + position.process_orders() + new_fills = position._fill_counter + + def _coof_magnified_loop(sub_bars_list, aggregated_candle): + """COOF re-execution loop with magnified order processing.""" + old_fills = position._fill_counter + position.process_orders_magnified(sub_bars_list, aggregated_candle) + new_fills = position._fill_counter + while new_fills > old_fills: + if var_snapshot.has_vars: + var_snapshot.restore() + function_isolation.reset() + _run_libs_and_main() + old_fills = new_fills + position.process_orders_magnified(sub_bars_list, aggregated_candle) + new_fills = position._fill_counter + + # --- Peek-ahead pattern: historical bars --- + from pynecore.core.plugin.live_provider import BarUpdate - # Peek-ahead pattern: look one step ahead to detect the last bar accurately ohlcv_iterator = iter(self.ohlcv_iter) - next_candle = next(ohlcv_iterator, None) + next_item = next(ohlcv_iterator, None) + first_live_update = None # Will hold the first BarUpdate if we transition + + while next_item is not None: + # If a BarUpdate arrives, we transition to live mode + if isinstance(next_item, BarUpdate): + first_live_update = next_item + break - while next_candle is not None: - candle = next_candle - next_candle = next(ohlcv_iterator, None) + candle = next_item + next_item = next(ohlcv_iterator, None) - # Update syminfo lib properties if needed, other ScriptRunner instances may have changed them + # Update syminfo lib properties if needed if self.update_syminfo_every_run: _set_lib_syminfo_properties(self.syminfo, lib) self.tz = _parse_timezone(lib.syminfo.timezone) - # Accurate last bar detection - no more estimation needed - barstate.islast = (next_candle is None) + # Last bar detection + if is_live: + barstate.islast = False + barstate._is_last_confirmed_history = ( + next_item is None or isinstance(next_item, BarUpdate) + ) + else: + barstate.islast = (next_item is None) # Update lib properties _set_lib_properties(candle, self.bar_index, self.tz, lib) @@ -532,129 +630,163 @@ def _lazy_spawn(sid: str): # Store first price for buy & hold calculation if self.first_price is None: self.first_price = lib.close # type: ignore - - # Update last price self.last_price = lib.close # type: ignore # calc_on_order_fills path: snapshot, process, re-execute on fills - if var_snapshot and position: + if var_snapshot and position and not lib._strategy_suppressed: if var_snapshot.has_vars: var_snapshot.save() - - old_fills = position._fill_counter - position.process_orders() - new_fills = position._fill_counter - - while new_fills > old_fills: - if var_snapshot.has_vars: - var_snapshot.restore() - function_isolation.reset() - lib._lib_semaphore = True - for library_title, main_func in script._registered_libraries: - main_func() - lib._lib_semaphore = False - self.script_module.main() - old_fills = new_fills - position.process_orders() - new_fills = position._fill_counter - + _coof_loop() if var_snapshot.has_vars: var_snapshot.restore() - else: - # Standard path (no COOF) - if is_strat and position: - position.process_orders() - - # Execute registered library main functions before main script - lib._lib_semaphore = True - for library_title, main_func in script._registered_libraries: - main_func() - lib._lib_semaphore = False + elif is_strat and position and not lib._strategy_suppressed: + position.process_orders() - # Run the script - res = self.script_module.main() + # Execute libraries + script + _run_libs_and_main() - # Process deferred margin calls (after script runs, before results) - if is_strat and position: + # Process deferred margin calls + if is_strat and position and not lib._strategy_suppressed: position.process_deferred_margin_call() - # Update plot data with the results - if res is not None: - assert isinstance(res, dict), "The 'main' function must return a dictionary!" - lib._plot_data.update(res) + # Write output + _write_bar_output(candle) - # Write plot data to CSV if we have a writer - if self.plot_writer and lib._plot_data: - # Create a new dictionary combining extra_fields (if any) with plot data - extra_fields = {} if candle.extra_fields is None else dict(candle.extra_fields) - extra_fields.update(lib._plot_data) - # Create a new OHLCV instance with updated extra_fields - updated_candle = candle._replace(extra_fields=extra_fields) - self.plot_writer.write_ohlcv(updated_candle) - - # Yield plot data to be able to process in a subclass + # Yield if not is_strat: yield candle, lib._plot_data elif position: yield candle, lib._plot_data, position.new_closed_trades - # Save trade data if we have a writer - if is_strat and self.trades_writer and position: - for trade in position.new_closed_trades: - trade_num += 1 # Start from 1 - self.trades_writer.write( - trade_num, - trade.entry_bar_index, - "Entry long" if trade.size > 0 else "Entry short", - trade.entry_comment if trade.entry_comment else trade.entry_id, - string.format_time(trade.entry_time), # type: ignore - trade.entry_price, - abs(trade.size), - trade.profit, - f"{trade.profit_percent:.2f}", - trade.cum_profit, - f"{trade.cum_profit_percent:.2f}", - trade.max_runup, - f"{trade.max_runup_percent:.2f}", - trade.max_drawdown, - f"{trade.max_drawdown_percent:.2f}", - ) - self.trades_writer.write( - trade_num, - trade.exit_bar_index, - "Exit long" if trade.size > 0 else "Exit short", - trade.exit_comment if trade.exit_comment else trade.exit_id, - string.format_time(trade.exit_time), # type: ignore - trade.exit_price, - abs(trade.size), - trade.profit, - f"{trade.profit_percent:.2f}", - trade.cum_profit, - f"{trade.cum_profit_percent:.2f}", - trade.max_runup, - f"{trade.max_runup_percent:.2f}", - trade.max_drawdown, - f"{trade.max_drawdown_percent:.2f}", - ) - - # Clear plot data lib._plot_data.clear() - # Track equity curve for strategies if is_strat and position: - current_equity = float(position.equity) if position.equity else self.script.initial_capital + current_equity = float(position.equity) if position.equity \ + else self.script.initial_capital self.equity_curve.append(current_equity) - # Call the progress callback if on_progress and lib._datetime is not None: on_progress(lib._datetime.replace(tzinfo=None)) - # Update bar index self.bar_index += 1 - # It is no longer the first bar barstate.isfirst = False - if on_progress: + # --- Live mode: transition and intra-bar loop --- + if first_live_update is not None: + import itertools + + # Transition: historical → live + barstate._is_live_phase = True + barstate._is_last_confirmed_history = False + lib._strategy_suppressed = False + + # Flush output at transition point + if self.plot_writer: + self.plot_writer.flush() + if self.trades_writer: + self.trades_writer.flush() + + last_bar_timestamp: int | None = None + sub_bars: list[OHLCV] = [] + + live_stream = itertools.chain([first_live_update], ohlcv_iterator) + for bar_update in live_stream: + if not isinstance(bar_update, BarUpdate): + continue + + candle = bar_update.ohlcv + is_new_bar = (candle.timestamp != last_bar_timestamp) + + barstate.islast = True + barstate._is_confirmed = bar_update.is_closed + barstate._is_new_bar = is_new_bar + + _set_lib_properties(candle, self.bar_index, self.tz, lib) + + if self.first_price is None: + self.first_price = lib.close # type: ignore + self.last_price = lib.close # type: ignore + + if is_new_bar and not bar_update.is_closed: + # ── Bar open (first intra-bar tick) ── + sub_bars = [candle] + if var_snapshot and var_snapshot.has_vars: + var_snapshot.save() + _run_libs_and_main() + last_bar_timestamp = candle.timestamp + + elif not bar_update.is_closed: + # ── Subsequent intra-bar tick ── + sub_bars.append(candle) + if var_snapshot and var_snapshot.has_vars: + var_snapshot.restore() + function_isolation.reset() + _run_libs_and_main() + + elif bar_update.is_closed: + # ── Bar close ── + if is_new_bar: + sub_bars = [] + if var_snapshot and var_snapshot.has_vars: + var_snapshot.save() + else: + sub_bars.append(candle) + if var_snapshot and var_snapshot.has_vars: + var_snapshot.restore() + function_isolation.reset() + + # Order processing: magnified if sub_bars available + if is_strat and position: + if sub_bars: + if var_snapshot and var_snapshot.has_vars: + _coof_magnified_loop(sub_bars, candle) + var_snapshot.restore() + else: + position.process_orders_magnified(sub_bars, candle) + else: + if var_snapshot and var_snapshot.has_vars: + _coof_loop() + var_snapshot.restore() + else: + position.process_orders() + + # Final script execution for the closed bar + _run_libs_and_main() + + if is_strat and position: + position.process_deferred_margin_call() + + # Commit state for next bar + if var_snapshot and var_snapshot.has_vars: + var_snapshot.save() + + # Output (only on closed bars) + _write_bar_output(candle) + + if not is_strat: + yield candle, lib._plot_data + elif position: + yield candle, lib._plot_data, position.new_closed_trades + + lib._plot_data.clear() + + if is_strat and position: + current_equity = float(position.equity) if position.equity \ + else self.script.initial_capital + self.equity_curve.append(current_equity) + + last_bar_timestamp = candle.timestamp + self.bar_index += 1 + barstate.isfirst = False + + # Live strategy stats: rewrite stats file after each bar + if is_strat and self.strat_writer and position: + self._write_live_strategy_stats(position) + + if on_progress and lib._datetime is not None: + on_progress(lib._datetime.replace(tzinfo=None)) + + elif on_progress: on_progress(datetime.max) except GeneratorExit: @@ -974,6 +1106,24 @@ def _ensure_ohlcv_ext(path: str | Path) -> str: return str(ohlcv_path) return str(path) + def _write_live_strategy_stats(self, position): + """Rewrite strategy stats file with current state (live mode, after each bar).""" + from .strategy_stats import calculate_strategy_statistics, write_strategy_statistics_csv + try: + self.strat_writer.open() + stats = calculate_strategy_statistics( + position, self.script.initial_capital, + self.equity_curve if self.equity_curve else None, + self.first_price, self.last_price, + ) + write_strategy_statistics_csv(stats, self.strat_writer) + self.strat_writer.close() + except Exception: + try: + self.strat_writer.close() + except Exception: + pass + def run(self, on_progress: Callable[[datetime], None] | None = None): """ Run the script on the data diff --git a/src/pynecore/lib/__init__.py b/src/pynecore/lib/__init__.py index 2d2af21..383a632 100644 --- a/src/pynecore/lib/__init__.py +++ b/src/pynecore/lib/__init__.py @@ -102,6 +102,12 @@ # Lib semaphore - to prevent lib`s main function to do things it must not (plot, strategy things, etc.) _lib_semaphore = False +# Live trading mode flag — set by run.py when --live is specified +_is_live = False + +# Strategy suppression — prevents strategy order placement during historical phase in live mode +_strategy_suppressed = False + # # Callable modules # diff --git a/src/pynecore/lib/barstate.py b/src/pynecore/lib/barstate.py index c5fd41a..c82999b 100644 --- a/src/pynecore/lib/barstate.py +++ b/src/pynecore/lib/barstate.py @@ -2,7 +2,7 @@ __all__ = [ 'isfirst', - 'islast', + 'islast', 'isconfirmed', 'ishistory', 'islastconfirmedhistory', @@ -10,7 +10,11 @@ 'isrealtime' ] -# TODO: support live trading +# Dynamic state variables set by ScriptRunner during live mode +_is_live_phase = False +_is_confirmed = True +_is_new_bar = False +_is_last_confirmed_history = False isfirst = True """ Returns true if current bar is first bar in barset, false otherwise.""" @@ -26,8 +30,7 @@ def isconfirmed() -> bool: :return: True if the script is calculating the last (closing) update of the current bar """ - # TODO: now it is always true, but if we implement bar magnifier, it should be calculated - return True + return _is_confirmed @module_property @@ -37,8 +40,7 @@ def ishistory() -> bool: :return: True if script is calculating on historical bars, false otherwise """ - # TODO: now it is always true, but for live trading it should be implemented - return True + return not _is_live_phase @module_property @@ -50,8 +52,7 @@ def islastconfirmedhistory() -> bool: :return: True if script is executing on the dataset's last bar when market is closed, or script is executing on the bar immediately preceding the real-time bar, if market is open """ - # TODO: now is always false, but for live trading it should be implemented - return False + return _is_last_confirmed_history @module_property @@ -61,8 +62,7 @@ def isnew() -> bool: :return: True if script is currently calculating on new bar, false otherwise """ - # TODO: now it is always false, but if we implement bar magnifier, it should be calculated - return False + return _is_new_bar @module_property @@ -72,5 +72,4 @@ def isrealtime() -> bool: :return: True if script is calculating on real-time bars, false otherwise """ - # TODO: now it is always false, but for live trading it should be implemented - return False + return _is_live_phase diff --git a/src/pynecore/lib/strategy/__init__.py b/src/pynecore/lib/strategy/__init__.py index 6763f58..585ff76 100644 --- a/src/pynecore/lib/strategy/__init__.py +++ b/src/pynecore/lib/strategy/__init__.py @@ -1807,7 +1807,7 @@ def cancel(id: str): :param id: The identifier of the order to cancel """ - if lib._lib_semaphore: + if lib._lib_semaphore or lib._strategy_suppressed: return position = lib._script.position @@ -1819,7 +1819,7 @@ def cancel_all(): """ Cancels all pending or unfilled orders """ - if lib._lib_semaphore: + if lib._lib_semaphore or lib._strategy_suppressed: return position = lib._script.position position.entry_orders.clear() @@ -1842,7 +1842,7 @@ def close(id: str, comment: PyneStr = na_str, qty: PyneFloat = na_float, :param alert_message: Custom text for the alert that fires when an order fills. :param immediately: If true, the closing order executes on the same tick when the strategy places it """ - if lib._lib_semaphore: + if lib._lib_semaphore or lib._strategy_suppressed: return position = lib._script.position @@ -1885,7 +1885,7 @@ def close_all(comment: PyneStr = na_str, alert_message: PyneStr = na_str, immedi :param alert_message: Custom text for the alert that fires when an order fills :param immediately: If true, the closing order executes on the same tick when the strategy places it """ - if lib._lib_semaphore: + if lib._lib_semaphore or lib._strategy_suppressed: return position = lib._script.position @@ -1921,7 +1921,7 @@ def entry(id: str, direction: direction.Direction, qty: int | PyneFloat = na_flo :param comment: Additional notes on the filled order :param alert_message: Custom text for the alert that fires when an order fills """ - if lib._lib_semaphore: + if lib._lib_semaphore or lib._strategy_suppressed: return script = lib._script @@ -2079,7 +2079,7 @@ def exit(id: str, from_entry: str = "", :param alert_trailing: Custom text for the alert that fires when an order fills :param disable_alert: If true, the alert will not fire when the order fills """ - if lib._lib_semaphore: + if lib._lib_semaphore or lib._strategy_suppressed: return script = lib._script @@ -2226,7 +2226,7 @@ def order(id: str, direction: direction.Direction, qty: int | PyneFloat = na_flo :param alert_message: Custom text for the alert that fires when an order fills :param disable_alert: If true, the strategy does not trigger an alert when the order fills """ - if lib._lib_semaphore: + if lib._lib_semaphore or lib._strategy_suppressed: return script = lib._script diff --git a/tests/t00_pynecore/core/test_018_live_runner.py b/tests/t00_pynecore/core/test_018_live_runner.py index e07b513..fa9d352 100644 --- a/tests/t00_pynecore/core/test_018_live_runner.py +++ b/tests/t00_pynecore/core/test_018_live_runner.py @@ -57,8 +57,8 @@ async def can_shutdown(self): return True -def __test_live_generator_yields_closed_bars__(): - """live_ohlcv_generator only yields bars where is_closed=True""" +def __test_live_generator_yields_all_bar_updates__(): + """live_ohlcv_generator yields both intra-bar and closed bar updates""" updates = [ _make_bar_update(1000, is_closed=False, close=100.0), _make_bar_update(1000, is_closed=True, close=101.0), @@ -69,9 +69,13 @@ def __test_live_generator_yields_closed_bars__(): provider = MockLiveProvider(updates) bars = list(live_ohlcv_generator(provider, "BTC/USDT", "1D")) - assert len(bars) == 2 - assert bars[0].close == 101.0 - assert bars[1].close == 103.0 + assert len(bars) == 4 + assert not bars[0].is_closed + assert bars[0].ohlcv.close == 100.0 + assert bars[1].is_closed + assert bars[1].ohlcv.close == 101.0 + assert not bars[2].is_closed + assert bars[3].is_closed def __test_live_generator_filters_old_bars__(): @@ -87,12 +91,12 @@ def __test_live_generator_filters_old_bars__(): last_historical_timestamp=2000)) assert len(bars) == 1 - assert bars[0].timestamp == 3000 - assert bars[0].close == 300.0 + assert bars[0].ohlcv.timestamp == 3000 + assert bars[0].ohlcv.close == 300.0 -def __test_live_generator_yields_ohlcv_objects__(): - """live_ohlcv_generator yields OHLCV, not BarUpdate""" +def __test_live_generator_yields_bar_update_objects__(): + """live_ohlcv_generator yields BarUpdate objects (not raw OHLCV)""" updates = [ _make_bar_update(1000, is_closed=True), ] @@ -101,7 +105,8 @@ def __test_live_generator_yields_ohlcv_objects__(): bars = list(live_ohlcv_generator(provider, "BTC/USDT", "1D")) assert len(bars) == 1 - assert isinstance(bars[0], OHLCV) + assert isinstance(bars[0], BarUpdate) + assert isinstance(bars[0].ohlcv, OHLCV) def __test_live_generator_connects_and_disconnects__(): From 074177b0a575a66ce47c893952036253bc28774a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Wed, 8 Apr 2026 01:19:28 +0200 Subject: [PATCH 18/64] test: use 24/7 session helper in strategy tests Replace get_opening_hours_and_sessions() with _create_24_7_sessions() in the bar magnifier and calc_on_order_fills test fixtures. --- tests/t01_lib/t30_strategy/test_020_bar_magnifier.py | 2 +- tests/t01_lib/t30_strategy/test_030_calc_on_order_fills.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/t01_lib/t30_strategy/test_020_bar_magnifier.py b/tests/t01_lib/t30_strategy/test_020_bar_magnifier.py index 28113f0..62ec121 100644 --- a/tests/t01_lib/t30_strategy/test_020_bar_magnifier.py +++ b/tests/t01_lib/t30_strategy/test_020_bar_magnifier.py @@ -51,7 +51,7 @@ def _make_syminfo(period: str = '5'): """Create a minimal SymInfo for testing.""" from pynecore.core.syminfo import SymInfo from pynecore.providers.ccxt import CCXTProvider - opening_hours, session_starts, session_ends = CCXTProvider.get_opening_hours_and_sessions() + opening_hours, session_starts, session_ends = CCXTProvider._create_24_7_sessions() return SymInfo( prefix="TEST", description="Test", ticker="TEST", currency="USD", period=period, type="crypto", mintick=0.01, pricescale=100, diff --git a/tests/t01_lib/t30_strategy/test_030_calc_on_order_fills.py b/tests/t01_lib/t30_strategy/test_030_calc_on_order_fills.py index 6b2596e..054afc4 100644 --- a/tests/t01_lib/t30_strategy/test_030_calc_on_order_fills.py +++ b/tests/t01_lib/t30_strategy/test_030_calc_on_order_fills.py @@ -38,7 +38,7 @@ def _make_syminfo(period: str = '5'): """Create a minimal SymInfo for testing.""" from pynecore.core.syminfo import SymInfo from pynecore.providers.ccxt import CCXTProvider - opening_hours, session_starts, session_ends = CCXTProvider.get_opening_hours_and_sessions() + opening_hours, session_starts, session_ends = CCXTProvider._create_24_7_sessions() return SymInfo( prefix="TEST", description="Test", ticker="TEST", currency="USD", period=period, type="crypto", mintick=0.01, pricescale=100, From 21b16ab20848f0a3625aa4f2bffb971202dde91d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Wed, 8 Apr 2026 02:17:35 +0200 Subject: [PATCH 19/64] refactor(barstate): convert module properties to plain variables MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit barstate properties (isconfirmed, ishistory, isrealtime, isnew, islastconfirmedhistory) were incorrectly declared as @module_property functions — they are plain variables on TradingView. ScriptRunner now sets them directly. Updated module_properties.json accordingly. Added 6 unit tests for live mode: barstate transitions, var rollback, varip persistence across intra-bar, yield-only-on-closed, and islastconfirmedhistory detection. --- src/pynecore/core/script_runner.py | 20 +- src/pynecore/lib/barstate.py | 65 +----- .../transformers/module_properties.json | 10 +- tests/t00_pynecore/core/test_019_live_mode.py | 195 ++++++++++++++++++ .../t00_pynecore/core/test_019_live_mode.toml | 38 ++++ 5 files changed, 259 insertions(+), 69 deletions(-) create mode 100644 tests/t00_pynecore/core/test_019_live_mode.py create mode 100644 tests/t00_pynecore/core/test_019_live_mode.toml diff --git a/src/pynecore/core/script_runner.py b/src/pynecore/core/script_runner.py index c1bc427..ffdabca 100644 --- a/src/pynecore/core/script_runner.py +++ b/src/pynecore/core/script_runner.py @@ -171,10 +171,11 @@ def _reset_lib_vars(lib: ModuleType): lib.barstate.isfirst = True lib.barstate.islast = False - lib.barstate._is_live_phase = False - lib.barstate._is_confirmed = True - lib.barstate._is_new_bar = False - lib.barstate._is_last_confirmed_history = False + lib.barstate.isconfirmed = True + lib.barstate.ishistory = True + lib.barstate.isrealtime = False + lib.barstate.isnew = False + lib.barstate.islastconfirmedhistory = False from ..lib import request request._reset_request_state() @@ -618,7 +619,7 @@ def _coof_magnified_loop(sub_bars_list, aggregated_candle): # Last bar detection if is_live: barstate.islast = False - barstate._is_last_confirmed_history = ( + barstate.islastconfirmedhistory = ( next_item is None or isinstance(next_item, BarUpdate) ) else: @@ -676,8 +677,9 @@ def _coof_magnified_loop(sub_bars_list, aggregated_candle): import itertools # Transition: historical → live - barstate._is_live_phase = True - barstate._is_last_confirmed_history = False + barstate.ishistory = False + barstate.isrealtime = True + barstate.islastconfirmedhistory = False lib._strategy_suppressed = False # Flush output at transition point @@ -698,8 +700,8 @@ def _coof_magnified_loop(sub_bars_list, aggregated_candle): is_new_bar = (candle.timestamp != last_bar_timestamp) barstate.islast = True - barstate._is_confirmed = bar_update.is_closed - barstate._is_new_bar = is_new_bar + barstate.isconfirmed = bar_update.is_closed + barstate.isnew = is_new_bar _set_lib_properties(candle, self.bar_index, self.tz, lib) diff --git a/src/pynecore/lib/barstate.py b/src/pynecore/lib/barstate.py index c82999b..ad5a5bb 100644 --- a/src/pynecore/lib/barstate.py +++ b/src/pynecore/lib/barstate.py @@ -1,5 +1,3 @@ -from ..core.module_property import module_property - __all__ = [ 'isfirst', 'islast', @@ -10,66 +8,23 @@ 'isrealtime' ] -# Dynamic state variables set by ScriptRunner during live mode -_is_live_phase = False -_is_confirmed = True -_is_new_bar = False -_is_last_confirmed_history = False - isfirst = True """ Returns true if current bar is first bar in barset, false otherwise.""" islast = False """ Returns true if current bar is the last bar in barset, false otherwise. """ +isconfirmed = True +""" Returns true if the script is calculating the last (closing) update of the current bar. """ -@module_property -def isconfirmed() -> bool: - """ - Returns true if the script is calculating the last (closing) update of the current bar - - :return: True if the script is calculating the last (closing) update of the current bar - """ - return _is_confirmed - - -@module_property -def ishistory() -> bool: - """ - Returns true if script is calculating on historical bars, false otherwise. - - :return: True if script is calculating on historical bars, false otherwise - """ - return not _is_live_phase - - -@module_property -def islastconfirmedhistory() -> bool: - """ - Returns true if script is executing on the dataset's last bar when market is closed, or script - is executing on the bar immediately preceding the real-time bar, if market is open. - - :return: True if script is executing on the dataset's last bar when market is closed, or script - is executing on the bar immediately preceding the real-time bar, if market is open - """ - return _is_last_confirmed_history - - -@module_property -def isnew() -> bool: - """ - Returns true if script is currently calculating on new bar, false otherwise. - - :return: True if script is currently calculating on new bar, false otherwise - """ - return _is_new_bar +ishistory = True +""" Returns true if script is calculating on historical bars, false otherwise. """ +islastconfirmedhistory = False +""" Returns true on the last historical bar before real-time bars begin. """ -@module_property -def isrealtime() -> bool: - """ - Returns true if script is calculating on real-time bars, false otherwise. +isnew = False +""" Returns true if script is currently calculating on new bar, false otherwise. """ - :return: True if script is calculating on real-time bars, false otherwise - """ - return _is_live_phase +isrealtime = False +""" Returns true if script is calculating on real-time bars, false otherwise. """ diff --git a/src/pynecore/transformers/module_properties.json b/src/pynecore/transformers/module_properties.json index 93b4622..975a188 100644 --- a/src/pynecore/transformers/module_properties.json +++ b/src/pynecore/transformers/module_properties.json @@ -103,25 +103,25 @@ }, "lib.barstate": { "isconfirmed": { - "type": "property" + "type": "variable" }, "isfirst": { "type": "variable" }, "ishistory": { - "type": "property" + "type": "variable" }, "islast": { "type": "variable" }, "islastconfirmedhistory": { - "type": "property" + "type": "variable" }, "isnew": { - "type": "property" + "type": "variable" }, "isrealtime": { - "type": "property" + "type": "variable" } }, "lib.box": { diff --git a/tests/t00_pynecore/core/test_019_live_mode.py b/tests/t00_pynecore/core/test_019_live_mode.py new file mode 100644 index 0000000..ccf10a5 --- /dev/null +++ b/tests/t00_pynecore/core/test_019_live_mode.py @@ -0,0 +1,195 @@ +""" +@pyne +""" +import sys +import itertools + +from pynecore.lib import script, barstate +from pynecore.types import Persistent, IBPersistent + + +@script.indicator(title="Live Mode Test") +def main(): + var_count: Persistent[int] = 0 + varip_count: IBPersistent[int] = 0 + var_count += 1 + varip_count += 1 + return { + "var": var_count, + "varip": varip_count, + "rt": 1 if barstate.isrealtime else 0, + "hist": 1 if barstate.ishistory else 0, + "conf": 1 if barstate.isconfirmed else 0, + "new": 1 if barstate.isnew else 0, + "lch": 1 if barstate.islastconfirmedhistory else 0, + } + + +def _make_ohlcv(ts, close=100.0): + from pynecore.types.ohlcv import OHLCV + return OHLCV(timestamp=ts, open=close, high=close + 1, low=close - 1, + close=close, volume=1000.0) + + +def _make_bar_update(ts, is_closed=True, close=100.0): + from pynecore.core.plugin.live_provider import BarUpdate + return BarUpdate(ohlcv=_make_ohlcv(ts, close), is_closed=is_closed) + + +def _create_live_runner(script_path, module_key, syminfo, ohlcv_iter): + """Helper: set live mode flags, clean module cache, create ScriptRunner.""" + from pynecore.core.script_runner import ScriptRunner + from pynecore import lib + + for key in [module_key, script_path.stem]: + sys.modules.pop(key, None) + + # Use setattr to avoid FunctionIsolationTransformer mangling the assignment + setattr(lib, '_is_live', True) + setattr(lib, '_strategy_suppressed', True) + return ScriptRunner(script_path, ohlcv_iter, syminfo) + + +def __test_barstate_historical_then_live__(script_path, module_key, syminfo): + """barstate transitions from ishistory=True to isrealtime=True at live phase""" + historical = [_make_ohlcv(i * 60, 100.0 + i) for i in range(3)] + live = [_make_bar_update(3 * 60, is_closed=True, close=104.0)] + + runner = _create_live_runner( + script_path, module_key, syminfo, + itertools.chain(historical, live), + ) + + results = [(c, dict(p)) for c, p in runner.run_iter()] + + assert len(results) == 4 + + # Historical bars + for i in range(3): + _, plot_data = results[i] + assert plot_data["hist"] == 1, f"Bar {i} should be historical" + assert plot_data["rt"] == 0, f"Bar {i} should not be realtime" + + # Live bar + _, plot_data = results[3] + assert plot_data["hist"] == 0, "Live bar should not be historical" + assert plot_data["rt"] == 1, "Live bar should be realtime" + assert plot_data["conf"] == 1, "Closed live bar should be confirmed" + + +def __test_islastconfirmedhistory__(script_path, module_key, syminfo): + """islastconfirmedhistory is True only on the final historical bar before live""" + historical = [_make_ohlcv(i * 60, 100.0 + i) for i in range(3)] + live = [_make_bar_update(3 * 60, is_closed=True, close=104.0)] + + runner = _create_live_runner( + script_path, module_key, syminfo, + itertools.chain(historical, live), + ) + + results = [(c, dict(p)) for c, p in runner.run_iter()] + + # Only bar 2 (last historical) should have islastconfirmedhistory=True + assert results[0][1]["lch"] == 0 + assert results[1][1]["lch"] == 0 + assert results[2][1]["lch"] == 1, "Last historical bar should have islastconfirmedhistory" + assert results[3][1]["lch"] == 0, "Live bar should not have islastconfirmedhistory" + + +def __test_intrabar_barstate__(script_path, module_key, syminfo): + """Intra-bar ticks have isconfirmed=False, isnew=True on first tick""" + historical = [_make_ohlcv(0, 100.0)] + live = [ + _make_bar_update(60, is_closed=False, close=101.0), # bar open + _make_bar_update(60, is_closed=False, close=101.5), # intra-bar + _make_bar_update(60, is_closed=True, close=102.0), # bar close + ] + + runner = _create_live_runner( + script_path, module_key, syminfo, + itertools.chain(historical, live), + ) + + results = [(c, dict(p)) for c, p in runner.run_iter()] + + # 1 historical + 1 closed live = 2 yielded results (intra-bar not yielded) + assert len(results) == 2 + + # The live bar's final values (from the bar-close execution) + _, plot_data = results[1] + assert plot_data["conf"] == 1, "Closed bar should be confirmed" + assert plot_data["rt"] == 1, "Should be realtime" + + +def __test_var_rollback_varip_persist__(script_path, module_key, syminfo): + """var rolls back on intra-bar, varip persists across all executions""" + historical = [_make_ohlcv(i * 60, 100.0 + i) for i in range(3)] + live = [ + # Bar with 3 executions: open tick, intra-bar tick, close + _make_bar_update(3 * 60, is_closed=False, close=104.0), + _make_bar_update(3 * 60, is_closed=False, close=104.5), + _make_bar_update(3 * 60, is_closed=True, close=105.0), + ] + + runner = _create_live_runner( + script_path, module_key, syminfo, + itertools.chain(historical, live), + ) + + results = [(c, dict(p)) for c, p in runner.run_iter()] + assert len(results) == 4 # 3 historical + 1 live close + + # Historical bars: var and varip both increment by 1 per bar + assert results[0][1]["var"] == 1 + assert results[0][1]["varip"] == 1 + assert results[1][1]["var"] == 2 + assert results[1][1]["varip"] == 2 + assert results[2][1]["var"] == 3 + assert results[2][1]["varip"] == 3 + + # Live bar: var should be 4 (3+1, rolled back each time, final = 4) + # varip should be 6 (3 + 3 executions: open, intra, close) + _, live_plot = results[3] + assert live_plot["var"] == 4, f"var should be 4, got {live_plot['var']}" + assert live_plot["varip"] == 6, f"varip should be 6, got {live_plot['varip']}" + + +def __test_yield_only_on_closed_bars__(script_path, module_key, syminfo): + """run_iter only yields for closed bars, not intra-bar ticks""" + historical = [_make_ohlcv(0, 100.0)] + live = [ + _make_bar_update(60, is_closed=False, close=101.0), + _make_bar_update(60, is_closed=False, close=101.5), + _make_bar_update(60, is_closed=False, close=101.8), + _make_bar_update(60, is_closed=True, close=102.0), + _make_bar_update(120, is_closed=True, close=103.0), + ] + + runner = _create_live_runner( + script_path, module_key, syminfo, + itertools.chain(historical, live), + ) + + results = [(c, dict(p)) for c, p in runner.run_iter()] + + # 1 historical + 2 closed live bars = 3 yields + assert len(results) == 3 + + +def __test_no_live_bars_unchanged_behavior__(script_path, module_key, syminfo): + """When is_live=True but no BarUpdate arrives, behaves like normal backtest""" + historical = [_make_ohlcv(i * 60, 100.0 + i) for i in range(5)] + + runner = _create_live_runner( + script_path, module_key, syminfo, + iter(historical), + ) + + results = [(c, dict(p)) for c, p in runner.run_iter()] + + assert len(results) == 5 + for i, (_, plot_data) in enumerate(results): + assert plot_data["hist"] == 1 + assert plot_data["rt"] == 0 + assert plot_data["var"] == i + 1 + assert plot_data["varip"] == i + 1 diff --git a/tests/t00_pynecore/core/test_019_live_mode.toml b/tests/t00_pynecore/core/test_019_live_mode.toml new file mode 100644 index 0000000..3babd5c --- /dev/null +++ b/tests/t00_pynecore/core/test_019_live_mode.toml @@ -0,0 +1,38 @@ +# Indicator / Strategy / Library Settings + +[script] +#overlay = false +#format = "inherit" +#precision = +#scale = +#pyramiding = 1 +#calc_on_order_fills = false +#calc_on_every_tick = false +#max_bars_back = 0 +#timeframe = +#timeframe_gaps = true +#explicit_plot_zorder = false +#max_lines_count = 50 +#max_labels_count = 50 +#max_boxes_count = 50 +#calc_bars_count = 0 +#max_polylines_count = 50 +#dynamic_requests = false +#behind_chart = true +#backtest_fill_limits_assumption = 0 +#default_qty_type = "cash" +#default_qty_value = 1 +#initial_capital = 1000000 +#currency = "NONE" +#slippage = 0 +#commission_type = "percent" +#commission_value = 0.0 +#process_orders_on_close = false +#close_entries_rule = "FIFO" +#margin_long = 100.0 +#margin_short = 100.0 +#risk_free_rate = 2.0 +#use_bar_magnifier = true +#fill_orders_on_standard_ohlc = false + +# Input Settings From 8ff9ba42990d086f8dce96b58221dfc868b7e2d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Wed, 8 Apr 2026 09:28:21 +0200 Subject: [PATCH 20/64] docs: add live mode documentation and default 500-bar prefetch Add docs/advanced/live-mode.md covering the full live streaming architecture: phases, barstate, var/varip, order processing, provider string format, and CLI options. Update cli/run.md with provider mode section and --live flag. Update plugin-system.md hierarchy (LiveProviderPlugin no longer planned). Default -f to -500 bars in provider mode instead of requiring it. --- docs/advanced/README.md | 1 + docs/advanced/live-mode.md | 226 ++++++++++++++++++++++++++++++ docs/cli/run.md | 47 ++++++- docs/development/plugin-system.md | 11 +- src/pynecore/cli/commands/run.py | 11 +- 5 files changed, 284 insertions(+), 12 deletions(-) create mode 100644 docs/advanced/live-mode.md diff --git a/docs/advanced/README.md b/docs/advanced/README.md index 2071e65..c9643d0 100644 --- a/docs/advanced/README.md +++ b/docs/advanced/README.md @@ -26,3 +26,4 @@ Advanced topics and features of PyneCore - [Extra Fields](./extra-fields.md) - Custom CSV columns beyond OHLCV in scripts - [request.security() Internals](./request-security-internals.md) - Multiprocessing architecture, AST transformation, shared memory - [Bar Magnifier](./bar-magnifier.md) - Accurate intrabar order fills using lower-timeframe data +- [Live Mode](./live-mode.md) - Real-time streaming with intra-bar updates and paper trading diff --git a/docs/advanced/live-mode.md b/docs/advanced/live-mode.md new file mode 100644 index 0000000..50e9d39 --- /dev/null +++ b/docs/advanced/live-mode.md @@ -0,0 +1,226 @@ + + +# Live Mode + +Live mode extends PyneCore beyond backtesting: after replaying historical data the script +seamlessly transitions to real-time streaming from a `LiveProviderPlugin`. Indicators update +on every tick; strategies run in paper-trading mode with tick-level order fill accuracy. + +## Quick Start + +```bash +# Stream BTC/USDT on 1-minute bars from Bybit via CCXT, prefetching 500 historical bars +pyne run my_strategy.py ccxt:BYBIT:BTC/USDT:USDT@1 --live -f -500 +``` + +The `--live` flag requires a **provider string** as the data source — it does not work with +local OHLCV files. + +## Historical Bar Count + +The `-f` / `--from` parameter controls how many historical bars the script processes before +going live. **Default: 500 bars** — enough for most scripts out of the box. + +Indicators need a warm-up period: `ta.sma(close, 200)` requires 200 bars before producing its +first value, and the initial values are still distorted by limited lookback. A good rule of +thumb is **2× the largest `length` parameter** in your script: + +```bash +# Script uses ta.ema(close, 50) and ta.atr(14) → largest length is 50 → -f -100 is enough +pyne run my_strategy.py ccxt:BYBIT:BTC/USDT:USDT@1 --live -f -100 + +# Script uses ta.sma(close, 200) → use -f -400 +pyne run my_strategy.py ccxt:BYBIT:BTC/USDT:USDT@1 --live -f -400 + +# No -f specified → default 500 bars, sufficient for most scripts +pyne run my_strategy.py ccxt:BYBIT:BTC/USDT:USDT@1 --live +``` + +Too few bars → indicators produce `NaN` or unreliable values, leading to missed or false +signals. Too many → longer startup, but no harm beyond that. When in doubt, err on the side +of more. + +## How It Works + +A live session has two phases: + +| Phase | Data source | `barstate.ishistory` | `barstate.isrealtime` | Strategy | +|------------|--------------------|----------------------|-----------------------|----------| +| Historical | Provider download | `True` | `False` | Suppressed | +| Live | WebSocket streaming | `False` | `True` | Active | + +### Historical Phase + +The provider downloads OHLCV data (controlled by `-f` / `--from`). The script runs on each +bar exactly like a normal backtest — indicators build up their series, `ta.sma()` warms up, +etc. **Strategy functions are suppressed**: calls to `strategy.entry()`, `strategy.exit()`, +and friends are silently ignored. This prevents phantom trades on historical bars that the +script sees for the first time. + +### Transition + +The ScriptRunner detects the transition automatically when the iterator yields its first +`BarUpdate` object (instead of a plain `OHLCV`). At this point: + +- `barstate.islastconfirmedhistory` becomes `True` on the final historical bar +- Output writers flush to disk (plot CSV, trade CSV) +- Strategy suppression is lifted — orders are now active + +### Live Phase + +The provider streams `BarUpdate` objects via WebSocket. Each update carries an OHLCV snapshot +and an `is_closed` flag: + +``` +BarUpdate(ohlcv=OHLCV(...), is_closed=False) # intra-bar tick +BarUpdate(ohlcv=OHLCV(...), is_closed=True) # bar closed +``` + +The script executes on **every update** — both intra-bar ticks and bar closes. + +## Intra-Bar Updates + +On TradingView, a real-time script re-executes on every tick within a bar. PyneCore replicates +this behavior in live mode. + +### barstate Values + +| Event | `isconfirmed` | `isnew` | `islast` | `isrealtime` | +|--------------------|---------------|---------|----------|--------------| +| First tick of bar | `False` | `True` | `True` | `True` | +| Later intra-bar | `False` | `False` | `True` | `True` | +| Bar close | `True` | `False` | `True` | `True` | + +### var vs varip in Live Mode + +The distinction between `Persistent` (Pine `var`) and `IBPersistent` (Pine `varip`) becomes +meaningful during intra-bar re-executions — the same mechanism used by +[calc_on_order_fills](./bar-magnifier.md#calc_on_order_fills): + +- **`Persistent` (var)**: rolled back to the bar-open snapshot before each intra-bar tick. + Every tick starts from the same baseline. +- **`IBPersistent` (varip)**: **not** rolled back — accumulates across all ticks within the bar. + +```python +var_counter: Persistent[int] = 0 +varip_counter: IBPersistent[int] = 0 + +var_counter += 1 # always == bar_index + 1 (rolled back each tick) +varip_counter += 1 # bar_index + 1 + total intra-bar ticks across all bars +``` + +This uses the same `VarSnapshot` mechanism as the bar magnifier's COOF loop. + +## Order Processing + +Strategies use **magnifier-style order processing** in live mode: intra-bar ticks are +accumulated as `sub_bars`. When the bar closes, `process_orders_magnified(sub_bars, final_bar)` +runs — checking limit, stop, and trailing stop orders against each tick's OHLCV in chronological +order. This gives tick-level fill accuracy even in paper trading. + +If `calc_on_order_fills=True`, the COOF re-execution loop runs on bar close as well — exactly +as it does in backtesting with the bar magnifier. + +### Strategy Suppression + +During the historical phase, all 7 strategy functions (`entry`, `exit`, `close`, `close_all`, +`cancel`, `cancel_all`, `order`) are no-ops. This is controlled by the internal +`lib._strategy_suppressed` flag — the same pattern as `lib._lib_semaphore`. + +## Output + +### Plot CSV + +Written only on **closed bars**. Intra-bar ticks do not produce plot output. This matches +TradingView behavior where plot values are committed only at bar close. + +### Strategy Stats CSV + +In live mode, the strategy statistics file is **rewritten after every closed bar** — not +appended. This means opening the file at any time shows the complete, up-to-date statistics +aggregated over the entire run (historical + live). + +### Trade CSV + +Trade entries and exits are recorded on the bar where the fill occurs, as in backtesting. + +## Provider String Format + +``` +provider:EXCHANGE:SYMBOL:SETTLE@TIMEFRAME +``` + +| Part | Example | Description | +|--------------|----------------|------------------------------------| +| `provider` | `ccxt` | Plugin name (entry point) | +| `EXCHANGE` | `BYBIT` | Exchange identifier | +| `SYMBOL` | `BTC/USDT` | Trading pair | +| `SETTLE` | `USDT` | Settlement currency (optional) | +| `TIMEFRAME` | `1` | TradingView timeframe format | + +The `-f` / `--from` option accepts a negative integer for relative bar count: + +```bash +# Prefetch last 500 bars before going live +pyne run script.py ccxt:BYBIT:BTC/USDT:USDT@1 --live -f -500 +``` + +## CLI Options + +| Flag | Description | +|-----------------------|------------------------------------------------------| +| `--live`, `-l` | Enable live streaming after historical phase | +| `--shutdown-timeout` | Max seconds for graceful shutdown (default: 120) | + +Press `Ctrl+C` to stop live streaming. The provider goes through a graceful shutdown sequence: +`can_shutdown()` is polled every second, then `disconnect()` is called. + +## Architecture + +``` +┌─────────────┐ provider.download() ┌──────────────┐ +│ run.py │ ───────────────────────── │ OHLCV file │ +│ (CLI) │ └──────┬───────┘ +│ │ │ OHLCVReader +│ │ itertools.chain() │ +│ │ ◄────────────────────────────────┤ +│ │ │ +│ │ live_ohlcv_generator() ┌──────┴───────┐ +│ │ ◄──── Queue ◄──── async ──│ WebSocket │ +└──────┬──────┘ └──────────────┘ + │ Iterator[OHLCV | BarUpdate] + │ +┌──────▼───────┐ +│ ScriptRunner │ +│ │ isinstance() detects BarUpdate → live transition +│ historical │ OHLCV bars → normal backtest loop +│ live loop │ BarUpdate → intra-bar + bar close processing +└──────────────┘ +``` + +The `live_ohlcv_generator` bridges the async WebSocket world to synchronous iteration via a +background thread and `queue.Queue`. The ScriptRunner is completely data-source agnostic — it +only cares whether it receives `OHLCV` or `BarUpdate` objects. + +## Limitations + +- **Paper trading only** — no real order execution. `BrokerPlugin` for live order routing is + planned but not yet implemented. +- **Single timeframe** — `request.security()` with live providers (multi-timeframe live) is not + yet supported. +- **Provider required** — `--live` only works with provider strings, not local data files. +- **No replay** — there is no mechanism to replay missed ticks if the connection drops mid-bar. + The provider reconnects and resumes from the next available update. \ No newline at end of file diff --git a/docs/cli/run.md b/docs/cli/run.md index ed0654d..f19141b 100644 --- a/docs/cli/run.md +++ b/docs/cli/run.md @@ -138,6 +138,40 @@ The `run` command has two required arguments: Note: you don't need to write the file extensions in the command. +## Provider Mode + +Instead of a local data file, you can pass a **provider string** as the `DATA` argument. +The provider plugin downloads historical data and (with `--live`) streams real-time updates: + +```bash +# Download historical data from CCXT/Bybit and run the script +pyne run my_strategy.py ccxt:BYBIT:BTC/USDT:USDT@1 + +# Same, but continue with live streaming after the historical phase +pyne run my_strategy.py ccxt:BYBIT:BTC/USDT:USDT@1 --live -f -500 +``` + +Provider string format: `provider:EXCHANGE:SYMBOL:SETTLE@TIMEFRAME` + +| Part | Example | Description | +|-------------|-------------|--------------------------------| +| `provider` | `ccxt` | Plugin name (entry point) | +| `EXCHANGE` | `BYBIT` | Exchange identifier | +| `SYMBOL` | `BTC/USDT` | Trading pair | +| `SETTLE` | `USDT` | Settlement currency (optional) | +| `TIMEFRAME` | `1` | TradingView timeframe format | + +The `-f` / `--from` option accepts a **negative integer** for relative bar count when using +a provider string: + +```bash +# Prefetch the last 500 bars +pyne run script.py ccxt:BYBIT:ETH/USDT:USDT@5 -f -500 +``` + +See [Live Mode](../advanced/live-mode.md) for details on real-time streaming, intra-bar +updates, strategy suppression, and paper trading. + ## Command Options The `run` command supports several options to customize the execution: @@ -148,7 +182,7 @@ The `run` command supports several options to customize the execution: ### Date Range Options -- `--from`, `-f`: Start date (UTC) in 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS' format. If not specified, it will use the first date in the data +- `--from`, `-f`: Start date (UTC) in 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS' format. If not specified, it will use the first date in the data. In provider mode, also accepts a negative integer for relative bar count (e.g. `-f -500`); defaults to `-500` bars if omitted. - `--to`, `-t`: End date (UTC) in 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS' format. If not specified, it will use the last date in the data. Example: @@ -157,6 +191,17 @@ Example: pyne run my_strategy.py eurusd_data.ohlcv --from "2023-01-01" --to "2023-12-31" ``` +### Live Mode Options + +- `--live`, `-l`: Continue with real-time data streaming after the historical phase. Only available in provider mode (provider string as data source). See [Live Mode](../advanced/live-mode.md). +- `--shutdown-timeout`: Maximum seconds to wait for graceful provider shutdown when stopping (default: 120). + +Example: +```bash +# Stream live 1-minute BTC/USDT bars after 500 historical bars +pyne run my_strategy.py ccxt:BYBIT:BTC/USDT:USDT@1 --live -f -500 +``` + ### Output Path Options - `--plot`, `-pp`: Path to save the plot data (CSV format). If not specified, it will be saved as `.csv` in the `workdir/output/` directory. diff --git a/docs/development/plugin-system.md b/docs/development/plugin-system.md index 691a4f3..17af8b0 100644 --- a/docs/development/plugin-system.md +++ b/docs/development/plugin-system.md @@ -26,12 +26,15 @@ class hierarchy determines what a plugin can do: ``` Plugin (base) -├── ProviderPlugin — Offline OHLCV data provider -├── CLIPlugin — CLI subcommands and parameter hooks -├── ExtensionPlugin — Hook-based script extension (planned) -└── LiveProviderPlugin — WebSocket/streaming data (planned) +├── ProviderPlugin — Offline OHLCV data provider +│ └── LiveProviderPlugin — WebSocket/streaming data (extends ProviderPlugin) +├── CLIPlugin — CLI subcommands and parameter hooks +└── ExtensionPlugin — Hook-based script extension (planned) ``` +`LiveProviderPlugin` inherits from `ProviderPlugin` — every live provider can also download +historical data. See [Live Mode](../advanced/live-mode.md) for usage details. + Multiple inheritance combines capabilities: ```python diff --git a/src/pynecore/cli/commands/run.py b/src/pynecore/cli/commands/run.py index bd1e140..7a3bb2f 100644 --- a/src/pynecore/cli/commands/run.py +++ b/src/pynecore/cli/commands/run.py @@ -135,12 +135,9 @@ def _download_provider_data(provider_str: str, time_from_str: str | None) -> _Pr secho(f"Plugin '{ps.provider}' is not a data provider.", err=True, fg=colors.RED) raise Exit(1) - # Parse --from (required in provider mode) + # Default to -500 bars if --from not specified in provider mode if not time_from_str: - secho("Error: --from / -f is required in provider mode.\n" - " Examples: -f 30 (30 days back), -f -500 (500 bars back), -f 2025-01-01", - err=True, fg=colors.RED) - raise Exit(1) + time_from_str = "-500" time_from_value = _parse_time_value(time_from_str, allow_bars=True) time_to_dt = datetime.now(UTC).replace(second=0, microsecond=0) @@ -215,7 +212,7 @@ def run( time_from: str | None = Option(None, '--from', '-f', metavar="[DATE|DAYS|-BARS]", help="Start: date (2025-01-01), days back (30), " - "or -N bars back (-500). Required in provider mode."), + "or -N bars back (-500). Default: -500 bars in provider mode."), time_to: str | None = Option(None, '--to', '-t', metavar="[DATE|DAYS]", help="End: date or days from start (default: end of data or now)"), @@ -269,7 +266,7 @@ def run( Provider mode: pyne run script.py ccxt:BYBIT:BTC/USDT:USDT@1D -f -500 In provider mode, historical data is downloaded automatically. The --from/-f parameter - is required and accepts: date (2025-01-01), days back (30), or -N bars back (-500). + accepts: date (2025-01-01), days back (30), or -N bars back (-500). Default: -500 bars. [bold]Pine Script Support:[/bold] Pine Script (.pine) files are automatically compiled to Python (.py) before execution. From d74a30c8627ea3c89570a5c9a45d072602780cda Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Wed, 8 Apr 2026 12:35:36 +0200 Subject: [PATCH 21/64] fix(live): correct strategy tick execution and varip handling Run live strategies on intra-bar ticks only when calc_on_every_tick is enabled, keep barstate.isnew true for close-only execution, and clear plot data before the final closed-bar run. Add live strategy tests covering var and varip behavior for default and calc_on_every_tick modes. --- src/pynecore/core/script_runner.py | 32 ++-- .../core/test_020_live_strategy_varip.py | 149 ++++++++++++++++++ .../core/test_020_live_strategy_varip.toml | 38 +++++ 3 files changed, 208 insertions(+), 11 deletions(-) create mode 100644 tests/t00_pynecore/core/test_020_live_strategy_varip.py create mode 100644 tests/t00_pynecore/core/test_020_live_strategy_varip.toml diff --git a/src/pynecore/core/script_runner.py b/src/pynecore/core/script_runner.py index ffdabca..f8eff70 100644 --- a/src/pynecore/core/script_runner.py +++ b/src/pynecore/core/script_runner.py @@ -514,10 +514,12 @@ def _lazy_spawn(sid: str): # Initialize calc_on_order_fills snapshot (for COOF or live mode) var_snapshot = None is_live = lib._is_live + # Indicators always run on every tick; strategies only if calc_on_every_tick + run_on_every_tick = not is_strat or self.script.calc_on_every_tick if is_strat and self.script.calc_on_order_fills: from .var_snapshot import VarSnapshot var_snapshot = VarSnapshot(self.script_module, script._registered_libraries) - elif is_live: + elif is_live and run_on_every_tick: from .var_snapshot import VarSnapshot var_snapshot = VarSnapshot(self.script_module, script._registered_libraries) @@ -712,18 +714,20 @@ def _coof_magnified_loop(sub_bars_list, aggregated_candle): if is_new_bar and not bar_update.is_closed: # ── Bar open (first intra-bar tick) ── sub_bars = [candle] - if var_snapshot and var_snapshot.has_vars: - var_snapshot.save() - _run_libs_and_main() + if run_on_every_tick: + if var_snapshot and var_snapshot.has_vars: + var_snapshot.save() + _run_libs_and_main() last_bar_timestamp = candle.timestamp elif not bar_update.is_closed: # ── Subsequent intra-bar tick ── sub_bars.append(candle) - if var_snapshot and var_snapshot.has_vars: - var_snapshot.restore() - function_isolation.reset() - _run_libs_and_main() + if run_on_every_tick: + if var_snapshot and var_snapshot.has_vars: + var_snapshot.restore() + function_isolation.reset() + _run_libs_and_main() elif bar_update.is_closed: # ── Bar close ── @@ -733,9 +737,14 @@ def _coof_magnified_loop(sub_bars_list, aggregated_candle): var_snapshot.save() else: sub_bars.append(candle) - if var_snapshot and var_snapshot.has_vars: - var_snapshot.restore() - function_isolation.reset() + if run_on_every_tick: + if var_snapshot and var_snapshot.has_vars: + var_snapshot.restore() + function_isolation.reset() + + # Strategy not running on ticks: bar close is first execution + if not run_on_every_tick: + barstate.isnew = True # Order processing: magnified if sub_bars available if is_strat and position: @@ -753,6 +762,7 @@ def _coof_magnified_loop(sub_bars_list, aggregated_candle): position.process_orders() # Final script execution for the closed bar + lib._plot_data.clear() _run_libs_and_main() if is_strat and position: diff --git a/tests/t00_pynecore/core/test_020_live_strategy_varip.py b/tests/t00_pynecore/core/test_020_live_strategy_varip.py new file mode 100644 index 0000000..8072811 --- /dev/null +++ b/tests/t00_pynecore/core/test_020_live_strategy_varip.py @@ -0,0 +1,149 @@ +""" +@pyne + +Tests that strategies handle varip correctly in live mode: +- Default (calc_on_every_tick=False): script runs only on bar close, var == varip +- calc_on_every_tick=True: script runs every tick, var rolls back, varip persists +""" +import sys +import itertools + +from pynecore.lib import barstate, plot, script, strategy +from pynecore.types import Persistent, IBPersistent + + +@script.strategy( + "Live Strategy varip Test", + overlay=True, + initial_capital=100000, + default_qty_type=strategy.fixed, + default_qty_value=1, +) +def main(): + var_count: Persistent[int] = 0 + varip_count: IBPersistent[int] = 0 + var_count += 1 + varip_count += 1 + plot(var_count, 'var') + plot(varip_count, 'varip') + plot(1 if barstate.isnew else 0, 'isnew') + + +def _make_ohlcv(ts, close=100.0): + from pynecore.types.ohlcv import OHLCV + return OHLCV(timestamp=ts, open=close, high=close + 1, low=close - 1, + close=close, volume=1000.0) + + +def _make_bar_update(ts, is_closed=True, close=100.0): + from pynecore.core.plugin.live_provider import BarUpdate + return BarUpdate(ohlcv=_make_ohlcv(ts, close), is_closed=is_closed) + + +def _create_live_runner(script_path, module_key, syminfo, ohlcv_iter): + """Helper: set live mode flags, clean module cache, create ScriptRunner.""" + from pynecore.core.script_runner import ScriptRunner + from pynecore import lib + + for key in [module_key, script_path.stem]: + sys.modules.pop(key, None) + + setattr(lib, '_is_live', True) + setattr(lib, '_strategy_suppressed', True) + return ScriptRunner(script_path, ohlcv_iter, syminfo) + + +def __test_strategy_default_var_equals_varip__(script_path, module_key, syminfo): + """Default strategy: script runs only at bar close, var and varip are identical.""" + historical = [_make_ohlcv(i * 60, 100.0 + i) for i in range(3)] + live = [ + _make_bar_update(3 * 60, is_closed=False, close=104.0), # bar open + _make_bar_update(3 * 60, is_closed=False, close=104.5), # intra-bar + _make_bar_update(3 * 60, is_closed=True, close=105.0), # bar close + ] + + runner = _create_live_runner( + script_path, module_key, syminfo, + itertools.chain(historical, live), + ) + + results = [] + for candle, plot_data, _trades in runner.run_iter(): + results.append(dict(plot_data)) + + assert len(results) == 4, f"Expected 4 bars, got {len(results)}" + + # Historical: var and varip both increment by 1 + for i in range(3): + assert results[i]['var'] == i + 1 + assert results[i]['varip'] == i + 1 + + # Live bar: strategy runs ONCE (bar close only), so var == varip == 4 + assert results[3]['var'] == 4, f"var should be 4, got {results[3]['var']}" + assert results[3]['varip'] == 4, f"varip should be 4, got {results[3]['varip']}" + assert results[3]['isnew'] == 1, "barstate.isnew should be True at bar close" + + +def __test_strategy_calc_on_every_tick__(script_path, module_key, syminfo): + """Strategy with calc_on_every_tick: var rolls back, varip persists like indicator.""" + historical = [_make_ohlcv(i * 60, 100.0 + i) for i in range(3)] + live = [ + _make_bar_update(3 * 60, is_closed=False, close=104.0), # bar open + _make_bar_update(3 * 60, is_closed=False, close=104.5), # intra-bar + _make_bar_update(3 * 60, is_closed=True, close=105.0), # bar close + ] + + runner = _create_live_runner( + script_path, module_key, syminfo, + itertools.chain(historical, live), + ) + runner.script.calc_on_every_tick = True + + results = [] + for candle, plot_data, _trades in runner.run_iter(): + results.append(dict(plot_data)) + + assert len(results) == 4, f"Expected 4 bars, got {len(results)}" + + # Historical: identical + for i in range(3): + assert results[i]['var'] == i + 1 + assert results[i]['varip'] == i + 1 + + # Live bar: 3 executions (open, intra, close) + # var: rolled back each time, final = 3 + 1 = 4 + # varip: accumulated across 3 ticks = 3 + 3 = 6 + assert results[3]['var'] == 4, f"var should be 4, got {results[3]['var']}" + assert results[3]['varip'] == 6, f"varip should be 6, got {results[3]['varip']}" + + +def __test_strategy_default_barstate_isnew_on_every_live_bar__(script_path, module_key, syminfo): + """Default strategy: barstate.isnew is True on every live bar close (strategy runs once).""" + historical = [_make_ohlcv(0, 100.0)] + live = [ + # First live bar: intra-bar ticks then close + _make_bar_update(60, is_closed=False, close=101.0), + _make_bar_update(60, is_closed=True, close=102.0), + # Second live bar: direct close (no intra-bar) + _make_bar_update(120, is_closed=True, close=103.0), + # Third live bar: with intra-bar ticks + _make_bar_update(180, is_closed=False, close=104.0), + _make_bar_update(180, is_closed=False, close=104.5), + _make_bar_update(180, is_closed=True, close=105.0), + ] + + runner = _create_live_runner( + script_path, module_key, syminfo, + itertools.chain(historical, live), + ) + + results = [] + for candle, plot_data, _trades in runner.run_iter(): + results.append(dict(plot_data)) + + assert len(results) == 4, f"Expected 4 bars, got {len(results)}" + + # Live bars (1-3): strategy runs once per bar, so isnew should always be True + for i in range(1, 4): + assert results[i]['isnew'] == 1, \ + f"Bar {i}: barstate.isnew should be True, got {results[i]['isnew']}" diff --git a/tests/t00_pynecore/core/test_020_live_strategy_varip.toml b/tests/t00_pynecore/core/test_020_live_strategy_varip.toml new file mode 100644 index 0000000..e87dbc5 --- /dev/null +++ b/tests/t00_pynecore/core/test_020_live_strategy_varip.toml @@ -0,0 +1,38 @@ +# Indicator / Strategy / Library Settings + +[script] +#overlay = true +#format = "inherit" +#precision = +#scale = +#pyramiding = 1 +#calc_on_order_fills = false +#calc_on_every_tick = false +#max_bars_back = 0 +#timeframe = +#timeframe_gaps = true +#explicit_plot_zorder = false +#max_lines_count = 50 +#max_labels_count = 50 +#max_boxes_count = 50 +#calc_bars_count = 0 +#max_polylines_count = 50 +#dynamic_requests = false +#behind_chart = true +#backtest_fill_limits_assumption = 0 +#default_qty_type = "fixed" +#default_qty_value = 1 +#initial_capital = 100000 +#currency = "NONE" +#slippage = 0 +#commission_type = "percent" +#commission_value = 0.0 +#process_orders_on_close = false +#close_entries_rule = "FIFO" +#margin_long = 100.0 +#margin_short = 100.0 +#risk_free_rate = 2.0 +#use_bar_magnifier = true +#fill_orders_on_standard_ohlc = false + +# Input Settings From 5c96cc09d717290e7be1183e01a6b25e4b9d9607 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Wed, 8 Apr 2026 22:44:42 +0200 Subject: [PATCH 22/64] feat(core): persist OHLCV extra fields in sidecar CSV Store candle extra_fields in a .extra.csv sidecar file, including gap-filled rows, reopen handling, truncate cleanup, and file rebuilds. Update CCXT provider tests to use plugin config paths and assert live updates return BarUpdate objects. --- src/pynecore/core/ohlcv_file.py | 98 ++++++++++++++++++- .../data/test_002_ccxt_provider.py | 11 +-- .../data/test_003_ccxt_live_provider.py | 13 +-- 3 files changed, 108 insertions(+), 14 deletions(-) diff --git a/src/pynecore/core/ohlcv_file.py b/src/pynecore/core/ohlcv_file.py index 44ab608..66ca690 100644 --- a/src/pynecore/core/ohlcv_file.py +++ b/src/pynecore/core/ohlcv_file.py @@ -194,7 +194,8 @@ class OHLCVWriter: '_price_changes', '_price_decimals', '_last_close', '_analyzed_tick_size', '_analyzed_price_scale', '_analyzed_min_move', '_confidence', '_trading_hours', '_analyzed_opening_hours', '_timestamp_offsets', '_analyzed_timezone', - '_truncate') + '_truncate', + '_extra_file', '_extra_writer', '_extra_headers', '_extra_row_count') def __init__(self, path: str | Path, truncate: bool = False): self.path: str = str(path) @@ -216,6 +217,11 @@ def __init__(self, path: str | Path, truncate: bool = False): # Trading hours analysis self._trading_hours: dict[tuple[int, int], int] = {} # (weekday, hour) -> count self._analyzed_opening_hours: list | None = None + # Extra fields sidecar CSV + self._extra_file = None + self._extra_writer = None + self._extra_headers: list[str] | None = None + self._extra_row_count: int = 0 def __enter__(self): self.open() @@ -359,6 +365,16 @@ def open(self) -> 'OHLCVWriter': if self._size > 0 and not self._truncate: self._collect_existing_trading_hours() + # Check for existing extra fields sidecar + extra_path = Path(self.path).with_suffix('.extra.csv') + if extra_path.exists(): + with open(extra_path, 'r', newline='') as ef: + reader = csv.reader(ef) + headers = next(reader, None) + if headers: + self._extra_headers = headers + self._extra_row_count = sum(1 for _ in reader) + return self def write(self, candle: OHLCV) -> None: @@ -418,6 +434,7 @@ def write(self, candle: OHLCV) -> None: prev_close, prev_close, -1.0) self._file.seek(self._current_pos * RECORD_SIZE) self._file.write(gap_data) + self._write_extra_gap() self._current_pos += 1 self._size = max(self._size, self._current_pos) expected_ts += self._interval @@ -430,6 +447,9 @@ def write(self, candle: OHLCV) -> None: self._file.write(data) self._file.flush() + # Write extra fields to sidecar CSV + self._write_extra_data(candle.extra_fields) + # Collect data for tick size analysis self._collect_price_data(candle) @@ -486,6 +506,15 @@ def truncate(self) -> None: if self._size == 0: self._start_timestamp = None + # Clean up extra fields sidecar on full truncate + if self._current_pos == 0: + self._close_extra_csv() + extra_path = Path(self.path).with_suffix('.extra.csv') + if extra_path.exists(): + extra_path.unlink() + self._extra_headers = None + self._extra_row_count = 0 + def close(self): """ Close the file @@ -493,6 +522,65 @@ def close(self): if self._file: self._file.close() self._file = None + self._close_extra_csv() + + def _close_extra_csv(self) -> None: + """Close the extra fields sidecar CSV if open.""" + if self._extra_file: + self._extra_file.close() + self._extra_file = None + self._extra_writer = None + + def _open_extra_csv(self, headers: list[str]) -> None: + """Open the extra fields sidecar CSV for writing.""" + extra_path = Path(self.path).with_suffix('.extra.csv') + + if (self._extra_headers == headers + and self._extra_row_count <= self._current_pos): + # Compatible header exists, append and pad if needed + self._extra_file = open(extra_path, 'a', newline='') + self._extra_writer = csv.writer(self._extra_file) + empty = [''] * len(headers) + for _ in range(self._current_pos - self._extra_row_count): + self._extra_writer.writerow(empty) + self._extra_row_count = self._current_pos + else: + # New file or incompatible header + self._extra_file = open(extra_path, 'w', newline='') + self._extra_writer = csv.writer(self._extra_file) + self._extra_headers = headers + self._extra_writer.writerow(headers) + empty = [''] * len(headers) + for _ in range(self._current_pos): + self._extra_writer.writerow(empty) + self._extra_row_count = self._current_pos + + def _write_extra_gap(self) -> None: + """Write an empty row to the extra CSV for a gap-fill position.""" + if self._extra_writer is not None: + self._extra_writer.writerow([''] * len(self._extra_headers)) + self._extra_row_count += 1 + + def _write_extra_data(self, extra_fields: dict | None) -> None: + """Write extra fields data row to the sidecar CSV.""" + if extra_fields and self._extra_writer is None: + self._open_extra_csv(list(extra_fields.keys())) + if self._extra_writer is not None: + if extra_fields: + row = [] + for h in self._extra_headers: + v = extra_fields.get(h) + if v is None: + row.append('') + elif isinstance(v, float): + row.append(_format_float(v)) + else: + row.append(str(v)) + self._extra_writer.writerow(row) + else: + self._extra_writer.writerow([''] * len(self._extra_headers)) + self._extra_row_count += 1 + self._extra_file.flush() def _collect_price_data(self, candle: OHLCV) -> None: """ @@ -995,8 +1083,14 @@ def _rebuild_with_correct_interval(self, new_interval: int) -> None: for record in current_records: temp_writer.write(record) - # Close current file + # Close current file and clean up extra CSV (will be rebuilt) self._file.close() + self._close_extra_csv() + extra_path = Path(self.path).with_suffix('.extra.csv') + if extra_path.exists(): + extra_path.unlink() + self._extra_headers = None + self._extra_row_count = 0 # Replace original with rebuilt file shutil.move(temp_path, self.path) diff --git a/tests/t00_pynecore/data/test_002_ccxt_provider.py b/tests/t00_pynecore/data/test_002_ccxt_provider.py index 1e484d0..f6d2552 100644 --- a/tests/t00_pynecore/data/test_002_ccxt_provider.py +++ b/tests/t00_pynecore/data/test_002_ccxt_provider.py @@ -108,11 +108,10 @@ def __test_ccxt_real_data_download__(tmp_path): if not config_dir.exists() or not config_dir.is_dir(): pytest.skip("No config directory found in workdir") - # Check if ccxt config exists (new per-plugin config or legacy providers.toml) - ccxt_toml = config_dir / "ccxt.toml" - providers_toml = config_dir / "providers.toml" - if not ccxt_toml.exists() and not providers_toml.exists(): - pytest.skip("No ccxt.toml or providers.toml found in config directory") + # Check if ccxt config exists + ccxt_toml = config_dir / "plugins" / "ccxt.toml" + if not ccxt_toml.exists(): + pytest.skip("No config/plugins/ccxt.toml found") # Create temporary data directory for the test data_dir = tmp_path / "data" @@ -149,7 +148,7 @@ def __test_ccxt_real_data_download__(tmp_path): # Load config via plugin config system from pynecore.core.config import ensure_config from pynecore.providers.ccxt import CCXTConfig - config = ensure_config(CCXTConfig, config_dir / 'ccxt.toml') + config = ensure_config(CCXTConfig, ccxt_toml) # Create provider instance provider = CCXTProvider( diff --git a/tests/t00_pynecore/data/test_003_ccxt_live_provider.py b/tests/t00_pynecore/data/test_003_ccxt_live_provider.py index 9480018..2bfa668 100644 --- a/tests/t00_pynecore/data/test_003_ccxt_live_provider.py +++ b/tests/t00_pynecore/data/test_003_ccxt_live_provider.py @@ -137,12 +137,13 @@ def __test_ccxt_live_generator_integration__(): ) received = [] - for ohlcv in live_ohlcv_generator(provider, "BTC/USDT:USDT", "1", - shutdown_timeout=5.0): - received.append(ohlcv) - assert isinstance(ohlcv, OHLCV) - assert ohlcv.timestamp > 0 - assert ohlcv.close > 0 + for update in live_ohlcv_generator(provider, "BTC/USDT:USDT", "1", + shutdown_timeout=5.0): + received.append(update) + assert isinstance(update, BarUpdate) + assert isinstance(update.ohlcv, OHLCV) + assert update.ohlcv.timestamp > 0 + assert update.ohlcv.close > 0 if len(received) >= 1: break From 4dc3f55ce5b1edcba3bbb49da4f5f5f4df00b265 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Thu, 9 Apr 2026 02:41:10 +0200 Subject: [PATCH 23/64] refactor: return OHLCV directly from live providers Remove BarUpdate and carry live bar state on OHLCV.is_closed instead. Add a live transition sentinel between historical and live streams so ScriptRunner can detect the mode switch without wrapper objects. Tighten provider download signatures and type assertions, and clean up related tests, imports, and minor lint issues. --- src/pynecore/cli/commands/data.py | 47 ++++++------- src/pynecore/cli/commands/run.py | 14 ++-- src/pynecore/core/csv_file.py | 4 +- src/pynecore/core/live_runner.py | 17 ++--- src/pynecore/core/plugin/__init__.py | 4 +- src/pynecore/core/plugin/live_provider.py | 17 +---- src/pynecore/core/plugin/provider.py | 6 +- src/pynecore/core/script_runner.py | 66 +++++++++---------- src/pynecore/providers/ccxt.py | 18 ++--- src/pynecore/types/ohlcv.py | 1 + .../core/test_016_live_provider.py | 20 ++---- .../t00_pynecore/core/test_018_live_runner.py | 55 +++++++--------- tests/t00_pynecore/core/test_019_live_mode.py | 53 +++++++-------- .../core/test_020_live_strategy_varip.py | 45 ++++++------- .../data/test_003_ccxt_live_provider.py | 31 ++++----- 15 files changed, 189 insertions(+), 209 deletions(-) diff --git a/src/pynecore/cli/commands/data.py b/src/pynecore/cli/commands/data.py index b802ad3..3f687f6 100644 --- a/src/pynecore/cli/commands/data.py +++ b/src/pynecore/cli/commands/data.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, TypeAlias +from typing import TYPE_CHECKING, TypeAlias, cast from pathlib import Path from enum import Enum from datetime import datetime, timedelta, UTC @@ -45,7 +45,7 @@ class AvailableProvidersEnum(Enum): _cls = _ep.load() if isinstance(_cls, type) and issubclass(_cls, ProviderPlugin): _provider_names.append(_name) - except Exception: + except Exception: # noqa pass AvailableProvidersEnum = Enum('Provider', { name.upper(): name.lower() for name in sorted(_provider_names) @@ -137,15 +137,16 @@ def download( Download historical OHLCV data """ # Load provider class via plugin system - provider_class = load_plugin(provider.value) + provider_class = cast(type[ProviderPlugin], load_plugin(provider.value)) try: # If list_symbols is True, we show the available symbols then exit if list_symbols: from ...core.config import ensure_config config = None - if hasattr(provider_class, 'Config') and provider_class.Config is not None: - config = ensure_config(provider_class.Config, + config_cls: type | None = getattr(provider_class, 'Config', None) + if config_cls is not None: + config = ensure_config(config_cls, app_state.config_dir / 'plugins' / f'{provider.value}.toml') with Progress(SpinnerColumn(), TextColumn("{task.description}"), transient=True) as progress: progress.add_task(description="Fetching market data...", total=None) @@ -163,11 +164,12 @@ def download( # Create provider instance with config from ...core.config import ensure_config config = None - if hasattr(provider_class, 'Config') and provider_class.Config is not None: - config = ensure_config(provider_class.Config, + config_cls: type | None = getattr(provider_class, 'Config', None) + if config_cls is not None: + config = ensure_config(config_cls, app_state.config_dir / 'plugins' / f'{provider.value}.toml') provider_instance: ProviderPlugin = provider_class(symbol=symbol, timeframe=timeframe, - ohlv_dir=app_state.data_dir, config=config) + ohlv_dir=app_state.data_dir, config=config) # Download symbol info if not exists if force_save_info or not provider_instance.is_symbol_info_exists(): @@ -195,7 +197,8 @@ def download( ohlcv_writer.truncate() # If the start date is "continue" (default), we resume from the last download - resolved_from: datetime | None = time_from + resolved_from: datetime = time_from + fetch_all = False if time_from == "continue": end_ts = ohlcv_writer.end_timestamp interval = ohlcv_writer.interval @@ -204,26 +207,26 @@ def download( # We need to add one interval to the start date to avoid downloading the same data resolved_from += timedelta(seconds=interval) elif getattr(provider_class, 'fetch_all_by_default', False): - resolved_from = None + resolved_from = datetime.fromtimestamp(0, UTC) + fetch_all = True else: # No data, download one year as default resolved_from = datetime.now(UTC) - timedelta(days=365) # We need to remove timezone info - if resolved_from is not None: - resolved_from = resolved_from.replace(tzinfo=None) + resolved_from = resolved_from.replace(tzinfo=None) time_to = time_to.replace(tzinfo=None) # We cannot download data from the future otherwise it would take very long if time_to > datetime.now(UTC).replace(tzinfo=None): time_to = datetime.now(UTC).replace(tzinfo=None) - # Check time range (skip for TV provider when resolved_from is None) - if resolved_from is not None and time_to < resolved_from: + # Check time range (skip for fetch_all providers) + if not fetch_all and time_to < resolved_from: secho("Error: End date (to) must be greater than start date (from)!", err=True, fg=colors.RED) raise Exit(1) # If the start date is before the start of the existing file, we truncate the file - if ohlcv_writer.start_timestamp and resolved_from is not None: + if ohlcv_writer.start_timestamp and not fetch_all: if resolved_from < ohlcv_writer.start_datetime.replace(tzinfo=None): secho(f"The start date (from: {resolved_from}) is before the start of the " f"existing file ({ohlcv_writer.start_datetime.replace(tzinfo=None)}).\n" @@ -234,8 +237,8 @@ def download( ohlcv_writer.seek(0) ohlcv_writer.truncate() - # TV provider with no resolved_from: use spinner-only progress (no time-based progress bar) - if resolved_from is None: + # fetch_all provider: use spinner-only progress (no time-based progress bar) + if fetch_all: with Progress( SpinnerColumn(finished_text="[green]✓"), TextColumn("{task.description}"), @@ -245,17 +248,15 @@ def download( description="Downloading all available OHLCV data...", total=None, ) - # Start downloading (no progress callback - TV provider shows its own progress) provider_instance.download_ohlcv(resolved_from, time_to, on_progress=None, limit=chunk_size) else: - start_from = resolved_from # narrowed to datetime - total_seconds = int((time_to - start_from).total_seconds()) + total_seconds = int((time_to - resolved_from).total_seconds()) # Get OHLCV data with Progress( SpinnerColumn(finished_text="[green]✓"), TextColumn("{task.description}"), - DateColumn(start_from), + DateColumn(resolved_from), BarColumn(), TimeElapsedColumn(), "/", @@ -268,11 +269,11 @@ def download( def cb_progress(current_time: datetime): """ Callback to update progress """ - elapsed_seconds = int((current_time - start_from).total_seconds()) + elapsed_seconds = int((current_time - resolved_from).total_seconds()) progress.update(task, completed=elapsed_seconds) # Start downloading - provider_instance.download_ohlcv(start_from, time_to, on_progress=cb_progress, limit=chunk_size) + provider_instance.download_ohlcv(resolved_from, time_to, on_progress=cb_progress, limit=chunk_size) except (ImportError, ValueError) as e: secho(str(e), err=True, fg=colors.RED) diff --git a/src/pynecore/cli/commands/run.py b/src/pynecore/cli/commands/run.py index 7a3bb2f..b0eb3f1 100644 --- a/src/pynecore/cli/commands/run.py +++ b/src/pynecore/cli/commands/run.py @@ -75,7 +75,7 @@ def _parse_time_value(value: str | None, *, allow_bars: bool = False) -> datetim """ if value is None: return None - value = value.strip() + value: str = value.strip() # Negative number = bar count (only for --from in provider mode) if allow_bars and value.startswith('-'): @@ -148,12 +148,14 @@ def _download_provider_data(provider_str: str, time_from_str: str | None) -> _Pr tf_seconds = in_seconds(ps.timeframe) time_from_dt = time_to_dt - timedelta(seconds=tf_seconds * bar_count) else: + assert isinstance(time_from_value, datetime) time_from_dt = time_from_value # Load config config = None - if hasattr(provider_class, 'Config') and provider_class.Config is not None: - config = ensure_config(provider_class.Config, + config_cls: type | None = getattr(provider_class, 'Config', None) + if config_cls is not None: + config = ensure_config(config_cls, app_state.config_dir / 'plugins' / f'{ps.provider}.toml') # Create provider instance @@ -195,6 +197,7 @@ def cb_progress(current_time: datetime): provider_instance.download_ohlcv(time_from_dl, time_to_dl, on_progress=cb_progress) + assert provider_instance.ohlcv_path is not None return _ProviderData( ohlcv_path=provider_instance.ohlcv_path, syminfo=syminfo, @@ -462,6 +465,7 @@ def run( if not time_to_dt: time_to_dt = reader.end_datetime + assert isinstance(time_from_dt, datetime) and isinstance(time_to_dt, datetime) time_from_ts = int(time_from_dt.timestamp()) time_to_ts = int(time_to_dt.timestamp()) @@ -491,6 +495,8 @@ def run( err=True, fg=colors.RED) raise Exit(1) + from pynecore.core.script_runner import LIVE_TRANSITION + live_iter = live_ohlcv_generator( provider=provider_data.provider_instance, symbol=provider_data.parsed_string.symbol, @@ -498,7 +504,7 @@ def run( last_historical_timestamp=time_to_ts, shutdown_timeout=shutdown_timeout, ) - ohlcv_iter = itertools.chain(ohlcv_iter, live_iter) + ohlcv_iter = itertools.chain(ohlcv_iter, [LIVE_TRANSITION], live_iter) size = 0 # Parse security data mappings diff --git a/src/pynecore/core/csv_file.py b/src/pynecore/core/csv_file.py index b04f257..cb80b9a 100644 --- a/src/pynecore/core/csv_file.py +++ b/src/pynecore/core/csv_file.py @@ -266,11 +266,9 @@ def write_ohlcv(self, candle: OHLCV, timeout: Optional[float] = None) -> bool: except queue.Full: return False - def flush(self, timeout: Optional[float] = None): + def flush(self): """ Wait for all pending writes to be processed by the worker thread. - - :param timeout: Optional timeout in seconds """ if self._is_open and self._queue is not None: self._queue.join() diff --git a/src/pynecore/core/live_runner.py b/src/pynecore/core/live_runner.py index 8657aa8..fdb17f2 100644 --- a/src/pynecore/core/live_runner.py +++ b/src/pynecore/core/live_runner.py @@ -2,7 +2,7 @@ Async/sync bridge for live data streaming. Runs a LiveProviderPlugin's async watch_ohlcv() in a background thread -and yields BarUpdate objects to the synchronous ScriptRunner via queue.Queue. +and yields OHLCV objects to the synchronous ScriptRunner via queue.Queue. """ from __future__ import annotations @@ -13,7 +13,8 @@ from collections.abc import Iterator from queue import Queue, Empty -from pynecore.core.plugin.live_provider import LiveProviderPlugin, BarUpdate +from pynecore.types.ohlcv import OHLCV +from pynecore.core.plugin.live_provider import LiveProviderPlugin __all__ = ['live_ohlcv_generator'] @@ -32,11 +33,11 @@ def live_ohlcv_generator( *, last_historical_timestamp: int | None = None, shutdown_timeout: float = 120.0, -) -> Iterator[BarUpdate]: +) -> Iterator[OHLCV]: """ - Bridge async watch_ohlcv() to a sync Iterator[BarUpdate]. + Bridge async watch_ohlcv() to a sync Iterator[OHLCV]. - Spawns a background thread running asyncio, collects BarUpdate objects + Spawns a background thread running asyncio, collects OHLCV objects via queue.Queue, and yields them including intra-bar updates. :param provider: A LiveProviderPlugin instance (already configured). @@ -44,9 +45,9 @@ def live_ohlcv_generator( :param timeframe: Timeframe in TradingView format. :param last_historical_timestamp: Timestamp of the last historical bar to avoid duplicates. :param shutdown_timeout: Max seconds to wait for graceful shutdown. 0 = wait forever. - :return: Iterator yielding BarUpdate objects (both closed and intra-bar). + :return: Iterator yielding OHLCV objects (both closed and intra-bar). """ - bar_queue: Queue[BarUpdate | BaseException] = Queue(maxsize=100) + bar_queue: Queue[OHLCV | BaseException] = Queue(maxsize=100) stop_event = threading.Event() async def _graceful_shutdown(): @@ -97,7 +98,7 @@ async def _async_loop(): # Filter duplicates from the historical phase if last_historical_timestamp is not None: - ts = bar_update.ohlcv.timestamp + ts = bar_update.timestamp if bar_update.is_closed and ts <= last_historical_timestamp: continue if not bar_update.is_closed and ts < last_historical_timestamp: diff --git a/src/pynecore/core/plugin/__init__.py b/src/pynecore/core/plugin/__init__.py index f511d53..6bf73ce 100644 --- a/src/pynecore/core/plugin/__init__.py +++ b/src/pynecore/core/plugin/__init__.py @@ -28,8 +28,6 @@ class BinancePlugin(LiveProviderPlugin, CLIPlugin): ... # offline + live cls = load_plugin("capitalcom") """ -from __future__ import annotations - import re import sys from typing import TypeVar, Generic @@ -119,6 +117,7 @@ def get_plugin_metadata(ep: EntryPoint) -> dict[str, str]: :param ep: The entry point of the plugin. :return: Dict with ``name``, ``version``, ``description``, ``min_pynecore``. """ + assert ep.dist is not None meta = ep.dist.metadata return { 'name': ep.name, @@ -138,6 +137,7 @@ def _parse_min_pynecore(ep: EntryPoint) -> str: :param ep: The entry point of the plugin. :return: Version string (e.g. ``"6.5"``) or ``""`` if not found. """ + assert ep.dist is not None requires = ep.dist.requires if not requires: return '' diff --git a/src/pynecore/core/plugin/live_provider.py b/src/pynecore/core/plugin/live_provider.py index 2cd55c6..5808508 100644 --- a/src/pynecore/core/plugin/live_provider.py +++ b/src/pynecore/core/plugin/live_provider.py @@ -1,7 +1,6 @@ from __future__ import annotations from abc import abstractmethod, ABCMeta -from dataclasses import dataclass from pynecore.types.ohlcv import OHLCV @@ -9,17 +8,6 @@ from .provider import ProviderPlugin -@dataclass -class BarUpdate: - """A single bar update from a live data source.""" - - ohlcv: OHLCV - """The OHLCV data for this update.""" - - is_closed: bool - """True if the bar is final (closed), False for intra-bar updates.""" - - class LiveProviderPlugin(ProviderPlugin[ConfigT], metaclass=ABCMeta): """ WebSocket/streaming data provider extending :class:`ProviderPlugin`. @@ -56,7 +44,7 @@ def is_connected(self) -> bool: # --- Data streaming --- @abstractmethod - async def watch_ohlcv(self, symbol: str, timeframe: str) -> BarUpdate: + async def watch_ohlcv(self, symbol: str, timeframe: str) -> OHLCV: """ Wait for and return the next OHLCV update. @@ -65,7 +53,7 @@ async def watch_ohlcv(self, symbol: str, timeframe: str) -> BarUpdate: :param symbol: The symbol in provider-specific format. :param timeframe: Timeframe in TradingView format (e.g. ``"1D"``, ``"1"``, ``"4H"``). - :return: A :class:`BarUpdate` with the OHLCV data and closed/open status. + :return: An :class:`OHLCV` with ``is_closed=True`` for a final bar, ``False`` for intra-bar updates. """ # --- Reconnection hooks (override for custom behavior) --- @@ -78,6 +66,7 @@ async def on_reconnect(self) -> None: # --- Shutdown hooks --- + # noinspection PyMethodMayBeStatic async def can_shutdown(self) -> bool: """ Whether the provider is ready to shut down. diff --git a/src/pynecore/core/plugin/provider.py b/src/pynecore/core/plugin/provider.py index 66b25a4..ed5c8ad 100644 --- a/src/pynecore/core/plugin/provider.py +++ b/src/pynecore/core/plugin/provider.py @@ -156,7 +156,7 @@ def save_ohlcv_data(self, data: OHLCV | list[OHLCV]): self.ohlcv_file.write(candle) @abstractmethod - def download_ohlcv(self, time_from: datetime | None, time_to: datetime | None, + def download_ohlcv(self, time_from: datetime, time_to: datetime, on_progress: Callable[[datetime], None] | None = None, limit: int | None = None): """ @@ -164,8 +164,8 @@ def download_ohlcv(self, time_from: datetime | None, time_to: datetime | None, Use :meth:`save_ohlcv_data` to write records to the data file. - :param time_from: The start time (None to fetch all available data). - :param time_to: The end time (None to fetch up to the latest). + :param time_from: The start time. Use ``datetime.fromtimestamp(0)`` to fetch all available data. + :param time_to: The end time. :param on_progress: Optional progress callback. :param limit: Override the automatic chunk size (number of bars per API request). """ diff --git a/src/pynecore/core/script_runner.py b/src/pynecore/core/script_runner.py index f8eff70..0eed940 100644 --- a/src/pynecore/core/script_runner.py +++ b/src/pynecore/core/script_runner.py @@ -8,19 +8,24 @@ from pynecore.core.syminfo import SymInfo from pynecore.core.csv_file import CSVWriter from pynecore.core.strategy_stats import calculate_strategy_statistics, write_strategy_statistics_csv +from pynecore.core.var_snapshot import VarSnapshot from pynecore.types import script_type if TYPE_CHECKING: from zoneinfo import ZoneInfo # noqa from pynecore.core.script import script - from pynecore.lib.strategy import Trade # noqa + from pynecore.lib.strategy import Trade, Position # noqa __all__ = [ 'import_script', 'ScriptRunner', + 'LIVE_TRANSITION', ] +LIVE_TRANSITION = OHLCV(timestamp=-1, open=-1, high=-1, low=-1, close=-1, volume=-1) +"""Sentinel inserted between historical and live OHLCV data in the iterator.""" + def import_script(script_path: Path) -> ModuleType: """ @@ -249,16 +254,13 @@ def __init__(self, script_path: Path, ohlcv_iter: Iterable[OHLCV], syminfo: SymI self.script: script = self.script_module.main.script - # noinspection PyProtectedMember - from ..lib import _parse_timezone - self.ohlcv_iter = ohlcv_iter self.syminfo = syminfo self.update_syminfo_every_run = update_syminfo_every_run self.last_bar_index = last_bar_index self.bar_index = 0 - self.tz = _parse_timezone(syminfo.timezone) + self.tz = lib._parse_timezone(syminfo.timezone) # Initialize tracking variables for statistics self.equity_curve: list[float] = [] @@ -512,20 +514,19 @@ def _lazy_spawn(sid: str): self.ohlcv_iter = (w.aggregated for w in magnifier) # Initialize calc_on_order_fills snapshot (for COOF or live mode) - var_snapshot = None + var_snapshot: VarSnapshot | None = None is_live = lib._is_live # Indicators always run on every tick; strategies only if calc_on_every_tick run_on_every_tick = not is_strat or self.script.calc_on_every_tick if is_strat and self.script.calc_on_order_fills: - from .var_snapshot import VarSnapshot var_snapshot = VarSnapshot(self.script_module, script._registered_libraries) elif is_live and run_on_every_tick: - from .var_snapshot import VarSnapshot var_snapshot = VarSnapshot(self.script_module, script._registered_libraries) # --- Helper closures for DRY --- registered_libraries = script._registered_libraries + # noinspection PyProtectedMember def _run_libs_and_main(): lib._lib_semaphore = True for _title, main_func in registered_libraries: @@ -536,6 +537,7 @@ def _run_libs_and_main(): assert isinstance(r, dict), "The 'main' function must return a dictionary!" lib._plot_data.update(r) + # noinspection PyProtectedMember def _write_bar_output(bar_candle): nonlocal trade_num if self.plot_writer and lib._plot_data: @@ -569,28 +571,30 @@ def _write_bar_output(bar_candle): f"{t.max_drawdown_percent:.2f}", ) + # noinspection PyProtectedMember def _coof_loop(): """COOF re-execution loop: process orders, re-execute on fills.""" old_fills = position._fill_counter position.process_orders() new_fills = position._fill_counter while new_fills > old_fills: - if var_snapshot.has_vars: - var_snapshot.restore() + if var_snapshot.has_vars: # type: ignore + var_snapshot.restore() # type: ignore function_isolation.reset() _run_libs_and_main() old_fills = new_fills position.process_orders() new_fills = position._fill_counter + # noinspection PyProtectedMember def _coof_magnified_loop(sub_bars_list, aggregated_candle): """COOF re-execution loop with magnified order processing.""" old_fills = position._fill_counter position.process_orders_magnified(sub_bars_list, aggregated_candle) new_fills = position._fill_counter while new_fills > old_fills: - if var_snapshot.has_vars: - var_snapshot.restore() + if var_snapshot.has_vars: # type: ignore + var_snapshot.restore() # type: ignore function_isolation.reset() _run_libs_and_main() old_fills = new_fills @@ -598,20 +602,14 @@ def _coof_magnified_loop(sub_bars_list, aggregated_candle): new_fills = position._fill_counter # --- Peek-ahead pattern: historical bars --- - from pynecore.core.plugin.live_provider import BarUpdate - + # LIVE_TRANSITION doubles as end-of-data sentinel → next() always returns OHLCV ohlcv_iterator = iter(self.ohlcv_iter) - next_item = next(ohlcv_iterator, None) - first_live_update = None # Will hold the first BarUpdate if we transition - - while next_item is not None: - # If a BarUpdate arrives, we transition to live mode - if isinstance(next_item, BarUpdate): - first_live_update = next_item - break + next_item = next(ohlcv_iterator, LIVE_TRANSITION) + first_live_update: OHLCV | None = None + while next_item is not LIVE_TRANSITION: candle = next_item - next_item = next(ohlcv_iterator, None) + next_item = next(ohlcv_iterator, LIVE_TRANSITION) # Update syminfo lib properties if needed if self.update_syminfo_every_run: @@ -621,11 +619,9 @@ def _coof_magnified_loop(sub_bars_list, aggregated_candle): # Last bar detection if is_live: barstate.islast = False - barstate.islastconfirmedhistory = ( - next_item is None or isinstance(next_item, BarUpdate) - ) + barstate.islastconfirmedhistory = (next_item is LIVE_TRANSITION) else: - barstate.islast = (next_item is None) + barstate.islast = (next_item is LIVE_TRANSITION) # Update lib properties _set_lib_properties(candle, self.bar_index, self.tz, lib) @@ -675,6 +671,10 @@ def _coof_magnified_loop(sub_bars_list, aggregated_candle): barstate.isfirst = False # --- Live mode: transition and intra-bar loop --- + # After the historical loop, if LIVE_TRANSITION was hit, get the first live bar + if next_item is LIVE_TRANSITION and is_live: + first_live_update = next(ohlcv_iterator, None) + if first_live_update is not None: import itertools @@ -695,10 +695,7 @@ def _coof_magnified_loop(sub_bars_list, aggregated_candle): live_stream = itertools.chain([first_live_update], ohlcv_iterator) for bar_update in live_stream: - if not isinstance(bar_update, BarUpdate): - continue - - candle = bar_update.ohlcv + candle = bar_update is_new_bar = (candle.timestamp != last_bar_timestamp) barstate.islast = True @@ -927,9 +924,8 @@ def _run_iter_magnified(self, lib, barstate, position, script_mod, is_strat, on_ trade_num = 0 # Initialize calc_on_order_fills snapshot for magnified path - var_snapshot = None + var_snapshot: VarSnapshot | None = None if is_strat and self.script.calc_on_order_fills: - from .var_snapshot import VarSnapshot var_snapshot = VarSnapshot(self.script_module, script_mod._registered_libraries) for window in magnifier: @@ -1120,7 +1116,10 @@ def _ensure_ohlcv_ext(path: str | Path) -> str: def _write_live_strategy_stats(self, position): """Rewrite strategy stats file with current state (live mode, after each bar).""" + if self.strat_writer is None: + return from .strategy_stats import calculate_strategy_statistics, write_strategy_statistics_csv + # noinspection PyBroadException try: self.strat_writer.open() stats = calculate_strategy_statistics( @@ -1131,6 +1130,7 @@ def _write_live_strategy_stats(self, position): write_strategy_statistics_csv(stats, self.strat_writer) self.strat_writer.close() except Exception: + # noinspection PyBroadException try: self.strat_writer.close() except Exception: diff --git a/src/pynecore/providers/ccxt.py b/src/pynecore/providers/ccxt.py index 8bc44f2..509d84c 100644 --- a/src/pynecore/providers/ccxt.py +++ b/src/pynecore/providers/ccxt.py @@ -6,8 +6,6 @@ import tomllib from pynecore.core.plugin import LiveProviderPlugin, override -from pynecore.core.plugin.live_provider import BarUpdate - from pynecore.core.syminfo import SymInfo, SymInfoInterval, SymInfoSession from ..types.ohlcv import OHLCV @@ -269,7 +267,7 @@ def update_symbol_info(self) -> SymInfo: ) @override - def download_ohlcv(self, time_from: datetime | None, time_to: datetime | None, + def download_ohlcv(self, time_from: datetime, time_to: datetime, on_progress: Callable[[datetime], None] | None = None, limit: int | None = None): """ @@ -284,7 +282,7 @@ def download_ohlcv(self, time_from: datetime | None, time_to: datetime | None, assert self.xchg_timeframe is not None tf: datetime = time_from.replace(tzinfo=None) - tt: datetime = (time_to if time_to is not None else datetime.now(UTC)).replace(tzinfo=None) + tt: datetime = time_to.replace(tzinfo=None) if limit is None: assert self._client.id @@ -370,7 +368,7 @@ def is_connected(self) -> bool: return hasattr(self, '_async_client') and self._async_client is not None @override - async def watch_ohlcv(self, symbol: str, timeframe: str) -> BarUpdate: + async def watch_ohlcv(self, symbol: str, timeframe: str) -> OHLCV: """ Wait for the next OHLCV update from the exchange websocket. @@ -380,7 +378,7 @@ async def watch_ohlcv(self, symbol: str, timeframe: str) -> BarUpdate: :param symbol: Symbol in CCXT format (e.g. "BTC/USDT:USDT"). :param timeframe: Timeframe in TradingView format (e.g. "1D", "1", "4H"). - :return: BarUpdate with OHLCV data and closed/open status. + :return: OHLCV with ``is_closed=True`` for a final bar, ``False`` for intra-bar updates. """ xchg_tf = self.to_exchange_timeframe(timeframe) @@ -396,15 +394,17 @@ async def watch_ohlcv(self, symbol: str, timeframe: str) -> BarUpdate: low=float(last[3]), close=float(last[4]), volume=float(last[5]), + is_closed=False, ) if (self._last_bar_timestamp is not None and timestamp != self._last_bar_timestamp): - closed_bar = self._last_bar_ohlcv + assert self._last_bar_ohlcv is not None + closed_bar = self._last_bar_ohlcv._replace(is_closed=True) self._last_bar_timestamp = timestamp self._last_bar_ohlcv = current_ohlcv - return BarUpdate(ohlcv=closed_bar, is_closed=True) + return closed_bar self._last_bar_timestamp = timestamp self._last_bar_ohlcv = current_ohlcv - return BarUpdate(ohlcv=current_ohlcv, is_closed=False) + return current_ohlcv diff --git a/src/pynecore/types/ohlcv.py b/src/pynecore/types/ohlcv.py index 7ca34ce..46c9163 100644 --- a/src/pynecore/types/ohlcv.py +++ b/src/pynecore/types/ohlcv.py @@ -9,3 +9,4 @@ class OHLCV(NamedTuple): close: float volume: float extra_fields: dict[str, Any] | None = None + is_closed: bool = True diff --git a/tests/t00_pynecore/core/test_016_live_provider.py b/tests/t00_pynecore/core/test_016_live_provider.py index 5c74a7b..1f70c82 100644 --- a/tests/t00_pynecore/core/test_016_live_provider.py +++ b/tests/t00_pynecore/core/test_016_live_provider.py @@ -1,12 +1,10 @@ """ -Tests for LiveProviderPlugin and BarUpdate. +Tests for LiveProviderPlugin and OHLCV is_closed field. """ from abc import ABCMeta -from dataclasses import fields from pynecore.core.plugin import Plugin, ProviderPlugin, LiveProviderPlugin -from pynecore.core.plugin.live_provider import BarUpdate from pynecore.types.ohlcv import OHLCV @@ -46,19 +44,13 @@ def __test_live_provider_default_reconnect_values__(): assert LiveProviderPlugin.max_reconnect_attempts == 10 -def __test_bar_update_fields__(): - """BarUpdate has ohlcv and is_closed fields""" - field_names = {f.name for f in fields(BarUpdate)} - assert field_names == {'ohlcv', 'is_closed'} - - -def __test_bar_update_creation__(): - """BarUpdate can be created with OHLCV and is_closed flag""" +def __test_ohlcv_is_closed_field__(): + """OHLCV has is_closed field with default True""" ohlcv = OHLCV(timestamp=1000, open=100.0, high=105.0, low=95.0, close=102.0, volume=1000.0) + assert ohlcv.is_closed is True - closed = BarUpdate(ohlcv=ohlcv, is_closed=True) - assert closed.ohlcv is ohlcv + closed = ohlcv._replace(is_closed=True) assert closed.is_closed is True - update = BarUpdate(ohlcv=ohlcv, is_closed=False) + update = ohlcv._replace(is_closed=False) assert update.is_closed is False diff --git a/tests/t00_pynecore/core/test_018_live_runner.py b/tests/t00_pynecore/core/test_018_live_runner.py index fa9d352..37cc87d 100644 --- a/tests/t00_pynecore/core/test_018_live_runner.py +++ b/tests/t00_pynecore/core/test_018_live_runner.py @@ -5,23 +5,18 @@ import time from pynecore.core.live_runner import live_ohlcv_generator -from pynecore.core.plugin.live_provider import BarUpdate from pynecore.types.ohlcv import OHLCV -def _make_ohlcv(timestamp: int, close: float = 100.0) -> OHLCV: +def _make_ohlcv(timestamp: int, close: float = 100.0, is_closed: bool = True) -> OHLCV: return OHLCV(timestamp=timestamp, open=close, high=close + 1, - low=close - 1, close=close, volume=1000.0) - - -def _make_bar_update(timestamp: int, is_closed: bool = True, close: float = 100.0) -> BarUpdate: - return BarUpdate(ohlcv=_make_ohlcv(timestamp, close), is_closed=is_closed) + low=close - 1, close=close, volume=1000.0, is_closed=is_closed) class MockLiveProvider: """Mock LiveProviderPlugin for testing the bridge.""" - def __init__(self, bar_updates: list[BarUpdate]): + def __init__(self, bar_updates: list[OHLCV]): self._bar_updates = bar_updates self._index = 0 self._connected = False @@ -38,7 +33,7 @@ async def disconnect(self): def is_connected(self): return self._connected - async def watch_ohlcv(self, symbol: str, timeframe: str) -> BarUpdate: + async def watch_ohlcv(self, symbol: str, timeframe: str) -> OHLCV: if self._index >= len(self._bar_updates): raise asyncio.CancelledError() @@ -60,10 +55,10 @@ async def can_shutdown(self): def __test_live_generator_yields_all_bar_updates__(): """live_ohlcv_generator yields both intra-bar and closed bar updates""" updates = [ - _make_bar_update(1000, is_closed=False, close=100.0), - _make_bar_update(1000, is_closed=True, close=101.0), - _make_bar_update(2000, is_closed=False, close=102.0), - _make_bar_update(2000, is_closed=True, close=103.0), + _make_ohlcv(1000, is_closed=False, close=100.0), + _make_ohlcv(1000, is_closed=True, close=101.0), + _make_ohlcv(2000, is_closed=False, close=102.0), + _make_ohlcv(2000, is_closed=True, close=103.0), ] provider = MockLiveProvider(updates) @@ -71,9 +66,9 @@ def __test_live_generator_yields_all_bar_updates__(): assert len(bars) == 4 assert not bars[0].is_closed - assert bars[0].ohlcv.close == 100.0 + assert bars[0].close == 100.0 assert bars[1].is_closed - assert bars[1].ohlcv.close == 101.0 + assert bars[1].close == 101.0 assert not bars[2].is_closed assert bars[3].is_closed @@ -81,9 +76,9 @@ def __test_live_generator_yields_all_bar_updates__(): def __test_live_generator_filters_old_bars__(): """live_ohlcv_generator skips bars older than last_historical_timestamp""" updates = [ - _make_bar_update(1000, is_closed=True, close=100.0), - _make_bar_update(2000, is_closed=True, close=200.0), - _make_bar_update(3000, is_closed=True, close=300.0), + _make_ohlcv(1000, is_closed=True, close=100.0), + _make_ohlcv(2000, is_closed=True, close=200.0), + _make_ohlcv(3000, is_closed=True, close=300.0), ] provider = MockLiveProvider(updates) @@ -91,28 +86,28 @@ def __test_live_generator_filters_old_bars__(): last_historical_timestamp=2000)) assert len(bars) == 1 - assert bars[0].ohlcv.timestamp == 3000 - assert bars[0].ohlcv.close == 300.0 + assert bars[0].timestamp == 3000 + assert bars[0].close == 300.0 -def __test_live_generator_yields_bar_update_objects__(): - """live_ohlcv_generator yields BarUpdate objects (not raw OHLCV)""" +def __test_live_generator_yields_ohlcv_objects__(): + """live_ohlcv_generator yields OHLCV objects directly""" updates = [ - _make_bar_update(1000, is_closed=True), + _make_ohlcv(1000, is_closed=True), ] provider = MockLiveProvider(updates) bars = list(live_ohlcv_generator(provider, "BTC/USDT", "1D")) assert len(bars) == 1 - assert isinstance(bars[0], BarUpdate) - assert isinstance(bars[0].ohlcv, OHLCV) + assert isinstance(bars[0], OHLCV) + assert bars[0].is_closed is True def __test_live_generator_connects_and_disconnects__(): """live_ohlcv_generator calls connect on start and disconnect on finish""" updates = [ - _make_bar_update(1000, is_closed=True), + _make_ohlcv(1000, is_closed=True), ] provider = MockLiveProvider(updates) @@ -131,7 +126,7 @@ def __test_live_generator_empty_stream__(): class DelayedShutdownProvider(MockLiveProvider): """Provider that delays shutdown for a number of can_shutdown() calls.""" - def __init__(self, bar_updates: list[BarUpdate], deny_count: int = 2): + def __init__(self, bar_updates: list[OHLCV], deny_count: int = 2): super().__init__(bar_updates) self._deny_count = deny_count self._shutdown_calls = 0 @@ -145,7 +140,7 @@ async def can_shutdown(self): def __test_graceful_shutdown_waits_for_can_shutdown__(): """Shutdown waits until can_shutdown() returns True""" - updates = [_make_bar_update(1000, is_closed=True)] + updates = [_make_ohlcv(1000, is_closed=True)] provider = DelayedShutdownProvider(updates, deny_count=2) list(live_ohlcv_generator(provider, "BTC/USDT", "1D", shutdown_timeout=10.0)) @@ -166,7 +161,7 @@ async def can_shutdown(self): self._shutdown_calls += 1 return False - updates = [_make_bar_update(1000, is_closed=True)] + updates = [_make_ohlcv(1000, is_closed=True)] provider = NeverReadyProvider(updates) start = time.monotonic() @@ -180,7 +175,7 @@ async def can_shutdown(self): def __test_graceful_shutdown_zero_timeout_waits_until_ready__(): """shutdown_timeout=0 waits indefinitely until can_shutdown() returns True""" - updates = [_make_bar_update(1000, is_closed=True)] + updates = [_make_ohlcv(1000, is_closed=True)] provider = DelayedShutdownProvider(updates, deny_count=3) list(live_ohlcv_generator(provider, "BTC/USDT", "1D", shutdown_timeout=0)) diff --git a/tests/t00_pynecore/core/test_019_live_mode.py b/tests/t00_pynecore/core/test_019_live_mode.py index ccf10a5..ec1d333 100644 --- a/tests/t00_pynecore/core/test_019_live_mode.py +++ b/tests/t00_pynecore/core/test_019_live_mode.py @@ -25,15 +25,10 @@ def main(): } -def _make_ohlcv(ts, close=100.0): +def _make_ohlcv(ts, close=100.0, is_closed=True): from pynecore.types.ohlcv import OHLCV return OHLCV(timestamp=ts, open=close, high=close + 1, low=close - 1, - close=close, volume=1000.0) - - -def _make_bar_update(ts, is_closed=True, close=100.0): - from pynecore.core.plugin.live_provider import BarUpdate - return BarUpdate(ohlcv=_make_ohlcv(ts, close), is_closed=is_closed) + close=close, volume=1000.0, is_closed=is_closed) def _create_live_runner(script_path, module_key, syminfo, ohlcv_iter): @@ -50,14 +45,20 @@ def _create_live_runner(script_path, module_key, syminfo, ohlcv_iter): return ScriptRunner(script_path, ohlcv_iter, syminfo) +def _chain_live(historical, live): + """Chain historical OHLCV with LIVE_TRANSITION sentinel and live OHLCV.""" + from pynecore.core.script_runner import LIVE_TRANSITION + return itertools.chain(historical, [LIVE_TRANSITION], live) + + def __test_barstate_historical_then_live__(script_path, module_key, syminfo): """barstate transitions from ishistory=True to isrealtime=True at live phase""" historical = [_make_ohlcv(i * 60, 100.0 + i) for i in range(3)] - live = [_make_bar_update(3 * 60, is_closed=True, close=104.0)] + live = [_make_ohlcv(3 * 60, is_closed=True, close=104.0)] runner = _create_live_runner( script_path, module_key, syminfo, - itertools.chain(historical, live), + _chain_live(historical, live), ) results = [(c, dict(p)) for c, p in runner.run_iter()] @@ -80,11 +81,11 @@ def __test_barstate_historical_then_live__(script_path, module_key, syminfo): def __test_islastconfirmedhistory__(script_path, module_key, syminfo): """islastconfirmedhistory is True only on the final historical bar before live""" historical = [_make_ohlcv(i * 60, 100.0 + i) for i in range(3)] - live = [_make_bar_update(3 * 60, is_closed=True, close=104.0)] + live = [_make_ohlcv(3 * 60, is_closed=True, close=104.0)] runner = _create_live_runner( script_path, module_key, syminfo, - itertools.chain(historical, live), + _chain_live(historical, live), ) results = [(c, dict(p)) for c, p in runner.run_iter()] @@ -100,14 +101,14 @@ def __test_intrabar_barstate__(script_path, module_key, syminfo): """Intra-bar ticks have isconfirmed=False, isnew=True on first tick""" historical = [_make_ohlcv(0, 100.0)] live = [ - _make_bar_update(60, is_closed=False, close=101.0), # bar open - _make_bar_update(60, is_closed=False, close=101.5), # intra-bar - _make_bar_update(60, is_closed=True, close=102.0), # bar close + _make_ohlcv(60, is_closed=False, close=101.0), # bar open + _make_ohlcv(60, is_closed=False, close=101.5), # intra-bar + _make_ohlcv(60, is_closed=True, close=102.0), # bar close ] runner = _create_live_runner( script_path, module_key, syminfo, - itertools.chain(historical, live), + _chain_live(historical, live), ) results = [(c, dict(p)) for c, p in runner.run_iter()] @@ -126,14 +127,14 @@ def __test_var_rollback_varip_persist__(script_path, module_key, syminfo): historical = [_make_ohlcv(i * 60, 100.0 + i) for i in range(3)] live = [ # Bar with 3 executions: open tick, intra-bar tick, close - _make_bar_update(3 * 60, is_closed=False, close=104.0), - _make_bar_update(3 * 60, is_closed=False, close=104.5), - _make_bar_update(3 * 60, is_closed=True, close=105.0), + _make_ohlcv(3 * 60, is_closed=False, close=104.0), + _make_ohlcv(3 * 60, is_closed=False, close=104.5), + _make_ohlcv(3 * 60, is_closed=True, close=105.0), ] runner = _create_live_runner( script_path, module_key, syminfo, - itertools.chain(historical, live), + _chain_live(historical, live), ) results = [(c, dict(p)) for c, p in runner.run_iter()] @@ -158,16 +159,16 @@ def __test_yield_only_on_closed_bars__(script_path, module_key, syminfo): """run_iter only yields for closed bars, not intra-bar ticks""" historical = [_make_ohlcv(0, 100.0)] live = [ - _make_bar_update(60, is_closed=False, close=101.0), - _make_bar_update(60, is_closed=False, close=101.5), - _make_bar_update(60, is_closed=False, close=101.8), - _make_bar_update(60, is_closed=True, close=102.0), - _make_bar_update(120, is_closed=True, close=103.0), + _make_ohlcv(60, is_closed=False, close=101.0), + _make_ohlcv(60, is_closed=False, close=101.5), + _make_ohlcv(60, is_closed=False, close=101.8), + _make_ohlcv(60, is_closed=True, close=102.0), + _make_ohlcv(120, is_closed=True, close=103.0), ] runner = _create_live_runner( script_path, module_key, syminfo, - itertools.chain(historical, live), + _chain_live(historical, live), ) results = [(c, dict(p)) for c, p in runner.run_iter()] @@ -177,7 +178,7 @@ def __test_yield_only_on_closed_bars__(script_path, module_key, syminfo): def __test_no_live_bars_unchanged_behavior__(script_path, module_key, syminfo): - """When is_live=True but no BarUpdate arrives, behaves like normal backtest""" + """When is_live=True but no live OHLCV arrives, behaves like normal backtest""" historical = [_make_ohlcv(i * 60, 100.0 + i) for i in range(5)] runner = _create_live_runner( diff --git a/tests/t00_pynecore/core/test_020_live_strategy_varip.py b/tests/t00_pynecore/core/test_020_live_strategy_varip.py index 8072811..55819e9 100644 --- a/tests/t00_pynecore/core/test_020_live_strategy_varip.py +++ b/tests/t00_pynecore/core/test_020_live_strategy_varip.py @@ -29,15 +29,10 @@ def main(): plot(1 if barstate.isnew else 0, 'isnew') -def _make_ohlcv(ts, close=100.0): +def _make_ohlcv(ts, close=100.0, is_closed=True): from pynecore.types.ohlcv import OHLCV return OHLCV(timestamp=ts, open=close, high=close + 1, low=close - 1, - close=close, volume=1000.0) - - -def _make_bar_update(ts, is_closed=True, close=100.0): - from pynecore.core.plugin.live_provider import BarUpdate - return BarUpdate(ohlcv=_make_ohlcv(ts, close), is_closed=is_closed) + close=close, volume=1000.0, is_closed=is_closed) def _create_live_runner(script_path, module_key, syminfo, ohlcv_iter): @@ -53,18 +48,24 @@ def _create_live_runner(script_path, module_key, syminfo, ohlcv_iter): return ScriptRunner(script_path, ohlcv_iter, syminfo) +def _chain_live(historical, live): + """Chain historical OHLCV with LIVE_TRANSITION sentinel and live OHLCV.""" + from pynecore.core.script_runner import LIVE_TRANSITION + return itertools.chain(historical, [LIVE_TRANSITION], live) + + def __test_strategy_default_var_equals_varip__(script_path, module_key, syminfo): """Default strategy: script runs only at bar close, var and varip are identical.""" historical = [_make_ohlcv(i * 60, 100.0 + i) for i in range(3)] live = [ - _make_bar_update(3 * 60, is_closed=False, close=104.0), # bar open - _make_bar_update(3 * 60, is_closed=False, close=104.5), # intra-bar - _make_bar_update(3 * 60, is_closed=True, close=105.0), # bar close + _make_ohlcv(3 * 60, is_closed=False, close=104.0), # bar open + _make_ohlcv(3 * 60, is_closed=False, close=104.5), # intra-bar + _make_ohlcv(3 * 60, is_closed=True, close=105.0), # bar close ] runner = _create_live_runner( script_path, module_key, syminfo, - itertools.chain(historical, live), + _chain_live(historical, live), ) results = [] @@ -88,14 +89,14 @@ def __test_strategy_calc_on_every_tick__(script_path, module_key, syminfo): """Strategy with calc_on_every_tick: var rolls back, varip persists like indicator.""" historical = [_make_ohlcv(i * 60, 100.0 + i) for i in range(3)] live = [ - _make_bar_update(3 * 60, is_closed=False, close=104.0), # bar open - _make_bar_update(3 * 60, is_closed=False, close=104.5), # intra-bar - _make_bar_update(3 * 60, is_closed=True, close=105.0), # bar close + _make_ohlcv(3 * 60, is_closed=False, close=104.0), # bar open + _make_ohlcv(3 * 60, is_closed=False, close=104.5), # intra-bar + _make_ohlcv(3 * 60, is_closed=True, close=105.0), # bar close ] runner = _create_live_runner( script_path, module_key, syminfo, - itertools.chain(historical, live), + _chain_live(historical, live), ) runner.script.calc_on_every_tick = True @@ -122,19 +123,19 @@ def __test_strategy_default_barstate_isnew_on_every_live_bar__(script_path, modu historical = [_make_ohlcv(0, 100.0)] live = [ # First live bar: intra-bar ticks then close - _make_bar_update(60, is_closed=False, close=101.0), - _make_bar_update(60, is_closed=True, close=102.0), + _make_ohlcv(60, is_closed=False, close=101.0), + _make_ohlcv(60, is_closed=True, close=102.0), # Second live bar: direct close (no intra-bar) - _make_bar_update(120, is_closed=True, close=103.0), + _make_ohlcv(120, is_closed=True, close=103.0), # Third live bar: with intra-bar ticks - _make_bar_update(180, is_closed=False, close=104.0), - _make_bar_update(180, is_closed=False, close=104.5), - _make_bar_update(180, is_closed=True, close=105.0), + _make_ohlcv(180, is_closed=False, close=104.0), + _make_ohlcv(180, is_closed=False, close=104.5), + _make_ohlcv(180, is_closed=True, close=105.0), ] runner = _create_live_runner( script_path, module_key, syminfo, - itertools.chain(historical, live), + _chain_live(historical, live), ) results = [] diff --git a/tests/t00_pynecore/data/test_003_ccxt_live_provider.py b/tests/t00_pynecore/data/test_003_ccxt_live_provider.py index 2bfa668..75ffcec 100644 --- a/tests/t00_pynecore/data/test_003_ccxt_live_provider.py +++ b/tests/t00_pynecore/data/test_003_ccxt_live_provider.py @@ -10,7 +10,6 @@ import pytest from pynecore.providers.ccxt import CCXTProvider -from pynecore.core.plugin.live_provider import BarUpdate from pynecore.types.ohlcv import OHLCV logging.getLogger("ccxt").setLevel(logging.WARNING) @@ -58,16 +57,13 @@ def __test_ccxt_live_watch_ohlcv__(): async def _run(): await provider.connect() try: - bar_update = await asyncio.wait_for( + ohlcv = await asyncio.wait_for( provider.watch_ohlcv("BTC/USDT:USDT", "1"), timeout=30.0, ) - assert isinstance(bar_update, BarUpdate) - assert isinstance(bar_update.ohlcv, OHLCV) - assert isinstance(bar_update.is_closed, bool) - - ohlcv = bar_update.ohlcv + assert isinstance(ohlcv, OHLCV) + assert isinstance(ohlcv.is_closed, bool) assert ohlcv.timestamp > 0 assert ohlcv.open > 0 assert ohlcv.high >= ohlcv.low @@ -94,16 +90,16 @@ async def _run(): try: updates = [] for _ in range(3): - bar_update = await asyncio.wait_for( + ohlcv = await asyncio.wait_for( provider.watch_ohlcv("BTC/USDT:USDT", "1"), timeout=30.0, ) - updates.append(bar_update) + updates.append(ohlcv) assert len(updates) == 3 for u in updates: - assert isinstance(u, BarUpdate) - assert u.ohlcv.timestamp > 0 + assert isinstance(u, OHLCV) + assert u.timestamp > 0 finally: await provider.disconnect() @@ -137,13 +133,12 @@ def __test_ccxt_live_generator_integration__(): ) received = [] - for update in live_ohlcv_generator(provider, "BTC/USDT:USDT", "1", - shutdown_timeout=5.0): - received.append(update) - assert isinstance(update, BarUpdate) - assert isinstance(update.ohlcv, OHLCV) - assert update.ohlcv.timestamp > 0 - assert update.ohlcv.close > 0 + for ohlcv in live_ohlcv_generator(provider, "BTC/USDT:USDT", "1", + shutdown_timeout=5.0): + received.append(ohlcv) + assert isinstance(ohlcv, OHLCV) + assert ohlcv.timestamp > 0 + assert ohlcv.close > 0 if len(received) >= 1: break From 5243335eba431a971c608cea193f015b13f42519 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Thu, 9 Apr 2026 07:59:40 +0200 Subject: [PATCH 24/64] fix(provider): rename ohlv_dir to ohlcv_dir across providers and tests --- src/pynecore/core/plugin/provider.py | 8 ++++---- src/pynecore/providers/ccxt.py | 6 +++--- tests/t00_pynecore/data/test_002_ccxt_provider.py | 5 ++--- .../t00_pynecore/data/test_003_ccxt_live_provider.py | 12 ++++++------ 4 files changed, 15 insertions(+), 16 deletions(-) diff --git a/src/pynecore/core/plugin/provider.py b/src/pynecore/core/plugin/provider.py index ed5c8ad..363b839 100644 --- a/src/pynecore/core/plugin/provider.py +++ b/src/pynecore/core/plugin/provider.py @@ -73,19 +73,19 @@ def get_ohlcv_path(cls, symbol: str, timeframe: str, ohlv_dir: Path, f"_{timeframe}.ohlcv") def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, - ohlv_dir: Path | None = None, config: ConfigT | None = None): + ohlcv_dir: Path | None = None, config: ConfigT | None = None): """ :param symbol: The symbol to get data for. :param timeframe: The timeframe to get data for in TradingView format. - :param ohlv_dir: The directory to save OHLCV data. + :param ohlcv_dir: The directory to save OHLCV data. :param config: Pre-loaded config dataclass instance. """ self.symbol = symbol self.timeframe = timeframe self.xchg_timeframe = self.to_exchange_timeframe(timeframe) if timeframe else None - if ohlv_dir: + if ohlcv_dir: assert symbol and timeframe - self.ohlcv_path = self.get_ohlcv_path(symbol, timeframe, ohlv_dir) + self.ohlcv_path = self.get_ohlcv_path(symbol, timeframe, ohlcv_dir) else: self.ohlcv_path = None self.ohlcv_file = OHLCVWriter(self.ohlcv_path) if self.ohlcv_path else None diff --git a/src/pynecore/providers/ccxt.py b/src/pynecore/providers/ccxt.py index 509d84c..4f9e3b1 100644 --- a/src/pynecore/providers/ccxt.py +++ b/src/pynecore/providers/ccxt.py @@ -125,11 +125,11 @@ def to_exchange_timeframe(cls, timeframe: str) -> str: @override def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, - ohlv_dir: Path | None = None, config: object | None = None): + ohlcv_dir: Path | None = None, config: object | None = None): """ :param symbol: The symbol to get data for (e.g. "binance:BTC/USDT") :param timeframe: The timeframe to get data for in TradingView fmt - :param ohlv_dir: The directory to save OHLCV data + :param ohlcv_dir: The directory to save OHLCV data :param config: Pre-loaded CCXTConfig instance """ try: @@ -137,7 +137,7 @@ def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, except ImportError: raise ImportError("CCXT is not installed. Please install it using `pip install ccxt`.") - super().__init__(symbol=symbol, timeframe=timeframe, ohlv_dir=ohlv_dir, config=config) + super().__init__(symbol=symbol, timeframe=timeframe, ohlcv_dir=ohlcv_dir, config=config) # Check symbol fmt try: diff --git a/tests/t00_pynecore/data/test_002_ccxt_provider.py b/tests/t00_pynecore/data/test_002_ccxt_provider.py index f6d2552..1185924 100644 --- a/tests/t00_pynecore/data/test_002_ccxt_provider.py +++ b/tests/t00_pynecore/data/test_002_ccxt_provider.py @@ -83,7 +83,6 @@ def __test_ccxt_real_data_download__(tmp_path): import os import json import logging - import tomllib import tempfile from pathlib import Path from datetime import datetime, UTC, timedelta @@ -154,7 +153,7 @@ def __test_ccxt_real_data_download__(tmp_path): provider = CCXTProvider( symbol=symbol, timeframe=timeframe, - ohlv_dir=data_dir, + ohlcv_dir=data_dir, config=config ) @@ -232,7 +231,7 @@ def download_ohlcv_data(): # We have reference data, compare the downloaded data with it # Compare the actual results with the expected data # We'll check the first 5 candles as they should be stable for this historical period - check_count = min(5, len(candles), len(expected_data)) + check_count = min(5, len(candles), len(expected_data)) # type: ignore print(f"Comparing first {check_count} candles with expected data") for i in range(check_count): diff --git a/tests/t00_pynecore/data/test_003_ccxt_live_provider.py b/tests/t00_pynecore/data/test_003_ccxt_live_provider.py index 75ffcec..ee24728 100644 --- a/tests/t00_pynecore/data/test_003_ccxt_live_provider.py +++ b/tests/t00_pynecore/data/test_003_ccxt_live_provider.py @@ -32,7 +32,7 @@ def __test_ccxt_live_connect_disconnect__(): provider = CCXTProvider( symbol="BYBIT:BTC/USDT:USDT", timeframe="1", - ohlv_dir=None, + ohlcv_dir=None, ) async def _run(): @@ -51,7 +51,7 @@ def __test_ccxt_live_watch_ohlcv__(): provider = CCXTProvider( symbol="BYBIT:BTC/USDT:USDT", timeframe="1", - ohlv_dir=None, + ohlcv_dir=None, ) async def _run(): @@ -82,7 +82,7 @@ def __test_ccxt_live_multiple_updates__(): provider = CCXTProvider( symbol="BYBIT:BTC/USDT:USDT", timeframe="1", - ohlv_dir=None, + ohlcv_dir=None, ) async def _run(): @@ -113,7 +113,7 @@ def __test_ccxt_live_can_shutdown_default__(): provider = CCXTProvider( symbol="BYBIT:BTC/USDT:USDT", timeframe="1", - ohlv_dir=None, + ohlcv_dir=None, ) result = asyncio.run(provider.can_shutdown()) @@ -129,12 +129,12 @@ def __test_ccxt_live_generator_integration__(): provider = CCXTProvider( symbol="BYBIT:BTC/USDT:USDT", timeframe="1", - ohlv_dir=None, + ohlcv_dir=None, ) received = [] for ohlcv in live_ohlcv_generator(provider, "BTC/USDT:USDT", "1", - shutdown_timeout=5.0): + shutdown_timeout=5.0): received.append(ohlcv) assert isinstance(ohlcv, OHLCV) assert ohlcv.timestamp > 0 From 25f89696d2c07fa0f9d7763441cb412c5f945f3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Thu, 9 Apr 2026 11:54:32 +0200 Subject: [PATCH 25/64] fix: improve live runner reconnect handling and symbol normalization Prevent queue overflow from dropping closed bars during live updates. Normalize provider symbols before watch calls and fix ohlcv_dir naming. Add reconnect coverage tests and warn when plugin loading fails. --- .gitignore | 5 + src/pynecore/cli/commands/__init__.py | 15 +- src/pynecore/cli/commands/data.py | 2 +- src/pynecore/cli/commands/run.py | 2 +- src/pynecore/core/live_runner.py | 30 ++- src/pynecore/core/plugin/provider.py | 28 ++- src/pynecore/providers/ccxt.py | 8 +- .../t00_pynecore/core/test_018_live_runner.py | 175 ++++++++++++++++++ 8 files changed, 247 insertions(+), 18 deletions(-) diff --git a/.gitignore b/.gitignore index f46fc36..21fb259 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,8 @@ +# AI review context (AGENTS.md local, CLAUDE.md + MEMORY.md symlinked from parent) +AGENTS.md +CLAUDE.md +MEMORY.md + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/src/pynecore/cli/commands/__init__.py b/src/pynecore/cli/commands/__init__.py index f6097c0..214b694 100644 --- a/src/pynecore/cli/commands/__init__.py +++ b/src/pynecore/cli/commands/__init__.py @@ -1,5 +1,7 @@ +import logging from pathlib import Path +import click import typer from ..app import app, app_state @@ -10,6 +12,7 @@ __all__ = ['run', 'data', 'compile', 'benchmark', 'debug', 'plugin'] +logger = logging.getLogger(__name__) @app.callback() @@ -234,8 +237,8 @@ def main( plugin_cls = ep.load() if hasattr(plugin_cls, 'Config') and plugin_cls.Config is not None: ensure_config(plugin_cls.Config, config_path) - except Exception: - pass # Don't crash CLI if a plugin is broken + except Exception as e: + logger.warning("Failed to load plugin config '%s': %s", name, e) # Create api.toml file for PyneSys API (if not exists) api_file = config_dir / 'api.toml' @@ -290,7 +293,9 @@ def _register_cli_plugins(): if not params: continue - click_cmd = typer.main.get_command(app).commands.get(cmd_name) + group = typer.main.get_command(app) + assert isinstance(group, click.Group) + click_cmd = group.commands.get(cmd_name) if not isinstance(click_cmd, PluggableCommand): continue @@ -302,8 +307,8 @@ def _register_cli_plugins(): fg="yellow", err=True, ) - except Exception: - pass + except Exception as e: + logger.warning("Failed to load CLI plugin '%s': %s", name, e) _register_cli_plugins() diff --git a/src/pynecore/cli/commands/data.py b/src/pynecore/cli/commands/data.py index 3f687f6..29bc3c0 100644 --- a/src/pynecore/cli/commands/data.py +++ b/src/pynecore/cli/commands/data.py @@ -169,7 +169,7 @@ def download( config = ensure_config(config_cls, app_state.config_dir / 'plugins' / f'{provider.value}.toml') provider_instance: ProviderPlugin = provider_class(symbol=symbol, timeframe=timeframe, - ohlv_dir=app_state.data_dir, config=config) + ohlcv_dir=app_state.data_dir, config=config) # Download symbol info if not exists if force_save_info or not provider_instance.is_symbol_info_exists(): diff --git a/src/pynecore/cli/commands/run.py b/src/pynecore/cli/commands/run.py index b0eb3f1..c3edf95 100644 --- a/src/pynecore/cli/commands/run.py +++ b/src/pynecore/cli/commands/run.py @@ -161,7 +161,7 @@ def _download_provider_data(provider_str: str, time_from_str: str | None) -> _Pr # Create provider instance provider_instance: ProviderPlugin = provider_class( symbol=ps.symbol, timeframe=ps.timeframe, - ohlv_dir=app_state.data_dir, config=config + ohlcv_dir=app_state.data_dir, config=config ) # Fetch symbol info diff --git a/src/pynecore/core/live_runner.py b/src/pynecore/core/live_runner.py index fdb17f2..9a63e1b 100644 --- a/src/pynecore/core/live_runner.py +++ b/src/pynecore/core/live_runner.py @@ -11,7 +11,7 @@ import time import threading from collections.abc import Iterator -from queue import Queue, Empty +from queue import Queue, Empty, Full from pynecore.types.ohlcv import OHLCV from pynecore.core.plugin.live_provider import LiveProviderPlugin @@ -83,6 +83,7 @@ async def _graceful_shutdown(): async def _async_loop(): try: await provider.connect() + watch_symbol = provider.normalize_symbol(symbol) logger.info("Live provider connected: %s %s@%s", type(provider).__name__, symbol, timeframe) @@ -91,7 +92,7 @@ async def _async_loop(): while not stop_event.is_set(): try: bar_update = await asyncio.wait_for( - provider.watch_ohlcv(symbol, timeframe), + provider.watch_ohlcv(watch_symbol, timeframe), timeout=2.0, ) reconnect_attempts = 0 @@ -104,7 +105,13 @@ async def _async_loop(): if not bar_update.is_closed and ts < last_historical_timestamp: continue - bar_queue.put(bar_update) + if bar_update.is_closed: + bar_queue.put(bar_update) + else: + try: + bar_queue.put_nowait(bar_update) + except Full: + pass except asyncio.TimeoutError: continue @@ -124,14 +131,27 @@ async def _async_loop(): await provider.on_disconnect() delay = provider.reconnect_delay * (2 ** (reconnect_attempts - 1)) - await asyncio.sleep(delay) + slept = 0.0 + while slept < delay and not stop_event.is_set(): + await asyncio.sleep(min(0.5, delay - slept)) + slept += 0.5 + if stop_event.is_set(): + break + + try: + await provider.disconnect() + except Exception as disc_err: + logger.debug("disconnect() before reconnect raised: %s", disc_err) try: await provider.connect() await provider.on_reconnect() logger.info("Reconnected successfully") except Exception as reconn_err: - logger.error("Reconnect failed: %s", reconn_err) + logger.error("Reconnect failed (attempt %d/%d): %s", + reconnect_attempts, + provider.max_reconnect_attempts, reconn_err) + continue except Exception as e: bar_queue.put(e) diff --git a/src/pynecore/core/plugin/provider.py b/src/pynecore/core/plugin/provider.py index 363b839..d2a36a0 100644 --- a/src/pynecore/core/plugin/provider.py +++ b/src/pynecore/core/plugin/provider.py @@ -57,20 +57,20 @@ def to_exchange_timeframe(cls, timeframe: str) -> str: """ @classmethod - def get_ohlcv_path(cls, symbol: str, timeframe: str, ohlv_dir: Path, + def get_ohlcv_path(cls, symbol: str, timeframe: str, ohlcv_dir: Path, provider_name: str | None = None) -> Path: """ Get the output path of the OHLCV data file. :param symbol: Symbol name. :param timeframe: Timeframe in TradingView format. - :param ohlv_dir: Directory to save OHLCV data. + :param ohlcv_dir: Directory to save OHLCV data. :param provider_name: Override provider name in filename. :return: Path to the OHLCV file. """ - return ohlv_dir / (f"{provider_name or cls.__name__.lower().replace('provider', '').replace('plugin', '')}" - f"_{symbol.replace('/', '_').replace(':', '_').upper()}" - f"_{timeframe}.ohlcv") + return ohlcv_dir / (f"{provider_name or cls.__name__.lower().replace('provider', '').replace('plugin', '')}" + f"_{symbol.replace('/', '_').replace(':', '_').upper()}" + f"_{timeframe}.ohlcv") def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, ohlcv_dir: Path | None = None, config: ConfigT | None = None): @@ -91,6 +91,24 @@ def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, self.ohlcv_file = OHLCVWriter(self.ohlcv_path) if self.ohlcv_path else None self.config: ConfigT | None = config + def normalize_symbol(self, symbol: str) -> str: + """ + Normalize a provider-format symbol to the exchange API format. + + Called by the framework before passing ``symbol`` to :meth:`watch_ohlcv` + in the live runner. For historical methods (:meth:`download_ohlcv`, + :meth:`update_symbol_info`), providers use ``self.symbol`` directly — + handle any needed format conversion in ``__init__`` instead. + + Override when the user-configured symbol includes prefixes or formatting + that the exchange API cannot accept + (e.g. stripping ``"binance:"`` from ``"binance:BTC/USDT"``). + + :param symbol: Symbol as configured by the user. + :return: Symbol in the format the exchange API expects. + """ + return symbol + def __enter__(self) -> OHLCVWriter: assert self.ohlcv_file is not None return self.ohlcv_file.open() diff --git a/src/pynecore/providers/ccxt.py b/src/pynecore/providers/ccxt.py index 4f9e3b1..fd01d03 100644 --- a/src/pynecore/providers/ccxt.py +++ b/src/pynecore/providers/ccxt.py @@ -153,7 +153,8 @@ def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, "(or simple exchange, if you want to list symbols)") self.symbol = symbol - exchange_name = xchg.lower() + self._exchange_name = xchg.lower() + exchange_name = self._exchange_name # Build exchange config from the Config dataclass + optional exchange-specific TOML sections exchange_config = {} @@ -188,6 +189,11 @@ def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, **exchange_config }) + @override + def normalize_symbol(self, symbol: str) -> str: + """Strip exchange prefix: ``"binance:BTC/USDT"`` → ``"BTC/USDT"``.""" + return self.symbol or symbol + @override def get_list_of_symbols(self, *args, **kwargs) -> list[str]: """ diff --git a/tests/t00_pynecore/core/test_018_live_runner.py b/tests/t00_pynecore/core/test_018_live_runner.py index 37cc87d..4fc276c 100644 --- a/tests/t00_pynecore/core/test_018_live_runner.py +++ b/tests/t00_pynecore/core/test_018_live_runner.py @@ -42,6 +42,9 @@ async def watch_ohlcv(self, symbol: str, timeframe: str) -> OHLCV: await asyncio.sleep(0.001) return bar + def normalize_symbol(self, symbol: str) -> str: + return symbol + async def on_disconnect(self): pass @@ -182,3 +185,175 @@ def __test_graceful_shutdown_zero_timeout_waits_until_ready__(): assert provider._shutdown_calls == 4 assert not provider.is_connected + + +# --- Reconnect behavior tests --- + +class ReconnectTrackingProvider(MockLiveProvider): + """Provider that records connect/disconnect call order and fails once.""" + + def __init__(self, bar_updates: list[OHLCV], fail_at_index: int = 1): + super().__init__(bar_updates) + self._fail_at_index = fail_at_index + self._failed = False + self.call_log: list[str] = [] + + async def connect(self): + self.call_log.append('connect') + self._connected = True + + async def disconnect(self): + self.call_log.append('disconnect') + self._connected = False + + async def watch_ohlcv(self, symbol: str, timeframe: str) -> OHLCV: + if not self._failed and self._index == self._fail_at_index: + self._failed = True + raise ConnectionError("Simulated connection loss") + return await super().watch_ohlcv(symbol, timeframe) + + +def __test_reconnect_calls_disconnect_before_connect__(): + """Reconnect path calls disconnect() before connect() to prevent resource leaks""" + updates = [ + _make_ohlcv(1000, is_closed=True, close=100.0), + _make_ohlcv(2000, is_closed=True, close=200.0), + _make_ohlcv(3000, is_closed=True, close=300.0), + ] + + provider = ReconnectTrackingProvider(updates, fail_at_index=1) + bars = list(live_ohlcv_generator(provider, "BTC/USDT", "1D")) + + # Should have: connect, [fail], disconnect, connect, ..., disconnect (shutdown) + assert provider.call_log[0] == 'connect' + + # Find the reconnect sequence: after the first connect, there should be + # disconnect followed by connect before the final shutdown disconnect + post_initial = provider.call_log[1:] + assert 'disconnect' in post_initial + disc_idx = post_initial.index('disconnect') + assert disc_idx + 1 < len(post_initial) + assert post_initial[disc_idx + 1] == 'connect' + + # Data should still come through after reconnect + assert len(bars) >= 1 + + +def __test_reconnect_max_attempts_exceeded__(): + """Generator raises after max reconnect attempts are exhausted""" + + class AlwaysFailProvider(MockLiveProvider): + def __init__(self): + super().__init__([]) + self.max_reconnect_attempts = 2 + self.reconnect_delay = 0.01 + + async def watch_ohlcv(self, symbol: str, timeframe: str) -> OHLCV: + raise ConnectionError("Permanent failure") + + provider = AlwaysFailProvider() + try: + list(live_ohlcv_generator(provider, "BTC/USDT", "1D")) + assert False, "Should have raised" + except ConnectionError: + pass + + +# --- Queue overflow tests --- + +class FloodProvider(MockLiveProvider): + """Provider that generates a burst of intra-bar updates, then closed bars.""" + + def __init__(self, intra_bar_count: int, closed_bars: list[OHLCV]): + all_updates: list[OHLCV] = [] + # Generate many intra-bar updates (same timestamp, is_closed=False) + for i in range(intra_bar_count): + all_updates.append(_make_ohlcv(1000, is_closed=False, close=100.0 + i * 0.01)) + # Then the actual closed bars + all_updates.extend(closed_bars) + super().__init__(all_updates) + + +def __test_queue_overflow_preserves_closed_bars__(): + """When queue is full, intra-bar updates may be dropped but closed bars are never lost""" + closed_bars = [ + _make_ohlcv(1000, is_closed=True, close=150.0), + _make_ohlcv(2000, is_closed=True, close=250.0), + ] + + # 200 intra-bar updates will overflow the 100-item queue + provider = FloodProvider(intra_bar_count=200, closed_bars=closed_bars) + bars = list(live_ohlcv_generator(provider, "BTC/USDT", "1D")) + + # All closed bars must be present + closed_received = [b for b in bars if b.is_closed] + assert len(closed_received) == 2 + assert closed_received[0].close == 150.0 + assert closed_received[1].close == 250.0 + + +# --- normalize_symbol tests --- + +class NormalizingProvider(MockLiveProvider): + """Provider that tracks which symbol was passed to watch_ohlcv.""" + + def __init__(self, bar_updates: list[OHLCV]): + super().__init__(bar_updates) + self.received_symbols: list[str] = [] + + def normalize_symbol(self, symbol: str) -> str: + # Strip "exchange:" prefix + if ':' in symbol: + return symbol.split(':', 1)[1] + return symbol + + async def watch_ohlcv(self, symbol: str, timeframe: str) -> OHLCV: + self.received_symbols.append(symbol) + return await super().watch_ohlcv(symbol, timeframe) + + +def __test_normalize_symbol_applied_to_watch_ohlcv__(): + """Framework calls normalize_symbol() before passing symbol to watch_ohlcv""" + updates = [_make_ohlcv(1000, is_closed=True)] + provider = NormalizingProvider(updates) + + list(live_ohlcv_generator(provider, "binance:BTC/USDT", "1D")) + + assert all(s == "BTC/USDT" for s in provider.received_symbols) + + +# --- Connection error from listener death tests --- + +class ListenerDeathProvider(MockLiveProvider): + """Provider that simulates WebSocket listener dying mid-stream.""" + + def __init__(self, bar_updates: list[OHLCV], die_at_index: int = 2): + super().__init__(bar_updates) + self._die_at_index = die_at_index + self._died = False + self._reconnected = False + + async def watch_ohlcv(self, symbol: str, timeframe: str) -> OHLCV: + if not self._died and self._index == self._die_at_index: + self._died = True + raise ConnectionError("WebSocket listener disconnected") + if self._died and not self._reconnected: + self._reconnected = True + return await super().watch_ohlcv(symbol, timeframe) + + +def __test_connection_error_triggers_reconnect__(): + """ConnectionError from watch_ohlcv triggers reconnect and resumes streaming""" + updates = [ + _make_ohlcv(1000, is_closed=True, close=100.0), + _make_ohlcv(2000, is_closed=True, close=200.0), + _make_ohlcv(3000, is_closed=True, close=300.0), + _make_ohlcv(4000, is_closed=True, close=400.0), + ] + + provider = ListenerDeathProvider(updates, die_at_index=2) + bars = list(live_ohlcv_generator(provider, "BTC/USDT", "1D")) + + # Should get bars from before and after the simulated death + assert len(bars) >= 2 + assert provider._reconnected From b8b766e146e170905ea8d869bb4747aeb9a05b34 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Thu, 9 Apr 2026 20:27:16 +0200 Subject: [PATCH 26/64] fix: raise on provider disconnect after timeout Detect provider disconnects during message receive timeouts and raise ConnectionError instead of continuing the loop. --- src/pynecore/core/live_runner.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/pynecore/core/live_runner.py b/src/pynecore/core/live_runner.py index 9a63e1b..59ef770 100644 --- a/src/pynecore/core/live_runner.py +++ b/src/pynecore/core/live_runner.py @@ -114,6 +114,10 @@ async def _async_loop(): pass except asyncio.TimeoutError: + if not provider.is_connected: + raise ConnectionError( + "Provider reports disconnected state" + ) continue except asyncio.CancelledError: break From 042f57b5aba06efa969bea4ba00c796751668379 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Tue, 14 Apr 2026 14:39:45 +0200 Subject: [PATCH 27/64] docs: update compatibility status for runtime features Mark calc_on_order_fills, calc_on_every_tick, and varip as supported. Remove no-longer-applicable runtime items from the unsupported features list. --- docs/overview/compatibility.md | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/docs/overview/compatibility.md b/docs/overview/compatibility.md index ba176b6..6b898cc 100644 --- a/docs/overview/compatibility.md +++ b/docs/overview/compatibility.md @@ -5,7 +5,7 @@ title: "Pine Script Compatibility" description: "Implementation status of Pine Script v6 features in PyneCore" icon: "checklist" date: "2026-03-28" -lastmod: "2026-03-28" +lastmod: "2026-04-13" draft: false toc: true categories: ["Overview", "Compatibility"] @@ -62,6 +62,8 @@ implementation status of all major Pine Script features. | `strategy.close_all()` | full | | | `strategy.cancel_all()` | full | | | Risk management | full | `strategy.risk.*` functions | +| `calc_on_order_fills` | full | Re-execution after fills, var rollback / varip persist | +| `calc_on_every_tick` | full | Live mode only — no effect on historical bars | ## Request Module @@ -178,7 +180,7 @@ All Pine Script v6 enum constants are implemented: | `if`/`else`/`switch` | full | Via PyneComp compilation | | `for`/`while` loops | full | | | `var` (persistent) | full | `Persistent[T]` annotation | -| `varip` (intrabar persist) | — | Not applicable in offline mode | +| `varip` (intrabar persist) | full | Persists across re-executions (COOF and live mode) | | Methods on types | full | `.get()`, `.set()`, `.size()`, etc. | | User-defined types (UDT) | full | Via PyneComp compilation | | Enums | full | Via PyneComp compilation | @@ -193,17 +195,13 @@ All Pine Script v6 enum constants are implemented: ## Not Applicable to PyneCore -These Pine Script features exist only in TradingView's live charting environment and are not -applicable to offline backtesting: - -| Feature | Reason | -|----------------------|---------------------------------------------------| -| `varip` | Intrabar persistence — offline bars are confirmed | -| Realtime bar updates | All bars are historical in offline mode | -| `alert()` triggers | No broker/notification integration | -| Chart rendering | No visual chart — output is CSV | -| `input()` UI widgets | Inputs are function parameters or TOML config | -| Order execution | Strategy simulator, not live trading | +These Pine Script features exist only in TradingView's live charting environment and have no +equivalent in PyneCore: + +| Feature | Reason | +|----------------------|--------------------------------------------------| +| Chart rendering | No visual chart — output is CSV | +| `input()` UI widgets | Inputs are function parameters or TOML config | ## Precision From 548fb42c9811451089a80c5ed037c8f5f5a02a68 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Tue, 14 Apr 2026 14:40:55 +0200 Subject: [PATCH 28/64] feat: add broker plugin foundation Add broker models, errors, validation, plugin interface, and live position tracking. Split strategy position handling into a shared base class and simulator implementation. Detect script broker requirements during AST transformation and attach them to strategy scripts. Add unit coverage for broker models, live position accounting, and requirement detection. --- src/pynecore/core/broker/__init__.py | 58 ++++ src/pynecore/core/broker/exceptions.py | 78 +++++ src/pynecore/core/broker/models.py | 277 ++++++++++++++++++ src/pynecore/core/broker/position.py | 272 +++++++++++++++++ src/pynecore/core/broker/validation.py | 49 ++++ src/pynecore/core/import_hook.py | 2 + src/pynecore/core/plugin/broker.py | 182 ++++++++++++ src/pynecore/core/script.py | 12 +- src/pynecore/core/strategy_stats.py | 4 +- src/pynecore/lib/strategy/__init__.py | 70 ++++- .../transformers/script_requirements.py | 236 +++++++++++++++ .../core/test_021_broker_models.py | 79 +++++ .../core/test_022_broker_position.py | 166 +++++++++++ .../core/test_023_script_requirements.py | 208 +++++++++++++ .../t30_strategy/test_001_barupdn.toml | 48 +++ 15 files changed, 1728 insertions(+), 13 deletions(-) create mode 100644 src/pynecore/core/broker/__init__.py create mode 100644 src/pynecore/core/broker/exceptions.py create mode 100644 src/pynecore/core/broker/models.py create mode 100644 src/pynecore/core/broker/position.py create mode 100644 src/pynecore/core/broker/validation.py create mode 100644 src/pynecore/core/plugin/broker.py create mode 100644 src/pynecore/transformers/script_requirements.py create mode 100644 tests/t00_pynecore/core/test_021_broker_models.py create mode 100644 tests/t00_pynecore/core/test_022_broker_position.py create mode 100644 tests/t00_pynecore/core/test_023_script_requirements.py create mode 100644 tests/t01_lib/t30_strategy/test_001_barupdn.toml diff --git a/src/pynecore/core/broker/__init__.py b/src/pynecore/core/broker/__init__.py new file mode 100644 index 0000000..840ecb5 --- /dev/null +++ b/src/pynecore/core/broker/__init__.py @@ -0,0 +1,58 @@ +""" +Broker plugin runtime support. + +- :mod:`pynecore.core.broker.models` — intent, event, exchange-state, + capability and requirement dataclasses. +- :mod:`pynecore.core.broker.exceptions` — broker error hierarchy. +- :mod:`pynecore.core.broker.position` — :class:`BrokerPosition` live + position tracker (no simulation). +""" +from pynecore.core.broker.exceptions import ( + BrokerError, + ExchangeCapabilityError, + ExchangeConnectionError, + ExchangeOrderRejectedError, + ExchangeRateLimitError, + OrderSyncError, + UnexpectedCancelError, +) +from pynecore.core.broker.models import ( + OrderStatus, + OrderType, + LegType, + ExchangeOrder, + OrderEvent, + ExchangePosition, + ExchangeCapabilities, + EntryIntent, + ExitIntent, + CloseIntent, + CancelIntent, + ScriptRequirements, + InterceptorResult, +) +from pynecore.core.broker.position import BrokerPosition + +__all__ = [ + 'BrokerError', + 'ExchangeCapabilityError', + 'ExchangeConnectionError', + 'ExchangeOrderRejectedError', + 'ExchangeRateLimitError', + 'OrderSyncError', + 'UnexpectedCancelError', + 'OrderStatus', + 'OrderType', + 'LegType', + 'ExchangeOrder', + 'OrderEvent', + 'ExchangePosition', + 'ExchangeCapabilities', + 'EntryIntent', + 'ExitIntent', + 'CloseIntent', + 'CancelIntent', + 'ScriptRequirements', + 'InterceptorResult', + 'BrokerPosition', +] diff --git a/src/pynecore/core/broker/exceptions.py b/src/pynecore/core/broker/exceptions.py new file mode 100644 index 0000000..7799b19 --- /dev/null +++ b/src/pynecore/core/broker/exceptions.py @@ -0,0 +1,78 @@ +""" +Broker-related exception hierarchy. + +All broker plugin and order-sync errors derive from :class:`BrokerError`. +""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pynecore.core.broker.models import ExchangeOrder + +__all__ = [ + 'BrokerError', + 'ExchangeCapabilityError', + 'ExchangeConnectionError', + 'ExchangeOrderRejectedError', + 'ExchangeRateLimitError', + 'OrderSyncError', + 'UnexpectedCancelError', +] + + +class BrokerError(RuntimeError): + """Base class for all broker-related errors.""" + + +class ExchangeCapabilityError(BrokerError): + """The exchange does not support a required feature. + + Raised by a BrokerPlugin when asked to do something its exchange cannot do + (e.g. a TP+SL bracket with OCA reduce semantics on an exchange without + native support for it). Treated as a graceful-stop condition at startup. + """ + + +class ExchangeConnectionError(BrokerError): + """Connection to the exchange was lost. + + The Order Sync Engine is expected to reconnect and then reconcile state + before resuming normal operation. + """ + + +class ExchangeOrderRejectedError(BrokerError): + """The exchange rejected an order. + + :ivar order: The rejected order as it is known locally, or ``None`` if the + order never made it far enough to have an exchange representation. + """ + + def __init__(self, message: str, order: 'ExchangeOrder | None' = None) -> None: + super().__init__(message) + self.order = order + + +class ExchangeRateLimitError(BrokerError): + """Exchange rate limit was hit. + + :ivar retry_after: Seconds the caller should wait before retrying. + """ + + def __init__(self, message: str, retry_after: float) -> None: + super().__init__(message) + self.retry_after = retry_after + + +class OrderSyncError(BrokerError): + """Exchange state diverged from the expected internal state.""" + + +class UnexpectedCancelError(BrokerError): + """A bot-owned order disappeared without the bot having cancelled it. + + Indicates external interference (manual user action, exchange-side + maintenance, margin-induced cancel, etc.). The default policy is a + graceful stop. + """ diff --git a/src/pynecore/core/broker/models.py b/src/pynecore/core/broker/models.py new file mode 100644 index 0000000..dce1d42 --- /dev/null +++ b/src/pynecore/core/broker/models.py @@ -0,0 +1,277 @@ +""" +Data models for the broker plugin system. + +These classes form the protocol between Pine Script, the Order Sync Engine, +and a concrete :class:`~pynecore.core.plugin.broker.BrokerPlugin`. Intent +objects describe what the script wants; Event objects describe what the +exchange actually did; Exchange* objects are snapshots of exchange state. + +See ``docs/pynecore/plugin-system/broker-plugin-plan.md`` for the full design. +""" +from __future__ import annotations + +from dataclasses import dataclass +from enum import StrEnum + +__all__ = [ + 'OrderStatus', + 'OrderType', + 'LegType', + 'ExchangeOrder', + 'OrderEvent', + 'ExchangePosition', + 'ExchangeCapabilities', + 'EntryIntent', + 'ExitIntent', + 'CloseIntent', + 'CancelIntent', + 'ScriptRequirements', + 'InterceptorResult', +] + + +class OrderStatus(StrEnum): + PENDING = "pending" + OPEN = "open" + PARTIALLY_FILLED = "partial" + FILLED = "filled" + CANCELLED = "cancelled" + REJECTED = "rejected" + EXPIRED = "expired" + + +class OrderType(StrEnum): + MARKET = "market" + LIMIT = "limit" + STOP = "stop" + STOP_LIMIT = "stop_limit" + TRAILING_STOP = "trailing_stop" + + +class LegType(StrEnum): + ENTRY = "entry" + TAKE_PROFIT = "tp" + STOP_LOSS = "sl" + TRAILING_STOP = "trail" + CLOSE = "close" + + +# === Exchange state snapshots === + +@dataclass +class ExchangeOrder: + """An order as it exists on the exchange.""" + id: str + symbol: str + side: str # "buy" | "sell" + order_type: OrderType + qty: float + filled_qty: float + remaining_qty: float + price: float | None # Limit price + stop_price: float | None # Trigger price + average_fill_price: float | None + status: OrderStatus + timestamp: float # Creation time (unix seconds) + fee: float + fee_currency: str + reduce_only: bool = False + + +@dataclass +class OrderEvent: + """ + A normalized fill/status event reported by a BrokerPlugin. + + The plugin is responsible for mapping exchange-level events back to + Pine-level identity. A single Pine exit intent may become multiple + exchange orders (e.g. Bybit Partial TP/SL pairs), and only the plugin + knows the mapping. + """ + order: ExchangeOrder + event_type: str # "created" | "filled" | "partial" | "cancelled" | "rejected" + fill_price: float | None + fill_qty: float | None + timestamp: float + # Pine-level identity (filled by the plugin, used by sync engine + BrokerPosition) + pine_id: str | None = None # Pine order ID (entry id or exit id) + from_entry: str | None = None # Which entry this fill belongs to (for exits) + leg_type: LegType | None = None # Which leg of a bracket filled + fee: float = 0.0 + fee_currency: str = "" + + +@dataclass +class ExchangePosition: + """Current position on the exchange (futures/margin).""" + symbol: str + side: str # "long" | "short" | "flat" + size: float + entry_price: float + unrealized_pnl: float + liquidation_price: float | None + leverage: float + margin_mode: str # "cross" | "isolated" + + +@dataclass +class ExchangeCapabilities: + """What the exchange supports. Declared once at startup by the plugin.""" + # Order types + stop_order: bool = False + stop_limit_order: bool = False + trailing_stop: bool = False + # Exit bracket (TP+SL with OCA reduce semantics) + tp_sl_bracket: bool = False + # Order management + amend_order: bool = False + cancel_all: bool = False + reduce_only: bool = False + # Streaming & position + watch_orders: bool = False + fetch_position: bool = False + + +# === Pine Script intents === + +@dataclass(frozen=True) +class EntryIntent: + """What the script wants: open or add to a position.""" + pine_id: str # strategy.entry(id=...) or strategy.order(id=...) + symbol: str + side: str # "buy" | "sell" + qty: float + order_type: OrderType + limit: float | None = None # Limit price + stop: float | None = None # Trigger/activation price + oca_name: str | None = None # OCA group name (strategy.entry/order oca_name param) + oca_type: str | None = None # "reduce" | "cancel" | "none" (strategy.oca.*) + comment: str | None = None + alert_message: str | None = None + is_strategy_order: bool = False # True if from strategy.order() (no pyramiding limit) + + @property + def intent_key(self) -> str: + """Stable diff key for the sync engine.""" + return self.pine_id + + +@dataclass(frozen=True) +class ExitIntent: + """What the script wants: reduce/close a position via TP/SL bracket.""" + pine_id: str # strategy.exit(id=...) + from_entry: str # strategy.exit(from_entry=...) + symbol: str + side: str # Exit side ("sell" for long TP/SL, "buy" for short TP/SL) + qty: float + tp_price: float | None = None # Take-profit limit price + sl_price: float | None = None # Stop-loss trigger price + trail_price: float | None = None # Trailing stop activation price + trail_offset: float | None = None # Trailing stop offset (price units) + # Raw tick values — used when exit is against a pending (unfilled) entry, + # so absolute prices cannot be calculated yet. The Order Sync Engine + # converts these to tp_price/sl_price after the entry fills. + profit_ticks: float | None = None + loss_ticks: float | None = None + trail_points_ticks: float | None = None + oca_name: str | None = None # OCA group name (auto: __exit_{id}_{from_entry}_oca__) + oca_type: str | None = None # "reduce" | "cancel" | "none" (default: reduce for exits) + comment: str | None = None + comment_profit: str | None = None + comment_loss: str | None = None + comment_trailing: str | None = None + alert_message: str | None = None + + @property + def intent_key(self) -> str: + """ + Stable diff key for the sync engine. + + Cannot be just pine_id — strategy.exit(id="TP") can create separate + exit orders for different from_entry values (e.g. "Long" and "Short"). + The (pine_id, from_entry) tuple is the unique key. + """ + return f"{self.pine_id}\0{self.from_entry}" + + @property + def has_unresolved_ticks(self) -> bool: + """True if tick-based prices need entry fill price to resolve.""" + return (self.profit_ticks is not None or self.loss_ticks is not None + or self.trail_points_ticks is not None) + + +@dataclass(frozen=True) +class CloseIntent: + """ + What the script wants: close position with market order. + + The ``immediately`` flag mirrors TradingView backtest semantics: without + it, a close waits for the next bar's open. With ``calc_on_every_tick`` + a non-immediate close can delay execution by an entire bar in live + trading, which is why the flag exists. + """ + pine_id: str # strategy.close(id=...) or strategy.close_all() + symbol: str + side: str # "sell" to close long, "buy" to close short + qty: float + immediately: bool = False + comment: str | None = None + alert_message: str | None = None + + @property + def intent_key(self) -> str: + return self.pine_id + + +@dataclass(frozen=True) +class CancelIntent: + """ + What the script wants: cancel a pending order. + + ``strategy.cancel(id)`` cancels ALL orders matching that id. For exits + this means every (pine_id, from_entry) pair with that pine_id. The + Order Sync Engine resolves the affected intent_keys and may send + multiple CancelIntents (one per from_entry), or a single one with + ``from_entry=None`` meaning "cancel all exits with this pine_id". + """ + pine_id: str + symbol: str + from_entry: str | None = None + + @property + def intent_key(self) -> str: + if self.from_entry is not None: + return f"{self.pine_id}\0{self.from_entry}" + return self.pine_id + + +# === Compile-time detected script requirements === + +@dataclass +class ScriptRequirements: + """Broker capabilities needed by this script. Detected via AST analysis.""" + market_orders: bool = False + limit_orders: bool = False + stop_orders: bool = False + stop_limit_orders: bool = False + tp_sl_bracket: bool = False # strategy.exit() with BOTH limit+stop or profit+loss + trailing_stop: bool = False + strategy_order: bool = False # strategy.order() — no pyramiding limit + + +# === Interceptor (Order Sync Engine extension point) === + +@dataclass +class InterceptorResult: + """ + Interceptor decision on an intent — modifiable before execution. + + Intent objects are frozen, so modifications are expressed as override + fields on this result rather than by mutating the intent in place. + """ + intent: EntryIntent | ExitIntent | CloseIntent | CancelIntent + rejected: bool = False + reject_reason: str = "" + modified_qty: float | None = None + modified_limit: float | None = None + modified_stop: float | None = None diff --git a/src/pynecore/core/broker/position.py b/src/pynecore/core/broker/position.py new file mode 100644 index 0000000..9df3ae3 --- /dev/null +++ b/src/pynecore/core/broker/position.py @@ -0,0 +1,272 @@ +""" +Position tracking for live broker trading. + +:class:`BrokerPosition` extends :class:`~pynecore.lib.strategy.PositionBase` +with no simulation logic — the exchange is the source of truth for fills, +prices, fees, and margin state. +""" +from __future__ import annotations + +from collections import deque +from typing import TYPE_CHECKING + +from pynecore import lib +from pynecore.lib.strategy import PositionBase, Trade +from pynecore.types.na import na_float + +if TYPE_CHECKING: + from pynecore.lib.strategy import Order + from pynecore.core.broker.models import OrderEvent + +__all__ = ['BrokerPosition'] + + +class BrokerPosition(PositionBase): + """ + Position state tracker for live broker trading. + + The exchange determines fills, prices, fees, and margin state; + :meth:`record_fill` consumes :class:`OrderEvent` objects emitted by a + :class:`~pynecore.core.plugin.broker.BrokerPlugin` and updates the + local view of the position accordingly. + + Trades are tracked FIFO: the first entry filled is the first closed + when the position is reduced, matching TradingView default semantics. + + Note: margin, liquidation price, and fee currency conversion are all + handled by the exchange. This class only records what the exchange + tells it. + """ + + __slots__ = ( + 'size', 'sign', 'avg_price', + 'netprofit', 'openprofit', 'grossprofit', 'grossloss', + 'open_commission', + 'eventrades', 'wintrades', 'losstrades', + 'max_drawdown', 'max_runup', + 'open_trades', 'closed_trades', + 'entry_orders', 'exit_orders', + '_current_price', + ) + + def __init__(self) -> None: + self.size: float = 0.0 + self.sign: float = 0.0 + self.avg_price = na_float + + self.netprofit: float = 0.0 + self.openprofit: float = 0.0 + self.grossprofit: float = 0.0 + self.grossloss: float = 0.0 + self.open_commission: float = 0.0 + + self.eventrades: int = 0 + self.wintrades: int = 0 + self.losstrades: int = 0 + self.max_drawdown: float = 0.0 + self.max_runup: float = 0.0 + + self.open_trades: list[Trade] = [] + self.closed_trades: deque[Trade] = deque(maxlen=9000) + + self.entry_orders: dict[str | None, 'Order'] = {} + self.exit_orders: dict[str | None, 'Order'] = {} + + self._current_price: float = 0.0 + + # === Pine-side order book === + + def _add_order(self, order: 'Order') -> None: + """Register an order locally (the sync engine forwards it to the exchange).""" + order.bar_index = int(lib.bar_index) + from pynecore.lib.strategy import _order_type_close # local import avoids cycle + if order.order_type == _order_type_close: + self.exit_orders[order.order_id] = order + else: + self.entry_orders[order.order_id] = order + + def _remove_order(self, order: 'Order') -> None: + """Cancel an order locally.""" + order.cancelled = True + from pynecore.lib.strategy import _order_type_close + if order.order_type == _order_type_close: + self.exit_orders.pop(order.order_id, None) + else: + self.entry_orders.pop(order.order_id, None) + + def _remove_order_by_id(self, order_id: str) -> None: + order = self.exit_orders.get(order_id) or self.entry_orders.get(order_id) + if order is not None: + self._remove_order(order) + + # === Exchange-side state updates === + + def record_fill(self, event: 'OrderEvent') -> bool: + """ + Record an exchange fill. + + :param event: An :class:`OrderEvent` with ``fill_qty`` and + ``fill_price`` populated, plus Pine identity fields + (``pine_id``, ``from_entry``, ``leg_type``) filled by the plugin. + :return: ``True`` if the position side changed as a result of this fill. + """ + fill_qty = event.fill_qty or 0.0 + fill_price = event.fill_price or 0.0 + if fill_qty <= 0.0 or fill_price <= 0.0: + return False + + signed_delta = fill_qty if event.order.side == "buy" else -fill_qty + old_sign = self.sign + new_size = self.size + signed_delta + + # Commission bookkeeping — realized fee becomes part of net P&L at close + fee = event.fee + + if self.size == 0.0 or (old_sign * signed_delta) > 0.0: + # Opening or adding to an existing position (same direction) + new_abs = abs(new_size) + old_abs = abs(self.size) + if old_abs == 0.0 or self.avg_price is na_float: + self.avg_price = fill_price + else: + self.avg_price = (self.avg_price * old_abs + fill_price * fill_qty) / new_abs + self.size = new_size + self.sign = 1.0 if new_size > 0.0 else (-1.0 if new_size < 0.0 else 0.0) + + trade = Trade( + size=signed_delta, + entry_id=event.pine_id, + entry_bar_index=int(getattr(lib, 'bar_index', 0)), + entry_time=int(event.timestamp * 1000.0), + entry_price=fill_price, + commission=fee, + entry_comment=None, + entry_equity=self.equity, + ) + self.open_trades.append(trade) + self.open_commission += fee + return False + + # Reducing or flipping — FIFO close of existing trades + remaining = fill_qty + closed_profit = 0.0 + closed_fee = 0.0 + while remaining > 0.0 and self.open_trades: + trade = self.open_trades[0] + trade_abs = abs(trade.size) + if trade_abs <= remaining + 1e-12: + # Close this trade fully + self._close_trade(trade, fill_price, event, fee_share=fee * (trade_abs / fill_qty)) + closed_profit += trade.profit + closed_fee += trade.commission + remaining -= trade_abs + else: + # Partial close: split the trade + closed_piece = Trade( + size=trade.sign * remaining, + entry_id=trade.entry_id, + entry_bar_index=trade.entry_bar_index, + entry_time=trade.entry_time, + entry_price=trade.entry_price, + commission=trade.commission * (remaining / trade_abs), + entry_comment=trade.entry_comment, + entry_equity=trade.entry_equity, + ) + self._close_trade(closed_piece, fill_price, event, fee_share=fee) + closed_profit += closed_piece.profit + # Shrink the remaining open trade + trade.size -= closed_piece.size + trade.commission -= closed_piece.commission + remaining = 0.0 + + self.size += signed_delta + # Clamp tiny residuals to zero + if abs(self.size) < 1e-12: + self.size = 0.0 + self.sign = 0.0 + self.avg_price = na_float + else: + self.sign = 1.0 if self.size > 0.0 else -1.0 + + # If there is leftover qty after closing all open_trades → side flip + if remaining > 0.0: + new_size = self.sign * remaining if self.sign != 0.0 else signed_delta + self.size = new_size + self.sign = 1.0 if new_size > 0.0 else (-1.0 if new_size < 0.0 else 0.0) + self.avg_price = fill_price + flipped = Trade( + size=new_size, + entry_id=event.pine_id, + entry_bar_index=int(getattr(lib, 'bar_index', 0)), + entry_time=int(event.timestamp * 1000.0), + entry_price=fill_price, + commission=0.0, + entry_comment=None, + entry_equity=self.equity, + ) + self.open_trades.append(flipped) + + # Update running stats + self.netprofit += closed_profit + if closed_profit > 0.0: + self.grossprofit += closed_profit + self.wintrades += 1 + elif closed_profit < 0.0: + self.grossloss += closed_profit + self.losstrades += 1 + else: + self.eventrades += 1 + + self.open_commission = sum(t.commission for t in self.open_trades) + + return self.sign != old_sign + + def update_unrealized_pnl(self, current_price: float) -> None: + """Mark-to-market: recompute :attr:`openprofit` at the given price.""" + self._current_price = current_price + if not self.open_trades or current_price <= 0.0: + self.openprofit = 0.0 + return + total = 0.0 + for trade in self.open_trades: + total += (current_price - trade.entry_price) * trade.size + self.openprofit = total + + def record_liquidation(self, event: 'OrderEvent') -> None: + """Record an exchange-initiated liquidation — close all open trades.""" + if not self.open_trades: + return + fill_price = event.fill_price or 0.0 + for trade in list(self.open_trades): + self._close_trade(trade, fill_price, event, fee_share=event.fee / max(len(self.open_trades), 1)) + self.netprofit += trade.profit + if trade.profit > 0.0: + self.grossprofit += trade.profit + self.wintrades += 1 + elif trade.profit < 0.0: + self.grossloss += trade.profit + self.losstrades += 1 + else: + self.eventrades += 1 + self.size = 0.0 + self.sign = 0.0 + self.avg_price = na_float + self.openprofit = 0.0 + self.open_commission = 0.0 + + # === Internals === + + def _close_trade(self, trade: Trade, fill_price: float, + event: 'OrderEvent', fee_share: float) -> None: + """Move a (possibly split) Trade from open_trades to closed_trades.""" + trade.exit_id = event.pine_id or "" + trade.exit_bar_index = int(getattr(lib, 'bar_index', 0)) + trade.exit_time = int(event.timestamp * 1000.0) + trade.exit_price = fill_price + trade.exit_comment = '' + trade.commission += fee_share + trade.profit = (fill_price - trade.entry_price) * trade.size - trade.commission + trade.exit_equity = self.equity + trade.profit + if trade in self.open_trades: + self.open_trades.remove(trade) + self.closed_trades.append(trade) diff --git a/src/pynecore/core/broker/validation.py b/src/pynecore/core/broker/validation.py new file mode 100644 index 0000000..4592421 --- /dev/null +++ b/src/pynecore/core/broker/validation.py @@ -0,0 +1,49 @@ +""" +Startup-time validation of script :class:`ScriptRequirements` against a +plugin's :class:`ExchangeCapabilities`. + +Pure function — the Script Runner calls this at broker-mode startup (future +phase) and, on a non-empty error list, refuses to start trading. +""" +from __future__ import annotations + +from pynecore.core.broker.models import ScriptRequirements, ExchangeCapabilities + +__all__ = ['validate_at_startup'] + + +def validate_at_startup( + reqs: ScriptRequirements, + caps: ExchangeCapabilities, +) -> list[str]: + """ + Return a list of human-readable error strings — empty if all requirements + are satisfied by the exchange capabilities. + + The rule is simple: if the script uses a Pine parameter, the exchange + must support the corresponding capability. No runtime "softening" — a + syntactically-present ``stop=`` keyword means stop orders are required, + even if the runtime value would end up being ``na`` on every bar. + Safety-first: better to refuse to start than to fail on the first + unexpected bar in live trading. + """ + errors: list[str] = [] + if reqs.stop_orders and not caps.stop_order: + errors.append( + "Script uses stop orders, but the exchange doesn't support them." + ) + if reqs.stop_limit_orders and not caps.stop_limit_order: + errors.append( + "Script uses stop-limit orders, but the exchange doesn't support them." + ) + if reqs.tp_sl_bracket and not caps.tp_sl_bracket: + errors.append( + "Script uses TP+SL exit brackets (OCA reduce), but the exchange " + "plugin doesn't support them. Use a plugin that emulates this, " + "or modify the script." + ) + if reqs.trailing_stop and not caps.trailing_stop: + errors.append( + "Script uses trailing stops, but the exchange doesn't support them." + ) + return errors diff --git a/src/pynecore/core/import_hook.py b/src/pynecore/core/import_hook.py index c9e2b46..02f0fe2 100644 --- a/src/pynecore/core/import_hook.py +++ b/src/pynecore/core/import_hook.py @@ -73,6 +73,7 @@ def source_to_code(self, data: bytes | str, path: str, *, _optimize: int = -1): from pynecore.transformers.function_isolation import FunctionIsolationTransformer from pynecore.transformers.module_property import ModulePropertyTransformer from pynecore.transformers.series import SeriesTransformer + from pynecore.transformers.script_requirements import ScriptRequirementsTransformer from pynecore.transformers.unused_series_detector import UnusedSeriesDetectorTransformer from pynecore.transformers.persistent import PersistentTransformer from pynecore.transformers.input_transformer import InputTransformer @@ -88,6 +89,7 @@ def source_to_code(self, data: bytes | str, path: str, *, _optimize: int = -1): transformed = ModulePropertyTransformer().visit(transformed) transformed = ClosureArgumentsTransformer().visit(transformed) transformed = FunctionIsolationTransformer().visit(transformed) + transformed = ScriptRequirementsTransformer().visit(transformed) transformed = UnusedSeriesDetectorTransformer().optimize(transformed) transformed = SeriesTransformer().visit(transformed) transformed = PersistentTransformer().visit(transformed) diff --git a/src/pynecore/core/plugin/broker.py b/src/pynecore/core/plugin/broker.py new file mode 100644 index 0000000..a79c271 --- /dev/null +++ b/src/pynecore/core/plugin/broker.py @@ -0,0 +1,182 @@ +""" +:class:`BrokerPlugin` — high-level order execution layer. + +A broker plugin receives Pine Script *intents* (entry, exit bracket, close, +cancel) and translates them to exchange-specific orders. The plugin author +decides HOW: native brackets, separate orders with software monitoring, +``reduce_only`` flags, editOrder vs cancel-and-replace, etc. + +Intents carry full Pine Script identity (``pine_id``, ``from_entry``, +``oca_name``) so the plugin can track order lifecycle and the sync engine +can route :class:`OrderEvent` fills back to the correct Pine trade. + +See ``docs/pynecore/plugin-system/broker-plugin-plan.md`` for the full +design, in particular the rationale for the high-level intent API. +""" +from __future__ import annotations + +from abc import ABC, abstractmethod +from collections.abc import AsyncIterator +from typing import TYPE_CHECKING + +from pynecore.core.plugin import ConfigT +from pynecore.core.plugin.live_provider import LiveProviderPlugin +from pynecore.core.broker.exceptions import ExchangeCapabilityError +from pynecore.core.broker.models import CancelIntent + +if TYPE_CHECKING: + from pynecore.core.broker.models import ( + EntryIntent, + ExitIntent, + CloseIntent, + ExchangeOrder, + ExchangePosition, + ExchangeCapabilities, + OrderEvent, + ) + +__all__ = ['BrokerPlugin'] + + +class BrokerPlugin(LiveProviderPlugin[ConfigT], ABC): + """ + High-level order execution layer. + + Subclasses implement the ``execute_*`` methods in whatever way their + exchange supports. The Order Sync Engine only calls these methods and + routes back the :class:`OrderEvent` objects they produce — it never + reaches into exchange-specific APIs itself. + """ + + on_unexpected_cancel: str = "stop" + """ + Policy for bot-owned orders that disappear without the bot cancelling them. + + One of ``"stop"`` (graceful stop, default), ``"stop_and_cancel"`` + (stop + cancel remaining bot orders), ``"re_place"`` (auto-replace + protective orders), or ``"ignore"`` (continue). Plugin authors may + override the default; users may further override via plugin config. + """ + + # === High-level order intents === + + @abstractmethod + async def execute_entry(self, intent: 'EntryIntent') -> list['ExchangeOrder']: + """ + Open or add to a position. + + Maps to ``strategy.entry()`` and ``strategy.order()``. + + | Pine params | order_type | limit | stop | + |---------------------|--------------|----------|----------| + | no limit, no stop | MARKET | None | None | + | limit only | LIMIT | price | None | + | stop only | STOP | None | trigger | + | limit + stop | STOP_LIMIT | price | trigger | + """ + + @abstractmethod + async def execute_exit(self, intent: 'ExitIntent') -> list['ExchangeOrder']: + """ + Exit (reduce) a position. OCA REDUCE semantics expected. + + Maps to ``strategy.exit()``. + + The plugin decides HOW to implement the TP+SL bracket on its exchange: + native bracket orders, separate orders with monitoring, etc. + + MUST handle: partial fill on one leg adjusts the other. If the + exchange cannot support a required combination, raise + :class:`ExchangeCapabilityError`. + """ + + @abstractmethod + async def execute_close(self, intent: 'CloseIntent') -> 'ExchangeOrder': + """ + Close a position with a market order. + + Maps to ``strategy.close()`` / ``strategy.close_all()``. + """ + + @abstractmethod + async def execute_cancel(self, intent: 'CancelIntent') -> bool: + """ + Cancel pending order(s). Returns ``True`` if cancelled. + """ + + # noinspection PyMethodMayBeStatic,PyUnusedLocal + async def execute_cancel_all(self, symbol: str | None = None) -> int: + """Cancel all open orders. Returns the count cancelled.""" + raise ExchangeCapabilityError("Bulk cancel not supported") + + # === Modify (upsert/replace) === + + async def modify_entry( + self, old_intent: 'EntryIntent', new_intent: 'EntryIntent', + ) -> list['ExchangeOrder']: + """ + Modify an existing entry order (price/qty changed). + + Default implementation: cancel + execute. Plugin authors SHOULD + override with an atomic amend when the exchange supports it. + """ + await self.execute_cancel(CancelIntent( + pine_id=old_intent.pine_id, + symbol=old_intent.symbol, + )) + return await self.execute_entry(new_intent) + + async def modify_exit( + self, old_intent: 'ExitIntent', new_intent: 'ExitIntent', + ) -> list['ExchangeOrder']: + """ + Modify an existing exit bracket (TP/SL price changed). + + Default: cancel + new. This opens a window without protection — + plugin authors SHOULD override with an atomic amend when the + exchange supports it (``editOrder``, Bybit amend, etc.). + """ + await self.execute_cancel(CancelIntent( + pine_id=old_intent.pine_id, + symbol=old_intent.symbol, + from_entry=old_intent.from_entry, + )) + return await self.execute_exit(new_intent) + + # === State queries === + + @abstractmethod + async def get_open_orders(self, symbol: str | None = None) -> list['ExchangeOrder']: + """Fetch all open orders from the exchange.""" + + @abstractmethod + async def get_position(self, symbol: str) -> 'ExchangePosition | None': + """Fetch current position. Returns ``None`` for spot markets.""" + + @abstractmethod + async def get_balance(self) -> dict[str, float]: + """Get available balance per currency.""" + + # === Live order stream === + + def watch_orders(self) -> AsyncIterator['OrderEvent']: + """ + Stream order status updates via WebSocket. + + If not implemented, the framework polls :meth:`get_open_orders` on + each bar. Return an async iterator of :class:`OrderEvent` objects; + the plugin is responsible for filling in the Pine identity fields + (``pine_id``, ``from_entry``, ``leg_type``) on each event. + """ + raise NotImplementedError + + # === Capabilities === + + @abstractmethod + def get_capabilities(self) -> 'ExchangeCapabilities': + """ + Declare what the exchange supports. + + Called once at startup for validation against script requirements + (see :func:`~pynecore.core.broker.validation.validate_at_startup`). + """ diff --git a/src/pynecore/core/script.py b/src/pynecore/core/script.py index f3960df..f1fbded 100644 --- a/src/pynecore/core/script.py +++ b/src/pynecore/core/script.py @@ -13,6 +13,8 @@ import pynecore.lib.currency as _currency import pynecore.lib.display as _display +from pynecore.core.broker.models import ScriptRequirements + from pynecore.types import script_type as _script_type from pynecore.types.color import Color from pynecore.types import PyneFloat, PyneInt @@ -104,7 +106,9 @@ class Script: use_bar_magnifier: bool = True fill_orders_on_standard_ohlc: bool = False - position: _strategy.Position = None # type: ignore[assignment] + position: _strategy.PositionBase = None # type: ignore[assignment] + + _broker_requirements: ScriptRequirements | None = None _modified: set[str] = field(default_factory=set) @@ -379,6 +383,8 @@ def strategy( behind_chart=True, + _broker_requirements: ScriptRequirements | None = None, + *_, **__ ) -> Callable[..., Any]: """ @@ -472,7 +478,9 @@ def strategy( script.dynamic_requests = dynamic_requests script.behind_chart = behind_chart - script.position = _strategy.Position() + script.position = _strategy.SimPosition() + + script._broker_requirements = _broker_requirements return script._decorate() diff --git a/src/pynecore/core/strategy_stats.py b/src/pynecore/core/strategy_stats.py index 8de297b..bf8280f 100644 --- a/src/pynecore/core/strategy_stats.py +++ b/src/pynecore/core/strategy_stats.py @@ -11,7 +11,7 @@ from ..types.na import NA from ..lib.strategy import Trade from .csv_file import CSVWriter -from ..lib.strategy import Position +from ..lib.strategy import PositionBase @dataclass @@ -185,7 +185,7 @@ def to_dict(self) -> dict[str, float | int]: def calculate_strategy_statistics( - position: Position, + position: PositionBase, initial_capital: float, equity_curve: list[float] | None = None, first_price: float | None = None, diff --git a/src/pynecore/lib/strategy/__init__.py b/src/pynecore/lib/strategy/__init__.py index 585ff76..c161dcf 100644 --- a/src/pynecore/lib/strategy/__init__.py +++ b/src/pynecore/lib/strategy/__init__.py @@ -1,6 +1,7 @@ from typing import TYPE_CHECKING, Literal, overload import math +from abc import ABC, abstractmethod from datetime import datetime, UTC from collections import deque, defaultdict from copy import copy @@ -25,7 +26,7 @@ "fixed", "cash", "percent_of_equity", "long", "short", 'direction', - 'Trade', 'Order', 'Position', + 'Trade', 'Order', 'PositionBase', 'SimPosition', "cancel", "cancel_all", "close", "close_all", "entry", "exit", "order", "closedtrades", "opentrades", @@ -401,12 +402,68 @@ def clear(self): self.order_prices.clear() +class PositionBase(ABC): + """ + Abstract base class for position tracking. + + Both backtest simulation (:class:`SimPosition`) and live broker trading + (:class:`pynecore.core.broker.position.BrokerPosition`) subclass this. + The Pine Script API surface — ``strategy.position_size``, + ``strategy.opentrades``, ``strategy.netprofit``, ``strategy.equity``, + etc. — reads the attributes declared here, so concrete subclasses MUST + initialize all of them in ``__init__``. + """ + __slots__ = () + + # Attribute surface (declared for documentation and type-checking only — + # concrete subclasses declare these in ``__slots__`` and initialize them). + size: float + sign: float + avg_price: PyneFloat + netprofit: PyneFloat + openprofit: PyneFloat + grossprofit: PyneFloat + grossloss: PyneFloat + open_commission: float + eventrades: int + wintrades: int + losstrades: int + max_drawdown: float + max_runup: float + open_trades: list['Trade'] + closed_trades: 'deque[Trade]' + entry_orders: dict[str | None, 'Order'] + exit_orders: dict[str | None, 'Order'] + + @property + def equity(self) -> PyneFloat: + """The current equity (initial capital + realized + unrealized P&L).""" + return lib._script.initial_capital + self.netprofit + self.openprofit + + @abstractmethod + def _add_order(self, order: 'Order') -> None: + """Register an order with this position.""" + + @abstractmethod + def _remove_order(self, order: 'Order') -> None: + """Cancel/remove an order from this position.""" + + @abstractmethod + def _remove_order_by_id(self, order_id: str) -> None: + """Remove an order by its id (searches both exit and entry books).""" + + # noinspection PyProtectedMember,PyShadowingNames,DuplicatedCode -class Position: +class SimPosition(PositionBase): """ - This holds data about positions and trades + Backtest simulation of position and trade state. + + Reproduces TradingView's strategy simulator faithfully: OHLC-based fill + detection, synthetic slippage, margin-call emulation, gap-through logic, + OCA reduce/cancel handling, trailing-stop tracking, etc. - This is the main class for strategies + Live broker trading uses :class:`BrokerPosition` instead — exchange fills + override all of the simulator logic below. """ __slots__ = ( @@ -498,11 +555,6 @@ def __init__(self): self._deferred_margin_call: tuple[float, bool] | None = None self._fill_counter: int = 0 - @property - def equity(self) -> PyneFloat: - """ The current equity """ - return lib._script.initial_capital + self.netprofit + self.openprofit - def _add_order(self, order: Order): """ Add an order to the strategy """ # Set the bar_index when the order is placed diff --git a/src/pynecore/transformers/script_requirements.py b/src/pynecore/transformers/script_requirements.py new file mode 100644 index 0000000..40538ad --- /dev/null +++ b/src/pynecore/transformers/script_requirements.py @@ -0,0 +1,236 @@ +""" +Detect broker capability requirements of a strategy script at compile time. + +Scans the module AST for calls to ``strategy.entry``, ``strategy.exit``, +``strategy.order``, ``strategy.close``, and ``strategy.close_all``, and +from the keyword arguments present at each call site deduces which +:class:`~pynecore.core.broker.models.ScriptRequirements` flags the script +needs. + +The detected :class:`ScriptRequirements` is injected as the +``_broker_requirements`` keyword of the ``@script.strategy(...)`` decorator +call on the script's ``main`` function, so the :class:`Script` object +carries the requirements at runtime — no need for a second AST pass or +metadata side channel. + +Detection is **conservative**: if the keyword is syntactically present +(even with an ``na`` value), the requirement is taken to be needed. Better +to refuse to start against an under-capable exchange than to fail on the +first unexpected bar in live trading. +""" +from __future__ import annotations + +import ast + +__all__ = ['ScriptRequirementsTransformer'] + +# Flag names on ScriptRequirements — kept in sync with the dataclass in +# pynecore.core.broker.models. +_FLAG_MARKET = 'market_orders' +_FLAG_LIMIT = 'limit_orders' +_FLAG_STOP = 'stop_orders' +_FLAG_STOP_LIMIT = 'stop_limit_orders' +_FLAG_BRACKET = 'tp_sl_bracket' +_FLAG_TRAIL = 'trailing_stop' +_FLAG_STRATEGY_ORDER = 'strategy_order' + + +def _strategy_call_name(node: ast.Call) -> str | None: + """ + Return ``"entry"`` / ``"exit"`` / ``"order"`` / ``"close"`` / ``"close_all"`` + if ``node`` is a call to ``(lib.)strategy.``, else ``None``. + + Matches both ``strategy.entry(...)`` (when the script imported + ``strategy`` directly) and ``lib.strategy.entry(...)`` (the form that + earlier transformers like ``ImportNormalizer`` may produce). + """ + func = node.func + if not isinstance(func, ast.Attribute): + return None + method = func.attr + parent = func.value + # strategy. + if isinstance(parent, ast.Name) and parent.id == 'strategy': + return method + # lib.strategy. + if isinstance(parent, ast.Attribute) and parent.attr == 'strategy': + grandparent = parent.value + if isinstance(grandparent, ast.Name) and grandparent.id == 'lib': + return method + return None + + +def _kw_names(node: ast.Call) -> set[str]: + """Keyword argument names syntactically present on the call.""" + return {kw.arg for kw in node.keywords if kw.arg is not None} + + +def _is_script_strategy_decorator(node: ast.expr) -> bool: + """ + True if ``node`` is a ``@script.strategy(...)`` call — matches both the + raw form and the ``@lib.script.strategy(...)`` form produced by + :class:`ImportNormalizerTransformer`. + """ + if not isinstance(node, ast.Call): + return False + func = node.func + if not (isinstance(func, ast.Attribute) and func.attr == 'strategy'): + return False + parent = func.value + # script.strategy + if isinstance(parent, ast.Name) and parent.id == 'script': + return True + # lib.script.strategy + if (isinstance(parent, ast.Attribute) and parent.attr == 'script' + and isinstance(parent.value, ast.Name) and parent.value.id == 'lib'): + return True + return False + + +class ScriptRequirementsTransformer(ast.NodeTransformer): + """ + Compute :class:`ScriptRequirements` for a strategy script and inject it + into the ``@script.strategy(...)`` decorator as the + ``_broker_requirements`` keyword argument. + + No-op on scripts that have no ``@script.strategy(...)`` decorator + (indicator scripts). + """ + + def __init__(self) -> None: + self._reqs: dict[str, bool] = { + _FLAG_MARKET: False, + _FLAG_LIMIT: False, + _FLAG_STOP: False, + _FLAG_STOP_LIMIT: False, + _FLAG_BRACKET: False, + _FLAG_TRAIL: False, + _FLAG_STRATEGY_ORDER: False, + } + self._strategy_decorator: ast.Call | None = None + + def visit_Module(self, node: ast.Module) -> ast.Module: + self.generic_visit(node) + if self._strategy_decorator is None: + return node + self._inject_requirements(node, self._strategy_decorator) + ast.fix_missing_locations(node) + return node + + def visit_FunctionDef(self, node: ast.FunctionDef) -> ast.FunctionDef: + for dec in node.decorator_list: + if _is_script_strategy_decorator(dec): + self._strategy_decorator = dec # type: ignore[assignment] + break + self.generic_visit(node) + return node + + def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> ast.AsyncFunctionDef: + for dec in node.decorator_list: + if _is_script_strategy_decorator(dec): + self._strategy_decorator = dec # type: ignore[assignment] + break + self.generic_visit(node) + return node + + def visit_Call(self, node: ast.Call) -> ast.Call: + self.generic_visit(node) + name = _strategy_call_name(node) + if name is None: + return node + kws = _kw_names(node) + if name == 'entry': + self._apply_entry_or_order(kws, is_strategy_order=False) + elif name == 'order': + self._apply_entry_or_order(kws, is_strategy_order=True) + elif name == 'exit': + self._apply_exit(kws) + elif name in ('close', 'close_all'): + self._reqs[_FLAG_MARKET] = True + return node + + # === Detection rules (see design doc, "Detektálható Minták" table) === + + def _apply_entry_or_order(self, kws: set[str], *, is_strategy_order: bool) -> None: + has_limit = 'limit' in kws + has_stop = 'stop' in kws + if is_strategy_order: + self._reqs[_FLAG_STRATEGY_ORDER] = True + if has_limit and has_stop: + self._reqs[_FLAG_STOP_LIMIT] = True + self._reqs[_FLAG_LIMIT] = True + self._reqs[_FLAG_STOP] = True + elif has_limit: + self._reqs[_FLAG_LIMIT] = True + elif has_stop: + self._reqs[_FLAG_STOP] = True + else: + self._reqs[_FLAG_MARKET] = True + + def _apply_exit(self, kws: set[str]) -> None: + has_limit = 'limit' in kws + has_stop = 'stop' in kws + has_profit_ticks = 'profit' in kws + has_loss_ticks = 'loss' in kws + has_trail = ( + 'trail_offset' in kws or 'trail_price' in kws or 'trail_points' in kws + ) + + # Full OCA-reduce bracket (both TP and SL) + if (has_limit and has_stop) or (has_profit_ticks and has_loss_ticks): + self._reqs[_FLAG_BRACKET] = True + self._reqs[_FLAG_LIMIT] = True + self._reqs[_FLAG_STOP] = True + else: + if has_limit or has_profit_ticks: + self._reqs[_FLAG_LIMIT] = True + if has_stop or has_loss_ticks: + self._reqs[_FLAG_STOP] = True + if has_trail: + self._reqs[_FLAG_TRAIL] = True + + # === AST injection === + + def _inject_requirements(self, module: ast.Module, decorator: ast.Call) -> None: + """Append ``_broker_requirements=ScriptRequirements(...)`` and add an import.""" + # Build: ScriptRequirements(flag=True, ...) + req_call = ast.Call( + func=ast.Name(id='ScriptRequirements', ctx=ast.Load()), + args=[], + keywords=[ + ast.keyword(arg=flag, value=ast.Constant(value=value)) + for flag, value in self._reqs.items() if value + ], + ) + # Remove any existing _broker_requirements keyword (idempotency) + decorator.keywords = [kw for kw in decorator.keywords + if kw.arg != '_broker_requirements'] + decorator.keywords.append( + ast.keyword(arg='_broker_requirements', value=req_call) + ) + + # Add the import if the module does not already have it. We insert + # as the first statement; ``ImportLifter`` runs before us, so any + # docstring is still the zeroth statement. + if not self._has_script_requirements_import(module): + import_node = ast.ImportFrom( + module='pynecore.core.broker.models', + names=[ast.alias(name='ScriptRequirements', asname=None)], + level=0, + ) + # Insert after the module docstring (if any) to keep it valid + insert_at = 0 + if (module.body and isinstance(module.body[0], ast.Expr) + and isinstance(module.body[0].value, ast.Constant) + and isinstance(module.body[0].value.value, str)): + insert_at = 1 + module.body.insert(insert_at, import_node) + + @staticmethod + def _has_script_requirements_import(module: ast.Module) -> bool: + for stmt in module.body: + if isinstance(stmt, ast.ImportFrom) and stmt.module == 'pynecore.core.broker.models': + for alias in stmt.names: + if alias.name == 'ScriptRequirements': + return True + return False diff --git a/tests/t00_pynecore/core/test_021_broker_models.py b/tests/t00_pynecore/core/test_021_broker_models.py new file mode 100644 index 0000000..22a4b8a --- /dev/null +++ b/tests/t00_pynecore/core/test_021_broker_models.py @@ -0,0 +1,79 @@ +""" +Unit tests for the broker plugin data models. + +Focus on the stable diff keys (:attr:`intent_key`) used by the Order Sync +Engine and the tick-unresolved flag on :class:`ExitIntent`. +""" +from pynecore.core.broker.models import ( + OrderType, + EntryIntent, + ExitIntent, + CloseIntent, + CancelIntent, + ScriptRequirements, +) + + +def __test_entry_intent_key_is_pine_id__(): + intent = EntryIntent( + pine_id="Long", + symbol="BTCUSDT", + side="buy", + qty=1.0, + order_type=OrderType.MARKET, + ) + assert intent.intent_key == "Long" + + +def __test_exit_intent_key_combines_pine_id_and_from_entry__(): + """ + Two strategy.exit(id="TP") calls with different from_entry values must + produce different intent_keys — otherwise the sync engine would treat + them as the same intent and lose one. + """ + a = ExitIntent(pine_id="TP", from_entry="Long", symbol="BTCUSDT", + side="sell", qty=1.0) + b = ExitIntent(pine_id="TP", from_entry="Short", symbol="BTCUSDT", + side="buy", qty=1.0) + assert a.intent_key != b.intent_key + assert a.intent_key == "TP\0Long" + assert b.intent_key == "TP\0Short" + + +def __test_exit_intent_has_unresolved_ticks__(): + """ + Tick-based exit against an unfilled entry cannot compute absolute + prices yet; the sync engine must defer until the entry fills. + """ + resolved = ExitIntent(pine_id="TP", from_entry="L", symbol="S", + side="sell", qty=1.0, tp_price=50000.0) + unresolved = ExitIntent(pine_id="TP", from_entry="L", symbol="S", + side="sell", qty=1.0, profit_ticks=100.0) + assert resolved.has_unresolved_ticks is False + assert unresolved.has_unresolved_ticks is True + + +def __test_close_intent_key_is_pine_id__(): + intent = CloseIntent(pine_id="Long", symbol="BTCUSDT", + side="sell", qty=1.0) + assert intent.intent_key == "Long" + + +def __test_cancel_intent_key_with_and_without_from_entry__(): + bare = CancelIntent(pine_id="TP", symbol="BTCUSDT") + scoped = CancelIntent(pine_id="TP", symbol="BTCUSDT", from_entry="Long") + assert bare.intent_key == "TP" + assert scoped.intent_key == "TP\0Long" + + +def __test_script_requirements_defaults_all_false__(): + """ + The AST detector should only set flags for features the script uses — + the default must start with everything off. + """ + reqs = ScriptRequirements() + assert not any([ + reqs.market_orders, reqs.limit_orders, reqs.stop_orders, + reqs.stop_limit_orders, reqs.tp_sl_bracket, + reqs.trailing_stop, reqs.strategy_order, + ]) diff --git a/tests/t00_pynecore/core/test_022_broker_position.py b/tests/t00_pynecore/core/test_022_broker_position.py new file mode 100644 index 0000000..991c891 --- /dev/null +++ b/tests/t00_pynecore/core/test_022_broker_position.py @@ -0,0 +1,166 @@ +""" +Unit tests for :class:`BrokerPosition`. + +Covers entry accounting (open, add), exit accounting (full close, partial +close, FIFO close across multiple entries), side flip in a single fill, +mark-to-market, and liquidation. +""" +from types import SimpleNamespace + +import pytest + +from pynecore import lib +from pynecore.core.broker.position import BrokerPosition +from pynecore.core.broker.models import ( + ExchangeOrder, + OrderEvent, + OrderStatus, + OrderType, + LegType, +) + + +@pytest.fixture(autouse=True) +def _stub_script(): + """Give :attr:`lib._script.initial_capital` a stable value for equity.""" + prev = lib._script + lib._script = SimpleNamespace(initial_capital=1_000_000.0) + try: + yield + finally: + lib._script = prev + + +def _fill(side: str, qty: float, price: float, *, + pine_id: str = "Long", leg: LegType = LegType.ENTRY, + fee: float = 0.0) -> OrderEvent: + """Build an OrderEvent as a plugin would emit it.""" + order = ExchangeOrder( + id=f"xchg-{pine_id}-{side}-{qty}", + symbol="BTCUSDT", + side=side, + order_type=OrderType.MARKET, + qty=qty, + filled_qty=qty, + remaining_qty=0.0, + price=None, + stop_price=None, + average_fill_price=price, + status=OrderStatus.FILLED, + timestamp=0.0, + fee=fee, + fee_currency="USDT", + ) + return OrderEvent( + order=order, + event_type="filled", + fill_price=price, + fill_qty=qty, + timestamp=0.0, + pine_id=pine_id, + from_entry=None, + leg_type=leg, + fee=fee, + fee_currency="USDT", + ) + + +def __test_record_fill_opens_long__(): + p = BrokerPosition() + assert not p.record_fill(_fill("buy", 2.0, 50_000.0, fee=1.0)) + assert p.size == 2.0 + assert p.sign == 1.0 + assert p.avg_price == 50_000.0 + assert len(p.open_trades) == 1 + assert p.open_trades[0].entry_price == 50_000.0 + assert p.open_commission == 1.0 + + +def __test_record_fill_adds_to_long_updates_avg_price__(): + p = BrokerPosition() + p.record_fill(_fill("buy", 1.0, 40_000.0)) + p.record_fill(_fill("buy", 3.0, 48_000.0)) + assert p.size == 4.0 + # weighted average: (1*40000 + 3*48000) / 4 = 46000 + assert p.avg_price == pytest.approx(46_000.0) + assert len(p.open_trades) == 2 + + +def __test_record_fill_full_close_realizes_profit__(): + p = BrokerPosition() + p.record_fill(_fill("buy", 1.0, 40_000.0)) + flipped = p.record_fill(_fill("sell", 1.0, 42_000.0, pine_id="TP", + leg=LegType.TAKE_PROFIT)) + assert p.size == 0.0 + assert flipped is True + assert len(p.open_trades) == 0 + assert len(p.closed_trades) == 1 + # profit = (42000 - 40000) * 1 - 0 commission + assert p.netprofit == pytest.approx(2_000.0) + assert p.wintrades == 1 + + +def __test_record_fill_partial_close_splits_trade__(): + """Closing half a long-only trade must split it — remaining stays open.""" + p = BrokerPosition() + p.record_fill(_fill("buy", 4.0, 50_000.0)) + p.record_fill(_fill("sell", 1.0, 52_000.0, pine_id="TP", + leg=LegType.TAKE_PROFIT)) + assert p.size == 3.0 + assert len(p.open_trades) == 1 + assert p.open_trades[0].size == 3.0 + assert len(p.closed_trades) == 1 + assert p.closed_trades[0].size == 1.0 + assert p.netprofit == pytest.approx(2_000.0) + + +def __test_record_fill_fifo_closes_oldest_first__(): + """With two entries, a partial close consumes the oldest first (FIFO).""" + p = BrokerPosition() + p.record_fill(_fill("buy", 1.0, 40_000.0, pine_id="E1")) + p.record_fill(_fill("buy", 1.0, 50_000.0, pine_id="E2")) + p.record_fill(_fill("sell", 1.0, 60_000.0, pine_id="TP", + leg=LegType.TAKE_PROFIT)) + assert p.size == 1.0 + assert len(p.open_trades) == 1 + assert p.open_trades[0].entry_price == 50_000.0 + assert p.closed_trades[0].entry_price == 40_000.0 + assert p.netprofit == pytest.approx(20_000.0) + + +def __test_record_fill_side_flip_in_single_event__(): + """Selling more than the open long flips the position to short.""" + p = BrokerPosition() + p.record_fill(_fill("buy", 1.0, 50_000.0)) + flipped = p.record_fill(_fill("sell", 3.0, 52_000.0, pine_id="Flip", + leg=LegType.ENTRY)) + assert flipped is True + assert p.size == pytest.approx(-2.0) + assert p.sign == -1.0 + assert p.avg_price == 52_000.0 + assert len(p.open_trades) == 1 + assert p.closed_trades[0].exit_price == 52_000.0 + + +def __test_update_unrealized_pnl_marks_to_market__(): + p = BrokerPosition() + p.record_fill(_fill("buy", 2.0, 40_000.0)) + p.update_unrealized_pnl(45_000.0) + # (45000 - 40000) * 2 = 10000 + assert p.openprofit == pytest.approx(10_000.0) + # equity = initial + net + open + assert p.equity == pytest.approx(1_010_000.0) + + +def __test_record_liquidation_closes_everything__(): + p = BrokerPosition() + p.record_fill(_fill("buy", 2.0, 50_000.0)) + liq = _fill("sell", 2.0, 45_000.0, pine_id="LIQ", leg=LegType.CLOSE) + p.record_liquidation(liq) + assert p.size == 0.0 + assert p.sign == 0.0 + assert p.openprofit == 0.0 + assert len(p.open_trades) == 0 + # Liquidated at loss → netprofit negative + assert p.netprofit == pytest.approx(-10_000.0) + assert p.losstrades == 1 diff --git a/tests/t00_pynecore/core/test_023_script_requirements.py b/tests/t00_pynecore/core/test_023_script_requirements.py new file mode 100644 index 0000000..f3a0013 --- /dev/null +++ b/tests/t00_pynecore/core/test_023_script_requirements.py @@ -0,0 +1,208 @@ +""" +Tests for :class:`ScriptRequirementsTransformer` detection and for the +startup-time :func:`validate_at_startup` pure function. + +The transformer tests run the transformer directly on synthetic AST modules +and assert on the injected ``_broker_requirements`` keyword of the +``@script.strategy(...)`` decorator — no ScriptRunner, no actual execution. +""" +from __future__ import annotations + +import ast +import textwrap + +from pynecore.core.broker.models import ScriptRequirements, ExchangeCapabilities +from pynecore.core.broker.validation import validate_at_startup +from pynecore.transformers.script_requirements import ScriptRequirementsTransformer + + +def _transform(src: str) -> ast.Module: + tree = ast.parse(textwrap.dedent(src)) + return ScriptRequirementsTransformer().visit(tree) + + +def _get_requirements_keyword(tree: ast.Module) -> dict[str, bool] | None: + """Return the flags dict injected into @script.strategy's call, or None.""" + for node in ast.walk(tree): + if isinstance(node, ast.FunctionDef): + for dec in node.decorator_list: + if (isinstance(dec, ast.Call) + and isinstance(dec.func, ast.Attribute) + and dec.func.attr == 'strategy'): + for kw in dec.keywords: + if kw.arg == '_broker_requirements' and isinstance(kw.value, ast.Call): + return { + k.arg: k.value.value # type: ignore[attr-defined] + for k in kw.value.keywords + if k.arg is not None + } + return None + + +def __test_indicator_script_has_no_injection__(): + """A script without @script.strategy must be left alone.""" + tree = _transform(""" + @script.indicator('Foo') + def main(): + pass + """) + assert _get_requirements_keyword(tree) is None + + +def __test_market_only_entry_detects_market_orders__(): + tree = _transform(""" + @script.strategy('S') + def main(): + strategy.entry('Long', strategy.long, qty=1) + """) + assert _get_requirements_keyword(tree) == {'market_orders': True} + + +def __test_entry_with_limit_detects_limit_orders__(): + tree = _transform(""" + @script.strategy('S') + def main(): + strategy.entry('Long', strategy.long, qty=1, limit=50000.0) + """) + assert _get_requirements_keyword(tree) == {'limit_orders': True} + + +def __test_entry_with_stop_detects_stop_orders__(): + tree = _transform(""" + @script.strategy('S') + def main(): + strategy.entry('Long', strategy.long, qty=1, stop=45000.0) + """) + assert _get_requirements_keyword(tree) == {'stop_orders': True} + + +def __test_entry_with_limit_and_stop_detects_stop_limit__(): + tree = _transform(""" + @script.strategy('S') + def main(): + strategy.entry('Long', strategy.long, qty=1, limit=50000.0, stop=49500.0) + """) + flags = _get_requirements_keyword(tree) + assert flags == { + 'limit_orders': True, 'stop_orders': True, 'stop_limit_orders': True, + } + + +def __test_exit_price_bracket_detects_tp_sl__(): + """strategy.exit with both limit and stop → OCA reduce bracket.""" + tree = _transform(""" + @script.strategy('S') + def main(): + strategy.exit('TP', from_entry='Long', limit=60000.0, stop=45000.0) + """) + flags = _get_requirements_keyword(tree) + assert flags == { + 'limit_orders': True, 'stop_orders': True, 'tp_sl_bracket': True, + } + + +def __test_exit_tick_bracket_detects_tp_sl__(): + """strategy.exit with profit+loss ticks also requires the bracket capability.""" + tree = _transform(""" + @script.strategy('S') + def main(): + strategy.exit('TP', from_entry='Long', profit=100, loss=50) + """) + flags = _get_requirements_keyword(tree) + assert flags == { + 'limit_orders': True, 'stop_orders': True, 'tp_sl_bracket': True, + } + + +def __test_exit_trail_offset_detects_trailing_stop__(): + tree = _transform(""" + @script.strategy('S') + def main(): + strategy.exit('TR', from_entry='Long', trail_offset=50, trail_points=100) + """) + flags = _get_requirements_keyword(tree) + assert flags == {'trailing_stop': True} + + +def __test_strategy_order_detects_strategy_order_flag__(): + """strategy.order() bypasses pyramiding → needs its own capability flag.""" + tree = _transform(""" + @script.strategy('S') + def main(): + strategy.order('X', strategy.long, qty=1) + """) + flags = _get_requirements_keyword(tree) + assert flags == {'market_orders': True, 'strategy_order': True} + + +def __test_close_detects_market_orders__(): + tree = _transform(""" + @script.strategy('S') + def main(): + strategy.close('Long') + """) + assert _get_requirements_keyword(tree) == {'market_orders': True} + + +def __test_import_is_injected_when_requirements_present__(): + tree = _transform(""" + @script.strategy('S') + def main(): + strategy.entry('Long', strategy.long, qty=1, limit=50000.0) + """) + imports = [stmt for stmt in tree.body if isinstance(stmt, ast.ImportFrom)] + assert any( + imp.module == 'pynecore.core.broker.models' + and any(a.name == 'ScriptRequirements' for a in imp.names) + for imp in imports + ) + + +def __test_lib_strategy_prefix_is_also_detected__(): + """After ImportNormalizer rewrites, calls appear as lib.strategy.entry(...).""" + tree = _transform(""" + @script.strategy('S') + def main(): + lib.strategy.entry('Long', lib.strategy.long, qty=1, limit=50000.0) + """) + assert _get_requirements_keyword(tree) == {'limit_orders': True} + + +def __test_lib_script_strategy_decorator_is_detected__(): + """ + The ``@script.strategy(...)`` decorator is rewritten to + ``@lib.script.strategy(...)`` by ``ImportNormalizer``; injection must + still reach it. + """ + tree = _transform(""" + @lib.script.strategy('S') + def main(): + lib.strategy.entry('Long', lib.strategy.long, qty=1) + """) + assert _get_requirements_keyword(tree) == {'market_orders': True} + + +# === validate_at_startup === + +def __test_validate_empty_when_requirements_satisfied__(): + reqs = ScriptRequirements(tp_sl_bracket=True) + caps = ExchangeCapabilities(tp_sl_bracket=True) + assert validate_at_startup(reqs, caps) == [] + + +def __test_validate_reports_missing_bracket__(): + reqs = ScriptRequirements(tp_sl_bracket=True) + caps = ExchangeCapabilities() + errors = validate_at_startup(reqs, caps) + assert len(errors) == 1 + assert 'TP+SL' in errors[0] + + +def __test_validate_collects_all_missing_capabilities__(): + reqs = ScriptRequirements( + stop_orders=True, stop_limit_orders=True, + tp_sl_bracket=True, trailing_stop=True, + ) + caps = ExchangeCapabilities() + errors = validate_at_startup(reqs, caps) + assert len(errors) == 4 diff --git a/tests/t01_lib/t30_strategy/test_001_barupdn.toml b/tests/t01_lib/t30_strategy/test_001_barupdn.toml new file mode 100644 index 0000000..ed9a90f --- /dev/null +++ b/tests/t01_lib/t30_strategy/test_001_barupdn.toml @@ -0,0 +1,48 @@ +# Indicator / Strategy / Library Settings + +[script] +#overlay = true +#format = "inherit" +#precision = +#scale = +#pyramiding = 1 +#calc_on_order_fills = false +#calc_on_every_tick = false +#max_bars_back = 0 +#timeframe = +#timeframe_gaps = true +#explicit_plot_zorder = false +#max_lines_count = 50 +#max_labels_count = 50 +#max_boxes_count = 50 +#calc_bars_count = 0 +#max_polylines_count = 50 +#dynamic_requests = false +#behind_chart = true +#backtest_fill_limits_assumption = 0 +#default_qty_type = "percent_of_equity" +#default_qty_value = 10 +#initial_capital = 1000000 +#currency = "NONE" +#slippage = 0 +#commission_type = "percent" +#commission_value = 0.0 +#process_orders_on_close = false +#close_entries_rule = "FIFO" +#margin_long = 100.0 +#margin_short = 100.0 +#risk_free_rate = 2.0 +#use_bar_magnifier = true +#fill_orders_on_standard_ohlc = false + +# Input Settings + +[inputs.maxIdLossPcnt] +# Input metadata, cannot be modified +# input_type: "float" +# defval: 1 +# title: "Max intraday loss (%)" +# inline: false +# confirm: false +# Change here to modify the input value +#value = From 21ec06ca7fc72cbb088247a6ad2d838fc08f615b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Tue, 14 Apr 2026 15:35:18 +0200 Subject: [PATCH 29/64] feat(broker): add order intent sync engine Add pure intent building for Pine strategy orders and broker dispatch. Introduce an order sync engine that handles intent diffing, interceptor updates, tick-based exit deferral, fill event routing, and reconciliation. Add tests covering intent translation, dispatch changes, deferred exits, stream handling, interceptors, and exchange position reconciliation. --- src/pynecore/core/broker/intent_builder.py | 208 +++++++++ src/pynecore/core/broker/sync_engine.py | 437 ++++++++++++++++++ .../core/test_024_intent_builder.py | 205 ++++++++ .../core/test_025_order_sync_engine.py | 421 +++++++++++++++++ 4 files changed, 1271 insertions(+) create mode 100644 src/pynecore/core/broker/intent_builder.py create mode 100644 src/pynecore/core/broker/sync_engine.py create mode 100644 tests/t00_pynecore/core/test_024_intent_builder.py create mode 100644 tests/t00_pynecore/core/test_025_order_sync_engine.py diff --git a/src/pynecore/core/broker/intent_builder.py b/src/pynecore/core/broker/intent_builder.py new file mode 100644 index 0000000..1c20d78 --- /dev/null +++ b/src/pynecore/core/broker/intent_builder.py @@ -0,0 +1,208 @@ +""" +Translate the Pine ``strategy.*`` order book into broker intent objects. + +The :class:`~pynecore.lib.strategy.SimPosition` maintains two dictionaries +of pending Pine orders created by ``strategy.entry``, ``strategy.order``, +``strategy.exit``, ``strategy.close`` and ``strategy.close_all``: + +- ``position.entry_orders[pine_id]`` — entries and strategy-order adds +- ``position.exit_orders[from_entry]`` — exits, closes and close-all + +For live trading the :class:`~pynecore.core.broker.sync_engine.OrderSyncEngine` +consumes these via :func:`build_intents`, which returns a flat list of +:class:`EntryIntent`, :class:`ExitIntent` and :class:`CloseIntent` +dataclasses the broker plugin can act on. The translation is **pure**: +no access to ``lib._script`` or ``syminfo``; the caller supplies the +symbol and keeps tick/mintick resolution inside the sync engine. +""" +from __future__ import annotations + +from typing import Iterable + +from pynecore.core.broker.models import ( + EntryIntent, + ExitIntent, + CloseIntent, + OrderType, +) +from pynecore.lib.strategy import ( + Order, + _order_type_normal, + _order_type_close, +) +from pynecore.types.na import NA + +__all__ = [ + 'build_intents', + 'build_entry_intent', + 'build_exit_intent', + 'build_close_intent', +] + +# Prefixes set by ``strategy.close`` / ``strategy.close_all`` in +# ``lib/strategy/__init__.py``. Used to distinguish a CloseIntent from an +# ExitIntent when both share ``_order_type_close``. +_CLOSE_EXIT_ID_PREFIX = "Close entry(s) order " +_CLOSE_ALL_EXIT_ID = "Close position order" + + +def _side_from_size(size: float) -> str: + """Signed Pine size → ``"buy"``/``"sell"``. + + Pine uses signed sizes: positive for long, negative for short. Exit/close + orders always carry the **opposite** sign of the position, so a long close + has a negative size → ``"sell"``, matching the exchange-side semantics. + """ + return "buy" if size > 0 else "sell" + + +def _infer_order_type(limit: float | None, stop: float | None) -> OrderType: + if limit is not None and stop is not None: + return OrderType.STOP_LIMIT + if limit is not None: + return OrderType.LIMIT + if stop is not None: + return OrderType.STOP + return OrderType.MARKET + + +def _na_to_none(value): + """Strip Pine :class:`NA` markers; leave concrete values alone.""" + return None if isinstance(value, NA) else value + + +def _coerce_oca(order: Order) -> tuple[str | None, str | None]: + """Return a protocol-friendly ``(oca_name, oca_type)`` pair. + + ``Order`` always stores a non-None ``oca_type`` (defaults to :data:`oca.none`) + even when no OCA participation is requested. The intent layer uses + ``None`` to mean "not in an OCA group" — so only emit ``oca_type`` when + the order actually names a group. + """ + if order.oca_name is None: + return None, None + return order.oca_name, str(order.oca_type) if order.oca_type is not None else None + + +def build_entry_intent(order: Order, symbol: str) -> EntryIntent: + """Translate a ``strategy.entry`` / ``strategy.order`` Pine order.""" + oca_name, oca_type_str = _coerce_oca(order) + return EntryIntent( + pine_id=order.order_id or "", + symbol=symbol, + side=_side_from_size(order.size), + qty=abs(order.size), + order_type=_infer_order_type(order.limit, order.stop), + limit=order.limit, + stop=order.stop, + oca_name=oca_name, + oca_type=oca_type_str, + comment=_na_to_none(order.comment), + alert_message=_na_to_none(order.alert_message), + is_strategy_order=(order.order_type == _order_type_normal), + ) + + +def build_exit_intent(order: Order, symbol: str) -> ExitIntent: + """Translate a ``strategy.exit`` Pine order. + + Tick-based exits (``profit=``/``loss=``/``trail_points=``) carry unresolved + distances: the plugin cannot place absolute TP/SL prices until the entry + fill price is known. The intent preserves the tick values + **alongside** empty ``tp_price``/``sl_price`` fields; the sync engine + resolves them on the corresponding entry fill event. + """ + oca_name, oca_type_str = _coerce_oca(order) + # Tick values take priority over explicit prices — mirrors Pine's + # exit() fill-time logic where profit_ticks overwrites order.limit. + tp_price = order.limit if order.profit_ticks is None else None + sl_price = order.stop if order.loss_ticks is None else None + trail_price = order.trail_price if order.trail_points_ticks is None else None + has_trail = ( + order.trail_price is not None or order.trail_points_ticks is not None + ) + trail_offset = order.trail_offset if has_trail else None + return ExitIntent( + pine_id=order.exit_id or "", + from_entry=order.order_id or "", + symbol=symbol, + side=_side_from_size(order.size), + qty=abs(order.size), + tp_price=tp_price, + sl_price=sl_price, + trail_price=trail_price, + trail_offset=trail_offset, + profit_ticks=order.profit_ticks, + loss_ticks=order.loss_ticks, + trail_points_ticks=order.trail_points_ticks, + oca_name=oca_name, + oca_type=oca_type_str, + comment=_na_to_none(order.comment), + comment_profit=_na_to_none(order.comment_profit), + comment_loss=_na_to_none(order.comment_loss), + comment_trailing=_na_to_none(order.comment_trailing), + alert_message=_na_to_none(order.alert_message), + ) + + +def build_close_intent(order: Order, symbol: str, *, is_close_all: bool) -> CloseIntent: + """Translate ``strategy.close(id)`` or ``strategy.close_all()``. + + ``strategy.close_all()`` uses an empty ``pine_id`` — the Pine order itself + has ``order_id=None`` because the close targets the whole position rather + than a specific entry identifier. + """ + pine_id = "" if is_close_all else (order.order_id or "") + return CloseIntent( + pine_id=pine_id, + symbol=symbol, + side=_side_from_size(order.size), + qty=abs(order.size), + immediately=False, + comment=_na_to_none(order.comment), + alert_message=_na_to_none(order.alert_message), + ) + + +def _classify_exit_side(order: Order) -> str: + """``'close_all' | 'close' | 'exit'`` — the exit_orders dict is polymorphic.""" + if order.exit_id == _CLOSE_ALL_EXIT_ID: + return 'close_all' + if order.exit_id and order.exit_id.startswith(_CLOSE_EXIT_ID_PREFIX): + return 'close' + return 'exit' + + +def build_intents( + entry_orders: dict, + exit_orders: dict, + symbol: str, +) -> list[EntryIntent | ExitIntent | CloseIntent]: + """Flatten a position's pending orders into intent objects. + + ``entry_orders`` and ``exit_orders`` are the ``dict``s that + :class:`~pynecore.lib.strategy.SimPosition` exposes. Orders already + marked ``cancelled`` (e.g. via OCA or :func:`strategy.cancel`) are + filtered out — the caller treats their absence as an implicit cancel. + """ + intents: list[EntryIntent | ExitIntent | CloseIntent] = [] + + for order in _active(entry_orders.values()): + intents.append(build_entry_intent(order, symbol)) + + for order in _active(exit_orders.values()): + kind = _classify_exit_side(order) + if kind == 'close_all': + intents.append(build_close_intent(order, symbol, is_close_all=True)) + elif kind == 'close': + intents.append(build_close_intent(order, symbol, is_close_all=False)) + else: + intents.append(build_exit_intent(order, symbol)) + + return intents + + +def _active(orders: Iterable[Order]) -> Iterable[Order]: + for order in orders: + if not order.cancelled: + yield order diff --git a/src/pynecore/core/broker/sync_engine.py b/src/pynecore/core/broker/sync_engine.py new file mode 100644 index 0000000..5c2e95e --- /dev/null +++ b/src/pynecore/core/broker/sync_engine.py @@ -0,0 +1,437 @@ +""" +:class:`OrderSyncEngine` — the bridge between the Pine Script order book and +a :class:`~pynecore.core.plugin.broker.BrokerPlugin`. + +On each bar the engine: + +1. Drains any :class:`OrderEvent` objects that the broker posted + asynchronously (via :meth:`on_order_event`), routing fills to the + :class:`~pynecore.core.broker.position.BrokerPosition` and unfreezing + tick-based exits once their entry fill price is known. +2. Builds intents from the position's pending order dicts. +3. Runs the interceptor chain to let extensions reject or amend intents. +4. Diffs the resulting intent set against the previously-active one and + dispatches the **new**, **modified** and **removed** intents to the + plugin — tick-deferred exits are held back until the referenced entry + has filled. +5. Every ``reconcile_every_n_syncs`` calls (optional) performs a read-side + state reconciliation with the exchange. + +The engine is synchronous; the broker plugin is async. :meth:`_run_async` +bridges the two, using ``run_coroutine_threadsafe`` on a background event +loop in live mode and ``asyncio.run`` for single-shot unit tests. +""" +from __future__ import annotations + +import asyncio +import dataclasses +import logging +import queue +from collections.abc import Callable +from typing import TYPE_CHECKING, Any + +from pynecore.core.broker.intent_builder import build_intents +from pynecore.core.broker.models import ( + CancelIntent, + CloseIntent, + EntryIntent, + ExitIntent, + InterceptorResult, + LegType, + OrderEvent, +) + +if TYPE_CHECKING: + from pynecore.core.broker.position import BrokerPosition + from pynecore.core.plugin.broker import BrokerPlugin + +__all__ = ['OrderSyncEngine'] + +_log = logging.getLogger(__name__) + +Intent = EntryIntent | ExitIntent | CloseIntent + + +class OrderSyncEngine: + """Translate Pine orders to broker calls and route fills back. + + :param broker: The concrete :class:`BrokerPlugin` instance to drive. + :param position: The live :class:`BrokerPosition` this engine updates. + :param symbol: The trading symbol (as the plugin expects it). + :param event_loop: A running ``asyncio`` loop on which to execute the + broker's coroutines. Pass ``None`` for unit tests — each broker call + will then spin up a transient loop via ``asyncio.run``. + :param execute_timeout: Seconds to wait for any single ``execute_*`` + coroutine when bridging from a background loop. + :param reconcile_every_n_syncs: If non-zero, perform a read-side + reconciliation every N :meth:`sync` calls. + :param mintick: Symbol minimum tick — used to resolve tick-based exits + (``profit=`` / ``loss=`` / ``trail_points=``) into absolute prices. + """ + + def __init__( + self, + broker: 'BrokerPlugin', + position: 'BrokerPosition', + symbol: str, + *, + event_loop: asyncio.AbstractEventLoop | None = None, + execute_timeout: float = 30.0, + reconcile_every_n_syncs: int = 0, + mintick: float = 0.01, + ) -> None: + self._broker = broker + self._position = position + self._symbol = symbol + self._loop = event_loop + self._timeout = execute_timeout + self._reconcile_every = reconcile_every_n_syncs + self._mintick = mintick + + self._active_intents: dict[str, Intent] = {} + self._order_mapping: dict[str, list[str]] = {} + self._deferred_exits: dict[str, ExitIntent] = {} + self._event_queue: queue.Queue[OrderEvent] = queue.Queue() + self._interceptors: list[Callable[[Intent], InterceptorResult]] = [] + self._sync_count = 0 + + # === Public API === + + @property + def active_intents(self) -> dict[str, Intent]: + return self._active_intents + + @property + def deferred_exits(self) -> dict[str, ExitIntent]: + return self._deferred_exits + + @property + def order_mapping(self) -> dict[str, list[str]]: + return self._order_mapping + + def register_interceptor( + self, fn: Callable[[Intent], InterceptorResult], + ) -> None: + """Add an interceptor that may reject or amend intents before dispatch.""" + self._interceptors.append(fn) + + def on_order_event(self, event: OrderEvent) -> None: + """Queue a broker :class:`OrderEvent` for processing on the next sync. + + Called from the :meth:`run_event_stream` background task or by + tests injecting synthetic events. + """ + self._event_queue.put(event) + + async def run_event_stream(self) -> None: + """Drain :meth:`BrokerPlugin.watch_orders` into the event queue. + + Meant to run as a long-lived task on the shared live-provider event + loop. If the plugin does not implement WebSocket streaming, the + method logs and returns — the engine then relies on + :meth:`reconcile` for fill detection. + """ + try: + stream = self._broker.watch_orders() + except NotImplementedError: + _log.info( + "broker does not implement watch_orders; " + "reconcile() will poll for fills instead", + ) + return + try: + async for event in stream: + self._event_queue.put(event) + except NotImplementedError: + _log.info( + "broker does not implement watch_orders; " + "reconcile() will poll for fills instead", + ) + return + except asyncio.CancelledError: + raise + except Exception: # pragma: no cover — defensive + _log.exception("watch_orders stream terminated with an error") + raise + + def sync(self) -> None: + """Run one diff/dispatch cycle. + + Reads the Pine order book from ``position.entry_orders`` and + ``position.exit_orders``, resolves tick-deferred exits where the + referenced entry price is now known, and dispatches whatever + changed to the broker plugin. + """ + self._drain_events() + + raw = build_intents( + self._position.entry_orders, + self._position.exit_orders, + self._symbol, + ) + resolved = [self._resolve_ticks(i) for i in raw] + final = self._apply_interceptors(resolved) + + dispatchable: list[Intent] = [] + new_deferred: dict[str, ExitIntent] = {} + for i in final: + if isinstance(i, ExitIntent) and i.has_unresolved_ticks: + new_deferred[i.from_entry] = i + else: + dispatchable.append(i) + self._deferred_exits = new_deferred + + self._diff_and_dispatch(dispatchable) + + self._sync_count += 1 + if self._reconcile_every and self._sync_count % self._reconcile_every == 0: + self.reconcile() + + def reconcile(self) -> None: + """Read-side state reconciliation with the exchange. + + The exchange is authoritative for state. Any mismatch between our + tracking and ``get_open_orders`` / ``get_position`` is logged and + the local tracking is overwritten. No orders are ever **sent** + from a reconciliation pass — that would risk duplicate entries. + """ + orders = self._run_async(self._broker.get_open_orders(self._symbol)) + tracked_ids: set[str] = set() + for ids in self._order_mapping.values(): + tracked_ids.update(ids) + exchange_ids = {o.id for o in orders} + stale = tracked_ids - exchange_ids + if stale: + _log.warning( + "tracked orders missing from exchange: %s", stale, + ) + untracked = exchange_ids - tracked_ids + if untracked: + _log.info( + "unknown orders on exchange (not bot-owned): %s", untracked, + ) + + exch_pos = self._run_async(self._broker.get_position(self._symbol)) + if exch_pos is not None and exch_pos.size != self._position.size: + _log.warning( + "position size mismatch (exchange=%s, internal=%s) — " + "adopting exchange", + exch_pos.size, self._position.size, + ) + self._position.size = exch_pos.size + self._position.sign = ( + 1.0 if exch_pos.size > 0.0 + else (-1.0 if exch_pos.size < 0.0 else 0.0) + ) + self._position.avg_price = exch_pos.entry_price + + # === Event routing === + + def _drain_events(self) -> None: + while True: + try: + event = self._event_queue.get_nowait() + except queue.Empty: + return + self._route_event(event) + + def _route_event(self, event: OrderEvent) -> None: + t = event.event_type + if t in ('filled', 'partial'): + self._position.record_fill(event) + if event.leg_type == LegType.ENTRY and event.pine_id: + self._resolve_deferred_for_entry(event.pine_id) + elif t == 'cancelled': + key = self._find_key_for_order_id(event.order.id) + if key is not None: + _log.error( + "unexpected cancel for intent %s (exchange order %s)", + key, event.order.id, + ) + self._order_mapping.pop(key, None) + self._active_intents.pop(key, None) + elif t == 'rejected': + key = self._find_key_for_order_id(event.order.id) + if key is not None: + _log.warning( + "order rejected for intent %s (exchange order %s)", + key, event.order.id, + ) + self._order_mapping.pop(key, None) + self._active_intents.pop(key, None) + + def _find_key_for_order_id(self, order_id: str) -> str | None: + for key, ids in self._order_mapping.items(): + if order_id in ids: + return key + return None + + def _resolve_deferred_for_entry(self, entry_id: str) -> None: + """An entry fill unblocks any exit that references it via ticks.""" + deferred = self._deferred_exits.pop(entry_id, None) + if deferred is None: + return + resolved = self._resolve_ticks(deferred) + if resolved.has_unresolved_ticks: + self._deferred_exits[entry_id] = deferred + return + self._dispatch_new(resolved) + self._active_intents[resolved.intent_key] = resolved + + # === Tick resolution === + + def _resolve_ticks(self, intent: Intent) -> Intent: + if not isinstance(intent, ExitIntent) or not intent.has_unresolved_ticks: + return intent + entry_price, entry_sign = self._find_entry_reference(intent.from_entry) + if entry_price is None: + return intent + return self._ticks_to_prices(intent, entry_price, entry_sign) + + def _find_entry_reference( + self, from_entry: str, + ) -> tuple[float | None, float]: + for trade in self._position.open_trades: + if trade.entry_id == from_entry: + return trade.entry_price, trade.sign + return None, 0.0 + + def _ticks_to_prices( + self, intent: ExitIntent, entry_price: float, entry_sign: float, + ) -> ExitIntent: + tp_price = intent.tp_price + sl_price = intent.sl_price + trail_price = intent.trail_price + if intent.profit_ticks is not None: + tp_price = entry_price + entry_sign * intent.profit_ticks * self._mintick + if intent.loss_ticks is not None: + sl_price = entry_price - entry_sign * intent.loss_ticks * self._mintick + if intent.trail_points_ticks is not None: + trail_price = ( + entry_price + entry_sign * intent.trail_points_ticks * self._mintick + ) + return dataclasses.replace( + intent, + tp_price=tp_price, + sl_price=sl_price, + trail_price=trail_price, + profit_ticks=None, + loss_ticks=None, + trail_points_ticks=None, + ) + + # === Interceptor chain === + + def _apply_interceptors(self, intents: list[Intent]) -> list[Intent]: + if not self._interceptors: + return intents + out: list[Intent] = [] + for intent in intents: + current = intent + rejected = False + for fn in self._interceptors: + result = fn(current) + if result.rejected: + rejected = True + _log.info( + "intent %s rejected by interceptor: %s", + current.intent_key, result.reject_reason, + ) + break + current = self._apply_modifications(current, result) + if not rejected: + out.append(current) + return out + + @staticmethod + def _apply_modifications( + intent: Intent, result: InterceptorResult, + ) -> Intent: + mods: dict[str, Any] = {} + if result.modified_qty is not None: + mods['qty'] = result.modified_qty + if result.modified_limit is not None: + if isinstance(intent, ExitIntent): + mods['tp_price'] = result.modified_limit + elif isinstance(intent, EntryIntent): + mods['limit'] = result.modified_limit + if result.modified_stop is not None: + if isinstance(intent, ExitIntent): + mods['sl_price'] = result.modified_stop + elif isinstance(intent, EntryIntent): + mods['stop'] = result.modified_stop + return dataclasses.replace(intent, **mods) if mods else intent + + # === Diff + dispatch === + + def _diff_and_dispatch(self, intents: list[Intent]) -> None: + new_map: dict[str, Intent] = {i.intent_key: i for i in intents} + + for key in list(self._active_intents): + if key not in new_map: + old = self._active_intents.pop(key) + self._dispatch_cancel(old) + + for key, intent in new_map.items(): + if key not in self._active_intents: + self._dispatch_new(intent) + self._active_intents[key] = intent + elif intent != self._active_intents[key]: + self._dispatch_modify(self._active_intents[key], intent) + self._active_intents[key] = intent + # else: unchanged — skip + + def _dispatch_new(self, intent: Intent) -> None: + if isinstance(intent, EntryIntent): + orders = self._run_async(self._broker.execute_entry(intent)) + self._order_mapping[intent.intent_key] = [o.id for o in orders] + elif isinstance(intent, ExitIntent): + orders = self._run_async(self._broker.execute_exit(intent)) + self._order_mapping[intent.intent_key] = [o.id for o in orders] + elif isinstance(intent, CloseIntent): + order = self._run_async(self._broker.execute_close(intent)) + self._order_mapping[intent.intent_key] = [order.id] + + def _dispatch_modify(self, old: Intent, new: Intent) -> None: + if isinstance(new, EntryIntent) and isinstance(old, EntryIntent): + orders = self._run_async(self._broker.modify_entry(old, new)) + self._order_mapping[new.intent_key] = [o.id for o in orders] + elif isinstance(new, ExitIntent) and isinstance(old, ExitIntent): + orders = self._run_async(self._broker.modify_exit(old, new)) + self._order_mapping[new.intent_key] = [o.id for o in orders] + else: + # CloseIntent or mismatched kinds — cancel + re-execute. + self._dispatch_cancel(old) + self._dispatch_new(new) + + def _dispatch_cancel(self, old: Intent) -> None: + if isinstance(old, EntryIntent): + cancel = CancelIntent(pine_id=old.pine_id, symbol=self._symbol) + elif isinstance(old, ExitIntent): + cancel = CancelIntent( + pine_id=old.pine_id, + symbol=self._symbol, + from_entry=old.from_entry, + ) + else: + # CloseIntent is immediate market — nothing to cancel. + self._order_mapping.pop(old.intent_key, None) + return + self._run_async(self._broker.execute_cancel(cancel)) + self._order_mapping.pop(old.intent_key, None) + + # === Async bridge === + + def _run_async(self, coro): + """Run a broker coroutine synchronously from the engine's thread. + + In production the engine shares an event loop with the live + provider; calls hop to that loop via ``run_coroutine_threadsafe``. + In unit tests no loop is supplied — the coroutine is driven to + completion by a transient ``asyncio.run``. + """ + if self._loop is None: + return asyncio.run(coro) + return asyncio.run_coroutine_threadsafe(coro, self._loop).result( + timeout=self._timeout, + ) diff --git a/tests/t00_pynecore/core/test_024_intent_builder.py b/tests/t00_pynecore/core/test_024_intent_builder.py new file mode 100644 index 0000000..41f5ec1 --- /dev/null +++ b/tests/t00_pynecore/core/test_024_intent_builder.py @@ -0,0 +1,205 @@ +""" +Tests for :func:`pynecore.core.broker.intent_builder.build_intents`. + +The builder is a pure translation of the Pine ``Order`` objects that +``strategy.entry/exit/order/close/close_all`` create into the broker +:class:`EntryIntent` / :class:`ExitIntent` / :class:`CloseIntent` dataclasses. +Tests construct ``Order`` instances directly — no ScriptRunner, no Pine +function call — and assert the resulting intent fields. +""" +from __future__ import annotations + +from pynecore.core.broker.intent_builder import build_intents +from pynecore.core.broker.models import ( + EntryIntent, + ExitIntent, + CloseIntent, + OrderType, +) +from pynecore.lib.strategy import ( + Order, + _order_type_normal, + _order_type_entry, + _order_type_close, +) +from pynecore.lib.strategy import oca as _oca + +SYMBOL = "BTCUSDT" + + +def _entry(order_id, size, **kw) -> Order: + return Order(order_id, size, order_type=_order_type_entry, **kw) + + +def _normal(order_id, size, **kw) -> Order: + return Order(order_id, size, order_type=_order_type_normal, **kw) + + +def _exit(from_entry, size, exit_id, **kw) -> Order: + return Order(from_entry, size, order_type=_order_type_close, + exit_id=exit_id, **kw) + + +def _close(id_, size) -> Order: + return Order(id_, size, order_type=_order_type_close, + exit_id=f"Close entry(s) order {id_}") + + +def _close_all(size) -> Order: + return Order(None, size, order_type=_order_type_close, + exit_id="Close position order") + + +# === Entry / Order === + +def __test_market_entry_produces_market_intent__(): + intents = build_intents({"L": _entry("L", 1.0)}, {}, SYMBOL) + assert intents == [EntryIntent( + pine_id="L", symbol=SYMBOL, side="buy", qty=1.0, + order_type=OrderType.MARKET, + )] + + +def __test_limit_entry__(): + intents = build_intents({"L": _entry("L", 1.0, limit=50_000.0)}, {}, SYMBOL) + i = intents[0] + assert isinstance(i, EntryIntent) + assert i.order_type is OrderType.LIMIT + assert i.limit == 50_000.0 and i.stop is None + + +def __test_stop_entry__(): + intents = build_intents({"L": _entry("L", 1.0, stop=49_000.0)}, {}, SYMBOL) + i = intents[0] + assert i.order_type is OrderType.STOP + assert i.stop == 49_000.0 and i.limit is None + + +def __test_stop_limit_entry__(): + intents = build_intents( + {"L": _entry("L", 1.0, limit=50_000.0, stop=49_500.0)}, {}, SYMBOL, + ) + i = intents[0] + assert i.order_type is OrderType.STOP_LIMIT + assert i.limit == 50_000.0 and i.stop == 49_500.0 + + +def __test_short_entry_side_is_sell__(): + intents = build_intents({"S": _entry("S", -1.0)}, {}, SYMBOL) + assert intents[0].side == "sell" + + +def __test_qty_is_absolute__(): + intents = build_intents({"S": _entry("S", -2.5)}, {}, SYMBOL) + assert intents[0].qty == 2.5 + + +def __test_strategy_order_sets_is_strategy_order__(): + intents = build_intents({"X": _normal("X", 1.0)}, {}, SYMBOL) + assert intents[0].is_strategy_order is True + + +def __test_strategy_entry_is_not_strategy_order__(): + intents = build_intents({"L": _entry("L", 1.0)}, {}, SYMBOL) + assert intents[0].is_strategy_order is False + + +# === OCA propagation === + +def __test_oca_group_is_propagated__(): + e = _entry("L", 1.0, limit=50_000.0, oca_name="grp", oca_type=_oca.cancel) + i = build_intents({"L": e}, {}, SYMBOL)[0] + assert i.oca_name == "grp" + assert i.oca_type == "cancel" + + +def __test_no_oca_means_both_none__(): + i = build_intents({"L": _entry("L", 1.0, limit=50_000.0)}, {}, SYMBOL)[0] + assert i.oca_name is None and i.oca_type is None + + +# === Exit === + +def __test_exit_with_prices_maps_tp_sl__(): + e = _exit("L", -1.0, "TP", limit=60_000.0, stop=45_000.0) + i = build_intents({}, {"L": e}, SYMBOL)[0] + assert isinstance(i, ExitIntent) + assert i.pine_id == "TP" and i.from_entry == "L" + assert i.tp_price == 60_000.0 and i.sl_price == 45_000.0 + assert i.profit_ticks is None and i.loss_ticks is None + assert i.has_unresolved_ticks is False + assert i.intent_key == "TP\0L" + + +def __test_exit_with_ticks_defers_resolution__(): + e = _exit("L", -1.0, "TP", profit_ticks=100.0, loss_ticks=50.0) + i = build_intents({}, {"L": e}, SYMBOL)[0] + assert isinstance(i, ExitIntent) + assert i.tp_price is None and i.sl_price is None + assert i.profit_ticks == 100.0 and i.loss_ticks == 50.0 + assert i.has_unresolved_ticks is True + + +def __test_exit_ticks_override_explicit_prices__(): + # If both are syntactically present, Pine uses ticks at fill time. + e = _exit("L", -1.0, "TP", + limit=60_000.0, stop=45_000.0, + profit_ticks=100.0, loss_ticks=50.0) + i = build_intents({}, {"L": e}, SYMBOL)[0] + assert i.tp_price is None and i.sl_price is None + assert i.profit_ticks == 100.0 and i.loss_ticks == 50.0 + + +def __test_exit_with_trailing__(): + e = _exit("L", -1.0, "TR", trail_price=55_000.0, trail_offset=50) + i = build_intents({}, {"L": e}, SYMBOL)[0] + assert i.trail_price == 55_000.0 + assert i.trail_offset == 50 + + +def __test_exit_without_trailing_has_null_trail_offset__(): + # Order.__init__ defaults trail_offset to 0; the intent should expose + # None when no trailing context exists, so a plugin doesn't mistake + # a plain TP/SL for a zero-offset trailing stop. + e = _exit("L", -1.0, "TP", limit=60_000.0) + i = build_intents({}, {"L": e}, SYMBOL)[0] + assert i.trail_offset is None and i.trail_price is None + + +def __test_exit_tick_trailing__(): + e = _exit("L", -1.0, "TR", trail_points_ticks=100.0, trail_offset=25) + i = build_intents({}, {"L": e}, SYMBOL)[0] + assert i.trail_price is None + assert i.trail_points_ticks == 100.0 + assert i.trail_offset == 25 + assert i.has_unresolved_ticks is True + + +# === Close / close_all === + +def __test_close_produces_close_intent__(): + i = build_intents({}, {"L": _close("L", -1.0)}, SYMBOL)[0] + assert isinstance(i, CloseIntent) + assert i.pine_id == "L" and i.side == "sell" + + +def __test_close_all_has_empty_pine_id__(): + i = build_intents({}, {None: _close_all(-1.0)}, SYMBOL)[0] + assert isinstance(i, CloseIntent) + assert i.pine_id == "" + + +# === Cancellation / filtering === + +def __test_cancelled_orders_are_skipped__(): + o = _entry("L", 1.0) + o.cancelled = True + assert build_intents({"L": o}, {}, SYMBOL) == [] + + +def __test_mixed_entry_and_exit_produce_both_intents__(): + e = _entry("L", 1.0, limit=50_000.0) + x = _exit("L", -1.0, "TP", limit=60_000.0, stop=45_000.0) + intents = build_intents({"L": e}, {"L": x}, SYMBOL) + kinds = [type(i).__name__ for i in intents] + assert kinds == ["EntryIntent", "ExitIntent"] diff --git a/tests/t00_pynecore/core/test_025_order_sync_engine.py b/tests/t00_pynecore/core/test_025_order_sync_engine.py new file mode 100644 index 0000000..11fed29 --- /dev/null +++ b/tests/t00_pynecore/core/test_025_order_sync_engine.py @@ -0,0 +1,421 @@ +""" +Tests for :class:`OrderSyncEngine` — the diff/dispatch/event-routing core. + +A :class:`MockBroker` implements just the async surface the engine uses, +recording every call so assertions can check which intent ended up where. +A stubbed :attr:`lib._script.initial_capital` keeps +:class:`BrokerPosition.equity` well-defined. +""" +from __future__ import annotations + +import asyncio +from dataclasses import dataclass, field +from types import SimpleNamespace +from typing import Any + +import pytest + +from pynecore import lib +from pynecore.core.broker.position import BrokerPosition +from pynecore.core.broker.sync_engine import OrderSyncEngine +from pynecore.core.broker.models import ( + CancelIntent, + EntryIntent, + ExitIntent, + CloseIntent, + ExchangeOrder, + ExchangePosition, + ExchangeCapabilities, + OrderEvent, + OrderStatus, + OrderType, + LegType, + InterceptorResult, +) +from pynecore.lib.strategy import ( + Order, + _order_type_entry, + _order_type_close, +) + + +SYMBOL = "BTCUSDT" + + +@pytest.fixture(autouse=True) +def _stub_script(): + prev = lib._script + lib._script = SimpleNamespace(initial_capital=1_000_000.0) + try: + yield + finally: + lib._script = prev + + +# === Mock broker === + + +@dataclass +class MockBroker: + """Duck-typed stand-in for :class:`BrokerPlugin`. Records all calls.""" + entry_calls: list[EntryIntent] = field(default_factory=list) + exit_calls: list[ExitIntent] = field(default_factory=list) + close_calls: list[CloseIntent] = field(default_factory=list) + cancel_calls: list[CancelIntent] = field(default_factory=list) + modify_entry_calls: list[tuple[EntryIntent, EntryIntent]] = field(default_factory=list) + modify_exit_calls: list[tuple[ExitIntent, ExitIntent]] = field(default_factory=list) + open_orders: list[ExchangeOrder] = field(default_factory=list) + position: ExchangePosition | None = None + streamed_events: list[OrderEvent] = field(default_factory=list) + watch_orders_impl: str = "generator" # "generator" | "not_implemented" + _next_id: int = 0 + + def _mk_order(self, intent) -> ExchangeOrder: + self._next_id += 1 + return ExchangeOrder( + id=f"xchg-{self._next_id}", + symbol=getattr(intent, 'symbol', SYMBOL), + side=getattr(intent, 'side', 'buy'), + order_type=OrderType.MARKET, + qty=getattr(intent, 'qty', 0.0), + filled_qty=0.0, + remaining_qty=getattr(intent, 'qty', 0.0), + price=None, + stop_price=None, + average_fill_price=None, + status=OrderStatus.OPEN, + timestamp=0.0, + fee=0.0, + fee_currency="", + ) + + async def execute_entry(self, intent): + self.entry_calls.append(intent) + return [self._mk_order(intent)] + + async def execute_exit(self, intent): + self.exit_calls.append(intent) + return [self._mk_order(intent)] + + async def execute_close(self, intent): + self.close_calls.append(intent) + return self._mk_order(intent) + + async def execute_cancel(self, intent): + self.cancel_calls.append(intent) + return True + + async def modify_entry(self, old, new): + self.modify_entry_calls.append((old, new)) + return [self._mk_order(new)] + + async def modify_exit(self, old, new): + self.modify_exit_calls.append((old, new)) + return [self._mk_order(new)] + + async def get_open_orders(self, symbol=None): + return list(self.open_orders) + + async def get_position(self, symbol): + return self.position + + def watch_orders(self): + if self.watch_orders_impl == "not_implemented": + raise NotImplementedError + + async def _gen(): + for event in self.streamed_events: + yield event + + return _gen() + + +# === Helpers === + + +def _entry_order(order_id, size, **kw) -> Order: + return Order(order_id, size, order_type=_order_type_entry, **kw) + + +def _exit_order(from_entry, size, exit_id, **kw) -> Order: + return Order(from_entry, size, order_type=_order_type_close, exit_id=exit_id, **kw) + + +def _mk_engine(broker, mintick: float = 1.0) -> tuple[OrderSyncEngine, BrokerPosition]: + pos = BrokerPosition() + engine = OrderSyncEngine( + broker=broker, # type: ignore[arg-type] + position=pos, + symbol=SYMBOL, + mintick=mintick, + ) + return engine, pos + + +def _fill_event(side: str, qty: float, price: float, *, + pine_id: str, leg: LegType = LegType.ENTRY, + xchg_id: str = "xchg-1") -> OrderEvent: + exch = ExchangeOrder( + id=xchg_id, symbol=SYMBOL, side=side, + order_type=OrderType.MARKET, qty=qty, filled_qty=qty, + remaining_qty=0.0, price=None, stop_price=None, + average_fill_price=price, status=OrderStatus.FILLED, + timestamp=0.0, fee=0.0, fee_currency="", + ) + return OrderEvent( + order=exch, event_type='filled', fill_price=price, + fill_qty=qty, timestamp=0.0, pine_id=pine_id, leg_type=leg, + ) + + +# === Diff / dispatch === + + +def __test_new_entry_dispatches_execute_entry__(): + b = MockBroker() + engine, pos = _mk_engine(b) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + + engine.sync() + + assert len(b.entry_calls) == 1 + assert b.entry_calls[0].pine_id == "L" + assert b.entry_calls[0].limit == 50_000.0 + assert engine.active_intents.keys() == {"L"} + assert engine.order_mapping["L"] == ["xchg-1"] + + +def __test_unchanged_entry_is_not_redispatched__(): + b = MockBroker() + engine, pos = _mk_engine(b) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + + engine.sync() + engine.sync() + + assert len(b.entry_calls) == 1 # only once + + +def __test_modified_entry_dispatches_modify_entry__(): + b = MockBroker() + engine, pos = _mk_engine(b) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + engine.sync() + + # Replace with a different limit price + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=49_500.0) + engine.sync() + + assert len(b.modify_entry_calls) == 1 + old, new = b.modify_entry_calls[0] + assert old.limit == 50_000.0 and new.limit == 49_500.0 + + +def __test_removed_entry_dispatches_cancel__(): + b = MockBroker() + engine, pos = _mk_engine(b) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + engine.sync() + + del pos.entry_orders["L"] + engine.sync() + + assert len(b.cancel_calls) == 1 + assert b.cancel_calls[0].pine_id == "L" + assert b.cancel_calls[0].from_entry is None + assert "L" not in engine.active_intents + + +def __test_close_intent_dispatches_execute_close__(): + b = MockBroker() + engine, pos = _mk_engine(b) + pos.exit_orders["L"] = Order( + "L", -1.0, order_type=_order_type_close, + exit_id="Close entry(s) order L", + ) + + engine.sync() + + assert len(b.close_calls) == 1 + assert b.close_calls[0].pine_id == "L" + assert b.close_calls[0].side == "sell" + + +def __test_exit_with_prices_dispatches_execute_exit__(): + b = MockBroker() + engine, pos = _mk_engine(b) + pos.exit_orders["L"] = _exit_order( + "L", -1.0, "TP", limit=60_000.0, stop=45_000.0, + ) + + engine.sync() + + assert len(b.exit_calls) == 1 + assert b.exit_calls[0].tp_price == 60_000.0 + assert b.exit_calls[0].sl_price == 45_000.0 + + +# === Tick deferral + resolution === + + +def __test_exit_with_ticks_without_entry_is_deferred__(): + b = MockBroker() + engine, pos = _mk_engine(b, mintick=1.0) + pos.exit_orders["L"] = _exit_order( + "L", -1.0, "TP", profit_ticks=100.0, loss_ticks=50.0, + ) + + engine.sync() + + # Exit never reaches the plugin while ticks are unresolved. + assert b.exit_calls == [] + assert "L" in engine.deferred_exits + assert "TP\0L" not in engine.active_intents + + +def __test_entry_fill_resolves_deferred_exit__(): + b = MockBroker() + engine, pos = _mk_engine(b, mintick=1.0) + pos.exit_orders["L"] = _exit_order( + "L", -1.0, "TP", profit_ticks=100.0, loss_ticks=50.0, + ) + engine.sync() # defers it + + engine.on_order_event(_fill_event( + "buy", qty=1.0, price=50_000.0, pine_id="L", leg=LegType.ENTRY, + )) + engine.sync() # drains the event, resolves ticks, dispatches + + assert len(b.exit_calls) == 1 + resolved = b.exit_calls[0] + # Long entry (sign=+1): TP above, SL below. + assert resolved.tp_price == 50_100.0 + assert resolved.sl_price == 49_950.0 + assert resolved.profit_ticks is None + assert resolved.loss_ticks is None + assert "L" not in engine.deferred_exits + + +def __test_short_entry_fill_reverses_tick_direction__(): + b = MockBroker() + engine, pos = _mk_engine(b, mintick=1.0) + pos.exit_orders["S"] = _exit_order( + "S", 1.0, "TP", profit_ticks=100.0, loss_ticks=50.0, + ) + engine.sync() + + engine.on_order_event(_fill_event( + "sell", qty=1.0, price=50_000.0, pine_id="S", leg=LegType.ENTRY, + )) + engine.sync() + + resolved = b.exit_calls[0] + # Short (sign=-1): TP below entry, SL above entry. + assert resolved.tp_price == 49_900.0 + assert resolved.sl_price == 50_050.0 + + +# === Interceptor === + + +def __test_interceptor_rejects_intent__(): + b = MockBroker() + engine, pos = _mk_engine(b) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + + def veto(_intent) -> InterceptorResult: + return InterceptorResult(intent=_intent, rejected=True, reject_reason="no") + + engine.register_interceptor(veto) + engine.sync() + + assert b.entry_calls == [] + assert engine.active_intents == {} + + +def __test_interceptor_modifies_qty__(): + b = MockBroker() + engine, pos = _mk_engine(b) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + + def half(_intent): + return InterceptorResult(intent=_intent, modified_qty=_intent.qty * 0.5) + + engine.register_interceptor(half) + engine.sync() + + assert b.entry_calls[0].qty == 0.5 + + +# === Reconciliation === + + +# === run_event_stream (async bridge) === + + +def __test_run_event_stream_queues_all_events__(): + b = MockBroker() + b.streamed_events = [ + _fill_event("buy", qty=1.0, price=50_000.0, + pine_id="L", leg=LegType.ENTRY, xchg_id="x1"), + _fill_event("sell", qty=1.0, price=50_500.0, + pine_id="L", leg=LegType.CLOSE, xchg_id="x2"), + ] + engine, pos = _mk_engine(b) + + asyncio.run(engine.run_event_stream()) + + # Drain via the public path (sync) — verifies integration with record_fill. + pos.avg_price = 50_000.0 # make equity finite for Trade bookkeeping + engine.sync() + + assert len(pos.closed_trades) == 0 or len(pos.closed_trades) == 1 + # We at least confirm the events flowed end-to-end by checking records + assert len(pos.open_trades) + len(pos.closed_trades) >= 1 + + +def __test_run_event_stream_handles_not_implemented__(): + b = MockBroker() + b.watch_orders_impl = "not_implemented" + engine, pos = _mk_engine(b) + + # Should return cleanly, not raise. + asyncio.run(engine.run_event_stream()) + + +def __test_run_event_stream_handles_async_gen_not_implemented__(): + """A plugin's ``watch_orders`` may raise NotImplementedError from the + generator body rather than from the outer call — the engine must treat + both the same way.""" + b = MockBroker() + + def _raise_in_body(): + async def _gen(): + raise NotImplementedError + yield # pragma: no cover — unreachable + + return _gen() + + b.watch_orders = _raise_in_body # type: ignore[method-assign] + engine, pos = _mk_engine(b) + + asyncio.run(engine.run_event_stream()) + + +# === Reconciliation === + + +def __test_reconcile_adopts_exchange_position_size__(): + b = MockBroker() + b.position = ExchangePosition( + symbol=SYMBOL, side="long", size=2.0, entry_price=50_000.0, + unrealized_pnl=0.0, liquidation_price=None, + leverage=1.0, margin_mode="isolated", + ) + engine, pos = _mk_engine(b) + pos.size = 1.0 # local tracking disagrees + + engine.reconcile() + + assert pos.size == 2.0 + assert pos.avg_price == 50_000.0 From fb7c1a89662e13af0ee1eed86b4b8dbc1413517c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Tue, 14 Apr 2026 16:18:11 +0200 Subject: [PATCH 30/64] feat: add broker mode order routing Add broker mode setup to the script runner, swapping in broker positions and routing orders through the order sync engine. Add CLI support for --broker, broker-aware live event loop handling, startup capability validation, and broker runner integration tests. --- src/pynecore/cli/commands/run.py | 33 +- src/pynecore/core/broker/position.py | 46 +- src/pynecore/core/live_runner.py | 33 +- src/pynecore/core/script_runner.py | 108 ++++- src/pynecore/lib/strategy/__init__.py | 2 + .../core/test_026_broker_runner.py | 396 ++++++++++++++++++ 6 files changed, 608 insertions(+), 10 deletions(-) create mode 100644 tests/t00_pynecore/core/test_026_broker_runner.py diff --git a/src/pynecore/cli/commands/run.py b/src/pynecore/cli/commands/run.py index c3edf95..56c2974 100644 --- a/src/pynecore/cli/commands/run.py +++ b/src/pynecore/cli/commands/run.py @@ -236,6 +236,10 @@ def run( live: bool = Option(False, "--live", "-l", help="Continue with live data after historical phase " "(provider mode only)"), + broker: bool = Option(False, "--broker", + help="Enable live broker trading — requires a provider plugin that " + "subclasses BrokerPlugin. Implies --live.", + rich_help_panel="Live Options"), shutdown_timeout: float = Option(120.0, "--shutdown-timeout", help="Max seconds to wait for graceful shutdown " "(0 = wait forever)", @@ -485,6 +489,30 @@ def run( else: ohlcv_iter = reader.read_from(time_from_ts, time_to_ts) + # --broker implies --live. + if broker: + live = True + + # Broker mode: verify plugin capability up front. + broker_plugin = None + broker_event_loop = None + if broker: + if not provider_data: + secho("--broker requires a provider string (ccxt:EXCHANGE:SYMBOL@TIMEFRAME).", + err=True, fg=colors.RED) + raise Exit(1) + from pynecore.core.plugin.broker import BrokerPlugin + if not isinstance(provider_data.provider_instance, BrokerPlugin): + secho( + f"Plugin '{provider_data.parsed_string.provider}' is not a BrokerPlugin " + f"— broker mode requires an exchange-backed plugin.", + err=True, fg=colors.RED, + ) + raise Exit(1) + broker_plugin = provider_data.provider_instance + import asyncio as _asyncio + broker_event_loop = _asyncio.new_event_loop() + # Chain live iterator after historical if --live if live and provider_data: import itertools @@ -503,6 +531,7 @@ def run( timeframe=provider_data.parsed_string.timeframe, last_historical_timestamp=time_to_ts, shutdown_timeout=shutdown_timeout, + event_loop=broker_event_loop, ) ohlcv_iter = itertools.chain(ohlcv_iter, [LIVE_TRANSITION], live_iter) size = 0 @@ -560,7 +589,9 @@ def run( runner = ScriptRunner(script, ohlcv_iter, syminfo, last_bar_index=size - 1, plot_path=plot_path, strat_path=strat_path, trade_path=trade_path, security_data=security_data, - magnifier_iter=magnifier_iter) + magnifier_iter=magnifier_iter, + broker_plugin=broker_plugin, + broker_event_loop=broker_event_loop) finally: # Remove lib directory from Python path if lib_path_added: diff --git a/src/pynecore/core/broker/position.py b/src/pynecore/core/broker/position.py index 9df3ae3..703f9bb 100644 --- a/src/pynecore/core/broker/position.py +++ b/src/pynecore/core/broker/position.py @@ -44,8 +44,9 @@ class BrokerPosition(PositionBase): 'open_commission', 'eventrades', 'wintrades', 'losstrades', 'max_drawdown', 'max_runup', - 'open_trades', 'closed_trades', + 'open_trades', 'closed_trades', 'new_closed_trades', 'entry_orders', 'exit_orders', + 'risk_halt_trading', '_current_price', ) @@ -68,12 +69,54 @@ def __init__(self) -> None: self.open_trades: list[Trade] = [] self.closed_trades: deque[Trade] = deque(maxlen=9000) + self.new_closed_trades: list[Trade] = [] self.entry_orders: dict[str | None, 'Order'] = {} self.exit_orders: dict[str | None, 'Order'] = {} + self.risk_halt_trading: bool = False + self._current_price: float = 0.0 + # === Pine API compatibility shims ====================================== + # Pine strategy.* functions read ``position.c`` / ``.o`` / ``.h`` / ``.l`` + # for the simulator's creation-time margin check. In broker mode those + # attributes are served from the live OHLCV module; the exchange enforces + # margin for real, so the Pine-level check still acts as a safety net + # on script-side state without a separate simulator update path. + + @property + def c(self) -> float: + try: + v = lib.close + except AttributeError: + return self._current_price or 0.0 + try: + return float(v) if v is not None else self._current_price or 0.0 + except (TypeError, ValueError): + return self._current_price or 0.0 + + @property + def o(self) -> float: + try: + return float(lib.open) + except (AttributeError, TypeError, ValueError): + return self.c + + @property + def h(self) -> float: + try: + return float(lib.high) + except (AttributeError, TypeError, ValueError): + return self.c + + @property + def l(self) -> float: # noqa: E743 — mirrors the Pine attribute name + try: + return float(lib.low) + except (AttributeError, TypeError, ValueError): + return self.c + # === Pine-side order book === def _add_order(self, order: 'Order') -> None: @@ -270,3 +313,4 @@ def _close_trade(self, trade: Trade, fill_price: float, if trade in self.open_trades: self.open_trades.remove(trade) self.closed_trades.append(trade) + self.new_closed_trades.append(trade) diff --git a/src/pynecore/core/live_runner.py b/src/pynecore/core/live_runner.py index 59ef770..4a56c13 100644 --- a/src/pynecore/core/live_runner.py +++ b/src/pynecore/core/live_runner.py @@ -33,6 +33,8 @@ def live_ohlcv_generator( *, last_historical_timestamp: int | None = None, shutdown_timeout: float = 120.0, + event_loop: asyncio.AbstractEventLoop | None = None, + engine_event_stream: asyncio.coroutines | None = None, ) -> Iterator[OHLCV]: """ Bridge async watch_ohlcv() to a sync Iterator[OHLCV]. @@ -45,6 +47,14 @@ def live_ohlcv_generator( :param timeframe: Timeframe in TradingView format. :param last_historical_timestamp: Timestamp of the last historical bar to avoid duplicates. :param shutdown_timeout: Max seconds to wait for graceful shutdown. 0 = wait forever. + :param event_loop: Optional externally-owned event loop. When supplied, the background + thread runs the async loop on it via ``run_until_complete`` instead + of ``asyncio.run``. Required for broker mode so that the Order Sync + Engine can submit coroutines to the same loop. + :param engine_event_stream: Optional coroutine (typically + ``OrderSyncEngine.run_event_stream()``) to run as a + long-lived task alongside the OHLCV watcher. The engine + receives its :class:`OrderEvent` stream this way. :return: Iterator yielding OHLCV objects (both closed and intra-bar). """ bar_queue: Queue[OHLCV | BaseException] = Queue(maxsize=100) @@ -87,6 +97,13 @@ async def _async_loop(): logger.info("Live provider connected: %s %s@%s", type(provider).__name__, symbol, timeframe) + # Broker mode: attach the Order Sync Engine's event stream as + # a background task so OrderEvents land in its queue without + # blocking the OHLCV reader. + engine_task: asyncio.Task | None = None + if engine_event_stream is not None: + engine_task = asyncio.create_task(engine_event_stream) + reconnect_attempts = 0 while not stop_event.is_set(): @@ -160,11 +177,25 @@ async def _async_loop(): except Exception as e: bar_queue.put(e) finally: + if engine_task is not None and not engine_task.done(): + engine_task.cancel() + try: + await engine_task + except (asyncio.CancelledError, Exception): # noqa: BLE001 + pass await _graceful_shutdown() bar_queue.put(_SENTINEL) def _thread_target(): - asyncio.run(_async_loop()) + if event_loop is not None: + asyncio.set_event_loop(event_loop) + try: + event_loop.run_until_complete(_async_loop()) + finally: + # The caller owns the loop; don't close it here. + pass + else: + asyncio.run(_async_loop()) thread = threading.Thread(target=_thread_target, daemon=True, name="live-provider") thread.start() diff --git a/src/pynecore/core/script_runner.py b/src/pynecore/core/script_runner.py index 0c1a774..b704ff4 100644 --- a/src/pynecore/core/script_runner.py +++ b/src/pynecore/core/script_runner.py @@ -16,6 +16,8 @@ from zoneinfo import ZoneInfo # noqa from pynecore.core.script import script from pynecore.lib.strategy import Trade, Position # noqa + from pynecore.core.plugin.broker import BrokerPlugin + from pynecore.core.broker.sync_engine import OrderSyncEngine __all__ = [ 'import_script', @@ -192,7 +194,8 @@ class ScriptRunner: __slots__ = ('script_module', 'script', 'ohlcv_iter', 'syminfo', 'update_syminfo_every_run', 'bar_index', 'tz', 'plot_writer', 'strat_writer', 'trades_writer', 'last_bar_index', 'equity_curve', 'first_price', 'last_price', - '_script_path', '_security_data', '_magnifier_iter') + '_script_path', '_security_data', '_magnifier_iter', + '_broker_plugin', '_order_sync_engine', '_broker_event_loop') # noinspection PyProtectedMember def __init__(self, script_path: Path, ohlcv_iter: Iterable[OHLCV], syminfo: SymInfo, *, @@ -201,7 +204,9 @@ def __init__(self, script_path: Path, ohlcv_iter: Iterable[OHLCV], syminfo: SymI update_syminfo_every_run: bool = False, last_bar_index=0, inputs: dict[str, Any] | None = None, security_data: dict[str, str | Path] | None = None, - magnifier_iter: Iterable[OHLCV] | None = None): + magnifier_iter: Iterable[OHLCV] | None = None, + broker_plugin: 'BrokerPlugin | None' = None, + broker_event_loop: Any = None): """ Initialize the script runner @@ -223,6 +228,16 @@ def __init__(self, script_path: Path, ohlcv_iter: Iterable[OHLCV], syminfo: SymI :param magnifier_iter: Optional sub-timeframe OHLCV iterator for bar magnifier mode. When provided with use_bar_magnifier=true, order fills are checked against each sub-bar for more accurate backtesting. + :param broker_plugin: If set, the runner operates in **broker (live trading) mode**: + ``script.position`` is replaced by a :class:`BrokerPosition`, + ``strategy.*`` orders are dispatched through an + :class:`OrderSyncEngine`, and the simulator's order processing + is bypassed. The plugin also drives the OHLCV stream + (a :class:`BrokerPlugin` extends :class:`LiveProviderPlugin`). + :param broker_event_loop: The shared ``asyncio`` event loop on which the broker plugin + runs. Passed to the :class:`OrderSyncEngine` so that + broker coroutines can be awaited from the runner thread + via ``run_coroutine_threadsafe``. :raises ImportError: If the script does not have a 'main' function :raises ImportError: If the 'main' function is not decorated with @script.[indicator|strategy|library] :raises OSError: If the plot file could not be opened @@ -252,6 +267,27 @@ def __init__(self, script_path: Path, ohlcv_iter: Iterable[OHLCV], syminfo: SymI self.script: script = self.script_module.main.script + # Broker (live trading) mode setup. + # Done before ohlcv_iter is consumed so the engine is ready before run_iter. + self._broker_plugin: 'BrokerPlugin | None' = broker_plugin + self._broker_event_loop = broker_event_loop + self._order_sync_engine: 'OrderSyncEngine | None' = None + if broker_plugin is not None: + from pynecore.core.broker.position import BrokerPosition + from pynecore.core.broker.sync_engine import OrderSyncEngine + # Swap the simulator position for a live tracker. The + # @script.strategy(...) decorator already attached a SimPosition; + # in live broker mode the exchange is authoritative, so the + # simulator is dropped entirely. + self.script.position = BrokerPosition() + self._order_sync_engine = OrderSyncEngine( + broker=broker_plugin, + position=self.script.position, # type: ignore[arg-type] + symbol=str(syminfo.ticker), + event_loop=broker_event_loop, + mintick=float(syminfo.mintick) if syminfo.mintick else 0.01, + ) + self.ohlcv_iter = ohlcv_iter self.syminfo = syminfo self.update_syminfo_every_run = update_syminfo_every_run @@ -281,6 +317,41 @@ def __init__(self, script_path: Path, ohlcv_iter: Iterable[OHLCV], syminfo: SymI "Drawdown %", )) if trade_path else None + # === Order-processing dispatch ========================================= + + def _process_orders(self, position) -> None: + """Run one order-processing step. + + In backtest mode this invokes the :class:`SimPosition` simulator + (OHLC fill detection, slippage, OCA, margin). In broker mode it + hands the pending Pine order book to the :class:`OrderSyncEngine`, + which dispatches real exchange calls and routes any fills that + arrived asynchronously through :meth:`BrokerPosition.record_fill`. + """ + if self._order_sync_engine is not None: + self._order_sync_engine.sync() + else: + position.process_orders() + + def _process_orders_magnified(self, position, sub_bars, candle) -> None: + """Backtest sub-bar order processing; in broker mode, the exchange + is the source of truth — magnification is irrelevant and the engine + runs a plain sync.""" + if self._order_sync_engine is not None: + self._order_sync_engine.sync() + else: + position.process_orders_magnified(sub_bars, candle) + + def _process_deferred_margin_call(self, position) -> None: + """Simulator-only. The exchange handles margin in broker mode, so + any deferred margin handling is a no-op there.""" + if self._order_sync_engine is None: + position.process_deferred_margin_call() + + @property + def _broker_mode(self) -> bool: + return self._order_sync_engine is not None + # noinspection PyProtectedMember def run_iter(self, on_progress: Callable[[datetime], None] | None = None) \ -> Iterator[tuple[OHLCV, dict[str, Any]] | tuple[OHLCV, dict[str, Any], list['Trade']]]: @@ -306,6 +377,21 @@ def run_iter(self, on_progress: Callable[[datetime], None] | None = None) \ # Set script data lib._script = self.script # Store script object in lib + # Broker mode: refuse to start if the script needs capabilities the + # exchange doesn't offer. Fail fast — never on the first bar. + if self._broker_plugin is not None: + from pynecore.core.broker.validation import validate_at_startup + from pynecore.core.broker.exceptions import ExchangeCapabilityError + caps = self._broker_plugin.get_capabilities() + reqs = getattr(self.script, '_broker_requirements', None) + if reqs is not None: + errors = validate_at_startup(reqs, caps) + if errors: + raise ExchangeCapabilityError( + "Script requirements not met by exchange:\n" + + "\n".join(f" - {e}" for e in errors) + ) + # Update syminfo lib properties if needed if not self.update_syminfo_every_run: _set_lib_syminfo_properties(self.syminfo, lib) @@ -572,6 +658,11 @@ def _write_bar_output(bar_candle): # noinspection PyProtectedMember def _coof_loop(): """COOF re-execution loop: process orders, re-execute on fills.""" + # Broker mode: no synchronous fill-driven re-execution — exchange + # fills arrive asynchronously and are routed on the next sync. + if self._broker_mode: + self._process_orders(position) + return old_fills = position._fill_counter position.process_orders() new_fills = position._fill_counter @@ -587,6 +678,9 @@ def _coof_loop(): # noinspection PyProtectedMember def _coof_magnified_loop(sub_bars_list, aggregated_candle): """COOF re-execution loop with magnified order processing.""" + if self._broker_mode: + self._process_orders(position) + return old_fills = position._fill_counter position.process_orders_magnified(sub_bars_list, aggregated_candle) new_fills = position._fill_counter @@ -637,14 +731,14 @@ def _coof_magnified_loop(sub_bars_list, aggregated_candle): if var_snapshot.has_vars: var_snapshot.restore() elif is_strat and position and not lib._strategy_suppressed: - position.process_orders() + self._process_orders(position) # Execute libraries + script _run_libs_and_main() # Process deferred margin calls if is_strat and position and not lib._strategy_suppressed: - position.process_deferred_margin_call() + self._process_deferred_margin_call(position) # Write output _write_bar_output(candle) @@ -748,20 +842,20 @@ def _coof_magnified_loop(sub_bars_list, aggregated_candle): _coof_magnified_loop(sub_bars, candle) var_snapshot.restore() else: - position.process_orders_magnified(sub_bars, candle) + self._process_orders_magnified(position, sub_bars, candle) else: if var_snapshot and var_snapshot.has_vars: _coof_loop() var_snapshot.restore() else: - position.process_orders() + self._process_orders(position) # Final script execution for the closed bar lib._plot_data.clear() _run_libs_and_main() if is_strat and position: - position.process_deferred_margin_call() + self._process_deferred_margin_call(position) # Commit state for next bar if var_snapshot and var_snapshot.has_vars: diff --git a/src/pynecore/lib/strategy/__init__.py b/src/pynecore/lib/strategy/__init__.py index c161dcf..4e8b5ea 100644 --- a/src/pynecore/lib/strategy/__init__.py +++ b/src/pynecore/lib/strategy/__init__.py @@ -432,8 +432,10 @@ class PositionBase(ABC): max_runup: float open_trades: list['Trade'] closed_trades: 'deque[Trade]' + new_closed_trades: list['Trade'] entry_orders: dict[str | None, 'Order'] exit_orders: dict[str | None, 'Order'] + risk_halt_trading: bool @property def equity(self) -> PyneFloat: diff --git a/tests/t00_pynecore/core/test_026_broker_runner.py b/tests/t00_pynecore/core/test_026_broker_runner.py new file mode 100644 index 0000000..29fcb55 --- /dev/null +++ b/tests/t00_pynecore/core/test_026_broker_runner.py @@ -0,0 +1,396 @@ +""" +Integration tests for :class:`ScriptRunner` in broker (live trading) mode. + +These tests construct a ScriptRunner with a :class:`MockBrokerPlugin` and +drive a minimal ``@pyne`` strategy through it, verifying the contract between +the Pine order book, the :class:`OrderSyncEngine` and the position tracker: + +- Startup capability validation rejects incompatible scripts. +- ``script.position`` is swapped to a :class:`BrokerPosition`. +- ``strategy.entry`` / ``strategy.close`` route to ``execute_*`` calls. +- :class:`OrderEvent` fills update :class:`BrokerPosition`. + +No live thread, no real exchange — the engine runs with +``event_loop=None`` (one-shot ``asyncio.run`` per broker call). +""" +from __future__ import annotations + +import textwrap +from dataclasses import dataclass, field +from pathlib import Path + +import pytest + +from pynecore.core.broker.models import ( + CancelIntent, + CloseIntent, + EntryIntent, + ExchangeCapabilities, + ExchangeOrder, + ExchangePosition, + ExitIntent, + OrderEvent, + OrderStatus, + OrderType, + LegType, +) +from pynecore.core.broker.exceptions import ExchangeCapabilityError +from pynecore.core.broker.position import BrokerPosition +from pynecore.core.script_runner import ScriptRunner +from pynecore.core.syminfo import SymInfo +from pynecore.types.ohlcv import OHLCV + + +# === Synthetic bars + syminfo === + + +def _make_syminfo() -> SymInfo: + """Minimal SymInfo for a crypto-like instrument.""" + return SymInfo( + prefix="TEST", + ticker="BTCUSDT", + description="Test", + currency="USDT", + basecurrency="BTC", + period="1D", + type="crypto", + mintick=0.01, + pricescale=100.0, + pointvalue=1.0, + timezone="UTC", + volumetype="base", + opening_hours=[], + session_starts=[], + session_ends=[], + ) + + +def _make_bars(count: int, start_price: float = 50_000.0) -> list[OHLCV]: + """A monotonically increasing, closed-bar sequence.""" + bars: list[OHLCV] = [] + for i in range(count): + price = start_price + i * 100.0 + bars.append(OHLCV( + timestamp=1_700_000_000 + i * 86_400, + open=price, high=price + 50.0, low=price - 50.0, + close=price, volume=1.0, + )) + return bars + + +# === Mock BrokerPlugin === + + +@dataclass +class MockBrokerPlugin: + """Duck-typed stand-in for :class:`BrokerPlugin` — matches only the + methods :class:`ScriptRunner` and :class:`OrderSyncEngine` call.""" + capabilities: ExchangeCapabilities = field(default_factory=ExchangeCapabilities) + entry_calls: list[EntryIntent] = field(default_factory=list) + exit_calls: list[ExitIntent] = field(default_factory=list) + close_calls: list[CloseIntent] = field(default_factory=list) + cancel_calls: list[CancelIntent] = field(default_factory=list) + _next_id: int = 0 + + def get_capabilities(self) -> ExchangeCapabilities: + return self.capabilities + + def _mk_order(self, intent) -> ExchangeOrder: + self._next_id += 1 + return ExchangeOrder( + id=f"xchg-{self._next_id}", + symbol=getattr(intent, 'symbol', 'BTCUSDT'), + side=getattr(intent, 'side', 'buy'), + order_type=OrderType.MARKET, + qty=getattr(intent, 'qty', 0.0), + filled_qty=0.0, + remaining_qty=getattr(intent, 'qty', 0.0), + price=None, stop_price=None, average_fill_price=None, + status=OrderStatus.OPEN, + timestamp=0.0, fee=0.0, fee_currency="", + ) + + async def execute_entry(self, intent): + self.entry_calls.append(intent) + return [self._mk_order(intent)] + + async def execute_exit(self, intent): + self.exit_calls.append(intent) + return [self._mk_order(intent)] + + async def execute_close(self, intent): + self.close_calls.append(intent) + return self._mk_order(intent) + + async def execute_cancel(self, intent): + self.cancel_calls.append(intent) + return True + + async def modify_entry(self, old, new): + return [self._mk_order(new)] + + async def modify_exit(self, old, new): + return [self._mk_order(new)] + + async def get_open_orders(self, symbol=None): + return [] + + async def get_position(self, symbol): + return None + + +# === Script templates === + + +_MARKET_ENTRY_SCRIPT = '''\ +""" +@pyne +""" +from pynecore.lib import script, strategy, bar_index + +@script.strategy("MarketEntry") +def main(): + if bar_index == 0: + strategy.entry("L", strategy.long, qty=1.0) +''' + + +_LIMIT_EXIT_BRACKET_SCRIPT = '''\ +""" +@pyne +""" +from pynecore.lib import script, strategy, bar_index + +@script.strategy("LimitBracket") +def main(): + if bar_index == 0: + strategy.entry("L", strategy.long, qty=1.0) + strategy.exit("TP", from_entry="L", limit=51_000.0, stop=49_000.0) +''' + + +_ENTRY_AND_CLOSE_SCRIPT = '''\ +""" +@pyne +""" +from pynecore.lib import script, strategy, bar_index + +@script.strategy("EntryClose") +def main(): + if bar_index == 0: + strategy.entry("L", strategy.long, qty=1.0) + if bar_index == 1: + strategy.close("L") +''' + + +_script_counter = [0] + + +def _write_script(tmp_path: Path, code: str) -> Path: + """Write the script to a uniquely-named file. + + Each test gets a fresh filename so Python's module cache doesn't + serve a sibling test's script when the same tmp_path is recycled. + """ + _script_counter[0] += 1 + p = tmp_path / f"strategy_test_{_script_counter[0]}.py" + p.write_text(code) + return p + + +# === Tests === + + +def __test_broker_mode_swaps_position_to_broker_position__(tmp_path): + plugin = MockBrokerPlugin(capabilities=ExchangeCapabilities()) + script_path = _write_script(tmp_path, _MARKET_ENTRY_SCRIPT) + + runner = ScriptRunner( + script_path=script_path, + ohlcv_iter=_make_bars(1), + syminfo=_make_syminfo(), + broker_plugin=plugin, # type: ignore[arg-type] + ) + + assert isinstance(runner.script.position, BrokerPosition) + assert runner._order_sync_engine is not None + + +def __test_startup_validation_rejects_incompatible_script__(tmp_path): + """Script uses TP+SL bracket, plugin declares no tp_sl_bracket.""" + plugin = MockBrokerPlugin(capabilities=ExchangeCapabilities()) + script_path = _write_script(tmp_path, _LIMIT_EXIT_BRACKET_SCRIPT) + + runner = ScriptRunner( + script_path=script_path, + ohlcv_iter=_make_bars(2), + syminfo=_make_syminfo(), + broker_plugin=plugin, # type: ignore[arg-type] + ) + with pytest.raises(ExchangeCapabilityError) as excinfo: + list(runner.run_iter()) + assert "TP+SL" in str(excinfo.value) + + +def __test_startup_validation_accepts_compatible_script__(tmp_path): + # The bracket script needs tp_sl_bracket AND stop_orders from its + # syntactic keywords; the plugin must advertise both. + plugin = MockBrokerPlugin( + capabilities=ExchangeCapabilities( + tp_sl_bracket=True, stop_order=True, + ), + ) + script_path = _write_script(tmp_path, _LIMIT_EXIT_BRACKET_SCRIPT) + + runner = ScriptRunner( + script_path=script_path, + ohlcv_iter=_make_bars(2), + syminfo=_make_syminfo(), + broker_plugin=plugin, # type: ignore[arg-type] + ) + # Must not raise. + list(runner.run_iter()) + + +def __test_market_entry_dispatches_execute_entry__(tmp_path): + plugin = MockBrokerPlugin(capabilities=ExchangeCapabilities()) + script_path = _write_script(tmp_path, _MARKET_ENTRY_SCRIPT) + + runner = ScriptRunner( + script_path=script_path, + ohlcv_iter=_make_bars(2), + syminfo=_make_syminfo(), + broker_plugin=plugin, # type: ignore[arg-type] + ) + list(runner.run_iter()) + + assert len(plugin.entry_calls) == 1 + call = plugin.entry_calls[0] + assert call.pine_id == "L" + assert call.side == "buy" + assert call.qty == 1.0 + assert call.order_type is OrderType.MARKET + + +def __test_close_dispatches_execute_close__(tmp_path): + """``strategy.close`` only emits an order when there is an open position; + in broker mode that requires a real exchange fill first.""" + plugin = MockBrokerPlugin(capabilities=ExchangeCapabilities()) + # 3-bar script: enter on bar 0, hold, close on bar 2 once filled. + script_path = _write_script(tmp_path, textwrap.dedent('''\ + """ + @pyne + """ + from pynecore.lib import script, strategy, bar_index + + @script.strategy("EntryClose") + def main(): + if bar_index == 0: + strategy.entry("L", strategy.long, qty=1.0) + if bar_index == 2: + strategy.close("L") + ''')) + + runner = ScriptRunner( + script_path=script_path, + ohlcv_iter=_make_bars(5), + syminfo=_make_syminfo(), + broker_plugin=plugin, # type: ignore[arg-type] + ) + + it = iter(runner.run_iter()) + next(it) # bar 0 — entry created (Pine), not yet dispatched + next(it) # bar 1 — sync dispatches entry; we now inject a fill + + assert len(plugin.entry_calls) == 1 + exch_id = runner._order_sync_engine._order_mapping["L"][0] + runner._order_sync_engine.on_order_event(OrderEvent( + order=ExchangeOrder( + id=exch_id, symbol="BTCUSDT", side="buy", + order_type=OrderType.MARKET, qty=1.0, filled_qty=1.0, + remaining_qty=0.0, price=None, stop_price=None, + average_fill_price=50_000.0, status=OrderStatus.FILLED, + timestamp=0.0, fee=0.0, fee_currency="", + ), + event_type='filled', fill_price=50_000.0, fill_qty=1.0, + timestamp=0.0, pine_id="L", leg_type=LegType.ENTRY, + )) + + next(it) # bar 2 — sync drains fill, script calls close (now allowed) + next(it) # bar 3 — sync dispatches the close order + + assert len(plugin.close_calls) == 1 + assert plugin.close_calls[0].pine_id == "L" + assert plugin.close_calls[0].side == "sell" + + +def __test_order_event_fill_updates_broker_position__(tmp_path): + plugin = MockBrokerPlugin(capabilities=ExchangeCapabilities()) + script_path = _write_script(tmp_path, _MARKET_ENTRY_SCRIPT) + + runner = ScriptRunner( + script_path=script_path, + ohlcv_iter=_make_bars(5), + syminfo=_make_syminfo(), + broker_plugin=plugin, # type: ignore[arg-type] + ) + + # Bar-by-bar drive. Sync runs BEFORE the script on each bar, so the + # entry created on bar 0 is dispatched only on bar 1's sync. + it = iter(runner.run_iter()) + next(it) # bar 0 — script creates the entry order + next(it) # bar 1 — sync dispatches execute_entry + + assert len(plugin.entry_calls) == 1 + + # Simulate the exchange filling the order. + exch_id = runner._order_sync_engine._order_mapping["L"][0] + fill = OrderEvent( + order=ExchangeOrder( + id=exch_id, symbol="BTCUSDT", side="buy", + order_type=OrderType.MARKET, qty=1.0, filled_qty=1.0, + remaining_qty=0.0, price=None, stop_price=None, + average_fill_price=50_000.0, status=OrderStatus.FILLED, + timestamp=0.0, fee=0.0, fee_currency="", + ), + event_type='filled', fill_price=50_000.0, fill_qty=1.0, + timestamp=0.0, pine_id="L", leg_type=LegType.ENTRY, + ) + runner._order_sync_engine.on_order_event(fill) + + next(it) # bar 2 — sync drains the fill, updates BrokerPosition + + pos = runner.script.position + assert isinstance(pos, BrokerPosition) + assert pos.size == 1.0 + assert len(pos.open_trades) == 1 + assert pos.open_trades[0].entry_price == 50_000.0 + + +def __test_unchanged_intent_not_redispatched__(tmp_path): + """A pending limit entry that Pine re-emits bar-after-bar must not + trigger repeated execute_entry calls.""" + plugin = MockBrokerPlugin(capabilities=ExchangeCapabilities()) + script_path = _write_script(tmp_path, textwrap.dedent('''\ + """ + @pyne + """ + from pynecore.lib import script, strategy, bar_index + + @script.strategy("PendingLimit") + def main(): + strategy.entry("L", strategy.long, qty=1.0, limit=48_000.0) + ''')) + + runner = ScriptRunner( + script_path=script_path, + ohlcv_iter=_make_bars(5), + syminfo=_make_syminfo(), + broker_plugin=plugin, # type: ignore[arg-type] + ) + list(runner.run_iter()) + + # Pine re-creates the same order every bar; the engine must see it as + # unchanged from bar 2 onwards. + assert len(plugin.entry_calls) == 1 From 622f49bd5d56f5643ce67ba891719118d90afd02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Tue, 14 Apr 2026 18:40:26 +0200 Subject: [PATCH 31/64] fix(strategy): skip local market margin check in broker mode Only run the creation-time margin check for simulated positions so broker mode can rely on the exchange as the authoritative margin source. --- src/pynecore/lib/strategy/__init__.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/pynecore/lib/strategy/__init__.py b/src/pynecore/lib/strategy/__init__.py index 4e8b5ea..0e6ef52 100644 --- a/src/pynecore/lib/strategy/__init__.py +++ b/src/pynecore/lib/strategy/__init__.py @@ -2056,9 +2056,11 @@ def entry(id: str, direction: direction.Direction, qty: int | PyneFloat = na_flo elif stop is not None: stop = _price_round(stop, direction_sign) - # Creation-time margin check for market entry orders (TradingView behavior) - # TV checks _size_round(qty) × (close + slippage) > equity at strategy.entry() call time - if limit is None and stop is None: + # Creation-time margin check for market entry orders (TradingView backtest behavior). + # Skip in broker mode: the exchange enforces margin authoritatively, and the script's + # equity view can drift from the exchange (funding, fees, transfers) — making the + # local check a source of silent false positives rather than a safety net. + if limit is None and stop is None and isinstance(position, SimPosition): margin_percent = (script.margin_short if direction_sign < 0 else script.margin_long) if margin_percent > 0: From 231a4f8aa2ee627e7767f936ec7233023fcf0c2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Thu, 16 Apr 2026 09:45:46 +0200 Subject: [PATCH 32/64] feat: add broker observability and CCXT exchange options Add structured broker events for bracket lifecycle tracking, repair failures, reconstruction, and protection degradation. Extend CCXT provider config with sandbox and default market type support, while keeping PyneCore-only settings out of CCXT constructor kwargs. Document that live order routing belongs to dedicated per-exchange broker plugins instead of live data providers. --- docs/advanced/live-mode.md | 4 +- docs/development/plugin-system.md | 13 +++-- src/pynecore/core/broker/models.py | 88 +++++++++++++++++++++++++++++- src/pynecore/providers/ccxt.py | 82 +++++++++++++++++++++------- 4 files changed, 159 insertions(+), 28 deletions(-) diff --git a/docs/advanced/live-mode.md b/docs/advanced/live-mode.md index 50e9d39..0928c88 100644 --- a/docs/advanced/live-mode.md +++ b/docs/advanced/live-mode.md @@ -217,8 +217,8 @@ only cares whether it receives `OHLCV` or `BarUpdate` objects. ## Limitations -- **Paper trading only** — no real order execution. `BrokerPlugin` for live order routing is - planned but not yet implemented. +- **Paper trading only** — no real order execution. Live order routing is provided by + dedicated per-exchange broker plugins (`pynecore-bybit`, `pynecore-binance`, etc.). - **Single timeframe** — `request.security()` with live providers (multi-timeframe live) is not yet supported. - **Provider required** — `--live` only works with provider strings, not local data files. diff --git a/docs/development/plugin-system.md b/docs/development/plugin-system.md index 17af8b0..c6a7394 100644 --- a/docs/development/plugin-system.md +++ b/docs/development/plugin-system.md @@ -26,14 +26,17 @@ class hierarchy determines what a plugin can do: ``` Plugin (base) -├── ProviderPlugin — Offline OHLCV data provider -│ └── LiveProviderPlugin — WebSocket/streaming data (extends ProviderPlugin) -├── CLIPlugin — CLI subcommands and parameter hooks -└── ExtensionPlugin — Hook-based script extension (planned) +├── ProviderPlugin — Offline OHLCV data provider +│ └── LiveProviderPlugin — WebSocket/streaming data (extends ProviderPlugin) +├── CLIPlugin — CLI subcommands and parameter hooks +└── ExtensionPlugin — Hook-based script extension (planned) ``` `LiveProviderPlugin` inherits from `ProviderPlugin` — every live provider can also download -historical data. See [Live Mode](../advanced/live-mode.md) for usage details. +historical data. See [Live Mode](../advanced/live-mode.md) for data-side details. + +Order execution is handled by dedicated per-exchange broker plugins +(`pynecore-bybit`, `pynecore-binance`, etc.) — not by the data provider. Multiple inheritance combines capabilities: diff --git a/src/pynecore/core/broker/models.py b/src/pynecore/core/broker/models.py index dce1d42..5a1e4a8 100644 --- a/src/pynecore/core/broker/models.py +++ b/src/pynecore/core/broker/models.py @@ -27,6 +27,12 @@ 'CancelIntent', 'ScriptRequirements', 'InterceptorResult', + 'BrokerEvent', + 'BracketRegisteredEvent', + 'LegPartialRepairedEvent', + 'LegRepairFailedEvent', + 'BracketReconstructedEvent', + 'ProtectionDegradedEvent', ] @@ -76,6 +82,10 @@ class ExchangeOrder: fee: float fee_currency: str reduce_only: bool = False + # Exchange-side clientOrderId (our allocation, echoed back by the exchange). + # Required for post-restart bracket reconstruction — without it, open TP/SL + # legs left on the exchange cannot be mapped back to Pine identity. + client_order_id: str | None = None @dataclass @@ -116,13 +126,27 @@ class ExchangePosition: @dataclass class ExchangeCapabilities: - """What the exchange supports. Declared once at startup by the plugin.""" + """ + What the plugin can deliver end-to-end for the script, not raw exchange + support. Declared once at startup. A capability is ``True`` when the + plugin can uphold its semantics on this exchange — natively (one atomic + exchange call) or in software (e.g. two reduce-only orders + stream-driven + repair with OCA reduce semantics). If neither path can uphold the + required semantics, declare ``False`` and + :func:`~pynecore.core.broker.validation.validate_at_startup` rejects the + script. + """ # Order types stop_order: bool = False stop_limit_order: bool = False trailing_stop: bool = False - # Exit bracket (TP+SL with OCA reduce semantics) + # Exit bracket (TP+SL with OCA reduce semantics). ``tp_sl_bracket=True`` + # means the plugin delivers the bracket on this exchange; it does NOT + # imply native support. ``tp_sl_bracket_native=True`` additionally + # promises a single atomic exchange call — useful for diagnostics, + # latency budgeting, and per-exchange reconcile strategy. tp_sl_bracket: bool = False + tp_sl_bracket_native: bool = False # Order management amend_order: bool = False cancel_all: bool = False @@ -261,6 +285,66 @@ class ScriptRequirements: # === Interceptor (Order Sync Engine extension point) === +# === Broker events (observability) ======================================= + +@dataclass +class BrokerEvent: + """Base class for structured broker-side events. + + The plugin emits these via an injected callback so the runner can + surface them in logs, metrics, and the user-facing event stream + without the plugin coupling to any specific sink. + """ + + +@dataclass +class BracketRegisteredEvent(BrokerEvent): + pine_id: str + from_entry: str + tp_order_id: str | None + sl_order_id: str | None + + +@dataclass +class LegPartialRepairedEvent(BrokerEvent): + pine_id: str + from_entry: str + leg: str # "tp" | "sl" + generation: int + old_qty: float + new_qty: float + + +@dataclass +class LegRepairFailedEvent(BrokerEvent): + pine_id: str + from_entry: str + leg: str # "tp" | "sl" + reason: str + action_taken: str # "degraded" | "retry" | ... + + +@dataclass +class BracketReconstructedEvent(BrokerEvent): + pine_id: str + from_entry: str + source: str # "open_orders" | "position_snapshot" | ... + + +@dataclass +class ProtectionDegradedEvent(BrokerEvent): + """The bracket can no longer be maintained with OCA reduce semantics. + + ``reason`` is human-readable / diagnostic; ``policy_action`` names the + manager's chosen follow-up (``"degraded"`` → bracket left in place but + unsupervised; ``"terminal"`` → bracket closed out). + """ + pine_id: str + from_entry: str + reason: str + policy_action: str + + @dataclass class InterceptorResult: """ diff --git a/src/pynecore/providers/ccxt.py b/src/pynecore/providers/ccxt.py index fd01d03..49f4e51 100644 --- a/src/pynecore/providers/ccxt.py +++ b/src/pynecore/providers/ccxt.py @@ -1,17 +1,18 @@ -from typing import Callable +from typing import Any, Callable from dataclasses import dataclass import re from datetime import datetime, UTC, timedelta, time from pathlib import Path import tomllib -from pynecore.core.plugin import LiveProviderPlugin, override +from pynecore.core.plugin import override +from pynecore.core.plugin.live_provider import LiveProviderPlugin from pynecore.core.syminfo import SymInfo, SymInfoInterval, SymInfoSession from ..types.ohlcv import OHLCV __all__ = ['CCXTProvider'] -known_limits = { +_KNOWN_LIMITS = { 'binance': 1000, 'bitget': { '1w': 12, @@ -28,6 +29,11 @@ 'huobi': 2000, } +_PYNECORE_ONLY_CONFIG_KEYS: frozenset[str] = frozenset({ + 'sandbox', + 'default_type', +}) + def add_space_before_uppercase(s): return re.sub(r'(? str: @@ -196,9 +225,7 @@ def normalize_symbol(self, symbol: str) -> str: @override def get_list_of_symbols(self, *args, **kwargs) -> list[str]: - """ - Get list of symbols. - """ + """Get list of symbols.""" self._client.load_markets() return self._client.symbols or [] @@ -224,9 +251,7 @@ def _create_24_7_sessions() -> tuple[ @override def update_symbol_info(self) -> SymInfo: - """ - Update symbol info from the exchange. - """ + """Update symbol info from the exchange.""" self._client.load_markets() assert self._client.markets market_details = self._client.markets[self.symbol] @@ -292,7 +317,7 @@ def download_ohlcv(self, time_from: datetime, time_to: datetime, if limit is None: assert self._client.id - limit_config = known_limits.get(self._client.id, 100) + limit_config = _KNOWN_LIMITS.get(self._client.id, 100) if isinstance(limit_config, dict): limit = limit_config.get(self.xchg_timeframe, limit_config.get('default', 100)) @@ -342,7 +367,7 @@ def download_ohlcv(self, time_from: datetime, time_to: datetime, @override async def connect(self) -> None: - """Establish async CCXT connection for live data streaming.""" + """Establish async CCXT Pro connection for live OHLCV streaming.""" try: import ccxt.pro as ccxtpro except ImportError: @@ -352,14 +377,33 @@ async def connect(self) -> None: exchange_name = self._client.id - exchange_config = { - 'enableRateLimit': True, - } + exchange_config: dict[str, Any] = {'enableRateLimit': True} + exchange_config.update(self._exchange_config) + if self.config: - exchange_config.update({k: v for k, v in vars(self.config).items() if v}) + exchange_config.update({ + k: v for k, v in vars(self.config).items() + if v and k not in _PYNECORE_ONLY_CONFIG_KEYS + }) self._async_client = getattr(ccxtpro, exchange_name)(exchange_config) + if self.config and self.config.sandbox: + try: + self._async_client.set_sandbox_mode(True) + except Exception as exc: # noqa: BLE001 + import logging + logging.getLogger(__name__).warning( + "Exchange %r does not support sandbox mode: %s", + exchange_name, exc, + ) + + if self.config and self.config.default_type: + self._async_client.options = { + **getattr(self._async_client, 'options', {}), + 'defaultType': self.config.default_type, + } + @override async def disconnect(self) -> None: """Close the async CCXT connection.""" From 5547a187ee41d085136741e95203d9d771172360 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Thu, 16 Apr 2026 19:22:03 +0200 Subject: [PATCH 33/64] fix(core): sync broker orders before live intra-bar script runs Dispatch queued broker orders before each live tick script execution when calc_on_every_tick is enabled. This lets orders from the previous tick reach the broker on the next intra-bar update and makes async fills visible before the script runs. Add coverage for live intra-bar broker sync dispatch timing. --- src/pynecore/core/script_runner.py | 9 ++ .../core/test_026_broker_runner.py | 87 +++++++++++++++++++ 2 files changed, 96 insertions(+) diff --git a/src/pynecore/core/script_runner.py b/src/pynecore/core/script_runner.py index b704ff4..e548ad6 100644 --- a/src/pynecore/core/script_runner.py +++ b/src/pynecore/core/script_runner.py @@ -806,6 +806,12 @@ def _coof_magnified_loop(sub_bars_list, aggregated_candle): if run_on_every_tick: if var_snapshot and var_snapshot.has_vars: var_snapshot.save() + # Broker sync runs before the script so orders queued by the + # previous tick dispatch now, and async fills from watch_orders + # become visible to this script run via record_fill. + if is_strat and position and self._broker_mode \ + and not lib._strategy_suppressed: + self._process_orders(position) _run_libs_and_main() last_bar_timestamp = candle.timestamp @@ -816,6 +822,9 @@ def _coof_magnified_loop(sub_bars_list, aggregated_candle): if var_snapshot and var_snapshot.has_vars: var_snapshot.restore() function_isolation.reset() + if is_strat and position and self._broker_mode \ + and not lib._strategy_suppressed: + self._process_orders(position) _run_libs_and_main() elif bar_update.is_closed: diff --git a/tests/t00_pynecore/core/test_026_broker_runner.py b/tests/t00_pynecore/core/test_026_broker_runner.py index 29fcb55..568f59f 100644 --- a/tests/t00_pynecore/core/test_026_broker_runner.py +++ b/tests/t00_pynecore/core/test_026_broker_runner.py @@ -394,3 +394,90 @@ def main(): # Pine re-creates the same order every bar; the engine must see it as # unchanged from bar 2 onwards. assert len(plugin.entry_calls) == 1 + + +def __test_live_intra_bar_sync_dispatches_on_next_tick__(tmp_path): + """With ``calc_on_every_tick=True`` in broker+live mode, an order queued + on intra-bar tick N must dispatch via the sync engine on tick N+1, not + only at bar close. + + Without the intra-bar sync, broker ``execute_entry`` would only be called + at bar-close, leaving the exchange blind to the order mid-bar. This also + implies async fills from ``watch_orders`` become visible to the script on + the very next tick instead of one bar late. + """ + import sys + from pynecore import lib as _lib + from pynecore.core.script_runner import LIVE_TRANSITION + + plugin = MockBrokerPlugin(capabilities=ExchangeCapabilities()) + script_path = _write_script(tmp_path, textwrap.dedent('''\ + """ + @pyne + """ + from pynecore.lib import script, strategy + + @script.strategy("IntraBarEntry") + def main(): + strategy.entry("L", strategy.long, qty=1.0, limit=49_000.0) + ''')) + + # One historical bar (strategy suppressed, warmup only), then three live + # updates for the same bar timestamp: open tick, intra-bar tick, close. + historical = _make_bars(1) + live_ts = historical[-1].timestamp + 86_400 + live = [ + OHLCV(timestamp=live_ts, open=50_000.0, high=50_050.0, low=49_950.0, + close=50_000.0, volume=1.0, is_closed=False), # tick 1 (open) + OHLCV(timestamp=live_ts, open=50_000.0, high=50_050.0, low=49_800.0, + close=49_900.0, volume=1.0, is_closed=False), # tick 2 (intra) + OHLCV(timestamp=live_ts, open=50_000.0, high=50_100.0, low=49_800.0, + close=50_050.0, volume=1.0, is_closed=True), # tick 3 (close) + ] + + observations: list[tuple[str, int]] = [] + + def observing_iter(): + # Historical phase. + for h in historical: + yield h + yield LIVE_TRANSITION + # Live tick 1: runner will process it on the next advance. + yield live[0] + # Resumes AFTER tick 1 was processed. + observations.append(('after_tick_1', len(plugin.entry_calls))) + yield live[1] + observations.append(('after_tick_2', len(plugin.entry_calls))) + yield live[2] + observations.append(('after_tick_3', len(plugin.entry_calls))) + + # Set live flags before instantiation so the historical suppression + # and live transition paths run as in production. + for key in [script_path.stem]: + sys.modules.pop(key, None) + setattr(_lib, '_is_live', True) + setattr(_lib, '_strategy_suppressed', True) + + runner = ScriptRunner( + script_path=script_path, + ohlcv_iter=observing_iter(), + syminfo=_make_syminfo(), + broker_plugin=plugin, # type: ignore[arg-type] + ) + runner.script.calc_on_every_tick = True + + # Drain the generator. + list(runner.run_iter()) + + obs = dict(observations) + # Tick 1 opens the bar; script queues the limit entry. Sync ran before + # the script, so no dispatch yet. + assert obs['after_tick_1'] == 0, \ + f"entry should not be dispatched on bar-open tick, got {obs['after_tick_1']}" + # Tick 2 is the decisive assertion: sync must run before the script on + # each intra-bar tick, so the entry queued on tick 1 now dispatches. + assert obs['after_tick_2'] == 1, \ + f"entry must dispatch on next intra-bar tick, got {obs['after_tick_2']}" + # Bar close keeps the dispatch idempotent. + assert obs['after_tick_3'] == 1, \ + f"entry should not re-dispatch at bar close, got {obs['after_tick_3']}" From 82e0699f24123a3d90abf6d866d1ffe9c425a798 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Fri, 17 Apr 2026 22:20:54 +0200 Subject: [PATCH 34/64] feat(broker): production-grade BrokerPlugin roadmap WS1-WS5 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit WS1 — idempotency + unknown-disposition recovery * Deterministic 30-char base36 client_order_id formula (run_tag + pine_id hash + bar_ts_ms + kind + retry_seq) * DispatchEnvelope wraps every intent; execute_*/modify_* take envelope * OrderDispositionUnknownError + pending_verification queue match parked dispatches against get_open_orders each sync * ExchangeCapabilities.client_id_echo / idempotency_native flags * OrderSyncEngine.__init__ requires run_tag, sync() takes bar_ts_ms WS2 — explicit reduce_only on ExitIntent / CloseIntent * Frozen-dataclass invariant: reduce_only is True, False rejected at construction — hedge mode deferred to future HedgeBrokerPlugin WS3 — error taxonomy refinement * AuthenticationError(BrokerError) terminal, carries reason * InsufficientMarginError(ExchangeOrderRejectedError) typed reject * AuthenticationFailedEvent for observability * BrokerPlugin._map_exception helper hook (stdlib default) * Startup get_balance() auth probe in script_runner WS4 — OCA cross-intent cascade cancel * OcaType StrEnum canonical + OcaPartialFillPolicy (FILL_CANCELS default) * ExchangeCapabilities.oca_cancel_native suppresses cascade * _cascade_oca_cancel in _route_event fill/partial branch * _cancelled_oca_groups_this_sync per-sync dedup * _remove_pine_order_for_intent mirrors SimPosition._cancel_oca_group * intent_builder validates oca_type against OcaType (unknown -> ValueError) WS5 — partial entry fill -> bracket qty amend (Option A) * _amend_bracket_qty_for_entry_fill reads cumulative filled_qty, dispatches modify_exit(new_qty=filled_qty) on every partial * Over-fill guard caps at EntryIntent.qty with LegRepairFailedEvent * Suppressed when tp_sl_bracket_native=True or no paired exit * _sync_pine_exit_qty keeps pos.exit_orders in sync with amended qty * Optional broker_event_sink callback for structured BrokerEvent emission Live-vs-backtest parity invariant: every event-driven intent mutation also mutates the matching Pine-side Order in BrokerPosition dicts — otherwise the diff engine reverts the mutation on the next sync. --- src/pynecore/core/broker/__init__.py | 4 + src/pynecore/core/broker/exceptions.py | 61 ++ src/pynecore/core/broker/idempotency.py | 197 +++++ src/pynecore/core/broker/intent_builder.py | 18 +- src/pynecore/core/broker/models.py | 138 ++++ src/pynecore/core/broker/sync_engine.py | 421 ++++++++++- src/pynecore/core/broker/validation.py | 7 + src/pynecore/core/plugin/broker.py | 103 ++- src/pynecore/core/script_runner.py | 35 +- src/pynecore/lib/strategy/oca.py | 4 + .../transformers/script_requirements.py | 4 + .../core/test_023_script_requirements.py | 49 +- .../core/test_024_intent_builder.py | 39 + .../core/test_025_order_sync_engine.py | 682 ++++++++++++++++-- .../core/test_026_broker_runner.py | 78 +- .../core/test_027_broker_idempotency.py | 303 ++++++++ .../core/test_028_broker_exceptions.py | 104 +++ 17 files changed, 2120 insertions(+), 127 deletions(-) create mode 100644 src/pynecore/core/broker/idempotency.py create mode 100644 tests/t00_pynecore/core/test_027_broker_idempotency.py create mode 100644 tests/t00_pynecore/core/test_028_broker_exceptions.py diff --git a/src/pynecore/core/broker/__init__.py b/src/pynecore/core/broker/__init__.py index 840ecb5..463f91f 100644 --- a/src/pynecore/core/broker/__init__.py +++ b/src/pynecore/core/broker/__init__.py @@ -8,11 +8,13 @@ position tracker (no simulation). """ from pynecore.core.broker.exceptions import ( + AuthenticationError, BrokerError, ExchangeCapabilityError, ExchangeConnectionError, ExchangeOrderRejectedError, ExchangeRateLimitError, + InsufficientMarginError, OrderSyncError, UnexpectedCancelError, ) @@ -34,11 +36,13 @@ from pynecore.core.broker.position import BrokerPosition __all__ = [ + 'AuthenticationError', 'BrokerError', 'ExchangeCapabilityError', 'ExchangeConnectionError', 'ExchangeOrderRejectedError', 'ExchangeRateLimitError', + 'InsufficientMarginError', 'OrderSyncError', 'UnexpectedCancelError', 'OrderStatus', diff --git a/src/pynecore/core/broker/exceptions.py b/src/pynecore/core/broker/exceptions.py index 7799b19..92b2b0b 100644 --- a/src/pynecore/core/broker/exceptions.py +++ b/src/pynecore/core/broker/exceptions.py @@ -11,11 +11,14 @@ from pynecore.core.broker.models import ExchangeOrder __all__ = [ + 'AuthenticationError', 'BrokerError', 'ExchangeCapabilityError', 'ExchangeConnectionError', 'ExchangeOrderRejectedError', 'ExchangeRateLimitError', + 'InsufficientMarginError', + 'OrderDispositionUnknownError', 'OrderSyncError', 'UnexpectedCancelError', ] @@ -25,6 +28,24 @@ class BrokerError(RuntimeError): """Base class for all broker-related errors.""" +class AuthenticationError(BrokerError): + """The exchange rejected the plugin's credentials. + + Raised when the exchange returns 401 / 403, reports an invalid API key, + or bans the source IP. Semantics are **terminal** — reconnect cannot + recover, the user must fix the credentials. The Script Runner treats + this as a graceful-stop condition at startup and surfaces an + :class:`~pynecore.core.broker.models.AuthenticationFailedEvent` so the + observability layer can page. + + :ivar reason: Short human-readable cause (echoed on the event). + """ + + def __init__(self, message: str, reason: str = "") -> None: + super().__init__(message) + self.reason = reason or message + + class ExchangeCapabilityError(BrokerError): """The exchange does not support a required feature. @@ -54,6 +75,16 @@ def __init__(self, message: str, order: 'ExchangeOrder | None' = None) -> None: self.order = order +class InsufficientMarginError(ExchangeOrderRejectedError): + """The exchange rejected an order for insufficient margin / balance. + + A typed sub-class of :class:`ExchangeOrderRejectedError` so the risk + layer can pattern-match the reason without string-parsing the exchange + message. Intent-level reject — non-terminal, the runner keeps going and + the strategy can respond (e.g. shrink size, back off). + """ + + class ExchangeRateLimitError(BrokerError): """Exchange rate limit was hit. @@ -65,6 +96,36 @@ def __init__(self, message: str, retry_after: float) -> None: self.retry_after = retry_after +class OrderDispositionUnknownError(BrokerError): + """A dispatch completed without a definitive accept-or-reject from the exchange. + + Raised by a BrokerPlugin when a submission times out mid-flight or the + connection drops before the exchange acknowledges the order — the plugin + genuinely does not know whether the order landed. Semantics are + deliberately distinct from :class:`ExchangeConnectionError` (recoverable + via reconnect) and :class:`ExchangeOrderRejectedError` (the order is known + not to exist): the Order Sync Engine reacts by holding the dispatch in a + pending-verification queue and matching against + :meth:`~pynecore.core.plugin.broker.BrokerPlugin.get_open_orders` on the + next sync, keyed by ``client_order_id``. + + :ivar client_order_id: The id the plugin attempted to submit with. The + sync engine uses it to match the open-orders query back to the + originating dispatch. + :ivar cause: The underlying raw exception, if any, preserved for logging. + """ + + def __init__( + self, + message: str, + client_order_id: str, + cause: Exception | None = None, + ) -> None: + super().__init__(message) + self.client_order_id = client_order_id + self.cause = cause + + class OrderSyncError(BrokerError): """Exchange state diverged from the expected internal state.""" diff --git a/src/pynecore/core/broker/idempotency.py b/src/pynecore/core/broker/idempotency.py new file mode 100644 index 0000000..bcfb60f --- /dev/null +++ b/src/pynecore/core/broker/idempotency.py @@ -0,0 +1,197 @@ +""" +Canonical ``client_order_id`` formula for BrokerPlugin dispatches. + +The broker layer uses a deterministic identifier so that retries, reconnects +and even full process restarts converge on the same exchange-side client +order id — idempotent by construction. + +The format is a 30-character budget, chosen as the lowest common denominator +across supported exchanges: Capital.com ``dealReference`` is ≤ 30 chars; +Binance ``newClientOrderId``, Bybit ``orderLinkId``, OKX ``clOrdId``, +Interactive Brokers ``orderRef`` and Deribit ``label`` all accept at least +that many characters. A single string therefore fits every plugin without +per-exchange branching. + +Format:: + + {run}-{pid}-{bar}-{k}{r} + +===== ========== ============================================================ +Field Width Content +===== ========== ============================================================ +run 4 base36 Session-stable hash of the script source / config. +pid 8 base36 Hash of the Pine-level order id (``pine_id``). +bar 9 base36 Bar open timestamp in milliseconds since the Unix epoch. +k 1 Single-character kind code (entry / TP / SL / close / cancel). +r 1–2 base36 Retry sequence — normally ``0``; bumped only when a prior + attempt is deliberately abandoned. +===== ========== ============================================================ + +Every field but ``r`` is fixed-width, keeping the result deterministic across +processes and Python versions. The formula is **pure**: two dispatches for the +same logical order on the same bar always produce identical ids. Exchanges +that enforce client-id uniqueness reject the duplicate outright; exchanges +that do not (Interactive Brokers, Deribit) dedup inside the plugin via a +``get_open_orders`` match on the same id. +""" +from __future__ import annotations + +import hashlib +from typing import Final + +__all__ = [ + 'KIND_ENTRY', + 'KIND_EXIT_TP', + 'KIND_EXIT_SL', + 'KIND_CLOSE', + 'KIND_CANCEL', + 'VALID_KINDS', + 'CLIENT_ORDER_ID_MAX_LEN', + 'RUN_TAG_WIDTH', + 'PINE_ID_HASH_WIDTH', + 'BAR_TS_WIDTH', + 'build_client_order_id', + 'hash_pine_id', + 'make_run_tag', +] + +# === Kind codes (single-character) ======================================= + +KIND_ENTRY: Final[str] = 'e' +KIND_EXIT_TP: Final[str] = 't' +KIND_EXIT_SL: Final[str] = 's' +KIND_CLOSE: Final[str] = 'c' +KIND_CANCEL: Final[str] = 'x' + +VALID_KINDS: Final[frozenset[str]] = frozenset({ + KIND_ENTRY, KIND_EXIT_TP, KIND_EXIT_SL, KIND_CLOSE, KIND_CANCEL, +}) + +# === Width constants ===================================================== + +CLIENT_ORDER_ID_MAX_LEN: Final[int] = 30 +RUN_TAG_WIDTH: Final[int] = 4 +PINE_ID_HASH_WIDTH: Final[int] = 8 +BAR_TS_WIDTH: Final[int] = 9 + +# === Base36 encoding ===================================================== + +_BASE36_DIGITS: Final[str] = '0123456789abcdefghijklmnopqrstuvwxyz' + + +def _to_base36(value: int, *, width: int | None = None) -> str: + """Encode a non-negative integer in lower-case base36. + + :param value: Non-negative integer to encode. + :param width: When given, left-pad with ``'0'`` to this width. The + function never truncates — an encoded value wider than ``width`` + raises :class:`ValueError`, because silent truncation would break + determinism on overflow. + :raises ValueError: On negative input or width overflow. + """ + if value < 0: + raise ValueError(f"value must be non-negative, got {value}") + if value == 0: + encoded = '0' + else: + digits: list[str] = [] + n = value + while n: + digits.append(_BASE36_DIGITS[n % 36]) + n //= 36 + encoded = ''.join(reversed(digits)) + if width is not None: + if len(encoded) > width: + raise ValueError( + f"encoded value {encoded!r} exceeds requested width {width}", + ) + encoded = encoded.rjust(width, '0') + return encoded + + +# === Public helpers ====================================================== + +def hash_pine_id(pine_id: str) -> str: + """Return an 8-character base36 hash of a Pine-level order id. + + Pine ids can contain arbitrary characters (spaces, slashes, unicode) and + arbitrary lengths; hashing fits the budget and neutralises odd input. + 40 bits of sha256 output are encoded, yielding ~1.1e12 distinct slots + before a birthday collision becomes plausible — orders of magnitude above + any realistic per-strategy id count. + + :param pine_id: The Pine order identifier (e.g. ``"Long"``, ``"TP/Long"``). + :return: Exactly 8 lower-case base36 characters. + """ + digest = hashlib.sha256(pine_id.encode('utf-8')).digest() + # 40 bits → ceil(log36(2**40)) == 8 chars. A 41st bit would overflow 8. + value = int.from_bytes(digest[:5], 'big') + return _to_base36(value, width=PINE_ID_HASH_WIDTH) + + +def make_run_tag(script_source: str) -> str: + """Return a 4-character base36 session tag for a script source. + + Two runs of the same script under the same config yield the same tag, so + a restarted process reconstructs the same ``client_order_id`` for every + logical intent — the exchange then dedups the retry as a duplicate. + + :param script_source: The Pine script source text (or any string the + caller wants to participate in session identity — add a config hash + to the input if config changes should invalidate the tag). + :return: Exactly 4 lower-case base36 characters. + """ + digest = hashlib.sha256(script_source.encode('utf-8')).digest() + # 20 bits → ceil(log36(2**20)) == 4 chars. Keeps ~1M distinct tags. + value = int.from_bytes(digest[:3], 'big') & 0x0FFFFF + return _to_base36(value, width=RUN_TAG_WIDTH) + + +def build_client_order_id( + *, + run_tag: str, + pine_id: str, + bar_ts_ms: int, + kind: str, + retry_seq: int = 0, +) -> str: + """Build the canonical client-order-id for a broker dispatch. + + :param run_tag: 4-char base36 session tag (see :func:`make_run_tag`). + :param pine_id: Pine-level order identifier; hashed internally. + :param bar_ts_ms: Bar open timestamp in milliseconds since the Unix epoch. + Must be non-negative. + :param kind: One of :data:`KIND_ENTRY`, :data:`KIND_EXIT_TP`, + :data:`KIND_EXIT_SL`, :data:`KIND_CLOSE`, :data:`KIND_CANCEL`. + :param retry_seq: Bumped only when the sync engine deliberately abandons + a prior attempt (e.g. the exchange never acknowledged the original + dispatch and the recovery timeout expired). ``0`` by default. + :raises ValueError: On malformed ``run_tag`` / ``kind``, negative ``bar_ts_ms`` + / ``retry_seq``, or when the formatted id would exceed + :data:`CLIENT_ORDER_ID_MAX_LEN` (indicates ``retry_seq`` overflow). + """ + if len(run_tag) != RUN_TAG_WIDTH or not run_tag.isascii() or not run_tag.isalnum(): + raise ValueError( + f"run_tag must be {RUN_TAG_WIDTH} alphanumeric ASCII chars, " + f"got {run_tag!r}", + ) + if kind not in VALID_KINDS: + raise ValueError( + f"kind must be one of {sorted(VALID_KINDS)}, got {kind!r}", + ) + if bar_ts_ms < 0: + raise ValueError(f"bar_ts_ms must be non-negative, got {bar_ts_ms}") + if retry_seq < 0: + raise ValueError(f"retry_seq must be non-negative, got {retry_seq}") + + pid = hash_pine_id(pine_id) + bar = _to_base36(bar_ts_ms, width=BAR_TS_WIDTH) + retry = _to_base36(retry_seq) + + result = f"{run_tag}-{pid}-{bar}-{kind}{retry}" + if len(result) > CLIENT_ORDER_ID_MAX_LEN: + raise ValueError( + f"client_order_id exceeds {CLIENT_ORDER_ID_MAX_LEN} chars " + f"(got {len(result)}); retry_seq={retry_seq} overflows the budget", + ) + return result diff --git a/src/pynecore/core/broker/intent_builder.py b/src/pynecore/core/broker/intent_builder.py index 1c20d78..4be5212 100644 --- a/src/pynecore/core/broker/intent_builder.py +++ b/src/pynecore/core/broker/intent_builder.py @@ -23,6 +23,7 @@ EntryIntent, ExitIntent, CloseIntent, + OcaType, OrderType, ) from pynecore.lib.strategy import ( @@ -78,10 +79,25 @@ def _coerce_oca(order: Order) -> tuple[str | None, str | None]: even when no OCA participation is requested. The intent layer uses ``None`` to mean "not in an OCA group" — so only emit ``oca_type`` when the order actually names a group. + + Unknown ``oca_type`` strings are rejected here rather than silently passed + through to the sync engine, where a typo would disable cascade cancel + without any diagnostic. The accepted values are exactly the members of + :class:`OcaType`. """ if order.oca_name is None: return None, None - return order.oca_name, str(order.oca_type) if order.oca_type is not None else None + if order.oca_type is None: + return order.oca_name, None + oca_type_str = str(order.oca_type) + try: + OcaType(oca_type_str) + except ValueError as exc: + raise ValueError( + f"unknown oca_type {oca_type_str!r}; expected one of " + f"{[m.value for m in OcaType]}", + ) from exc + return order.oca_name, oca_type_str def build_entry_intent(order: Order, symbol: str) -> EntryIntent: diff --git a/src/pynecore/core/broker/models.py b/src/pynecore/core/broker/models.py index 5a1e4a8..7ae520d 100644 --- a/src/pynecore/core/broker/models.py +++ b/src/pynecore/core/broker/models.py @@ -13,10 +13,14 @@ from dataclasses import dataclass from enum import StrEnum +from pynecore.core.broker.idempotency import build_client_order_id + __all__ = [ 'OrderStatus', 'OrderType', 'LegType', + 'OcaType', + 'OcaPartialFillPolicy', 'ExchangeOrder', 'OrderEvent', 'ExchangePosition', @@ -25,9 +29,11 @@ 'ExitIntent', 'CloseIntent', 'CancelIntent', + 'DispatchEnvelope', 'ScriptRequirements', 'InterceptorResult', 'BrokerEvent', + 'AuthenticationFailedEvent', 'BracketRegisteredEvent', 'LegPartialRepairedEvent', 'LegRepairFailedEvent', @@ -62,6 +68,40 @@ class LegType(StrEnum): CLOSE = "close" +class OcaType(StrEnum): + """Canonical OCA semantics for the sync engine. + + The Pine-level literal values (``strategy.oca.cancel`` / ``.reduce`` / + ``.none``) are plain strings for script compatibility; this enum is the + single authority the sync engine and intent builder match against. Adding + a new OCA semantic therefore requires exactly one source edit, not a + scattered grep across the intent-builder / sync-engine / validator. + """ + CANCEL = "cancel" + REDUCE = "reduce" + NONE = "none" + + +class OcaPartialFillPolicy(StrEnum): + """How the sync engine treats a *partial* fill for OCA-cancel cascading. + + On a full fill the behaviour is unambiguous: sibling orders in the same + OCA-cancel group must be cancelled. Partial fills are the grey zone — some + exchanges re-fill the remainder at a better price (risking sibling fills + too), others do not. The policy lets the user pick: + + - :data:`FILL_CANCELS` (default): a partial fill already commits the script + to this side, so sibling cancel triggers immediately. Matches the + Pine backtester, where the first touch on any leg wins. + - :data:`FULL_FILL_ONLY`: wait until the leg is fully filled. Useful when + the user prefers siblings to stay live in case the first leg partial is + followed by a same-bar reversal that would otherwise lock in a + sub-optimal entry. + """ + FILL_CANCELS = "fill_cancels" + FULL_FILL_ONLY = "full_fill_only" + + # === Exchange state snapshots === @dataclass @@ -147,6 +187,14 @@ class ExchangeCapabilities: # latency budgeting, and per-exchange reconcile strategy. tp_sl_bracket: bool = False tp_sl_bracket_native: bool = False + # Native OCA-cancel groups: the plugin has registered the OCA group with + # the exchange such that the exchange itself cancels sibling orders when + # one fills (Bybit bracket, OKX algo orders, ...). When True the sync + # engine SUPPRESSES its own cascade-cancel logic — the exchange is + # authoritative. When False (the default, and the case for the vast + # majority of exchanges), the engine synthesises CancelIntent dispatches + # for surviving siblings the moment a fill event lands. + oca_cancel_native: bool = False # Order management amend_order: bool = False cancel_all: bool = False @@ -154,6 +202,17 @@ class ExchangeCapabilities: # Streaming & position watch_orders: bool = False fetch_position: bool = False + # Idempotency. ``client_id_echo`` means the plugin can attach a client-side + # order id that the exchange returns verbatim on ``get_open_orders`` — the + # foundation of the restart-safe recovery path in + # :class:`~pynecore.core.broker.sync_engine.OrderSyncEngine`. Without it the + # engine cannot re-associate orders after a timeout or restart and live + # scripts are rejected at startup. ``idempotency_native`` additionally + # promises that the exchange itself rejects duplicates of the same id + # (Binance/Bybit/OKX/Capital.com); ``False`` means the plugin must dedup + # client-side before each dispatch (Interactive Brokers, Deribit). + client_id_echo: bool = False + idempotency_native: bool = False # === Pine Script intents === @@ -205,6 +264,21 @@ class ExitIntent: comment_loss: str | None = None comment_trailing: str | None = None alert_message: str | None = None + # One-way Pine semantics: every strategy.exit is reduce-only by definition. + # A manual position close while the exit is pending must not flip the + # book back to the other side. The plugin must pass this to the exchange + # (Binance/Bybit/OKX ``reduceOnly``, Capital.com force-close, etc.). + # ``False`` is rejected at construction — a future ``HedgeBrokerPlugin`` + # subclass will introduce a separate hedge-aware intent rather than flip + # this flag. + reduce_only: bool = True + + def __post_init__(self) -> None: + if self.reduce_only is not True: + raise ValueError( + "ExitIntent.reduce_only must be True — one-way Pine semantics. " + "Hedge-mode intents belong on a future HedgeBrokerPlugin subclass." + ) @property def intent_key(self) -> str: @@ -241,6 +315,15 @@ class CloseIntent: immediately: bool = False comment: str | None = None alert_message: str | None = None + # Same invariant as :attr:`ExitIntent.reduce_only` — a close can never + # flip the book to the other side in one-way Pine mode. + reduce_only: bool = True + + def __post_init__(self) -> None: + if self.reduce_only is not True: + raise ValueError( + "CloseIntent.reduce_only must be True — one-way Pine semantics." + ) @property def intent_key(self) -> str: @@ -269,6 +352,44 @@ def intent_key(self) -> str: return self.pine_id +# === Dispatch envelope === + +@dataclass(frozen=True) +class DispatchEnvelope: + """Broker dispatch envelope — an intent plus idempotency metadata. + + The :class:`~pynecore.core.broker.sync_engine.OrderSyncEngine` wraps every + intent in a fresh envelope before handing it to the :class:`BrokerPlugin`. + Plugins call :meth:`client_order_id` for each exchange order they place; + the result is deterministic, so a retry or restart regenerates the same id + and the exchange dedups the duplicate. + + :ivar intent: The Pine-level intent this dispatch carries. + :ivar run_tag: 4-char base36 session tag (see :func:`make_run_tag`). + :ivar bar_ts_ms: Bar open timestamp (ms since Unix epoch). + :ivar retry_seq: Bumped by the recovery path only when a prior attempt is + deliberately abandoned — defaults to ``0``. + """ + intent: 'EntryIntent | ExitIntent | CloseIntent | CancelIntent' + run_tag: str + bar_ts_ms: int + retry_seq: int = 0 + + def client_order_id(self, kind: str) -> str: + """Allocate the canonical client-order-id for a given leg kind. + + :param kind: One of the ``KIND_*`` constants from + :mod:`pynecore.core.broker.idempotency`. + """ + return build_client_order_id( + run_tag=self.run_tag, + pine_id=self.intent.pine_id, + bar_ts_ms=self.bar_ts_ms, + kind=kind, + retry_seq=self.retry_seq, + ) + + # === Compile-time detected script requirements === @dataclass @@ -281,6 +402,12 @@ class ScriptRequirements: tp_sl_bracket: bool = False # strategy.exit() with BOTH limit+stop or profit+loss trailing_stop: bool = False strategy_order: bool = False # strategy.order() — no pyramiding limit + # True if the script calls any of ``strategy.exit`` / ``strategy.close`` / + # ``strategy.close_all``. Every such call requires the exchange to honour + # reduce-only semantics — a manual position close otherwise lets the + # still-pending exit flip the book the other way. The validator turns + # this into a hard reject when ``caps.reduce_only=False``. + exit_orders: bool = False # === Interceptor (Order Sync Engine extension point) === @@ -297,6 +424,17 @@ class BrokerEvent: """ +@dataclass +class AuthenticationFailedEvent(BrokerEvent): + """Emitted when the plugin's credentials are rejected by the exchange. + + ``reason`` is the short human-readable cause (``AuthenticationError.reason``); + the runner surfaces the event to observability sinks and then performs a + graceful stop — reconnect cannot gain access with wrong credentials. + """ + reason: str + + @dataclass class BracketRegisteredEvent(BrokerEvent): pine_id: str diff --git a/src/pynecore/core/broker/sync_engine.py b/src/pynecore/core/broker/sync_engine.py index 5c2e95e..ec43f9f 100644 --- a/src/pynecore/core/broker/sync_engine.py +++ b/src/pynecore/core/broker/sync_engine.py @@ -30,14 +30,21 @@ from collections.abc import Callable from typing import TYPE_CHECKING, Any +from pynecore.core.broker.exceptions import OrderDispositionUnknownError from pynecore.core.broker.intent_builder import build_intents from pynecore.core.broker.models import ( + BrokerEvent, CancelIntent, CloseIntent, + DispatchEnvelope, EntryIntent, ExitIntent, InterceptorResult, + LegPartialRepairedEvent, + LegRepairFailedEvent, LegType, + OcaPartialFillPolicy, + OcaType, OrderEvent, ) @@ -58,6 +65,10 @@ class OrderSyncEngine: :param broker: The concrete :class:`BrokerPlugin` instance to drive. :param position: The live :class:`BrokerPosition` this engine updates. :param symbol: The trading symbol (as the plugin expects it). + :param run_tag: 4-char base36 session tag (see :func:`make_run_tag`) — seeds + every :class:`DispatchEnvelope` this engine builds, so restarting the + same script under the same config regenerates the same + ``client_order_id`` values and the exchange dedups duplicates. :param event_loop: A running ``asyncio`` loop on which to execute the broker's coroutines. Pass ``None`` for unit tests — each broker call will then spin up a transient loop via ``asyncio.run``. @@ -67,6 +78,15 @@ class OrderSyncEngine: reconciliation every N :meth:`sync` calls. :param mintick: Symbol minimum tick — used to resolve tick-based exits (``profit=`` / ``loss=`` / ``trail_points=``) into absolute prices. + :param oca_partial_fill_policy: How OCA-cancel groups react to partial + fills (see :class:`OcaPartialFillPolicy`). Defaults to + :data:`OcaPartialFillPolicy.FILL_CANCELS` — matches the Pine + backtester, which treats the first touch as the winning leg. + :param broker_event_sink: Optional callable invoked for structured + broker-side :class:`BrokerEvent` objects (bracket repairs, overfill + guards, ...). ``None`` disables emission — useful in tests and + single-shot backtests; production wires the runner's observability + bus here. """ def __init__( @@ -75,25 +95,46 @@ def __init__( position: 'BrokerPosition', symbol: str, *, + run_tag: str, event_loop: asyncio.AbstractEventLoop | None = None, execute_timeout: float = 30.0, reconcile_every_n_syncs: int = 0, mintick: float = 0.01, + oca_partial_fill_policy: OcaPartialFillPolicy = OcaPartialFillPolicy.FILL_CANCELS, + broker_event_sink: Callable[[BrokerEvent], None] | None = None, ) -> None: self._broker = broker self._position = position self._symbol = symbol + self._run_tag = run_tag self._loop = event_loop self._timeout = execute_timeout self._reconcile_every = reconcile_every_n_syncs self._mintick = mintick + self._oca_partial_policy = oca_partial_fill_policy + self._broker_event_sink = broker_event_sink + # Capabilities are declared once at plugin startup — cache the lookup + # so the cascade-cancel fast path does not pay a method call per event. + caps = broker.get_capabilities() + self._oca_cancel_native = bool(getattr(caps, 'oca_cancel_native', False)) + self._tp_sl_bracket_native = bool( + getattr(caps, 'tp_sl_bracket_native', False), + ) self._active_intents: dict[str, Intent] = {} self._order_mapping: dict[str, list[str]] = {} + self._envelopes: dict[str, DispatchEnvelope] = {} + self._pending_verification: dict[str, DispatchEnvelope] = {} self._deferred_exits: dict[str, ExitIntent] = {} self._event_queue: queue.Queue[OrderEvent] = queue.Queue() self._interceptors: list[Callable[[Intent], InterceptorResult]] = [] self._sync_count = 0 + self._current_bar_ts_ms: int = 0 + # OCA groups already processed inside the current :meth:`sync` pass. + # Cleared at the start of every sync so a fresh bar re-enables cascade, + # but kept stable within the pass so two fills in the same group do + # not emit duplicate CancelIntents. + self._cancelled_oca_groups_this_sync: set[str] = set() # === Public API === @@ -109,6 +150,11 @@ def deferred_exits(self) -> dict[str, ExitIntent]: def order_mapping(self) -> dict[str, list[str]]: return self._order_mapping + @property + def pending_verification(self) -> dict[str, DispatchEnvelope]: + """Envelopes whose exchange-side disposition is still unknown.""" + return self._pending_verification + def register_interceptor( self, fn: Callable[[Intent], InterceptorResult], ) -> None: @@ -154,15 +200,22 @@ async def run_event_stream(self) -> None: _log.exception("watch_orders stream terminated with an error") raise - def sync(self) -> None: + def sync(self, bar_ts_ms: int) -> None: """Run one diff/dispatch cycle. Reads the Pine order book from ``position.entry_orders`` and ``position.exit_orders``, resolves tick-deferred exits where the referenced entry price is now known, and dispatches whatever changed to the broker plugin. + + :param bar_ts_ms: Current bar open timestamp in milliseconds — seeds + every :class:`DispatchEnvelope` built in this cycle. The caller + (typically the script runner) sources this from ``lib._time``. """ + self._current_bar_ts_ms = bar_ts_ms + self._cancelled_oca_groups_this_sync.clear() self._drain_events() + self._verify_pending_dispatches() raw = build_intents( self._position.entry_orders, @@ -187,6 +240,39 @@ def sync(self) -> None: if self._reconcile_every and self._sync_count % self._reconcile_every == 0: self.reconcile() + def _verify_pending_dispatches(self) -> None: + """Match parked timeouts against the exchange's open-orders view. + + When a plugin raises :class:`OrderDispositionUnknownError` the sync + engine cannot tell whether the order landed on the exchange; it parks + the envelope here. Every subsequent :meth:`sync` calls this method + first: it queries ``get_open_orders`` and, for each pending + ``client_order_id`` that now appears on the exchange, promotes the + envelope back into ``_order_mapping`` without re-dispatching. + + A pending entry that does *not* show up stays parked — the engine + deliberately does not re-dispatch because the original may still land + (slow network round-trip). The user can inspect + :attr:`pending_verification` to surface stuck entries. + """ + if not self._pending_verification: + return + orders = self._run_async(self._broker.get_open_orders(self._symbol)) + by_coid = {o.client_order_id: o for o in orders if o.client_order_id} + for coid in list(self._pending_verification): + order = by_coid.get(coid) + if order is None: + continue + envelope = self._pending_verification.pop(coid) + key = envelope.intent.intent_key + current = self._order_mapping.setdefault(key, []) + if order.id not in current: + current.append(order.id) + _log.info( + "recovered pending dispatch %s -> exchange order %s " + "for intent %s", coid, order.id, key, + ) + def reconcile(self) -> None: """Read-side state reconciliation with the exchange. @@ -241,6 +327,8 @@ def _route_event(self, event: OrderEvent) -> None: self._position.record_fill(event) if event.leg_type == LegType.ENTRY and event.pine_id: self._resolve_deferred_for_entry(event.pine_id) + self._amend_bracket_qty_for_entry_fill(event) + self._cascade_oca_cancel(event) elif t == 'cancelled': key = self._find_key_for_order_id(event.order.id) if key is not None: @@ -250,6 +338,7 @@ def _route_event(self, event: OrderEvent) -> None: ) self._order_mapping.pop(key, None) self._active_intents.pop(key, None) + self._envelopes.pop(key, None) elif t == 'rejected': key = self._find_key_for_order_id(event.order.id) if key is not None: @@ -259,6 +348,7 @@ def _route_event(self, event: OrderEvent) -> None: ) self._order_mapping.pop(key, None) self._active_intents.pop(key, None) + self._envelopes.pop(key, None) def _find_key_for_order_id(self, order_id: str) -> str | None: for key, ids in self._order_mapping.items(): @@ -278,6 +368,222 @@ def _resolve_deferred_for_entry(self, entry_id: str) -> None: self._dispatch_new(resolved) self._active_intents[resolved.intent_key] = resolved + # === OCA cascade cancel === + + def _cascade_oca_cancel(self, event: OrderEvent) -> None: + """Cancel OCA-cancel siblings of a freshly filled intent. + + Pine semantics: an ``oca_type='cancel'`` group keeps exactly one live + leg at a time. The Pine backtester enforces this at fill time; this + method is the live-trading equivalent — without it, a fill on leg A + leaves leg B open until the next bar's diff pass, and a same-bar + reversal may fill B too. + + The cascade is **suppressed** when: + + - The plugin declared ``oca_cancel_native=True`` — the exchange + registers and cancels the group natively. + - The filled intent has no OCA group, or its type is not ``cancel``. + (``reduce`` groups amend quantities on fill; that belongs to the + partial-fill qty-amend workstream, not here.) + - The partial-fill policy is :data:`OcaPartialFillPolicy.FULL_FILL_ONLY` + and the event is ``partial``. + - The group was already processed in this sync — prevents a + double-fill (e.g. TP and entry both filling on the same bar) from + emitting duplicate cancels. + """ + if self._oca_cancel_native: + return + if event.event_type == 'partial' and ( + self._oca_partial_policy is OcaPartialFillPolicy.FULL_FILL_ONLY + ): + return + + filled_key = self._filled_intent_key(event) + if filled_key is None: + return + filled_intent = self._active_intents.get(filled_key) + if filled_intent is None: + return + oca_name = getattr(filled_intent, 'oca_name', None) + oca_type = getattr(filled_intent, 'oca_type', None) + if not oca_name or oca_type != OcaType.CANCEL.value: + return + if oca_name in self._cancelled_oca_groups_this_sync: + return + self._cancelled_oca_groups_this_sync.add(oca_name) + + siblings = [ + (key, intent) + for key, intent in list(self._active_intents.items()) + if key != filled_key + and getattr(intent, 'oca_name', None) == oca_name + and getattr(intent, 'oca_type', None) == OcaType.CANCEL.value + ] + for key, intent in siblings: + _log.info( + "OCA cascade cancel: fill on %s cancels sibling %s in group %r", + filled_key, key, oca_name, + ) + self._active_intents.pop(key, None) + self._remove_pine_order_for_intent(intent) + self._dispatch_cancel(intent) + + def _remove_pine_order_for_intent(self, intent: Intent) -> None: + """Delete the Pine-side :class:`Order` backing ``intent``. + + Mirrors :meth:`SimPosition._cancel_oca_group` for the live path: once + an OCA-cancel sibling is cancelled exchange-side, the Pine-level order + book must drop it too — otherwise the next :meth:`sync` rebuilds an + intent from the stale entry and re-dispatches onto the now-cancelled + exchange state. + """ + entry_orders = getattr(self._position, 'entry_orders', None) + exit_orders = getattr(self._position, 'exit_orders', None) + if isinstance(intent, EntryIntent) and entry_orders is not None: + entry_orders.pop(intent.pine_id, None) + elif isinstance(intent, ExitIntent) and exit_orders is not None: + exit_orders.pop(intent.from_entry, None) + + def _filled_intent_key(self, event: OrderEvent) -> str | None: + """Resolve a fill event to the ``intent_key`` of the owning intent. + + Exits track identity as ``(pine_id, from_entry)``; entries / closes + as just ``pine_id``. An event coming from a plugin that did not tag + the Pine identity cannot be routed and the method returns ``None``. + """ + if event.pine_id is None: + return None + if event.leg_type in (LegType.TAKE_PROFIT, LegType.STOP_LOSS): + if event.from_entry is None: + return None + return f"{event.pine_id}\0{event.from_entry}" + return event.pine_id + + # === Partial entry fill → bracket qty amend (WS5, Option A) === + + def _amend_bracket_qty_for_entry_fill(self, event: OrderEvent) -> None: + """Track partial entry fills with an incremental bracket qty amend. + + Canonical semantics (Option A): the bracket's qty follows the entry's + cumulative ``filled_qty`` — every partial fill dispatches a + :meth:`BrokerPlugin.modify_exit` with ``new_qty = filled_qty``. This + mirrors the Pine backtester, where exits exist against the actually + filled entry portion; it also guarantees that if the entry ends with + unfilled remainder (cancel/expire), the bracket is not over-sized. + + Suppressed when: + + - The plugin declared ``tp_sl_bracket_native=True`` — the exchange + tracks partial entry fills natively (Bybit V5 attached TP/SL). + - No bracket is active for ``event.pine_id`` (plain entry, no exit). + - The current ExitIntent already matches the target qty — avoids + redundant dispatch churn. + + Over-fill guard: if ``event.order.filled_qty`` exceeds the entry + intent's intended qty (exchange rounding or adversarial event), the + amend is capped at the intended qty and a + :class:`LegRepairFailedEvent` is emitted so the runner can surface + the anomaly. + """ + if self._tp_sl_bracket_native: + return + pine_id = event.pine_id + if pine_id is None: + return + + filled_qty = event.order.filled_qty + if filled_qty <= 0.0: + return + + bracket_key: str | None = None + bracket_intent: ExitIntent | None = None + for key, intent in self._active_intents.items(): + if isinstance(intent, ExitIntent) and intent.from_entry == pine_id: + bracket_key = key + bracket_intent = intent + break + if bracket_key is None or bracket_intent is None: + return + + entry_intent = self._active_intents.get(pine_id) + target_qty = filled_qty + overfill = False + if isinstance(entry_intent, EntryIntent) and filled_qty > entry_intent.qty: + target_qty = entry_intent.qty + overfill = True + + if target_qty == bracket_intent.qty: + if overfill: + self._emit_overfill_event( + bracket_intent, entry_intent, filled_qty, + ) + return + + old_qty = bracket_intent.qty + new_intent = dataclasses.replace(bracket_intent, qty=target_qty) + self._dispatch_modify(bracket_intent, new_intent) + self._active_intents[bracket_key] = new_intent + self._sync_pine_exit_qty(new_intent, target_qty) + + self._emit_broker_event(LegPartialRepairedEvent( + pine_id=new_intent.pine_id, + from_entry=new_intent.from_entry, + leg='bracket', + generation=0, + old_qty=old_qty, + new_qty=target_qty, + )) + if overfill: + self._emit_overfill_event(new_intent, entry_intent, filled_qty) + + def _sync_pine_exit_qty(self, bracket: ExitIntent, new_qty: float) -> None: + """Mutate the Pine-side exit :class:`Order` to match the amended qty. + + Without this, the next :meth:`sync` rebuilds the ExitIntent from the + unchanged ``pos.exit_orders[from_entry]`` (whose ``size`` still equals + the original full qty), the diff engine sees a mismatch against the + amended active intent, and emits a *second* ``modify_exit`` back to + the original qty — undoing the partial-fill cascade we just did. + """ + exit_orders = getattr(self._position, 'exit_orders', None) + if exit_orders is None: + return + order = exit_orders.get(bracket.from_entry) + if order is None: + return + sign = 1.0 if order.size >= 0.0 else -1.0 + order.size = sign * new_qty + order.sign = sign if new_qty > 0.0 else 0.0 + + def _emit_overfill_event( + self, + bracket: ExitIntent, + entry: 'Intent | None', + filled_qty: float, + ) -> None: + entry_qty = entry.qty if isinstance(entry, EntryIntent) else None + self._emit_broker_event(LegRepairFailedEvent( + pine_id=bracket.pine_id, + from_entry=bracket.from_entry, + leg='bracket', + reason=( + f"overfill detected: filled_qty={filled_qty} exceeds " + f"entry qty={entry_qty}" + ), + action_taken='capped', + )) + + def _emit_broker_event(self, event: BrokerEvent) -> None: + """Forward a structured broker event to the registered sink, if any.""" + if self._broker_event_sink is None: + _log.info("broker event (no sink): %r", event) + return + try: + self._broker_event_sink(event) + except Exception: # pragma: no cover — defensive + _log.exception("broker_event_sink raised for event %r", event) + # === Tick resolution === def _resolve_ticks(self, intent: Intent) -> Intent: @@ -381,28 +687,87 @@ def _diff_and_dispatch(self, intents: list[Intent]) -> None: self._active_intents[key] = intent # else: unchanged — skip + def _build_envelope(self, intent: Intent) -> DispatchEnvelope: + """Wrap an intent in a :class:`DispatchEnvelope`. + + The first envelope for a given ``intent_key`` is pinned on creation + (bar_ts_ms, retry_seq frozen). Subsequent modifies re-use the same + anchor so the ``client_order_id`` stays stable across amend cycles — + that stability is what lets the exchange recognise a retry as a + duplicate rather than a new order. + """ + existing = self._envelopes.get(intent.intent_key) + if existing is not None: + return DispatchEnvelope( + intent=intent, + run_tag=existing.run_tag, + bar_ts_ms=existing.bar_ts_ms, + retry_seq=existing.retry_seq, + ) + envelope = DispatchEnvelope( + intent=intent, + run_tag=self._run_tag, + bar_ts_ms=self._current_bar_ts_ms, + retry_seq=0, + ) + self._envelopes[intent.intent_key] = envelope + return envelope + + def _build_cancel_envelope(self, cancel: CancelIntent) -> DispatchEnvelope: + return DispatchEnvelope( + intent=cancel, + run_tag=self._run_tag, + bar_ts_ms=self._current_bar_ts_ms, + retry_seq=0, + ) + + def _park_pending( + self, envelope: DispatchEnvelope, error: OrderDispositionUnknownError, + ) -> None: + """Stash a dispatch whose exchange disposition the plugin could not confirm. + + :meth:`_verify_pending_dispatches` reruns ``get_open_orders`` on each + subsequent sync and promotes the envelope back to + ``_order_mapping`` once the order shows up. + """ + self._pending_verification[error.client_order_id] = envelope + _log.warning( + "dispatch for %s ended with unknown disposition " + "(client_order_id=%s); will verify on next sync: %s", + envelope.intent.intent_key, error.client_order_id, error, + ) + def _dispatch_new(self, intent: Intent) -> None: - if isinstance(intent, EntryIntent): - orders = self._run_async(self._broker.execute_entry(intent)) - self._order_mapping[intent.intent_key] = [o.id for o in orders] - elif isinstance(intent, ExitIntent): - orders = self._run_async(self._broker.execute_exit(intent)) - self._order_mapping[intent.intent_key] = [o.id for o in orders] - elif isinstance(intent, CloseIntent): - order = self._run_async(self._broker.execute_close(intent)) - self._order_mapping[intent.intent_key] = [order.id] + envelope = self._build_envelope(intent) + try: + if isinstance(intent, EntryIntent): + orders = self._run_async(self._broker.execute_entry(envelope)) + self._order_mapping[intent.intent_key] = [o.id for o in orders] + elif isinstance(intent, ExitIntent): + orders = self._run_async(self._broker.execute_exit(envelope)) + self._order_mapping[intent.intent_key] = [o.id for o in orders] + elif isinstance(intent, CloseIntent): + order = self._run_async(self._broker.execute_close(envelope)) + self._order_mapping[intent.intent_key] = [order.id] + except OrderDispositionUnknownError as e: + self._park_pending(envelope, e) def _dispatch_modify(self, old: Intent, new: Intent) -> None: - if isinstance(new, EntryIntent) and isinstance(old, EntryIntent): - orders = self._run_async(self._broker.modify_entry(old, new)) - self._order_mapping[new.intent_key] = [o.id for o in orders] - elif isinstance(new, ExitIntent) and isinstance(old, ExitIntent): - orders = self._run_async(self._broker.modify_exit(old, new)) - self._order_mapping[new.intent_key] = [o.id for o in orders] - else: - # CloseIntent or mismatched kinds — cancel + re-execute. - self._dispatch_cancel(old) - self._dispatch_new(new) + old_env = self._build_envelope(old) + new_env = self._build_envelope(new) + try: + if isinstance(new, EntryIntent) and isinstance(old, EntryIntent): + orders = self._run_async(self._broker.modify_entry(old_env, new_env)) + self._order_mapping[new.intent_key] = [o.id for o in orders] + elif isinstance(new, ExitIntent) and isinstance(old, ExitIntent): + orders = self._run_async(self._broker.modify_exit(old_env, new_env)) + self._order_mapping[new.intent_key] = [o.id for o in orders] + else: + # CloseIntent or mismatched kinds — cancel + re-execute. + self._dispatch_cancel(old) + self._dispatch_new(new) + except OrderDispositionUnknownError as e: + self._park_pending(new_env, e) def _dispatch_cancel(self, old: Intent) -> None: if isinstance(old, EntryIntent): @@ -416,9 +781,23 @@ def _dispatch_cancel(self, old: Intent) -> None: else: # CloseIntent is immediate market — nothing to cancel. self._order_mapping.pop(old.intent_key, None) + self._envelopes.pop(old.intent_key, None) return - self._run_async(self._broker.execute_cancel(cancel)) + cancel_envelope = self._build_cancel_envelope(cancel) + try: + self._run_async(self._broker.execute_cancel(cancel_envelope)) + except OrderDispositionUnknownError as e: + # A timed-out cancel leaves the exchange-side order in ambiguous + # state. The next reconcile() pass observes whether the order is + # still live; if so, a subsequent cancel attempt hits the same + # deterministic id and the exchange treats it idempotently. + _log.warning( + "cancel dispatch for %s timed out " + "(client_order_id=%s); next reconcile will verify: %s", + old.intent_key, e.client_order_id, e, + ) self._order_mapping.pop(old.intent_key, None) + self._envelopes.pop(old.intent_key, None) # === Async bridge === diff --git a/src/pynecore/core/broker/validation.py b/src/pynecore/core/broker/validation.py index 4592421..4d87860 100644 --- a/src/pynecore/core/broker/validation.py +++ b/src/pynecore/core/broker/validation.py @@ -46,4 +46,11 @@ def validate_at_startup( errors.append( "Script uses trailing stops, but the exchange doesn't support them." ) + if reqs.exit_orders and not caps.reduce_only: + errors.append( + "Script uses strategy.exit / strategy.close, but the exchange " + "doesn't support reduce-only orders. A later-arriving exit " + "could flip the book to the other side once the position is " + "already closed — refuse to start." + ) return errors diff --git a/src/pynecore/core/plugin/broker.py b/src/pynecore/core/plugin/broker.py index a79c271..ab0e9ab 100644 --- a/src/pynecore/core/plugin/broker.py +++ b/src/pynecore/core/plugin/broker.py @@ -21,14 +21,15 @@ from pynecore.core.plugin import ConfigT from pynecore.core.plugin.live_provider import LiveProviderPlugin -from pynecore.core.broker.exceptions import ExchangeCapabilityError -from pynecore.core.broker.models import CancelIntent +from pynecore.core.broker.exceptions import ( + BrokerError, + ExchangeCapabilityError, + ExchangeConnectionError, +) +from pynecore.core.broker.models import CancelIntent, DispatchEnvelope if TYPE_CHECKING: from pynecore.core.broker.models import ( - EntryIntent, - ExitIntent, - CloseIntent, ExchangeOrder, ExchangePosition, ExchangeCapabilities, @@ -59,13 +60,21 @@ class BrokerPlugin(LiveProviderPlugin[ConfigT], ABC): """ # === High-level order intents === + # + # Every execute_* method takes a :class:`DispatchEnvelope` rather than a + # bare intent. The envelope carries the idempotency metadata the plugin + # needs to allocate stable ``client_order_id`` values via + # :meth:`DispatchEnvelope.client_order_id`. The wrapped intent is on + # ``envelope.intent`` with its original Pine-level fields intact. @abstractmethod - async def execute_entry(self, intent: 'EntryIntent') -> list['ExchangeOrder']: + async def execute_entry(self, envelope: 'DispatchEnvelope') -> list['ExchangeOrder']: """ Open or add to a position. - Maps to ``strategy.entry()`` and ``strategy.order()``. + Maps to ``strategy.entry()`` and ``strategy.order()``. ``envelope.intent`` + is the :class:`EntryIntent`. Use ``envelope.client_order_id(KIND_ENTRY)`` + for the exchange-side client id. | Pine params | order_type | limit | stop | |---------------------|--------------|----------|----------| @@ -76,11 +85,14 @@ async def execute_entry(self, intent: 'EntryIntent') -> list['ExchangeOrder']: """ @abstractmethod - async def execute_exit(self, intent: 'ExitIntent') -> list['ExchangeOrder']: + async def execute_exit(self, envelope: 'DispatchEnvelope') -> list['ExchangeOrder']: """ Exit (reduce) a position. OCA REDUCE semantics expected. - Maps to ``strategy.exit()``. + Maps to ``strategy.exit()``. ``envelope.intent`` is the + :class:`ExitIntent`. Allocate per-leg client ids via + ``envelope.client_order_id(KIND_EXIT_TP)`` and + ``envelope.client_order_id(KIND_EXIT_SL)``. The plugin decides HOW to implement the TP+SL bracket on its exchange: native bracket orders, separate orders with monitoring, etc. @@ -91,17 +103,23 @@ async def execute_exit(self, intent: 'ExitIntent') -> list['ExchangeOrder']: """ @abstractmethod - async def execute_close(self, intent: 'CloseIntent') -> 'ExchangeOrder': + async def execute_close(self, envelope: 'DispatchEnvelope') -> 'ExchangeOrder': """ Close a position with a market order. - Maps to ``strategy.close()`` / ``strategy.close_all()``. + Maps to ``strategy.close()`` / ``strategy.close_all()``. Use + ``envelope.client_order_id(KIND_CLOSE)`` for the exchange-side id. """ @abstractmethod - async def execute_cancel(self, intent: 'CancelIntent') -> bool: + async def execute_cancel(self, envelope: 'DispatchEnvelope') -> bool: """ Cancel pending order(s). Returns ``True`` if cancelled. + + ``envelope.intent`` is the :class:`CancelIntent`. The canonical cancel + id (``envelope.client_order_id(KIND_CANCEL)``) is primarily useful for + audit and retry correlation — the actual exchange call typically + references the existing order by its exchange-side id. """ # noinspection PyMethodMayBeStatic,PyUnusedLocal @@ -112,7 +130,7 @@ async def execute_cancel_all(self, symbol: str | None = None) -> int: # === Modify (upsert/replace) === async def modify_entry( - self, old_intent: 'EntryIntent', new_intent: 'EntryIntent', + self, old: 'DispatchEnvelope', new: 'DispatchEnvelope', ) -> list['ExchangeOrder']: """ Modify an existing entry order (price/qty changed). @@ -120,14 +138,20 @@ async def modify_entry( Default implementation: cancel + execute. Plugin authors SHOULD override with an atomic amend when the exchange supports it. """ - await self.execute_cancel(CancelIntent( - pine_id=old_intent.pine_id, - symbol=old_intent.symbol, - )) - return await self.execute_entry(new_intent) + cancel_envelope = DispatchEnvelope( + intent=CancelIntent( + pine_id=old.intent.pine_id, + symbol=old.intent.symbol, + ), + run_tag=new.run_tag, + bar_ts_ms=new.bar_ts_ms, + retry_seq=new.retry_seq, + ) + await self.execute_cancel(cancel_envelope) + return await self.execute_entry(new) async def modify_exit( - self, old_intent: 'ExitIntent', new_intent: 'ExitIntent', + self, old: 'DispatchEnvelope', new: 'DispatchEnvelope', ) -> list['ExchangeOrder']: """ Modify an existing exit bracket (TP/SL price changed). @@ -136,12 +160,19 @@ async def modify_exit( plugin authors SHOULD override with an atomic amend when the exchange supports it (``editOrder``, Bybit amend, etc.). """ - await self.execute_cancel(CancelIntent( - pine_id=old_intent.pine_id, - symbol=old_intent.symbol, - from_entry=old_intent.from_entry, - )) - return await self.execute_exit(new_intent) + old_exit = old.intent + cancel_envelope = DispatchEnvelope( + intent=CancelIntent( + pine_id=old_exit.pine_id, + symbol=old_exit.symbol, + from_entry=getattr(old_exit, 'from_entry', None), + ), + run_tag=new.run_tag, + bar_ts_ms=new.bar_ts_ms, + retry_seq=new.retry_seq, + ) + await self.execute_cancel(cancel_envelope) + return await self.execute_exit(new) # === State queries === @@ -170,6 +201,28 @@ def watch_orders(self) -> AsyncIterator['OrderEvent']: """ raise NotImplementedError + # === Exception mapping === + + # noinspection PyMethodMayBeStatic + def _map_exception(self, raw: Exception) -> BrokerError | None: + """Translate a raw exchange-SDK exception into the broker taxonomy. + + Utility hook for plugin authors — **not** called by the sync engine + directly. Plugin ``execute_*`` implementations wrap their SDK calls in + try/except and delegate classification here so exchange-specific + knowledge stays in one place. Default implementation only handles + stdlib exceptions common to every plugin: a concrete plugin should + override to layer in its SDK's error types (``ccxt.AuthenticationError``, + IB ``errorCode``, etc.) and return ``None`` for anything it doesn't + recognise so the caller re-raises the original. + + :returns: A :class:`BrokerError` subclass instance if ``raw`` can be + classified, or ``None`` if the plugin should re-raise as-is. + """ + if isinstance(raw, ConnectionError): + return ExchangeConnectionError(str(raw) or "Connection lost") + return None + # === Capabilities === @abstractmethod diff --git a/src/pynecore/core/script_runner.py b/src/pynecore/core/script_runner.py index e548ad6..4cd183d 100644 --- a/src/pynecore/core/script_runner.py +++ b/src/pynecore/core/script_runner.py @@ -4,6 +4,7 @@ from pathlib import Path from datetime import datetime, UTC +from pynecore import lib from pynecore.types.ohlcv import OHLCV from pynecore.core.syminfo import SymInfo from pynecore.core.csv_file import CSVWriter @@ -273,6 +274,7 @@ def __init__(self, script_path: Path, ohlcv_iter: Iterable[OHLCV], syminfo: SymI self._broker_event_loop = broker_event_loop self._order_sync_engine: 'OrderSyncEngine | None' = None if broker_plugin is not None: + from pynecore.core.broker.idempotency import make_run_tag from pynecore.core.broker.position import BrokerPosition from pynecore.core.broker.sync_engine import OrderSyncEngine # Swap the simulator position for a live tracker. The @@ -280,10 +282,14 @@ def __init__(self, script_path: Path, ohlcv_iter: Iterable[OHLCV], syminfo: SymI # in live broker mode the exchange is authoritative, so the # simulator is dropped entirely. self.script.position = BrokerPosition() + run_tag = make_run_tag( + f"{script_path.read_text(encoding='utf-8')}\n{syminfo.ticker}", + ) self._order_sync_engine = OrderSyncEngine( broker=broker_plugin, position=self.script.position, # type: ignore[arg-type] symbol=str(syminfo.ticker), + run_tag=run_tag, event_loop=broker_event_loop, mintick=float(syminfo.mintick) if syminfo.mintick else 0.01, ) @@ -329,7 +335,7 @@ def _process_orders(self, position) -> None: arrived asynchronously through :meth:`BrokerPosition.record_fill`. """ if self._order_sync_engine is not None: - self._order_sync_engine.sync() + self._order_sync_engine.sync(int(lib.last_bar_time)) else: position.process_orders() @@ -338,7 +344,7 @@ def _process_orders_magnified(self, position, sub_bars, candle) -> None: is the source of truth — magnification is irrelevant and the engine runs a plain sync.""" if self._order_sync_engine is not None: - self._order_sync_engine.sync() + self._order_sync_engine.sync(int(lib.last_bar_time)) else: position.process_orders_magnified(sub_bars, candle) @@ -380,8 +386,12 @@ def run_iter(self, on_progress: Callable[[datetime], None] | None = None) \ # Broker mode: refuse to start if the script needs capabilities the # exchange doesn't offer. Fail fast — never on the first bar. if self._broker_plugin is not None: + import asyncio from pynecore.core.broker.validation import validate_at_startup - from pynecore.core.broker.exceptions import ExchangeCapabilityError + from pynecore.core.broker.exceptions import ( + AuthenticationError, + ExchangeCapabilityError, + ) caps = self._broker_plugin.get_capabilities() reqs = getattr(self.script, '_broker_requirements', None) if reqs is not None: @@ -392,6 +402,25 @@ def run_iter(self, on_progress: Callable[[datetime], None] | None = None) \ + "\n".join(f" - {e}" for e in errors) ) + # Auth check: fail fast on bad credentials rather than on the + # first order attempt. A single get_balance() call is cheap and + # every exchange supports it. An AuthenticationError here is + # terminal — reconnect can never recover wrong keys. + coro = self._broker_plugin.get_balance() + try: + if self._broker_event_loop is None: + asyncio.run(coro) + else: + asyncio.run_coroutine_threadsafe( + coro, self._broker_event_loop, + ).result(timeout=30.0) + except AuthenticationError as exc: + raise AuthenticationError( + "Broker authentication failed at startup — cannot begin " + f"trading: {exc.reason}", + reason=exc.reason, + ) from exc + # Update syminfo lib properties if needed if not self.update_syminfo_every_run: _set_lib_syminfo_properties(self.syminfo, lib) diff --git a/src/pynecore/lib/strategy/oca.py b/src/pynecore/lib/strategy/oca.py index 83adfc3..693b81e 100644 --- a/src/pynecore/lib/strategy/oca.py +++ b/src/pynecore/lib/strategy/oca.py @@ -3,6 +3,10 @@ # # Constants # +# The string values here are the Pine-side literals; the broker layer treats +# them as the authoritative enum members of +# :class:`pynecore.core.broker.models.OcaType`. A new OCA semantic must be +# added in both places or the sync engine will reject the intent. cancel = Oca("cancel") reduce = Oca("reduce") diff --git a/src/pynecore/transformers/script_requirements.py b/src/pynecore/transformers/script_requirements.py index 40538ad..fcf7ea9 100644 --- a/src/pynecore/transformers/script_requirements.py +++ b/src/pynecore/transformers/script_requirements.py @@ -33,6 +33,7 @@ _FLAG_BRACKET = 'tp_sl_bracket' _FLAG_TRAIL = 'trailing_stop' _FLAG_STRATEGY_ORDER = 'strategy_order' +_FLAG_EXIT_ORDERS = 'exit_orders' def _strategy_call_name(node: ast.Call) -> str | None: @@ -106,6 +107,7 @@ def __init__(self) -> None: _FLAG_BRACKET: False, _FLAG_TRAIL: False, _FLAG_STRATEGY_ORDER: False, + _FLAG_EXIT_ORDERS: False, } self._strategy_decorator: ast.Call | None = None @@ -145,8 +147,10 @@ def visit_Call(self, node: ast.Call) -> ast.Call: self._apply_entry_or_order(kws, is_strategy_order=True) elif name == 'exit': self._apply_exit(kws) + self._reqs[_FLAG_EXIT_ORDERS] = True elif name in ('close', 'close_all'): self._reqs[_FLAG_MARKET] = True + self._reqs[_FLAG_EXIT_ORDERS] = True return node # === Detection rules (see design doc, "Detektálható Minták" table) === diff --git a/tests/t00_pynecore/core/test_023_script_requirements.py b/tests/t00_pynecore/core/test_023_script_requirements.py index f3a0013..428e4a2 100644 --- a/tests/t00_pynecore/core/test_023_script_requirements.py +++ b/tests/t00_pynecore/core/test_023_script_requirements.py @@ -98,6 +98,7 @@ def main(): flags = _get_requirements_keyword(tree) assert flags == { 'limit_orders': True, 'stop_orders': True, 'tp_sl_bracket': True, + 'exit_orders': True, } @@ -111,6 +112,7 @@ def main(): flags = _get_requirements_keyword(tree) assert flags == { 'limit_orders': True, 'stop_orders': True, 'tp_sl_bracket': True, + 'exit_orders': True, } @@ -121,7 +123,7 @@ def main(): strategy.exit('TR', from_entry='Long', trail_offset=50, trail_points=100) """) flags = _get_requirements_keyword(tree) - assert flags == {'trailing_stop': True} + assert flags == {'trailing_stop': True, 'exit_orders': True} def __test_strategy_order_detects_strategy_order_flag__(): @@ -141,7 +143,30 @@ def __test_close_detects_market_orders__(): def main(): strategy.close('Long') """) - assert _get_requirements_keyword(tree) == {'market_orders': True} + assert _get_requirements_keyword(tree) == { + 'market_orders': True, 'exit_orders': True, + } + + +def __test_close_all_detects_exit_orders__(): + tree = _transform(""" + @script.strategy('S') + def main(): + strategy.close_all() + """) + assert _get_requirements_keyword(tree) == { + 'market_orders': True, 'exit_orders': True, + } + + +def __test_plain_exit_detects_exit_orders__(): + """A bracket-less strategy.exit still requires reduce-only semantics.""" + tree = _transform(""" + @script.strategy('S') + def main(): + strategy.exit('X', from_entry='Long') + """) + assert _get_requirements_keyword(tree) == {'exit_orders': True} def __test_import_is_injected_when_requirements_present__(): @@ -202,7 +227,25 @@ def __test_validate_collects_all_missing_capabilities__(): reqs = ScriptRequirements( stop_orders=True, stop_limit_orders=True, tp_sl_bracket=True, trailing_stop=True, + exit_orders=True, ) caps = ExchangeCapabilities() errors = validate_at_startup(reqs, caps) - assert len(errors) == 4 + assert len(errors) == 5 + + +def __test_validate_rejects_exit_without_reduce_only_capability__(): + """A script that uses strategy.exit/close must refuse to start on an + exchange that doesn't honour reduce-only semantics — otherwise a + later-arriving exit can flip the book to the other side.""" + reqs = ScriptRequirements(exit_orders=True) + caps = ExchangeCapabilities() # reduce_only=False + errors = validate_at_startup(reqs, caps) + assert len(errors) == 1 + assert 'reduce-only' in errors[0] + + +def __test_validate_accepts_exit_with_reduce_only_capability__(): + reqs = ScriptRequirements(exit_orders=True) + caps = ExchangeCapabilities(reduce_only=True) + assert validate_at_startup(reqs, caps) == [] diff --git a/tests/t00_pynecore/core/test_024_intent_builder.py b/tests/t00_pynecore/core/test_024_intent_builder.py index 41f5ec1..a05192b 100644 --- a/tests/t00_pynecore/core/test_024_intent_builder.py +++ b/tests/t00_pynecore/core/test_024_intent_builder.py @@ -203,3 +203,42 @@ def __test_mixed_entry_and_exit_produce_both_intents__(): intents = build_intents({"L": e}, {"L": x}, SYMBOL) kinds = [type(i).__name__ for i in intents] assert kinds == ["EntryIntent", "ExitIntent"] + + +# === reduce_only invariant (WS2) === + +def __test_exit_intent_defaults_to_reduce_only_true__(): + x = _exit("L", -1.0, "TP", limit=60_000.0, stop=45_000.0) + i = build_intents({}, {"L": x}, SYMBOL)[0] + assert isinstance(i, ExitIntent) + assert i.reduce_only is True + + +def __test_close_intent_defaults_to_reduce_only_true__(): + i = build_intents({}, {"L": _close("L", -1.0)}, SYMBOL)[0] + assert isinstance(i, CloseIntent) + assert i.reduce_only is True + + +def __test_close_all_intent_defaults_to_reduce_only_true__(): + i = build_intents({}, {None: _close_all(-1.0)}, SYMBOL)[0] + assert isinstance(i, CloseIntent) + assert i.reduce_only is True + + +def __test_exit_intent_rejects_reduce_only_false__(): + import pytest + with pytest.raises(ValueError, match="reduce_only must be True"): + ExitIntent( + pine_id="TP", from_entry="L", symbol=SYMBOL, + side="sell", qty=1.0, reduce_only=False, + ) + + +def __test_close_intent_rejects_reduce_only_false__(): + import pytest + with pytest.raises(ValueError, match="reduce_only must be True"): + CloseIntent( + pine_id="L", symbol=SYMBOL, side="sell", qty=1.0, + reduce_only=False, + ) diff --git a/tests/t00_pynecore/core/test_025_order_sync_engine.py b/tests/t00_pynecore/core/test_025_order_sync_engine.py index 11fed29..f54e11d 100644 --- a/tests/t00_pynecore/core/test_025_order_sync_engine.py +++ b/tests/t00_pynecore/core/test_025_order_sync_engine.py @@ -16,16 +16,18 @@ import pytest from pynecore import lib +from pynecore.core.broker.exceptions import OrderDispositionUnknownError from pynecore.core.broker.position import BrokerPosition from pynecore.core.broker.sync_engine import OrderSyncEngine from pynecore.core.broker.models import ( - CancelIntent, - EntryIntent, - ExitIntent, - CloseIntent, + BrokerEvent, + DispatchEnvelope, ExchangeOrder, ExchangePosition, ExchangeCapabilities, + LegPartialRepairedEvent, + LegRepairFailedEvent, + OcaPartialFillPolicy, OrderEvent, OrderStatus, OrderType, @@ -36,10 +38,13 @@ Order, _order_type_entry, _order_type_close, + oca as _oca, ) SYMBOL = "BTCUSDT" +RUN_TAG = "test" +BAR_TS = 1_700_000_000_000 @pytest.fixture(autouse=True) @@ -57,21 +62,38 @@ def _stub_script(): @dataclass class MockBroker: - """Duck-typed stand-in for :class:`BrokerPlugin`. Records all calls.""" - entry_calls: list[EntryIntent] = field(default_factory=list) - exit_calls: list[ExitIntent] = field(default_factory=list) - close_calls: list[CloseIntent] = field(default_factory=list) - cancel_calls: list[CancelIntent] = field(default_factory=list) - modify_entry_calls: list[tuple[EntryIntent, EntryIntent]] = field(default_factory=list) - modify_exit_calls: list[tuple[ExitIntent, ExitIntent]] = field(default_factory=list) + """Duck-typed stand-in for :class:`BrokerPlugin`. Records all calls. + + Each call captures the full :class:`DispatchEnvelope` the sync engine + sends so tests can inspect both the wrapped intent and the allocated + ``client_order_id``. + """ + entry_calls: list[DispatchEnvelope] = field(default_factory=list) + exit_calls: list[DispatchEnvelope] = field(default_factory=list) + close_calls: list[DispatchEnvelope] = field(default_factory=list) + cancel_calls: list[DispatchEnvelope] = field(default_factory=list) + modify_entry_calls: list[tuple[DispatchEnvelope, DispatchEnvelope]] = field( + default_factory=list, + ) + modify_exit_calls: list[tuple[DispatchEnvelope, DispatchEnvelope]] = field( + default_factory=list, + ) open_orders: list[ExchangeOrder] = field(default_factory=list) position: ExchangePosition | None = None streamed_events: list[OrderEvent] = field(default_factory=list) watch_orders_impl: str = "generator" # "generator" | "not_implemented" + raise_on_next_entry: Exception | None = None + raise_on_next_exit: Exception | None = None + raise_on_next_cancel: Exception | None = None + capabilities: ExchangeCapabilities = field(default_factory=ExchangeCapabilities) _next_id: int = 0 - def _mk_order(self, intent) -> ExchangeOrder: + def get_capabilities(self) -> ExchangeCapabilities: + return self.capabilities + + def _mk_order(self, envelope: DispatchEnvelope, kind: str) -> ExchangeOrder: self._next_id += 1 + intent = envelope.intent return ExchangeOrder( id=f"xchg-{self._next_id}", symbol=getattr(intent, 'symbol', SYMBOL), @@ -87,31 +109,44 @@ def _mk_order(self, intent) -> ExchangeOrder: timestamp=0.0, fee=0.0, fee_currency="", + client_order_id=envelope.client_order_id(kind), ) - async def execute_entry(self, intent): - self.entry_calls.append(intent) - return [self._mk_order(intent)] - - async def execute_exit(self, intent): - self.exit_calls.append(intent) - return [self._mk_order(intent)] - - async def execute_close(self, intent): - self.close_calls.append(intent) - return self._mk_order(intent) - - async def execute_cancel(self, intent): - self.cancel_calls.append(intent) + async def execute_entry(self, envelope): + self.entry_calls.append(envelope) + if self.raise_on_next_entry is not None: + err = self.raise_on_next_entry + self.raise_on_next_entry = None + raise err + return [self._mk_order(envelope, 'e')] + + async def execute_exit(self, envelope): + self.exit_calls.append(envelope) + if self.raise_on_next_exit is not None: + err = self.raise_on_next_exit + self.raise_on_next_exit = None + raise err + return [self._mk_order(envelope, 't')] + + async def execute_close(self, envelope): + self.close_calls.append(envelope) + return self._mk_order(envelope, 'c') + + async def execute_cancel(self, envelope): + self.cancel_calls.append(envelope) + if self.raise_on_next_cancel is not None: + err = self.raise_on_next_cancel + self.raise_on_next_cancel = None + raise err return True async def modify_entry(self, old, new): self.modify_entry_calls.append((old, new)) - return [self._mk_order(new)] + return [self._mk_order(new, 'e')] async def modify_exit(self, old, new): self.modify_exit_calls.append((old, new)) - return [self._mk_order(new)] + return [self._mk_order(new, 't')] async def get_open_orders(self, symbol=None): return list(self.open_orders) @@ -147,11 +182,16 @@ def _mk_engine(broker, mintick: float = 1.0) -> tuple[OrderSyncEngine, BrokerPos broker=broker, # type: ignore[arg-type] position=pos, symbol=SYMBOL, + run_tag=RUN_TAG, mintick=mintick, ) return engine, pos +def _sync(engine: OrderSyncEngine, *, bar_ts: int = BAR_TS) -> None: + engine.sync(bar_ts) + + def _fill_event(side: str, qty: float, price: float, *, pine_id: str, leg: LegType = LegType.ENTRY, xchg_id: str = "xchg-1") -> OrderEvent: @@ -176,11 +216,11 @@ def __test_new_entry_dispatches_execute_entry__(): engine, pos = _mk_engine(b) pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) - engine.sync() + engine.sync(BAR_TS) assert len(b.entry_calls) == 1 - assert b.entry_calls[0].pine_id == "L" - assert b.entry_calls[0].limit == 50_000.0 + assert b.entry_calls[0].intent.pine_id == "L" + assert b.entry_calls[0].intent.limit == 50_000.0 assert engine.active_intents.keys() == {"L"} assert engine.order_mapping["L"] == ["xchg-1"] @@ -190,8 +230,8 @@ def __test_unchanged_entry_is_not_redispatched__(): engine, pos = _mk_engine(b) pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) - engine.sync() - engine.sync() + engine.sync(BAR_TS) + engine.sync(BAR_TS) assert len(b.entry_calls) == 1 # only once @@ -200,29 +240,33 @@ def __test_modified_entry_dispatches_modify_entry__(): b = MockBroker() engine, pos = _mk_engine(b) pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) - engine.sync() + engine.sync(BAR_TS) # Replace with a different limit price pos.entry_orders["L"] = _entry_order("L", 1.0, limit=49_500.0) - engine.sync() + engine.sync(BAR_TS) assert len(b.modify_entry_calls) == 1 old, new = b.modify_entry_calls[0] - assert old.limit == 50_000.0 and new.limit == 49_500.0 + assert old.intent.limit == 50_000.0 and new.intent.limit == 49_500.0 + # Envelope identity is pinned on first dispatch and preserved on modify — + # that is what makes the exchange treat the amend as idempotent. + assert old.bar_ts_ms == new.bar_ts_ms == BAR_TS + assert old.run_tag == new.run_tag == RUN_TAG def __test_removed_entry_dispatches_cancel__(): b = MockBroker() engine, pos = _mk_engine(b) pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) - engine.sync() + engine.sync(BAR_TS) del pos.entry_orders["L"] - engine.sync() + engine.sync(BAR_TS) assert len(b.cancel_calls) == 1 - assert b.cancel_calls[0].pine_id == "L" - assert b.cancel_calls[0].from_entry is None + assert b.cancel_calls[0].intent.pine_id == "L" + assert b.cancel_calls[0].intent.from_entry is None assert "L" not in engine.active_intents @@ -234,11 +278,11 @@ def __test_close_intent_dispatches_execute_close__(): exit_id="Close entry(s) order L", ) - engine.sync() + engine.sync(BAR_TS) assert len(b.close_calls) == 1 - assert b.close_calls[0].pine_id == "L" - assert b.close_calls[0].side == "sell" + assert b.close_calls[0].intent.pine_id == "L" + assert b.close_calls[0].intent.side == "sell" def __test_exit_with_prices_dispatches_execute_exit__(): @@ -248,11 +292,11 @@ def __test_exit_with_prices_dispatches_execute_exit__(): "L", -1.0, "TP", limit=60_000.0, stop=45_000.0, ) - engine.sync() + engine.sync(BAR_TS) assert len(b.exit_calls) == 1 - assert b.exit_calls[0].tp_price == 60_000.0 - assert b.exit_calls[0].sl_price == 45_000.0 + assert b.exit_calls[0].intent.tp_price == 60_000.0 + assert b.exit_calls[0].intent.sl_price == 45_000.0 # === Tick deferral + resolution === @@ -265,7 +309,7 @@ def __test_exit_with_ticks_without_entry_is_deferred__(): "L", -1.0, "TP", profit_ticks=100.0, loss_ticks=50.0, ) - engine.sync() + engine.sync(BAR_TS) # Exit never reaches the plugin while ticks are unresolved. assert b.exit_calls == [] @@ -279,15 +323,15 @@ def __test_entry_fill_resolves_deferred_exit__(): pos.exit_orders["L"] = _exit_order( "L", -1.0, "TP", profit_ticks=100.0, loss_ticks=50.0, ) - engine.sync() # defers it + engine.sync(BAR_TS) # defers it engine.on_order_event(_fill_event( "buy", qty=1.0, price=50_000.0, pine_id="L", leg=LegType.ENTRY, )) - engine.sync() # drains the event, resolves ticks, dispatches + engine.sync(BAR_TS) # drains the event, resolves ticks, dispatches assert len(b.exit_calls) == 1 - resolved = b.exit_calls[0] + resolved = b.exit_calls[0].intent # Long entry (sign=+1): TP above, SL below. assert resolved.tp_price == 50_100.0 assert resolved.sl_price == 49_950.0 @@ -302,14 +346,14 @@ def __test_short_entry_fill_reverses_tick_direction__(): pos.exit_orders["S"] = _exit_order( "S", 1.0, "TP", profit_ticks=100.0, loss_ticks=50.0, ) - engine.sync() + engine.sync(BAR_TS) engine.on_order_event(_fill_event( "sell", qty=1.0, price=50_000.0, pine_id="S", leg=LegType.ENTRY, )) - engine.sync() + engine.sync(BAR_TS) - resolved = b.exit_calls[0] + resolved = b.exit_calls[0].intent # Short (sign=-1): TP below entry, SL above entry. assert resolved.tp_price == 49_900.0 assert resolved.sl_price == 50_050.0 @@ -327,7 +371,7 @@ def veto(_intent) -> InterceptorResult: return InterceptorResult(intent=_intent, rejected=True, reject_reason="no") engine.register_interceptor(veto) - engine.sync() + engine.sync(BAR_TS) assert b.entry_calls == [] assert engine.active_intents == {} @@ -342,9 +386,9 @@ def half(_intent): return InterceptorResult(intent=_intent, modified_qty=_intent.qty * 0.5) engine.register_interceptor(half) - engine.sync() + engine.sync(BAR_TS) - assert b.entry_calls[0].qty == 0.5 + assert b.entry_calls[0].intent.qty == 0.5 # === Reconciliation === @@ -367,7 +411,7 @@ def __test_run_event_stream_queues_all_events__(): # Drain via the public path (sync) — verifies integration with record_fill. pos.avg_price = 50_000.0 # make equity finite for Trade bookkeeping - engine.sync() + engine.sync(BAR_TS) assert len(pos.closed_trades) == 0 or len(pos.closed_trades) == 1 # We at least confirm the events flowed end-to-end by checking records @@ -405,6 +449,122 @@ async def _gen(): # === Reconciliation === +# === Idempotency: client_order_id allocation + unknown-disposition recovery === + + +def __test_dispatch_passes_deterministic_client_order_id__(): + """Plugins receive a canonical ``client_order_id`` via the envelope.""" + b = MockBroker() + engine, pos = _mk_engine(b) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + + engine.sync(BAR_TS) + + env = b.entry_calls[0] + coid = env.client_order_id('e') + # Deterministic prefix built from RUN_TAG + hash(pine_id="L") + BAR_TS. + assert coid.startswith(RUN_TAG + "-") + assert coid.endswith("-e0") + assert len(coid) <= 30 + + +def __test_retry_within_same_bar_reuses_client_order_id__(): + """A second dispatch attempt in the same bar yields the same CO-ID so the + exchange can dedup the duplicate.""" + b = MockBroker() + engine, pos = _mk_engine(b) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + + engine.sync(BAR_TS) + coid_first = b.entry_calls[0].client_order_id('e') + + # Simulate a second engine building the same envelope for the same logical + # intent on the same bar — same inputs must produce the same CO-ID. + engine2, pos2 = _mk_engine(MockBroker()) + pos2.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + engine2.sync(BAR_TS) + coid_second = engine2._envelopes["L"].client_order_id('e') # type: ignore[attr-defined] + + assert coid_first == coid_second + + +def _preview_entry_coid(pine_id: str, *, limit: float, bar_ts: int = BAR_TS) -> str: + """Learn the ``client_order_id`` the engine will allocate for a given entry.""" + noop = MockBroker() + engine, pos = _mk_engine(noop) + pos.entry_orders[pine_id] = _entry_order(pine_id, 1.0, limit=limit) + engine.sync(bar_ts) + return noop.entry_calls[0].client_order_id('e') + + +def __test_unknown_disposition_parks_pending__(): + """A timed-out dispatch is parked on ``pending_verification``, not retried.""" + expected_coid = _preview_entry_coid("L", limit=50_000.0) + + b = MockBroker() + b.raise_on_next_entry = OrderDispositionUnknownError( + "simulated timeout", client_order_id=expected_coid, + ) + engine, pos = _mk_engine(b) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + + engine.sync(BAR_TS) + + assert expected_coid in engine.pending_verification + # The engine did call execute_entry exactly once — no auto-retry. + assert len(b.entry_calls) == 1 + + +def __test_verify_pending_promotes_matched_order__(): + """``_verify_pending_dispatches`` matches a pending CO-ID against open orders.""" + expected_coid = _preview_entry_coid("L", limit=50_000.0) + + b = MockBroker() + b.raise_on_next_entry = OrderDispositionUnknownError( + "simulated timeout", client_order_id=expected_coid, + ) + engine, pos = _mk_engine(b) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + + engine.sync(BAR_TS) + assert expected_coid in engine.pending_verification + + # The order actually did land; surface it on get_open_orders. + b.open_orders = [ + ExchangeOrder( + id="xchg-42", symbol=SYMBOL, side="buy", + order_type=OrderType.LIMIT, qty=1.0, filled_qty=0.0, + remaining_qty=1.0, price=50_000.0, stop_price=None, + average_fill_price=None, status=OrderStatus.OPEN, + timestamp=0.0, fee=0.0, fee_currency="", + client_order_id=expected_coid, + ), + ] + + engine.sync(BAR_TS) + + assert expected_coid not in engine.pending_verification + assert engine.order_mapping["L"] == ["xchg-42"] + + +def __test_verify_pending_keeps_pending_when_not_found__(): + """If ``get_open_orders`` does not surface the CO-ID, the pending stays.""" + expected_coid = _preview_entry_coid("L", limit=50_000.0) + + b = MockBroker() + b.raise_on_next_entry = OrderDispositionUnknownError( + "simulated timeout", client_order_id=expected_coid, + ) + engine, pos = _mk_engine(b) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + + engine.sync(BAR_TS) + # Second sync: exchange has no matching order; pending stays parked. + engine.sync(BAR_TS) + + assert expected_coid in engine.pending_verification + + def __test_reconcile_adopts_exchange_position_size__(): b = MockBroker() b.position = ExchangePosition( @@ -419,3 +579,411 @@ def __test_reconcile_adopts_exchange_position_size__(): assert pos.size == 2.0 assert pos.avg_price == 50_000.0 + + +# === OCA cascade cancel === +# +# The engine must cancel OCA-cancel siblings the moment a fill event arrives, +# not wait for the next bar's diff pass. These tests exercise the full event +# → sync → cascade path with both entry-side and exit-side fills. + + +def _mk_engine_with_policy( + broker: MockBroker, + *, + policy: OcaPartialFillPolicy = OcaPartialFillPolicy.FILL_CANCELS, +) -> tuple[OrderSyncEngine, BrokerPosition]: + pos = BrokerPosition() + engine = OrderSyncEngine( + broker=broker, # type: ignore[arg-type] + position=pos, + symbol=SYMBOL, + run_tag=RUN_TAG, + oca_partial_fill_policy=policy, + ) + return engine, pos + + +def _oca_entry(order_id: str, size: float, *, oca_name: str, + oca_type, limit: float | None = None) -> Order: + return Order( + order_id, size, order_type=_order_type_entry, + limit=limit, oca_name=oca_name, oca_type=oca_type, + ) + + +def __test_fill_cascades_cancel_to_oca_siblings__(): + """Full fill on A triggers an immediate cancel dispatch for sibling B.""" + b = MockBroker() + engine, pos = _mk_engine_with_policy(b) + pos.entry_orders["A"] = _oca_entry( + "A", 1.0, oca_name="G", oca_type=_oca.cancel, limit=50_000.0, + ) + pos.entry_orders["B"] = _oca_entry( + "B", 1.0, oca_name="G", oca_type=_oca.cancel, limit=49_000.0, + ) + engine.sync(BAR_TS) + assert len(b.entry_calls) == 2 + assert set(engine.active_intents) == {"A", "B"} + + # A fills — must emit a cancel for B on the next sync's drain. + engine.on_order_event(_fill_event( + "buy", 1.0, 50_000.0, pine_id="A", leg=LegType.ENTRY, + )) + engine.sync(BAR_TS + 60_000) + + assert len(b.cancel_calls) == 1 + assert b.cancel_calls[0].intent.pine_id == "B" + assert "B" not in engine.active_intents + # Pine-side cleanup mirrors SimPosition._cancel_oca_group. + assert "B" not in pos.entry_orders + + +def __test_partial_fill_cascades_under_fill_cancels_policy__(): + """Default policy treats a partial fill as a committed win for the leg.""" + b = MockBroker() + engine, pos = _mk_engine_with_policy(b) + pos.entry_orders["A"] = _oca_entry( + "A", 1.0, oca_name="G", oca_type=_oca.cancel, limit=50_000.0, + ) + pos.entry_orders["B"] = _oca_entry( + "B", 1.0, oca_name="G", oca_type=_oca.cancel, limit=49_000.0, + ) + engine.sync(BAR_TS) + + partial = _fill_event("buy", 0.4, 50_000.0, pine_id="A", leg=LegType.ENTRY) + partial.event_type = 'partial' + engine.on_order_event(partial) + engine.sync(BAR_TS + 60_000) + + assert len(b.cancel_calls) == 1 + assert b.cancel_calls[0].intent.pine_id == "B" + + +def __test_partial_fill_does_not_cascade_under_full_fill_only_policy__(): + """FULL_FILL_ONLY keeps siblings live until the leg is fully filled.""" + b = MockBroker() + engine, pos = _mk_engine_with_policy( + b, policy=OcaPartialFillPolicy.FULL_FILL_ONLY, + ) + pos.entry_orders["A"] = _oca_entry( + "A", 1.0, oca_name="G", oca_type=_oca.cancel, limit=50_000.0, + ) + pos.entry_orders["B"] = _oca_entry( + "B", 1.0, oca_name="G", oca_type=_oca.cancel, limit=49_000.0, + ) + engine.sync(BAR_TS) + + partial = _fill_event("buy", 0.4, 50_000.0, pine_id="A", leg=LegType.ENTRY) + partial.event_type = 'partial' + engine.on_order_event(partial) + engine.sync(BAR_TS + 60_000) + + assert b.cancel_calls == [] + assert "B" in engine.active_intents + + # Full fill then arrives — cascade must trigger now. + engine.on_order_event(_fill_event( + "buy", 0.6, 50_000.0, pine_id="A", leg=LegType.ENTRY, + )) + engine.sync(BAR_TS + 120_000) + + assert len(b.cancel_calls) == 1 + assert b.cancel_calls[0].intent.pine_id == "B" + + +def __test_native_oca_cancel_suppresses_cascade__(): + """When the exchange owns the OCA group, the sync engine stays hands-off.""" + b = MockBroker(capabilities=ExchangeCapabilities(oca_cancel_native=True)) + engine, pos = _mk_engine_with_policy(b) + pos.entry_orders["A"] = _oca_entry( + "A", 1.0, oca_name="G", oca_type=_oca.cancel, limit=50_000.0, + ) + pos.entry_orders["B"] = _oca_entry( + "B", 1.0, oca_name="G", oca_type=_oca.cancel, limit=49_000.0, + ) + engine.sync(BAR_TS) + + engine.on_order_event(_fill_event( + "buy", 1.0, 50_000.0, pine_id="A", leg=LegType.ENTRY, + )) + engine.sync(BAR_TS + 60_000) + + assert b.cancel_calls == [] + # Exchange takes care of B; engine's active_intents still reflect both + # until the plugin surfaces a separate cancelled event for B. + assert "B" in engine.active_intents + + +def __test_two_fills_same_group_same_sync_emit_one_cancel__(): + """Per-group dedup inside a single sync pass prevents duplicate cancels.""" + b = MockBroker() + engine, pos = _mk_engine_with_policy(b) + pos.entry_orders["A"] = _oca_entry( + "A", 1.0, oca_name="G", oca_type=_oca.cancel, limit=50_000.0, + ) + pos.entry_orders["B"] = _oca_entry( + "B", 1.0, oca_name="G", oca_type=_oca.cancel, limit=49_000.0, + ) + pos.entry_orders["C"] = _oca_entry( + "C", 1.0, oca_name="G", oca_type=_oca.cancel, limit=48_000.0, + ) + engine.sync(BAR_TS) + + engine.on_order_event(_fill_event( + "buy", 1.0, 50_000.0, pine_id="A", leg=LegType.ENTRY, + )) + # A second spurious fill on the same group (e.g. a partial followed by a + # full fill reported separately) must not re-trigger the cascade. + engine.on_order_event(_fill_event( + "buy", 1.0, 50_000.0, pine_id="A", leg=LegType.ENTRY, + )) + engine.sync(BAR_TS + 60_000) + + # Two siblings cancelled, but only on the first fill — the second is no-op. + assert len(b.cancel_calls) == 2 + assert {c.intent.pine_id for c in b.cancel_calls} == {"B", "C"} + + +def __test_non_cancel_oca_does_not_cascade__(): + """OCA-reduce groups stay alive on fill (partial-fill qty-amend is WS5).""" + b = MockBroker() + engine, pos = _mk_engine_with_policy(b) + pos.entry_orders["A"] = _oca_entry( + "A", 1.0, oca_name="G", oca_type=_oca.reduce, limit=50_000.0, + ) + pos.entry_orders["B"] = _oca_entry( + "B", 1.0, oca_name="G", oca_type=_oca.reduce, limit=49_000.0, + ) + engine.sync(BAR_TS) + + engine.on_order_event(_fill_event( + "buy", 1.0, 50_000.0, pine_id="A", leg=LegType.ENTRY, + )) + engine.sync(BAR_TS + 60_000) + + assert b.cancel_calls == [] + assert "B" in engine.active_intents + + +def __test_standalone_fill_without_oca_group_is_quiet__(): + """Fills on non-OCA intents never touch the cascade path.""" + b = MockBroker() + engine, pos = _mk_engine_with_policy(b) + pos.entry_orders["A"] = _entry_order("A", 1.0, limit=50_000.0) + engine.sync(BAR_TS) + + engine.on_order_event(_fill_event( + "buy", 1.0, 50_000.0, pine_id="A", leg=LegType.ENTRY, + )) + engine.sync(BAR_TS + 60_000) + + assert b.cancel_calls == [] + + +# === Partial entry fill → bracket qty amend (WS5, Option A) === + + +def _partial_entry_event(*, pine_id: str, fill_delta: float, + cumulative_filled: float, order_qty: float, + price: float, xchg_id: str = "xchg-1") -> OrderEvent: + """Build an ``event_type='partial'`` entry fill with cumulative tracking. + + ``fill_delta`` is what the plugin reports this tick; ``cumulative_filled`` + is the running total on the exchange-side order (what the sync engine + reads via ``event.order.filled_qty``). + """ + exch = ExchangeOrder( + id=xchg_id, symbol=SYMBOL, side="buy", + order_type=OrderType.LIMIT, qty=order_qty, + filled_qty=cumulative_filled, + remaining_qty=order_qty - cumulative_filled, + price=price, stop_price=None, average_fill_price=price, + status=OrderStatus.PARTIALLY_FILLED, + timestamp=0.0, fee=0.0, fee_currency="", + ) + return OrderEvent( + order=exch, event_type='partial', fill_price=price, + fill_qty=fill_delta, timestamp=0.0, + pine_id=pine_id, leg_type=LegType.ENTRY, + ) + + +def _mk_engine_with_sink( + broker: MockBroker, sink: list[BrokerEvent], +) -> tuple[OrderSyncEngine, BrokerPosition]: + pos = BrokerPosition() + engine = OrderSyncEngine( + broker=broker, # type: ignore[arg-type] + position=pos, + symbol=SYMBOL, + run_tag=RUN_TAG, + broker_event_sink=sink.append, + ) + return engine, pos + + +def __test_partial_entry_fill_amends_bracket_qty__(): + """A 40% partial entry fill scales the bracket down to 0.4.""" + b = MockBroker() + events: list[BrokerEvent] = [] + engine, pos = _mk_engine_with_sink(b, events) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + pos.exit_orders["L"] = _exit_order( + "L", -1.0, "TP", limit=60_000.0, stop=45_000.0, + ) + engine.sync(BAR_TS) + assert len(b.exit_calls) == 1 + assert b.exit_calls[0].intent.qty == 1.0 + + engine.on_order_event(_partial_entry_event( + pine_id="L", fill_delta=0.4, cumulative_filled=0.4, + order_qty=1.0, price=50_000.0, + )) + engine.sync(BAR_TS + 60_000) + + assert len(b.modify_exit_calls) == 1 + old, new = b.modify_exit_calls[0] + assert old.intent.qty == 1.0 + assert new.intent.qty == 0.4 + assert engine.active_intents["TP\0L"].qty == 0.4 + + repair_events = [e for e in events if isinstance(e, LegPartialRepairedEvent)] + assert len(repair_events) == 1 + assert repair_events[0].old_qty == 1.0 + assert repair_events[0].new_qty == 0.4 + + +def __test_subsequent_partial_fill_emits_another_amend__(): + """Each partial fill with a new cumulative qty triggers a fresh amend.""" + b = MockBroker() + events: list[BrokerEvent] = [] + engine, pos = _mk_engine_with_sink(b, events) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + pos.exit_orders["L"] = _exit_order( + "L", -1.0, "TP", limit=60_000.0, stop=45_000.0, + ) + engine.sync(BAR_TS) + + engine.on_order_event(_partial_entry_event( + pine_id="L", fill_delta=0.3, cumulative_filled=0.3, + order_qty=1.0, price=50_000.0, + )) + engine.sync(BAR_TS + 60_000) + engine.on_order_event(_partial_entry_event( + pine_id="L", fill_delta=0.4, cumulative_filled=0.7, + order_qty=1.0, price=50_000.0, + )) + engine.sync(BAR_TS + 120_000) + + assert len(b.modify_exit_calls) == 2 + assert b.modify_exit_calls[0][1].intent.qty == 0.3 + assert b.modify_exit_calls[1][1].intent.qty == 0.7 + + +def __test_native_bracket_skips_partial_amend__(): + """tp_sl_bracket_native=True — the plugin/exchange tracks partial fills.""" + b = MockBroker( + capabilities=ExchangeCapabilities(tp_sl_bracket_native=True), + ) + engine, pos = _mk_engine_with_policy(b) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + pos.exit_orders["L"] = _exit_order( + "L", -1.0, "TP", limit=60_000.0, stop=45_000.0, + ) + engine.sync(BAR_TS) + + engine.on_order_event(_partial_entry_event( + pine_id="L", fill_delta=0.4, cumulative_filled=0.4, + order_qty=1.0, price=50_000.0, + )) + engine.sync(BAR_TS + 60_000) + + assert b.modify_exit_calls == [] + # Bracket intent untouched: still the original 1.0 qty. + assert engine.active_intents["TP\0L"].qty == 1.0 + + +def __test_partial_fill_without_bracket_is_quiet__(): + """Entry without a paired exit → no amend, no event.""" + b = MockBroker() + events: list[BrokerEvent] = [] + engine, pos = _mk_engine_with_sink(b, events) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + engine.sync(BAR_TS) + + engine.on_order_event(_partial_entry_event( + pine_id="L", fill_delta=0.5, cumulative_filled=0.5, + order_qty=1.0, price=50_000.0, + )) + engine.sync(BAR_TS + 60_000) + + assert b.modify_exit_calls == [] + assert events == [] + + +def __test_overfill_is_capped_and_emits_leg_repair_failed__(): + """filled_qty > entry_intent.qty → cap at entry qty + LegRepairFailedEvent. + + The bracket was originally dispatched at 1.0; the cap lands it at 1.0 + again, so no second modify_exit is needed — the critical outcome is the + :class:`LegRepairFailedEvent` surfacing the exchange anomaly. + """ + b = MockBroker() + events: list[BrokerEvent] = [] + engine, pos = _mk_engine_with_sink(b, events) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + pos.exit_orders["L"] = _exit_order( + "L", -1.0, "TP", limit=60_000.0, stop=45_000.0, + ) + engine.sync(BAR_TS) + + engine.on_order_event(_partial_entry_event( + pine_id="L", fill_delta=1.2, cumulative_filled=1.2, + order_qty=1.0, price=50_000.0, + )) + engine.sync(BAR_TS + 60_000) + + # Bracket qty stays at entry qty (cap), no redundant modify_exit. + assert engine.active_intents["TP\0L"].qty == 1.0 + assert b.modify_exit_calls == [] + + overfill = [e for e in events if isinstance(e, LegRepairFailedEvent)] + assert len(overfill) == 1 + assert "overfill" in overfill[0].reason.lower() + assert overfill[0].action_taken == 'capped' + + +def __test_overfill_after_partial_caps_at_entry_qty__(): + """0.4 partial amends to 0.4; follow-up 1.2 cumulative caps back at 1.0.""" + b = MockBroker() + events: list[BrokerEvent] = [] + engine, pos = _mk_engine_with_sink(b, events) + pos.entry_orders["L"] = _entry_order("L", 1.0, limit=50_000.0) + pos.exit_orders["L"] = _exit_order( + "L", -1.0, "TP", limit=60_000.0, stop=45_000.0, + ) + engine.sync(BAR_TS) + + engine.on_order_event(_partial_entry_event( + pine_id="L", fill_delta=0.4, cumulative_filled=0.4, + order_qty=1.0, price=50_000.0, + )) + engine.sync(BAR_TS + 60_000) + + # Second event over-reports 1.2 cumulative — cap at 1.0. + engine.on_order_event(_partial_entry_event( + pine_id="L", fill_delta=0.8, cumulative_filled=1.2, + order_qty=1.0, price=50_000.0, + )) + engine.sync(BAR_TS + 120_000) + + # Two amends: 1.0 → 0.4 (first partial), 0.4 → 1.0 (second, capped). + assert len(b.modify_exit_calls) == 2 + assert b.modify_exit_calls[0][1].intent.qty == 0.4 + assert b.modify_exit_calls[1][1].intent.qty == 1.0 + assert engine.active_intents["TP\0L"].qty == 1.0 + # The second amend carries the overfill flag. + overfill = [e for e in events if isinstance(e, LegRepairFailedEvent)] + assert len(overfill) == 1 + assert overfill[0].action_taken == 'capped' diff --git a/tests/t00_pynecore/core/test_026_broker_runner.py b/tests/t00_pynecore/core/test_026_broker_runner.py index 568f59f..06f9f3b 100644 --- a/tests/t00_pynecore/core/test_026_broker_runner.py +++ b/tests/t00_pynecore/core/test_026_broker_runner.py @@ -34,7 +34,10 @@ OrderType, LegType, ) -from pynecore.core.broker.exceptions import ExchangeCapabilityError +from pynecore.core.broker.exceptions import ( + AuthenticationError, + ExchangeCapabilityError, +) from pynecore.core.broker.position import BrokerPosition from pynecore.core.script_runner import ScriptRunner from pynecore.core.syminfo import SymInfo @@ -90,13 +93,15 @@ class MockBrokerPlugin: exit_calls: list[ExitIntent] = field(default_factory=list) close_calls: list[CloseIntent] = field(default_factory=list) cancel_calls: list[CancelIntent] = field(default_factory=list) + auth_error: AuthenticationError | None = None _next_id: int = 0 def get_capabilities(self) -> ExchangeCapabilities: return self.capabilities - def _mk_order(self, intent) -> ExchangeOrder: + def _mk_order(self, envelope) -> ExchangeOrder: self._next_id += 1 + intent = getattr(envelope, 'intent', envelope) return ExchangeOrder( id=f"xchg-{self._next_id}", symbol=getattr(intent, 'symbol', 'BTCUSDT'), @@ -108,22 +113,24 @@ def _mk_order(self, intent) -> ExchangeOrder: price=None, stop_price=None, average_fill_price=None, status=OrderStatus.OPEN, timestamp=0.0, fee=0.0, fee_currency="", + client_order_id=envelope.client_order_id('e') + if hasattr(envelope, 'client_order_id') else None, ) - async def execute_entry(self, intent): - self.entry_calls.append(intent) - return [self._mk_order(intent)] + async def execute_entry(self, envelope): + self.entry_calls.append(envelope.intent) + return [self._mk_order(envelope)] - async def execute_exit(self, intent): - self.exit_calls.append(intent) - return [self._mk_order(intent)] + async def execute_exit(self, envelope): + self.exit_calls.append(envelope.intent) + return [self._mk_order(envelope)] - async def execute_close(self, intent): - self.close_calls.append(intent) - return self._mk_order(intent) + async def execute_close(self, envelope): + self.close_calls.append(envelope.intent) + return self._mk_order(envelope) - async def execute_cancel(self, intent): - self.cancel_calls.append(intent) + async def execute_cancel(self, envelope): + self.cancel_calls.append(envelope.intent) return True async def modify_entry(self, old, new): @@ -138,6 +145,11 @@ async def get_open_orders(self, symbol=None): async def get_position(self, symbol): return None + async def get_balance(self): + if self.auth_error is not None: + raise self.auth_error + return {"USDT": 1000.0} + # === Script templates === @@ -234,11 +246,11 @@ def __test_startup_validation_rejects_incompatible_script__(tmp_path): def __test_startup_validation_accepts_compatible_script__(tmp_path): - # The bracket script needs tp_sl_bracket AND stop_orders from its - # syntactic keywords; the plugin must advertise both. + # The bracket script needs tp_sl_bracket, stop_order AND reduce_only — + # the last one because strategy.exit implies reduce-only semantics. plugin = MockBrokerPlugin( capabilities=ExchangeCapabilities( - tp_sl_bracket=True, stop_order=True, + tp_sl_bracket=True, stop_order=True, reduce_only=True, ), ) script_path = _write_script(tmp_path, _LIMIT_EXIT_BRACKET_SCRIPT) @@ -276,7 +288,11 @@ def __test_market_entry_dispatches_execute_entry__(tmp_path): def __test_close_dispatches_execute_close__(tmp_path): """``strategy.close`` only emits an order when there is an open position; in broker mode that requires a real exchange fill first.""" - plugin = MockBrokerPlugin(capabilities=ExchangeCapabilities()) + # ``strategy.close`` triggers the ``exit_orders`` requirement, which the + # validator pairs with ``caps.reduce_only``. + plugin = MockBrokerPlugin( + capabilities=ExchangeCapabilities(reduce_only=True), + ) # 3-bar script: enter on bar 0, hold, close on bar 2 once filled. script_path = _write_script(tmp_path, textwrap.dedent('''\ """ @@ -481,3 +497,31 @@ def observing_iter(): # Bar close keeps the dispatch idempotent. assert obs['after_tick_3'] == 1, \ f"entry should not re-dispatch at bar close, got {obs['after_tick_3']}" + + +# === Startup authentication check (WS3) === + + +def __test_startup_rejects_script_on_authentication_failure__(tmp_path): + """Bad credentials on get_balance() must abort startup with + AuthenticationError — before any order is sent.""" + plugin = MockBrokerPlugin( + capabilities=ExchangeCapabilities(), + auth_error=AuthenticationError("Invalid API key", reason="invalid key"), + ) + script_path = _write_script(tmp_path, _MARKET_ENTRY_SCRIPT) + + runner = ScriptRunner( + script_path=script_path, + ohlcv_iter=_make_bars(2), + syminfo=_make_syminfo(), + broker_plugin=plugin, # type: ignore[arg-type] + ) + with pytest.raises(AuthenticationError) as excinfo: + list(runner.run_iter()) + # The runner wraps with its own descriptive message but preserves the + # original reason. + assert excinfo.value.reason == "invalid key" + assert "authentication failed at startup" in str(excinfo.value).lower() + # No orders should have been dispatched. + assert plugin.entry_calls == [] diff --git a/tests/t00_pynecore/core/test_027_broker_idempotency.py b/tests/t00_pynecore/core/test_027_broker_idempotency.py new file mode 100644 index 0000000..d740c4f --- /dev/null +++ b/tests/t00_pynecore/core/test_027_broker_idempotency.py @@ -0,0 +1,303 @@ +""" +Unit tests for :mod:`pynecore.core.broker.idempotency`. + +The ``client_order_id`` formula underpins every broker dispatch: retries, +reconnects and full process restarts all rely on the same inputs producing +byte-identical ids. These tests pin down the determinism, the 30-character +budget, collision resistance, and the error paths. +""" +import random +import string + +import pytest + +from pynecore.core.broker.idempotency import ( + BAR_TS_WIDTH, + CLIENT_ORDER_ID_MAX_LEN, + KIND_CANCEL, + KIND_CLOSE, + KIND_ENTRY, + KIND_EXIT_SL, + KIND_EXIT_TP, + PINE_ID_HASH_WIDTH, + RUN_TAG_WIDTH, + VALID_KINDS, + build_client_order_id, + hash_pine_id, + make_run_tag, +) + +# === Determinism ========================================================= + + +def __test_build_is_deterministic_across_calls__(): + """Same inputs must always produce the same id — across calls and time.""" + kwargs = dict( + run_tag='ab12', + pine_id='Long', + bar_ts_ms=1_700_000_000_000, + kind=KIND_ENTRY, + ) + first = build_client_order_id(**kwargs) + for _ in range(10): + assert build_client_order_id(**kwargs) == first + + +def __test_hash_pine_id_is_deterministic__(): + assert hash_pine_id('Long') == hash_pine_id('Long') + assert hash_pine_id('TP/Long') == hash_pine_id('TP/Long') + + +def __test_make_run_tag_is_deterministic__(): + assert make_run_tag('strategy("x")\nplot(close)') == make_run_tag('strategy("x")\nplot(close)') + + +# === Length budget ======================================================= + + +def __test_result_fits_30_char_budget__(): + """Common-case inputs must stay within the Capital.com ``dealReference`` limit.""" + for kind in VALID_KINDS: + result = build_client_order_id( + run_tag='abcd', + pine_id='SomeVeryLongPineIdentifierThatDoesNotMatter', + bar_ts_ms=9_999_999_999_999, # Year 2286 + kind=kind, + retry_seq=0, + ) + assert len(result) <= CLIENT_ORDER_ID_MAX_LEN + + +def __test_hash_pine_id_is_exactly_expected_width__(): + assert len(hash_pine_id('')) == PINE_ID_HASH_WIDTH + assert len(hash_pine_id('Long')) == PINE_ID_HASH_WIDTH + assert len(hash_pine_id('x' * 10_000)) == PINE_ID_HASH_WIDTH + + +def __test_make_run_tag_is_exactly_expected_width__(): + assert len(make_run_tag('')) == RUN_TAG_WIDTH + assert len(make_run_tag('strategy("x")')) == RUN_TAG_WIDTH + assert len(make_run_tag('y' * 100_000)) == RUN_TAG_WIDTH + + +def __test_bar_ts_is_exactly_expected_width__(): + """The bar segment is 9 chars regardless of ts magnitude.""" + for bar_ts in [0, 1, 1_700_000_000_000]: + out = build_client_order_id( + run_tag='abcd', pine_id='L', bar_ts_ms=bar_ts, kind=KIND_ENTRY, + ) + # Segments: run(4) - pid(8) - bar(N) - k(1) + r(1) -> 4+1+8+1+N+1+1+1 + bar_segment = out.split('-')[2] + assert len(bar_segment) == BAR_TS_WIDTH + + +# === Distinctness ======================================================== + + +def __test_different_kind_different_id__(): + base = dict(run_tag='abcd', pine_id='Long', bar_ts_ms=1_700_000_000_000) + ids = {build_client_order_id(**base, kind=k) for k in VALID_KINDS} + assert len(ids) == len(VALID_KINDS) + + +def __test_different_pine_id_different_id__(): + kwargs = dict(run_tag='abcd', bar_ts_ms=1_700_000_000_000, kind=KIND_ENTRY) + a = build_client_order_id(**kwargs, pine_id='Long') + b = build_client_order_id(**kwargs, pine_id='Short') + assert a != b + + +def __test_different_bar_ts_different_id__(): + kwargs = dict(run_tag='abcd', pine_id='Long', kind=KIND_ENTRY) + a = build_client_order_id(**kwargs, bar_ts_ms=1_700_000_000_000) + b = build_client_order_id(**kwargs, bar_ts_ms=1_700_000_060_000) + assert a != b + + +def __test_different_run_tag_different_id__(): + kwargs = dict(pine_id='Long', bar_ts_ms=1_700_000_000_000, kind=KIND_ENTRY) + a = build_client_order_id(run_tag='abcd', **kwargs) + b = build_client_order_id(run_tag='efgh', **kwargs) + assert a != b + + +def __test_retry_seq_changes_id__(): + kwargs = dict( + run_tag='abcd', pine_id='Long', bar_ts_ms=1_700_000_000_000, kind=KIND_ENTRY, + ) + a = build_client_order_id(**kwargs, retry_seq=0) + b = build_client_order_id(**kwargs, retry_seq=1) + assert a != b + assert a.endswith('0') + assert b.endswith('1') + + +# === Collision resistance ================================================ + + +def __test_no_collision_across_1000_random_pine_ids__(): + """A birthday attack on 40 bits would take ~1M ids; 1000 must all be unique.""" + rng = random.Random(0xDEADBEEF) + charset = string.ascii_letters + string.digits + '/_ ' + pine_ids: set[str] = set() + while len(pine_ids) < 1000: + pine_ids.add( + ''.join(rng.choice(charset) for _ in range(rng.randint(1, 40))), + ) + seen = {hash_pine_id(pid) for pid in pine_ids} + assert len(seen) == 1000 + + +def __test_no_collision_across_1000_bar_combinations__(): + """Different bars + kinds produce distinct ids under the same pine_id / run_tag.""" + rng = random.Random(42) + seen: set[str] = set() + for _ in range(1000): + bar_ts = rng.randint(1_000_000_000_000, 2_000_000_000_000) + kind = rng.choice(list(VALID_KINDS)) + seen.add( + build_client_order_id( + run_tag='abcd', pine_id='Long', bar_ts_ms=bar_ts, kind=kind, + ), + ) + assert len(seen) == 1000 + + +# === Validation errors =================================================== + + +@pytest.mark.parametrize('bad_run_tag', [ + '', # too short + 'ab', # too short + 'abcde', # too long + 'ab c', # contains space + 'ab-d', # contains hyphen + 'ábcd', # non-ascii + 'ab_d', # contains underscore +]) +def __test_build_rejects_bad_run_tag__(bad_run_tag): + with pytest.raises(ValueError, match='run_tag'): + build_client_order_id( + run_tag=bad_run_tag, + pine_id='Long', + bar_ts_ms=1_700_000_000_000, + kind=KIND_ENTRY, + ) + + +@pytest.mark.parametrize('bad_kind', ['', 'entry', 'E', 'exit', 'z']) +def __test_build_rejects_bad_kind__(bad_kind): + with pytest.raises(ValueError, match='kind'): + build_client_order_id( + run_tag='abcd', + pine_id='Long', + bar_ts_ms=1_700_000_000_000, + kind=bad_kind, + ) + + +def __test_build_rejects_negative_bar_ts__(): + with pytest.raises(ValueError, match='bar_ts_ms'): + build_client_order_id( + run_tag='abcd', + pine_id='Long', + bar_ts_ms=-1, + kind=KIND_ENTRY, + ) + + +def __test_build_rejects_negative_retry_seq__(): + with pytest.raises(ValueError, match='retry_seq'): + build_client_order_id( + run_tag='abcd', + pine_id='Long', + bar_ts_ms=1_700_000_000_000, + kind=KIND_ENTRY, + retry_seq=-1, + ) + + +def __test_build_rejects_retry_seq_overflowing_30_char_budget__(): + """A retry_seq that would push the id past 30 chars must raise. + + The fixed part is 25 chars (``{4}-{8}-{9}-{1}``), leaving 5 chars for + ``retry_seq``. The first 6-char base36 value is ``36**5 == 60_466_176``. + """ + with pytest.raises(ValueError, match='exceeds'): + build_client_order_id( + run_tag='abcd', + pine_id='Long', + bar_ts_ms=1_700_000_000_000, + kind=KIND_ENTRY, + retry_seq=36 ** 5, + ) + + +def __test_build_accepts_maximum_retry_seq_at_30_char_budget__(): + """The largest 5-char base36 retry (``36**5 - 1``) must still fit.""" + out = build_client_order_id( + run_tag='abcd', + pine_id='Long', + bar_ts_ms=1_700_000_000_000, + kind=KIND_ENTRY, + retry_seq=36 ** 5 - 1, + ) + assert len(out) == CLIENT_ORDER_ID_MAX_LEN + + +# === Edge cases ========================================================== + + +def __test_empty_pine_id_is_accepted__(): + """``strategy.close_all()`` has an empty pine_id — must still produce a valid id.""" + out = build_client_order_id( + run_tag='abcd', pine_id='', bar_ts_ms=1_700_000_000_000, kind=KIND_CLOSE, + ) + assert len(out) <= CLIENT_ORDER_ID_MAX_LEN + + +def __test_unicode_pine_id_is_accepted__(): + """The hash layer neutralises whatever bytes the user put in the id.""" + out = build_client_order_id( + run_tag='abcd', + pine_id='Belépő stratégia', + bar_ts_ms=1_700_000_000_000, + kind=KIND_ENTRY, + ) + assert len(out) <= CLIENT_ORDER_ID_MAX_LEN + + +def __test_bar_ts_zero_is_accepted__(): + out = build_client_order_id( + run_tag='abcd', pine_id='Long', bar_ts_ms=0, kind=KIND_ENTRY, + ) + # Zero timestamp encodes as '000000000' (9 zeros). + assert out == 'abcd-' + hash_pine_id('Long') + '-000000000-e0' + + +def __test_format_is_lowercase_ascii__(): + """Exchanges routinely lowercase client ids; staying lowercase avoids surprises.""" + out = build_client_order_id( + run_tag='abcd', + pine_id='SomeId', + bar_ts_ms=1_700_000_000_000, + kind=KIND_EXIT_TP, + ) + assert out == out.lower() + + +# === Kind-specific smoke ================================================= + + +@pytest.mark.parametrize('kind,expected_suffix', [ + (KIND_ENTRY, 'e0'), + (KIND_EXIT_TP, 't0'), + (KIND_EXIT_SL, 's0'), + (KIND_CLOSE, 'c0'), + (KIND_CANCEL, 'x0'), +]) +def __test_kind_appears_literally_in_result__(kind, expected_suffix): + out = build_client_order_id( + run_tag='abcd', pine_id='L', bar_ts_ms=1, kind=kind, + ) + assert out.endswith(expected_suffix) diff --git a/tests/t00_pynecore/core/test_028_broker_exceptions.py b/tests/t00_pynecore/core/test_028_broker_exceptions.py new file mode 100644 index 0000000..e66eb90 --- /dev/null +++ b/tests/t00_pynecore/core/test_028_broker_exceptions.py @@ -0,0 +1,104 @@ +""" +Tests for the WS3 broker-exception taxonomy additions. + +Covers: +- ``AuthenticationError`` construction and reason echo. +- ``InsufficientMarginError`` as a typed ``ExchangeOrderRejectedError``. +- ``AuthenticationFailedEvent`` dataclass. +- ``BrokerPlugin._map_exception`` default stdlib mapping. +""" +from __future__ import annotations + +from pynecore.core.broker.exceptions import ( + AuthenticationError, + BrokerError, + ExchangeConnectionError, + ExchangeOrderRejectedError, + InsufficientMarginError, +) +from pynecore.core.broker.models import ( + AuthenticationFailedEvent, + BrokerEvent, + ExchangeOrder, + OrderStatus, + OrderType, +) + + +# === AuthenticationError === + +def __test_authentication_error_is_broker_error__(): + exc = AuthenticationError("Invalid API key") + assert isinstance(exc, BrokerError) + + +def __test_authentication_error_reason_defaults_to_message__(): + exc = AuthenticationError("API key revoked") + assert exc.reason == "API key revoked" + + +def __test_authentication_error_reason_can_be_distinct__(): + exc = AuthenticationError( + "Broker authentication failed at startup — cannot begin trading: bad key", + reason="bad key", + ) + assert exc.reason == "bad key" + assert "bad key" in str(exc) + + +# === InsufficientMarginError === + +def __test_insufficient_margin_is_rejected_error__(): + order = ExchangeOrder( + id="1", symbol="BTCUSDT", side="buy", order_type=OrderType.MARKET, + qty=10.0, filled_qty=0.0, remaining_qty=10.0, + price=None, stop_price=None, average_fill_price=None, + status=OrderStatus.REJECTED, timestamp=0.0, fee=0.0, fee_currency="", + ) + exc = InsufficientMarginError("Not enough margin", order=order) + assert isinstance(exc, ExchangeOrderRejectedError) + assert isinstance(exc, BrokerError) + assert exc.order is order + + +def __test_insufficient_margin_typed_match__(): + """Risk engine pattern-match on type instead of string.""" + try: + raise InsufficientMarginError("Balance too low") + except ExchangeOrderRejectedError as exc: + assert isinstance(exc, InsufficientMarginError) + + +# === AuthenticationFailedEvent === + +def __test_authentication_failed_event_is_broker_event__(): + evt = AuthenticationFailedEvent(reason="Invalid API key") + assert isinstance(evt, BrokerEvent) + assert evt.reason == "Invalid API key" + + +# === BrokerPlugin._map_exception default behaviour === +# +# The base implementation does not touch ``self``, so the tests exercise it +# as an unbound method — avoiding the full abstract-method + LiveProvider +# implementation surface a real subclass would require. + +def _map(raw: Exception): + from pynecore.core.plugin.broker import BrokerPlugin + return BrokerPlugin._map_exception(None, raw) # type: ignore[arg-type] + + +def __test_map_exception_maps_connection_error__(): + mapped = _map(ConnectionError("peer closed")) + assert isinstance(mapped, ExchangeConnectionError) + assert "peer closed" in str(mapped) + + +def __test_map_exception_returns_none_for_unknown__(): + assert _map(ValueError("no idea")) is None + + +def __test_map_exception_connection_error_without_message__(): + mapped = _map(ConnectionError()) + assert isinstance(mapped, ExchangeConnectionError) + assert str(mapped) == "Connection lost" From c0f30c3c1cd3c54811faab96ee013262c7ae49db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Sat, 18 Apr 2026 14:13:09 +0200 Subject: [PATCH 35/64] feat(broker): cross-restart recovery via append-only state store (WS1.5) Add StateStore: append-only JSONL persistence for the OrderSyncEngine's envelope identity and parked-verification queue, so a process restart regenerates the same client_order_id for live intents and the exchange dedups in-flight retries. Operations: envelope / park / unpark / complete (self-compaction marker that drops the envelope and any pending entries attached to the same key). A torn final line on crash is silently ignored on replay. OrderSyncEngine: - New optional state_store kwarg; replay populates _persisted_envelope_anchors and _persisted_pending_anchors on construction. - _build_envelope consults the persisted anchor before allocating a new one, so post-restart dispatches re-use the original bar_ts_ms / retry_seq. - _verify_pending_dispatches now matches both in-memory parked envelopes and persisted pending anchors against get_open_orders. - _diff_and_dispatch adopts a recovered intent (key in _order_mapping but not in _active_intents) without re-dispatching. - _drop_envelope helper unifies the four pop sites with record_complete. Tests: 10 new in test_029_broker_state_store.py covering JSONL round-trip, torn-line tolerance, complete-as-compaction, restart with same store regenerating identical CO-IDs, parked-dispatch recovery via get_open_orders, and the no-store backwards-compat path. Full broker suite (139 tests) green. --- src/pynecore/core/broker/state_store.py | 251 ++++++++++++ src/pynecore/core/broker/sync_engine.py | 115 +++++- .../core/test_029_broker_state_store.py | 379 ++++++++++++++++++ 3 files changed, 738 insertions(+), 7 deletions(-) create mode 100644 src/pynecore/core/broker/state_store.py create mode 100644 tests/t00_pynecore/core/test_029_broker_state_store.py diff --git a/src/pynecore/core/broker/state_store.py b/src/pynecore/core/broker/state_store.py new file mode 100644 index 0000000..7911a5b --- /dev/null +++ b/src/pynecore/core/broker/state_store.py @@ -0,0 +1,251 @@ +""" +Append-only JSONL state store for cross-restart broker recovery. + +The :class:`~pynecore.core.broker.sync_engine.OrderSyncEngine` keeps two pieces +of state that are required for the canonical ``client_order_id`` scheme to +survive a process restart: + +- ``_envelopes`` — the **first** :class:`DispatchEnvelope` ever built for a + given ``intent_key``. The sync engine pins ``bar_ts_ms`` and ``retry_seq`` + on this envelope and re-uses them on every modify, which is what gives the + exchange a stable id to dedup against. Lose this map and a post-restart + amend re-emits a *different* ``client_order_id`` — the exchange treats the + modify as a brand-new order. +- ``_pending_verification`` — envelopes whose dispatch raised + :class:`OrderDispositionUnknownError`. The next sync calls ``get_open_orders`` + and matches by ``client_order_id``; lose the parked envelope and the engine + cannot tell which intent the exchange-side order belongs to. + +The store is a single append-only JSONL file. Every mutation is one short line, +fsync'd by the OS in the usual way; a torn last line is silently dropped on +replay (the prior lines remain valid). The replayer reduces the event log into +the same pair of dicts above. + +JSON schema (one object per line):: + + {"op": "envelope", "key": "Long", "bar_ts_ms": 1700000000000, "retry_seq": 0} + {"op": "park", "key": "Long", "coid": "abcd-...-e0"} + {"op": "unpark", "coid": "abcd-...-e0"} + {"op": "complete", "key": "Long"} + +``complete`` removes both the envelope and any park entry for that key — the +sync engine emits it whenever an intent is cancelled or the position closes. +The file is therefore self-compacting: replay only retains entries whose +``complete`` has not yet arrived. + +The store is **not** a transaction log of every dispatch — only the envelope +*identity* (``bar_ts_ms``, ``retry_seq``) and the parked-verification queue. +The actual exchange-side order list is recovered on the next sync via +``get_open_orders`` matching, exactly as the in-process recovery path does. +""" +from __future__ import annotations + +import io +import json +import logging +import os +from dataclasses import dataclass +from pathlib import Path +from typing import IO + +__all__ = [ + 'StateStore', + 'EnvelopeRecord', + 'PendingRecord', +] + +_log = logging.getLogger(__name__) + + +@dataclass(frozen=True) +class EnvelopeRecord: + """Replay output for a single live envelope. + + The sync engine reconstructs a :class:`DispatchEnvelope` from this by + pairing it with the freshly-built intent (the intent itself is *not* + persisted — it is always rebuilt from the Pine order book on the first + sync after restart). + """ + key: str + bar_ts_ms: int + retry_seq: int + + +@dataclass(frozen=True) +class PendingRecord: + """Replay output for a single parked verification. + + ``key`` lets the sync engine route the recovered exchange order back into + ``_order_mapping`` once :meth:`_verify_pending_dispatches` matches the + ``client_order_id``. + """ + key: str + coid: str + + +class StateStore: + """Append-only JSONL persistence for sync-engine envelopes. + + The store opens its file in line-buffered append mode on construction and + keeps the handle for the lifetime of the engine. Each ``record_*`` method + writes a single short JSON line. The OS buffer flush boundary is the line + terminator — a process kill mid-write at worst loses the trailing line, + and :meth:`replay` skips a malformed final entry rather than aborting. + + :param path: JSONL file path. The parent directory is created on demand. + :param fsync_each_write: When ``True`` (the live-trading default), call + ``os.fsync`` after every record so the kernel page cache cannot lose + a write across a host crash. Off for unit tests where the cost is + not justified — the line buffer alone is sufficient against process + crashes (only host crashes need fsync). + """ + + def __init__(self, path: Path | str, *, fsync_each_write: bool = False) -> None: + self._path = Path(path) + self._fsync = fsync_each_write + self._path.parent.mkdir(parents=True, exist_ok=True) + # Line-buffered append. Opening in append mode is atomic on POSIX with + # respect to concurrent appenders (a single process is the realistic + # case here, but the guarantee removes a class of foot-guns). + self._fp: IO[str] = open(self._path, 'a', buffering=1, encoding='utf-8') + + # === Lifecycle === + + def close(self) -> None: + """Flush and close the underlying file handle. Safe to call twice.""" + if self._fp is None: + return + try: + self._fp.flush() + try: + os.fsync(self._fp.fileno()) + except (OSError, ValueError): # pragma: no cover — best effort + pass + finally: + self._fp.close() + self._fp = None # type: ignore[assignment] + + def __enter__(self) -> 'StateStore': + return self + + def __exit__(self, *_exc) -> None: + self.close() + + # === Writers === + + def record_envelope(self, key: str, bar_ts_ms: int, retry_seq: int) -> None: + """Persist the first envelope for an ``intent_key``. + + Subsequent modifies do **not** call this — the sync engine pins the + envelope on first build, so the persisted ``bar_ts_ms`` and + ``retry_seq`` already match every later dispatch. + """ + self._append({ + 'op': 'envelope', + 'key': key, + 'bar_ts_ms': bar_ts_ms, + 'retry_seq': retry_seq, + }) + + def record_park(self, coid: str, key: str) -> None: + """Persist a parked dispatch awaiting verification.""" + self._append({'op': 'park', 'coid': coid, 'key': key}) + + def record_unpark(self, coid: str) -> None: + """Persist that a parked dispatch was matched to an exchange order.""" + self._append({'op': 'unpark', 'coid': coid}) + + def record_complete(self, key: str) -> None: + """Persist that an envelope is no longer needed (cancelled / closed). + + Replay treats ``complete`` as the terminator for the whole ``key``: + any earlier ``envelope`` and any still-open ``park`` for the key are + dropped. This is what keeps the file self-compacting under steady-state + churn. + """ + self._append({'op': 'complete', 'key': key}) + + def _append(self, payload: dict) -> None: + line = json.dumps(payload, separators=(',', ':')) + # The newline is what the line buffer flushes on; write+newline together + # to keep the boundary atomic for the buffer. + self._fp.write(line + '\n') + if self._fsync: + try: + os.fsync(self._fp.fileno()) + except (OSError, ValueError): # pragma: no cover — best effort + pass + + # === Replay === + + def replay(self) -> tuple[dict[str, EnvelopeRecord], dict[str, PendingRecord]]: + """Read the JSONL file and reduce to the live state. + + :returns: ``(envelopes_by_key, pending_by_coid)``. ``envelopes_by_key`` + holds every key whose ``complete`` has not yet been recorded; + ``pending_by_coid`` holds every parked dispatch whose ``unpark`` + (or whose key's ``complete``) has not yet been recorded. + """ + envelopes: dict[str, EnvelopeRecord] = {} + pending: dict[str, PendingRecord] = {} + coid_to_key: dict[str, str] = {} + if not self._path.exists(): + return envelopes, pending + # Read from a fresh handle — the writer's buffer may not yet have hit + # the disk, but that's fine: replay only runs at startup, before any + # writes happen on this engine instance. + with open(self._path, 'r', encoding='utf-8') as fh: + for lineno, raw in enumerate(fh, start=1): + raw = raw.rstrip('\n') + if not raw: + continue + try: + rec = json.loads(raw) + except json.JSONDecodeError: + # Torn final line — log and stop. Anything after a torn + # line is suspect, but the engine's behaviour with a + # truncated tail is the same as if the truncated events + # had never been recorded (they will simply replay as + # in-memory state on the new run). + _log.warning( + 'broker state store: dropping malformed line %d in %s', + lineno, self._path, + ) + break + op = rec.get('op') + if op == 'envelope': + key = rec['key'] + envelopes[key] = EnvelopeRecord( + key=key, + bar_ts_ms=int(rec['bar_ts_ms']), + retry_seq=int(rec['retry_seq']), + ) + elif op == 'park': + coid = rec['coid'] + key = rec['key'] + pending[coid] = PendingRecord(key=key, coid=coid) + coid_to_key[coid] = key + elif op == 'unpark': + coid = rec['coid'] + pending.pop(coid, None) + coid_to_key.pop(coid, None) + elif op == 'complete': + key = rec['key'] + envelopes.pop(key, None) + # Drop any pending verifications still attached to this key. + stale = [c for c, k in coid_to_key.items() if k == key] + for c in stale: + pending.pop(c, None) + coid_to_key.pop(c, None) + else: + _log.warning( + 'broker state store: unknown op %r at line %d in %s', + op, lineno, self._path, + ) + return envelopes, pending + + # === Inspection helpers === + + @property + def path(self) -> Path: + return self._path diff --git a/src/pynecore/core/broker/sync_engine.py b/src/pynecore/core/broker/sync_engine.py index ec43f9f..d7265e2 100644 --- a/src/pynecore/core/broker/sync_engine.py +++ b/src/pynecore/core/broker/sync_engine.py @@ -47,6 +47,11 @@ OcaType, OrderEvent, ) +from pynecore.core.broker.state_store import ( + EnvelopeRecord, + PendingRecord, + StateStore, +) if TYPE_CHECKING: from pynecore.core.broker.position import BrokerPosition @@ -87,6 +92,13 @@ class OrderSyncEngine: guards, ...). ``None`` disables emission — useful in tests and single-shot backtests; production wires the runner's observability bus here. + :param state_store: Optional :class:`StateStore` for cross-restart + recovery. When provided the engine persists envelope identity and + parked-verification entries; on construction it replays the file so + a restarted process re-uses the same ``client_order_id`` for every + live intent and matches up parked dispatches against + ``get_open_orders`` on the next sync. Pass ``None`` for unit tests + and single-shot backtests where restart safety is not required. """ def __init__( @@ -102,6 +114,7 @@ def __init__( mintick: float = 0.01, oca_partial_fill_policy: OcaPartialFillPolicy = OcaPartialFillPolicy.FILL_CANCELS, broker_event_sink: Callable[[BrokerEvent], None] | None = None, + state_store: StateStore | None = None, ) -> None: self._broker = broker self._position = position @@ -113,6 +126,7 @@ def __init__( self._mintick = mintick self._oca_partial_policy = oca_partial_fill_policy self._broker_event_sink = broker_event_sink + self._state_store = state_store # Capabilities are declared once at plugin startup — cache the lookup # so the cascade-cancel fast path does not pay a method call per event. caps = broker.get_capabilities() @@ -136,6 +150,24 @@ def __init__( # not emit duplicate CancelIntents. self._cancelled_oca_groups_this_sync: set[str] = set() + # Cross-restart recovery anchors. The state store persists envelope + # identity and parked-verification entries; replay rebuilds these + # *anchor* dicts (intent objects are not persisted — they are rebuilt + # from the Pine order book on the first post-restart sync). The first + # _build_envelope / _verify_pending_dispatches call for an anchored key + # promotes the anchor into the live in-memory state and clears it. + self._persisted_envelope_anchors: dict[str, EnvelopeRecord] = {} + self._persisted_pending_anchors: dict[str, PendingRecord] = {} + if state_store is not None: + envelopes, pending = state_store.replay() + self._persisted_envelope_anchors = dict(envelopes) + self._persisted_pending_anchors = dict(pending) + if envelopes or pending: + _log.info( + "broker state replay: %d envelope(s), %d pending verification(s)", + len(envelopes), len(pending), + ) + # === Public API === @property @@ -250,12 +282,17 @@ def _verify_pending_dispatches(self) -> None: ``client_order_id`` that now appears on the exchange, promotes the envelope back into ``_order_mapping`` without re-dispatching. + After a restart the persisted parked entries are also matched here — + the in-memory envelope is gone, but the persisted ``key`` is enough to + attach the recovered exchange order to the right ``_order_mapping`` + slot. + A pending entry that does *not* show up stays parked — the engine deliberately does not re-dispatch because the original may still land (slow network round-trip). The user can inspect :attr:`pending_verification` to surface stuck entries. """ - if not self._pending_verification: + if not self._pending_verification and not self._persisted_pending_anchors: return orders = self._run_async(self._broker.get_open_orders(self._symbol)) by_coid = {o.client_order_id: o for o in orders if o.client_order_id} @@ -268,10 +305,26 @@ def _verify_pending_dispatches(self) -> None: current = self._order_mapping.setdefault(key, []) if order.id not in current: current.append(order.id) + if self._state_store is not None: + self._state_store.record_unpark(coid) _log.info( "recovered pending dispatch %s -> exchange order %s " "for intent %s", coid, order.id, key, ) + for coid in list(self._persisted_pending_anchors): + order = by_coid.get(coid) + if order is None: + continue + anchor = self._persisted_pending_anchors.pop(coid) + current = self._order_mapping.setdefault(anchor.key, []) + if order.id not in current: + current.append(order.id) + if self._state_store is not None: + self._state_store.record_unpark(coid) + _log.info( + "recovered persisted pending dispatch %s -> exchange order %s " + "for intent %s", coid, order.id, anchor.key, + ) def reconcile(self) -> None: """Read-side state reconciliation with the exchange. @@ -338,7 +391,7 @@ def _route_event(self, event: OrderEvent) -> None: ) self._order_mapping.pop(key, None) self._active_intents.pop(key, None) - self._envelopes.pop(key, None) + self._drop_envelope(key) elif t == 'rejected': key = self._find_key_for_order_id(event.order.id) if key is not None: @@ -348,7 +401,19 @@ def _route_event(self, event: OrderEvent) -> None: ) self._order_mapping.pop(key, None) self._active_intents.pop(key, None) - self._envelopes.pop(key, None) + self._drop_envelope(key) + + def _drop_envelope(self, key: str) -> None: + """Remove envelope state for ``key`` and persist a ``complete`` marker. + + Called from every site that retires an intent (cancel dispatch, + unexpected cancel event, reject event). The persisted marker lets the + replay path skip the envelope and any still-pending verifications + attached to the same ``key`` — keeping the JSONL self-compacting. + """ + self._envelopes.pop(key, None) + if self._state_store is not None: + self._state_store.record_complete(key) def _find_key_for_order_id(self, order_id: str) -> str | None: for key, ids in self._order_mapping.items(): @@ -680,8 +745,18 @@ def _diff_and_dispatch(self, intents: list[Intent]) -> None: for key, intent in new_map.items(): if key not in self._active_intents: - self._dispatch_new(intent) - self._active_intents[key] = intent + if key in self._order_mapping: + # Cross-restart adoption: the persisted state recovered an + # exchange-side order for this intent (via + # _verify_pending_dispatches). Re-dispatching here would + # duplicate the order — instead, adopt the existing + # mapping and pin the envelope from the persisted anchor + # so subsequent modifies emit the same client_order_id. + self._build_envelope(intent) + self._active_intents[key] = intent + else: + self._dispatch_new(intent) + self._active_intents[key] = intent elif intent != self._active_intents[key]: self._dispatch_modify(self._active_intents[key], intent) self._active_intents[key] = intent @@ -695,6 +770,11 @@ def _build_envelope(self, intent: Intent) -> DispatchEnvelope: anchor so the ``client_order_id`` stays stable across amend cycles — that stability is what lets the exchange recognise a retry as a duplicate rather than a new order. + + After a restart, the anchor for an existing ``intent_key`` is + reconstructed from the persisted :class:`StateStore` instead of being + recomputed from ``_current_bar_ts_ms`` — the latter would yield a new + ``client_order_id`` and break exchange-side dedup. """ existing = self._envelopes.get(intent.intent_key) if existing is not None: @@ -704,6 +784,16 @@ def _build_envelope(self, intent: Intent) -> DispatchEnvelope: bar_ts_ms=existing.bar_ts_ms, retry_seq=existing.retry_seq, ) + anchor = self._persisted_envelope_anchors.pop(intent.intent_key, None) + if anchor is not None: + envelope = DispatchEnvelope( + intent=intent, + run_tag=self._run_tag, + bar_ts_ms=anchor.bar_ts_ms, + retry_seq=anchor.retry_seq, + ) + self._envelopes[intent.intent_key] = envelope + return envelope envelope = DispatchEnvelope( intent=intent, run_tag=self._run_tag, @@ -711,6 +801,12 @@ def _build_envelope(self, intent: Intent) -> DispatchEnvelope: retry_seq=0, ) self._envelopes[intent.intent_key] = envelope + if self._state_store is not None: + self._state_store.record_envelope( + key=intent.intent_key, + bar_ts_ms=envelope.bar_ts_ms, + retry_seq=envelope.retry_seq, + ) return envelope def _build_cancel_envelope(self, cancel: CancelIntent) -> DispatchEnvelope: @@ -731,6 +827,11 @@ def _park_pending( ``_order_mapping`` once the order shows up. """ self._pending_verification[error.client_order_id] = envelope + if self._state_store is not None: + self._state_store.record_park( + coid=error.client_order_id, + key=envelope.intent.intent_key, + ) _log.warning( "dispatch for %s ended with unknown disposition " "(client_order_id=%s); will verify on next sync: %s", @@ -781,7 +882,7 @@ def _dispatch_cancel(self, old: Intent) -> None: else: # CloseIntent is immediate market — nothing to cancel. self._order_mapping.pop(old.intent_key, None) - self._envelopes.pop(old.intent_key, None) + self._drop_envelope(old.intent_key) return cancel_envelope = self._build_cancel_envelope(cancel) try: @@ -797,7 +898,7 @@ def _dispatch_cancel(self, old: Intent) -> None: old.intent_key, e.client_order_id, e, ) self._order_mapping.pop(old.intent_key, None) - self._envelopes.pop(old.intent_key, None) + self._drop_envelope(old.intent_key) # === Async bridge === diff --git a/tests/t00_pynecore/core/test_029_broker_state_store.py b/tests/t00_pynecore/core/test_029_broker_state_store.py new file mode 100644 index 0000000..f5bd4df --- /dev/null +++ b/tests/t00_pynecore/core/test_029_broker_state_store.py @@ -0,0 +1,379 @@ +""" +Tests for the cross-restart broker recovery layer (WS1.5). + +Two layers under test: + +- :class:`pynecore.core.broker.state_store.StateStore` — the append-only JSONL + format itself. Round-trip writes / replay; torn-line tolerance; the + ``complete`` self-compaction rule that keeps the file small under churn. + +- :class:`OrderSyncEngine` integration — restarting the engine with the same + store regenerates identical ``client_order_id``s for live intents and + recovers parked-verification entries via the ``get_open_orders`` matching + path that already existed for in-process recovery. +""" +from __future__ import annotations + +from dataclasses import dataclass, field +from pathlib import Path +from types import SimpleNamespace + +import pytest + +from pynecore import lib +from pynecore.core.broker.exceptions import OrderDispositionUnknownError +from pynecore.core.broker.position import BrokerPosition +from pynecore.core.broker.state_store import ( + EnvelopeRecord, + PendingRecord, + StateStore, +) +from pynecore.core.broker.sync_engine import OrderSyncEngine +from pynecore.core.broker.models import ( + DispatchEnvelope, + ExchangeCapabilities, + ExchangeOrder, + OrderStatus, + OrderType, +) +from pynecore.lib.strategy import Order, _order_type_entry + + +SYMBOL = "BTCUSDT" +RUN_TAG = "test" +BAR_TS = 1_700_000_000_000 + + +@pytest.fixture(autouse=True) +def _stub_script(): + prev = lib._script + lib._script = SimpleNamespace(initial_capital=1_000_000.0) + try: + yield + finally: + lib._script = prev + + +# === StateStore unit tests === + + +def __test_state_store_round_trip_envelope_and_pending__(tmp_path: Path) -> None: + path = tmp_path / "state.jsonl" + with StateStore(path) as store: + store.record_envelope(key="Long", bar_ts_ms=BAR_TS, retry_seq=0) + store.record_envelope(key="TP\0Long", bar_ts_ms=BAR_TS, retry_seq=0) + store.record_park(coid="abcd-pid12345-0jw3qkz00-e0", key="Long") + + with StateStore(path) as store: + envelopes, pending = store.replay() + + assert envelopes == { + "Long": EnvelopeRecord(key="Long", bar_ts_ms=BAR_TS, retry_seq=0), + "TP\0Long": EnvelopeRecord(key="TP\0Long", bar_ts_ms=BAR_TS, retry_seq=0), + } + assert pending == { + "abcd-pid12345-0jw3qkz00-e0": PendingRecord( + key="Long", coid="abcd-pid12345-0jw3qkz00-e0", + ), + } + + +def __test_state_store_complete_drops_envelope_and_pending_for_key__( + tmp_path: Path, +) -> None: + path = tmp_path / "state.jsonl" + with StateStore(path) as store: + store.record_envelope(key="Long", bar_ts_ms=BAR_TS, retry_seq=0) + store.record_park(coid="coid-1", key="Long") + store.record_complete(key="Long") + + envelopes, pending = StateStore(path).replay() + assert envelopes == {} + assert pending == {}, "complete must drop pending entries attached to the key" + + +def __test_state_store_unpark_drops_only_that_coid__(tmp_path: Path) -> None: + path = tmp_path / "state.jsonl" + with StateStore(path) as store: + store.record_envelope(key="Long", bar_ts_ms=BAR_TS, retry_seq=0) + store.record_park(coid="coid-1", key="Long") + store.record_park(coid="coid-2", key="Long") + store.record_unpark(coid="coid-1") + + envelopes, pending = StateStore(path).replay() + assert "Long" in envelopes + assert list(pending) == ["coid-2"] + + +def __test_state_store_torn_final_line_is_ignored__(tmp_path: Path) -> None: + """Crash mid-write must not lose previously-flushed records.""" + path = tmp_path / "state.jsonl" + with StateStore(path) as store: + store.record_envelope(key="Long", bar_ts_ms=BAR_TS, retry_seq=0) + store.record_envelope(key="Short", bar_ts_ms=BAR_TS, retry_seq=0) + # Simulate a torn append: the writer crashed before the newline / mid-JSON. + with open(path, "a", encoding="utf-8") as fh: + fh.write('{"op":"envelope","key":"Tor') + + envelopes, pending = StateStore(path).replay() + assert set(envelopes) == {"Long", "Short"} + assert pending == {} + + +def __test_state_store_replay_on_missing_file_is_empty__(tmp_path: Path) -> None: + envelopes, pending = StateStore(tmp_path / "missing.jsonl").replay() + assert envelopes == {} + assert pending == {} + + +def __test_state_store_unknown_op_is_skipped__(tmp_path: Path) -> None: + path = tmp_path / "state.jsonl" + with StateStore(path) as store: + store.record_envelope(key="Long", bar_ts_ms=BAR_TS, retry_seq=0) + with open(path, "a", encoding="utf-8") as fh: + fh.write('{"op":"unknown","key":"X"}\n') + + envelopes, _ = StateStore(path).replay() + assert "Long" in envelopes + + +# === Integration: restart recovery via the sync engine === + + +@dataclass +class _MockBroker: + """Minimal broker for restart-recovery scenarios.""" + entry_calls: list[DispatchEnvelope] = field(default_factory=list) + cancel_calls: list[DispatchEnvelope] = field(default_factory=list) + modify_entry_calls: list[tuple[DispatchEnvelope, DispatchEnvelope]] = field( + default_factory=list, + ) + open_orders: list[ExchangeOrder] = field(default_factory=list) + raise_on_next_entry: Exception | None = None + capabilities: ExchangeCapabilities = field(default_factory=ExchangeCapabilities) + _next_id: int = 0 + + def get_capabilities(self) -> ExchangeCapabilities: + return self.capabilities + + def _mk_order(self, envelope: DispatchEnvelope) -> ExchangeOrder: + self._next_id += 1 + intent = envelope.intent + return ExchangeOrder( + id=f"xchg-{self._next_id}", + symbol=getattr(intent, 'symbol', SYMBOL), + side=getattr(intent, 'side', 'buy'), + order_type=OrderType.LIMIT, + qty=getattr(intent, 'qty', 0.0), + filled_qty=0.0, + remaining_qty=getattr(intent, 'qty', 0.0), + price=None, + stop_price=None, + average_fill_price=None, + status=OrderStatus.OPEN, + timestamp=0.0, + fee=0.0, + fee_currency="", + client_order_id=envelope.client_order_id('e'), + ) + + async def execute_entry(self, envelope): + self.entry_calls.append(envelope) + if self.raise_on_next_entry is not None: + err = self.raise_on_next_entry + self.raise_on_next_entry = None + raise err + return [self._mk_order(envelope)] + + async def execute_exit(self, envelope): # pragma: no cover — unused here + return [self._mk_order(envelope)] + + async def execute_close(self, envelope): # pragma: no cover — unused here + return self._mk_order(envelope) + + async def execute_cancel(self, envelope): + self.cancel_calls.append(envelope) + return True + + async def modify_entry(self, old, new): + self.modify_entry_calls.append((old, new)) + return [self._mk_order(new)] + + async def modify_exit(self, old, new): # pragma: no cover — unused here + return [self._mk_order(new)] + + async def get_open_orders(self, symbol=None): + return list(self.open_orders) + + async def get_position(self, symbol): # pragma: no cover — unused here + return None + + def watch_orders(self): # pragma: no cover — unused here + raise NotImplementedError + + +def _mk_engine( + broker: _MockBroker, store: StateStore | None, +) -> tuple[OrderSyncEngine, BrokerPosition]: + pos = BrokerPosition() + engine = OrderSyncEngine( + broker=broker, # type: ignore[arg-type] + position=pos, + symbol=SYMBOL, + run_tag=RUN_TAG, + mintick=1.0, + state_store=store, + ) + return engine, pos + + +def __test_restart_regenerates_same_client_order_id_for_live_intent__( + tmp_path: Path, +) -> None: + """Restart with same store → first dispatch produces the *same* CO-ID. + + Without persistence the post-restart engine would re-anchor the envelope to + the new ``bar_ts_ms`` and emit a brand-new ``client_order_id`` — which the + exchange would treat as a new order, not a duplicate of the in-flight one. + """ + state_path = tmp_path / "state.jsonl" + + broker_a = _MockBroker() + store_a = StateStore(state_path) + engine_a, pos_a = _mk_engine(broker_a, store_a) + pos_a.entry_orders["L"] = Order("L", 1.0, order_type=_order_type_entry, limit=50_000.0) + engine_a.sync(BAR_TS) + coid_first = broker_a.entry_calls[0].client_order_id('e') + store_a.close() + + # Process restart — fresh broker, fresh engine, fresh position; same store path + # and same Pine order book (the script reproduces it on every run). + broker_b = _MockBroker() + store_b = StateStore(state_path) + engine_b, pos_b = _mk_engine(broker_b, store_b) + pos_b.entry_orders["L"] = Order("L", 1.0, order_type=_order_type_entry, limit=50_000.0) + engine_b.sync(BAR_TS + 60_000) # later bar — would differ without persistence + coid_second = broker_b.entry_calls[0].client_order_id('e') + + assert coid_first == coid_second, ( + "post-restart dispatch must use the persisted bar_ts_ms anchor" + ) + + +def __test_restart_completed_intent_does_not_replay__(tmp_path: Path) -> None: + """An intent that was cancelled before restart must NOT resurrect. + + The ``complete`` marker dropped during cancel removes the envelope from the + persisted state, so a fresh engine that no longer sees the Pine order does + not anchor a stale ``bar_ts_ms``. + """ + state_path = tmp_path / "state.jsonl" + + broker_a = _MockBroker() + store_a = StateStore(state_path) + engine_a, pos_a = _mk_engine(broker_a, store_a) + pos_a.entry_orders["L"] = Order("L", 1.0, order_type=_order_type_entry, limit=50_000.0) + engine_a.sync(BAR_TS) + # Pine cancels the order — diff engine emits cancel + complete. + pos_a.entry_orders.clear() + engine_a.sync(BAR_TS + 1) + assert len(broker_a.cancel_calls) == 1 + store_a.close() + + # Restart with no Pine order — replay must be empty. + envelopes, pending = StateStore(state_path).replay() + assert envelopes == {} + assert pending == {} + + +def __test_restart_recovers_parked_dispatch_via_get_open_orders__( + tmp_path: Path, +) -> None: + """A pre-restart parked dispatch is matched on the next sync's open-orders view. + + Sequence: + + 1. Engine A dispatches an entry → broker raises ``OrderDispositionUnknownError``. + 2. Engine A persists the parked CO-ID and key. + 3. Process restarts. Engine B replays the store, but its in-memory dict is empty. + 4. The exchange did receive the order. Engine B's first sync sees it via + ``get_open_orders`` matched by the persisted CO-ID and registers the + exchange order under the right ``intent_key`` — without re-dispatching. + """ + state_path = tmp_path / "state.jsonl" + + broker_a = _MockBroker() + + # Compute the deterministic CO-ID the engine will allocate for this entry. + expected_envelope = DispatchEnvelope( + intent=SimpleNamespace(pine_id="L"), # type: ignore[arg-type] + run_tag=RUN_TAG, + bar_ts_ms=BAR_TS, + retry_seq=0, + ) + expected_coid = expected_envelope.client_order_id('e') + + broker_a.raise_on_next_entry = OrderDispositionUnknownError( + "simulated network timeout", + client_order_id=expected_coid, + cause=TimeoutError("simulated network timeout"), + ) + + store_a = StateStore(state_path) + engine_a, pos_a = _mk_engine(broker_a, store_a) + pos_a.entry_orders["L"] = Order("L", 1.0, order_type=_order_type_entry, limit=50_000.0) + engine_a.sync(BAR_TS) + assert expected_coid in engine_a.pending_verification, "park did not happen" + store_a.close() + + # Restart. The exchange happens to have the order — broker_b returns it from + # get_open_orders with a matching client_order_id. + broker_b = _MockBroker() + broker_b.open_orders = [ + ExchangeOrder( + id="xchg-from-restart", + symbol=SYMBOL, + side="buy", + order_type=OrderType.LIMIT, + qty=1.0, + filled_qty=0.0, + remaining_qty=1.0, + price=50_000.0, + stop_price=None, + average_fill_price=None, + status=OrderStatus.OPEN, + timestamp=0.0, + fee=0.0, + fee_currency="", + client_order_id=expected_coid, + ), + ] + store_b = StateStore(state_path) + engine_b, pos_b = _mk_engine(broker_b, store_b) + pos_b.entry_orders["L"] = Order("L", 1.0, order_type=_order_type_entry, limit=50_000.0) + engine_b.sync(BAR_TS + 60_000) + + # No re-dispatch: the engine reused the in-flight order via the persisted park. + assert len(broker_b.entry_calls) == 0, ( + "post-restart engine must NOT re-dispatch a parked entry that the " + "exchange already has" + ) + assert "xchg-from-restart" in engine_b.order_mapping["L"] + # The persisted park is consumed. + envelopes, pending = StateStore(state_path).replay() + assert pending == {} + assert "L" in envelopes # envelope still live (entry not yet filled / cancelled) + + +def __test_no_state_store_means_no_persistence__(tmp_path: Path) -> None: + """Backwards-compat: omitting ``state_store`` keeps behaviour pre-WS1.5. + + No file should be created and a fresh engine must be free to anchor the + envelope to its own ``bar_ts_ms``. + """ + broker = _MockBroker() + engine, pos = _mk_engine(broker, store=None) + pos.entry_orders["L"] = Order("L", 1.0, order_type=_order_type_entry, limit=50_000.0) + engine.sync(BAR_TS) + # No file in tmp_path because the engine was given store=None. + assert list(tmp_path.iterdir()) == [] From ae68e6afe78059eb3f93905f761b1ebf860c0438 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Mon, 20 Apr 2026 14:13:31 +0200 Subject: [PATCH 36/64] feat(plugin): show plugin descriptions in CLI output Add helpers to extract normalized plugin docstrings for summaries and detailed descriptions. Display plugin summaries in the list command and render full descriptions in the info command. Include broker capability detection for BrokerPlugin implementations. Expand the CCXT provider docstring with supported features and limitations, and narrow sandbox setup error handling to NotSupported. --- src/pynecore/cli/commands/plugin.py | 23 ++++++++++++++++----- src/pynecore/core/plugin/__init__.py | 30 ++++++++++++++++++++++++++++ src/pynecore/providers/ccxt.py | 30 +++++++++++++++++++++------- 3 files changed, 71 insertions(+), 12 deletions(-) diff --git a/src/pynecore/cli/commands/plugin.py b/src/pynecore/cli/commands/plugin.py index 8e28ca1..f4f2335 100644 --- a/src/pynecore/cli/commands/plugin.py +++ b/src/pynecore/cli/commands/plugin.py @@ -11,11 +11,13 @@ def _get_capabilities(cls: type) -> list[str]: """Determine plugin capabilities from its class hierarchy.""" from ...core.plugin import ProviderPlugin, CLIPlugin + from ...core.plugin.broker import BrokerPlugin caps = [] if isinstance(cls, type) and issubclass(cls, ProviderPlugin): caps.append('provider') - # Future: ExtensionPlugin, LiveProviderPlugin checks will go here + if isinstance(cls, type) and issubclass(cls, BrokerPlugin): + caps.append('broker') if isinstance(cls, type) and issubclass(cls, CLIPlugin): caps.append('cli') return caps @@ -31,7 +33,7 @@ def list_plugins( """ List all installed PyneCore plugins. """ - from ...core.plugin import discover_plugins, get_plugin_metadata + from ...core.plugin import discover_plugins, get_plugin_metadata, get_plugin_summary plugins = discover_plugins() if not plugins: @@ -54,7 +56,8 @@ def list_plugins( display_name = getattr(cls, 'plugin_name', '') or name version = f"v{meta['version']}" if meta['version'] else '' caps_str = ', '.join(caps) if caps else 'library' - rows.append((name, display_name, version, caps_str)) + summary = get_plugin_summary(cls) or meta['description'] + rows.append((name, display_name, version, caps_str, summary)) except Exception as e: errors.append((name, str(e))) @@ -70,8 +73,11 @@ def list_plugins( secho(f"\n Installed plugins:\n", fg=colors.BRIGHT_WHITE, bold=True) - for name, display_name, version, caps_str in rows: + indent = 4 + w_name + 3 + for name, display_name, version, caps_str, summary in rows: secho(f" {name:<{w_name}} {display_name:<{w_disp}} {version:<{w_ver}} [{caps_str}]") + if summary: + secho(f"{' ' * indent}└─ {summary}", dim=True) for name, error in errors: secho(f" {name:<{w_name}} (failed to load: {error})", fg=colors.RED) @@ -88,7 +94,9 @@ def plugin_info( """ Show detailed information about an installed plugin. """ - from ...core.plugin import discover_plugins, get_plugin_metadata + from ...core.plugin import discover_plugins, get_plugin_metadata, get_plugin_description + from rich.console import Console + from rich.markdown import Markdown import dataclasses plugins = discover_plugins() @@ -116,6 +124,11 @@ def plugin_info( secho(f" Capabilities: {', '.join(caps) if caps else 'library'}") secho(f" Entry point: {ep.value}") + description = get_plugin_description(cls) + if description: + secho("\n Details:", fg=colors.BRIGHT_WHITE, bold=True) + Console().print(Markdown(description)) + config_cls = getattr(cls, 'Config', None) if config_cls and dataclasses.is_dataclass(config_cls): fields = dataclasses.fields(config_cls) diff --git a/src/pynecore/core/plugin/__init__.py b/src/pynecore/core/plugin/__init__.py index 6bf73ce..1f38a62 100644 --- a/src/pynecore/core/plugin/__init__.py +++ b/src/pynecore/core/plugin/__init__.py @@ -28,6 +28,7 @@ class BinancePlugin(LiveProviderPlugin, CLIPlugin): ... # offline + live cls = load_plugin("capitalcom") """ +import inspect import re import sys from typing import TypeVar, Generic @@ -128,6 +129,35 @@ def get_plugin_metadata(ep: EntryPoint) -> dict[str, str]: } +def get_plugin_summary(cls: type) -> str: + """ + Return the first paragraph of the plugin class docstring. + + The first paragraph is the text up to the first blank line, with + internal newlines collapsed to single spaces — suitable for a + one-line summary in listings. + + :param cls: The plugin class. + :return: First-paragraph summary, or ``""`` if no docstring. + """ + doc = inspect.getdoc(cls) or "" + first_para = doc.split("\n\n", 1)[0].strip() + return " ".join(first_para.split()) + + +def get_plugin_description(cls: type) -> str: + """ + Return the full normalized plugin class docstring. + + Uses :func:`inspect.getdoc`, which strips the uniform leading + indentation per PEP 257. + + :param cls: The plugin class. + :return: Normalized docstring, or ``""`` if no docstring. + """ + return inspect.getdoc(cls) or "" + + def _parse_min_pynecore(ep: EntryPoint) -> str: """ Extract the minimum PyneCore version from the package dependencies. diff --git a/src/pynecore/providers/ccxt.py b/src/pynecore/providers/ccxt.py index 49f4e51..b39803b 100644 --- a/src/pynecore/providers/ccxt.py +++ b/src/pynecore/providers/ccxt.py @@ -66,13 +66,29 @@ class CCXTConfig: class CCXTProvider(LiveProviderPlugin[CCXTConfig]): - """ - CCXT-based provider for live OHLCV data and market metadata. + """CCXT-based market-data provider for ~100 crypto exchanges. + + Wraps the CCXT library for symbol discovery and historical candle + downloads, and CCXT Pro for real-time WebSocket OHLCV streaming. + One plugin instance serves every CCXT-registered exchange — the + exchange is selected from the symbol prefix (`BYBIT:BTCUSDT` etc.). + + **Supported** + + - Symbol discovery and market metadata across all CCXT exchanges + - Historical candle downloads with per-exchange bar-limit hints + - Real-time OHLCV WebSocket streaming (CCXT Pro) + - Sandbox / testnet endpoints via the `sandbox` config flag + - Forwards any extra field on `CCXTConfig` as a CCXT constructor + kwarg — add exchange-specific settings without touching the plugin + + **Limitations** - Uses CCXT for symbol discovery, historical candle downloads, and - CCXT Pro for real-time websocket OHLCV streaming. Order execution - is NOT provided — use a dedicated exchange broker plugin - (``pynecore-bybit``, ``pynecore-binance``, etc.) for that. + - **No order execution** — this plugin is market-data only. For live + trading use a dedicated exchange `BrokerPlugin` (Capital.com, + Interactive Brokers, Bybit, Binance). + - API credentials are per-exchange; the defaults on `CCXTConfig` + apply to every exchange unless overridden via CCXT's own routing. """ plugin_name = "CCXT" @@ -215,7 +231,7 @@ def __init__(self, *, symbol: str | None = None, timeframe: str | None = None, if self.config and getattr(self.config, 'sandbox', False): try: self._client.set_sandbox_mode(True) - except Exception: # noqa: BLE001 + except ccxt.NotSupported: pass @override From 20e65d4f84237bbd039d808400017ae68542055e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Mon, 20 Apr 2026 15:10:51 +0200 Subject: [PATCH 37/64] fix(plugin): indent plugin details output Pad rendered plugin descriptions so markdown content aligns with the rest of the command output. Tighten Config lookup typing and make the dataclass check explicit. --- src/pynecore/cli/commands/plugin.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/pynecore/cli/commands/plugin.py b/src/pynecore/cli/commands/plugin.py index f4f2335..4d6ce73 100644 --- a/src/pynecore/cli/commands/plugin.py +++ b/src/pynecore/cli/commands/plugin.py @@ -97,6 +97,7 @@ def plugin_info( from ...core.plugin import discover_plugins, get_plugin_metadata, get_plugin_description from rich.console import Console from rich.markdown import Markdown + from rich.padding import Padding import dataclasses plugins = discover_plugins() @@ -127,10 +128,11 @@ def plugin_info( description = get_plugin_description(cls) if description: secho("\n Details:", fg=colors.BRIGHT_WHITE, bold=True) - Console().print(Markdown(description)) + Console().print(Padding(Markdown(description), (0, 0, 0, 2))) - config_cls = getattr(cls, 'Config', None) - if config_cls and dataclasses.is_dataclass(config_cls): + config_cls: type | None = getattr(cls, 'Config', None) + if config_cls is not None and dataclasses.is_dataclass(config_cls): + # noinspection PyDataclass fields = dataclasses.fields(config_cls) if fields: secho(f"\n Config fields (defaults):") From a06834035381572b30db3e9a3b838a550ee851ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wallner=20=C3=81d=C3=A1m?= Date: Tue, 21 Apr 2026 09:50:01 +0200 Subject: [PATCH 38/64] feat: add unified broker SQLite storage Replace JSONL broker state persistence with BrokerStore backed by SQLite. Add run identity based run tags, persisted run contexts, heartbeat cleanup, order refs, audit events, and restart recovery through RunContext replay. Wire broker runs through the CLI and ScriptRunner, add run labels, expose store_ctx on broker plugins, and update broker plugin documentation. Add unit and sync-engine integration coverage for storage and recovery. --- docs/development/plugin-system.md | 136 ++- src/pynecore/cli/commands/run.py | 369 ++++--- src/pynecore/core/broker/idempotency.py | 22 +- src/pynecore/core/broker/models.py | 3 +- src/pynecore/core/broker/run_identity.py | 121 +++ src/pynecore/core/broker/state_store.py | 251 ----- src/pynecore/core/broker/storage.py | 970 ++++++++++++++++++ src/pynecore/core/broker/sync_engine.py | 58 +- src/pynecore/core/plugin/broker.py | 45 + src/pynecore/core/script_runner.py | 58 +- .../core/test_026_broker_runner.py | 1 + .../core/test_027_broker_idempotency.py | 21 +- .../core/test_029_broker_state_store.py | 379 ------- .../core/test_029_broker_store.py | 612 +++++++++++ .../core/test_030_broker_store_sync_engine.py | 348 +++++++ 15 files changed, 2544 insertions(+), 850 deletions(-) create mode 100644 src/pynecore/core/broker/run_identity.py delete mode 100644 src/pynecore/core/broker/state_store.py create mode 100644 src/pynecore/core/broker/storage.py delete mode 100644 tests/t00_pynecore/core/test_029_broker_state_store.py create mode 100644 tests/t00_pynecore/core/test_029_broker_store.py create mode 100644 tests/t00_pynecore/core/test_030_broker_store_sync_engine.py diff --git a/docs/development/plugin-system.md b/docs/development/plugin-system.md index c6a7394..88a4498 100644 --- a/docs/development/plugin-system.md +++ b/docs/development/plugin-system.md @@ -28,6 +28,7 @@ class hierarchy determines what a plugin can do: Plugin (base) ├── ProviderPlugin — Offline OHLCV data provider │ └── LiveProviderPlugin — WebSocket/streaming data (extends ProviderPlugin) +│ └── BrokerPlugin — Order execution (extends LiveProviderPlugin) ├── CLIPlugin — CLI subcommands and parameter hooks └── ExtensionPlugin — Hook-based script extension (planned) ``` @@ -35,8 +36,10 @@ Plugin (base) `LiveProviderPlugin` inherits from `ProviderPlugin` — every live provider can also download historical data. See [Live Mode](../advanced/live-mode.md) for data-side details. -Order execution is handled by dedicated per-exchange broker plugins -(`pynecore-bybit`, `pynecore-binance`, etc.) — not by the data provider. +`BrokerPlugin` inherits from `LiveProviderPlugin` — an exchange that routes orders can +also deliver the live market data those orders trade against. Order execution is handled +by dedicated per-exchange broker plugins (`pynecore-bybit`, `pynecore-binance`, +`pynecore-capitalcom`, etc.) — not by standalone data providers. Multiple inheritance combines capabilities: @@ -240,6 +243,135 @@ out, users uncomment and edit what they need: The Generic type parameter (`ProviderPlugin[FooConfig]`) gives your IDE full type information on `self.config` — no more `object | None` warnings. +### BrokerPlugin — Order Execution + +A `BrokerPlugin` is a `LiveProviderPlugin` that can also **route orders** to an +exchange. It receives high-level intents from the engine (`execute_entry`, +`execute_exit`, `execute_close`, `execute_cancel`) and translates them into +exchange-specific calls. The engine handles idempotency, retry, and reconcile +— the plugin focuses on the actual REST/WebSocket wiring. + +```python +from dataclasses import dataclass +from pynecore.core.plugin import BrokerPlugin, override +from pynecore.core.broker.models import ( + DispatchEnvelope, ExchangeCapabilities, ExchangeOrder, ExchangePosition, +) + + +@dataclass +class FooBrokerConfig: + """Foo exchange credentials.""" + api_key: str = "" + api_secret: str = "" + demo: bool = True + + +class FooBroker(BrokerPlugin[FooBrokerConfig]): + Config = FooBrokerConfig + + @override + async def connect(self) -> None: + # Authenticate and populate self._account_id. + # The account_id property later reads it back as a sync value. + await self._authenticate() + self._account_id = f"foo-{'demo' if self.config.demo else 'live'}-{self._login}" + + @override + def get_capabilities(self) -> ExchangeCapabilities: + return ExchangeCapabilities( + stop_order=True, + tp_sl_bracket=True, + reduce_only=True, + # ... see pynecore.core.broker.models for the full struct + ) + + @override + async def execute_entry(self, envelope: DispatchEnvelope) -> ExchangeOrder: + ... + + @override + async def get_position(self, symbol: str) -> ExchangePosition | None: + ... +``` + +#### Storage — `self.store_ctx` + +Every broker plugin gets a `RunContext` wired in by `ScriptRunner` at startup +(`self.store_ctx`). This is the single entry point for persistence — you do +**not** write your own JSONL, SQLite, or in-memory bookkeeping. The +`RunContext` is backed by a shared `BrokerStore` (SQLite, WAL mode) at +`workdir/output/logs/broker.sqlite`, and it gives you: + +- **Generic alias lookup.** Exchange IDs that arrive later in the lifecycle + (Capital.com `dealId`, IB `permId`, Bybit `orderLinkId`) are stored in the + `order_refs` table. Reverse lookup is a single indexed SELECT: + + ```python + # When the exchange returns a durable ID, stash it as an alias. + self.store_ctx.add_ref(client_order_id, 'exchange_order_id', exchange_id) + + # Later, when a fill event arrives with only the exchange ID, resolve it: + row = self.store_ctx.find_by_ref('exchange_order_id', exchange_id) + if row is not None: + client_order_id = row.client_order_id + ``` + +- **Audit log.** Plugin-specific events (rate-limit hits, degraded protection, + reconcile outcomes) go through `log_event`: + + ```python + self.store_ctx.log_event( + 'rate_limit_hit', + client_order_id=coid, + payload={'retry_after_s': 1.5}, + ) + ``` + +- **Order state writes.** The sync engine handles the canonical order + lifecycle automatically. Only touch `upsert_order` / `set_exchange_id` / + `set_risk` if your plugin needs to record extra state the engine doesn't + know about. + +**Authentication and `account_id`.** `BrokerPlugin.account_id` is a sync +property that returns `self._account_id`. Your `connect()` (or the first +authenticating call) must populate `self._account_id` as a +**plugin-qualified** string, e.g. `"foo-demo-1234567"`. The `ScriptRunner` +reads it once during startup to build the `run_id` — if the bot later +switches accounts on the broker UI, the stored `run_id` won't silently drift. + +**Restart recovery.** If the process is `SIGKILL`-ed or the host restarts, +the `runs` row is left with `ended_ts_ms IS NULL` but its heartbeat goes +stale. The next startup's `open_run()` automatically closes stale rows +(heartbeat > 5 min) and logs a `stale_run_cleaned` event. There is nothing +for the plugin to do here — recovery is built into the store. + +#### `BrokerStore` schema — what gets stored where + +A single SQLite file at `workdir/output/logs/broker.sqlite` is shared by +every bot process in the same workdir (WAL mode; one writer at a time, no +blocked readers). Two identity keys share the tables: + +- **`run_id`** — logical stream, the humanly recognizable identifier of + a bot: `"{strategy}@{account}:{symbol}:{tf}[#label]"`. Stable across + restarts. +- **`run_instance_id`** — physical autoincrement integer, unique per + process-level run. Historical isolation. + +| Table | Keyed by | What it holds | +|-------------------------|----------------------|--------------------------------------------------------| +| `runs` | `run_instance_id` | Per-run metadata, heartbeat, lifecycle timestamps. | +| `envelopes` | `run_id` | Sync engine envelope identity (cross-restart). | +| `pending_verifications` | `run_id` | Parked dispatches awaiting confirmation. | +| `orders` | `run_instance_id` | Live order snapshot (+ plugin-specific `extras` JSON). | +| `order_refs` | `run_instance_id` | Generic alias lookup (broker IDs → `client_order_id`). | +| `events` | `run_instance_id` | Audit log (dispatch, fill, reconcile, stale-cleanup). | + +The `envelopes` and `pending_verifications` tables key on the **logical** +`run_id`, so a restarted bot picks up the same idempotency anchors. +Everything else keys on the **physical** `run_instance_id`, so historical +runs stay isolated. + ## Combining Capabilities A plugin can combine multiple capabilities via multiple inheritance. The diff --git a/src/pynecore/cli/commands/run.py b/src/pynecore/cli/commands/run.py index 56c2974..ad525ba 100644 --- a/src/pynecore/cli/commands/run.py +++ b/src/pynecore/cli/commands/run.py @@ -240,6 +240,11 @@ def run( help="Enable live broker trading — requires a provider plugin that " "subclasses BrokerPlugin. Implies --live.", rich_help_panel="Live Options"), + run_label: str | None = Option(None, "--run-label", + help="Optional label to distinguish parallel instances of the " + "same strategy+account+symbol+timeframe. Stored in the " + "broker run_id as ``...#