diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 1495c7362..cc25056e4 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -65,6 +65,7 @@ jobs:
- packages/commons
- packages/evaluators
- packages/node
+ - packages/flow
- packages/services
- packages/tentacles_manager
- packages/trading
@@ -101,7 +102,7 @@ jobs:
fi
- name: Install tentacles
- if: matrix.package == 'octobot'
+ if: matrix.package == 'octobot' || matrix.package == 'packages/node' || matrix.package == 'packages/flow'
run: |
mkdir -p output
OctoBot tentacles -d packages/tentacles -p any_platform.zip
@@ -113,11 +114,16 @@ jobs:
pytest tests -n auto --dist loadfile
pytest --ignore=tentacles/Trading/Exchange tentacles -n auto --dist loadfile
else
- cd ${{ matrix.package }}
- if [ "${{ matrix.package }}" = "packages/tentacles_manager" ] || [ "${{ matrix.package }}" = "packages/node" ]; then
- pytest tests
+ if [ "${{ matrix.package }}" = "packages/node" ] || [ "${{ matrix.package }}" = "packages/flow" ]; then
+ echo "Running tests from root dir to allow tentacles import"
+ PYTHONPATH=.:$PYTHONPATH pytest ${{ matrix.package }}/tests -n auto --dist loadfile
else
- pytest tests -n auto --dist loadfile
+ cd ${{ matrix.package }}
+ if [ "${{ matrix.package }}" = "packages/tentacles_manager" ]; then
+ pytest tests
+ else
+ pytest tests -n auto --dist loadfile
+ fi
fi
fi
env:
diff --git a/BUILD b/BUILD
index 869ae38ac..847f377c6 100644
--- a/BUILD
+++ b/BUILD
@@ -32,6 +32,7 @@ PACKAGE_SOURCES = [
"packages/commons:octobot_commons",
"packages/evaluators:octobot_evaluators",
"packages/node:octobot_node",
+ "packages/flow:octobot_flow",
"packages/services:octobot_services",
"packages/tentacles_manager:octobot_tentacles_manager",
"packages/trading:octobot_trading",
diff --git a/additional_tests/exchanges_tests/abstract_authenticated_exchange_tester.py b/additional_tests/exchanges_tests/abstract_authenticated_exchange_tester.py
index 8346b29fd..91057485e 100644
--- a/additional_tests/exchanges_tests/abstract_authenticated_exchange_tester.py
+++ b/additional_tests/exchanges_tests/abstract_authenticated_exchange_tester.py
@@ -35,7 +35,7 @@
import octobot_trading.personal_data as personal_data
import octobot_trading.personal_data.orders as personal_data_orders
import octobot_trading.util.test_tools.exchanges_test_tools as exchanges_test_tools
-import octobot_trading.util.test_tools.exchange_data as exchange_data_import
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
import trading_backend.enums
import octobot_tentacles_manager.api as tentacles_manager_api
from additional_tests.exchanges_tests import get_authenticated_exchange_manager, NoProvidedCredentialsError
diff --git a/octobot/backtesting/minimal_data_importer.py b/octobot/backtesting/minimal_data_importer.py
index 27a2d9968..2811ef1a7 100644
--- a/octobot/backtesting/minimal_data_importer.py
+++ b/octobot/backtesting/minimal_data_importer.py
@@ -7,7 +7,7 @@
import octobot_backtesting.importers
import octobot_backtesting.enums
-import octobot_trading.util.test_tools.exchange_data as exchange_data_import
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
class MinimalDataImporter(octobot_backtesting.importers.ExchangeDataImporter):
diff --git a/packages/commons/octobot_commons/asyncio_tools.py b/packages/commons/octobot_commons/asyncio_tools.py
index 84abc8d71..09783c4e4 100644
--- a/packages/commons/octobot_commons/asyncio_tools.py
+++ b/packages/commons/octobot_commons/asyncio_tools.py
@@ -14,8 +14,11 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import asyncio
+import contextlib
+import time
import traceback
import concurrent.futures
+import typing
import octobot_commons.constants as constants
import octobot_commons.logging as logging_util
@@ -117,6 +120,27 @@ async def gather_waiting_for_all_before_raising(*coros):
return maybe_exceptions
+@contextlib.contextmanager
+def logged_waiter(self, name: str, sleep_time: float = 30) -> typing.Generator[None, None, None]:
+ """
+ Periodically log the time elapsed since the start of the waiter
+ """
+ async def _waiter() -> None:
+ t0 = time.time()
+ try:
+ await asyncio.sleep(sleep_time)
+ self.logger.info(f"{name} is still processing [{time.time() - t0:.2f} seconds] ...")
+ except asyncio.CancelledError:
+ pass
+ task = None
+ try:
+ task = asyncio.create_task(_waiter())
+ yield
+ finally:
+ if task is not None and not task.done():
+ task.cancel()
+
+
class RLock(asyncio.Lock):
"""
Async Lock implementing reentrancy
diff --git a/packages/commons/octobot_commons/constants.py b/packages/commons/octobot_commons/constants.py
index 0c51cd3c8..aa4d016d3 100644
--- a/packages/commons/octobot_commons/constants.py
+++ b/packages/commons/octobot_commons/constants.py
@@ -249,6 +249,7 @@ def parse_boolean_environment_var(env_key: str, default_value: str) -> bool:
# DSL interpreter
BASE_OPERATORS_LIBRARY = "base"
CONTEXTUAL_OPERATORS_LIBRARY = "contextual"
+UNRESOLVED_PARAMETER_PLACEHOLDER = "UNRESOLVED_PARAMETER"
# Logging
EXCEPTION_DESC = "exception_desc"
diff --git a/packages/commons/octobot_commons/dataclasses/flexible_dataclass.py b/packages/commons/octobot_commons/dataclasses/flexible_dataclass.py
index 93496ca4e..e4f9a5c06 100644
--- a/packages/commons/octobot_commons/dataclasses/flexible_dataclass.py
+++ b/packages/commons/octobot_commons/dataclasses/flexible_dataclass.py
@@ -25,7 +25,7 @@
@dataclasses.dataclass
class FlexibleDataclass:
- _class_field_cache: typing.ClassVar[dict] = {}
+ _class_field_cache: typing.ClassVar[dict] = dataclasses.field(default={}, repr=False)
"""
Implements from_dict which can be called to instantiate a new instance of this class from a dict. Using from_dict
ignores any additional key from the given dict that is not defined as a dataclass field.
diff --git a/packages/commons/octobot_commons/dsl_interpreter/__init__.py b/packages/commons/octobot_commons/dsl_interpreter/__init__.py
index 83b863d91..ab9bdd809 100644
--- a/packages/commons/octobot_commons/dsl_interpreter/__init__.py
+++ b/packages/commons/octobot_commons/dsl_interpreter/__init__.py
@@ -24,7 +24,10 @@
get_all_operators,
clear_get_all_operators_cache,
)
-from octobot_commons.dsl_interpreter.operator_parameter import OperatorParameter
+from octobot_commons.dsl_interpreter.operator_parameter import (
+ OperatorParameter,
+ UNINITIALIZED_VALUE,
+)
from octobot_commons.dsl_interpreter.operator_docs import OperatorDocs
from octobot_commons.dsl_interpreter.operators import (
BinaryOperator,
@@ -34,8 +37,24 @@
CallOperator,
NameOperator,
ExpressionOperator,
+ PreComputingCallOperator,
+ ReCallableOperatorMixin,
+ ReCallingOperatorResult,
+ ReCallingOperatorResultKeys,
+)
+from octobot_commons.dsl_interpreter.interpreter_dependency import (
+ InterpreterDependency,
+)
+from octobot_commons.dsl_interpreter.parameters_util import (
+ format_parameter_value,
+ resove_operator_params,
+ apply_resolved_parameter_value,
+ add_resolved_parameter_value,
+ has_unresolved_parameters,
+)
+from octobot_commons.dsl_interpreter.dsl_call_result import (
+ DSLCallResult,
)
-from octobot_commons.dsl_interpreter.interpreter_dependency import InterpreterDependency
__all__ = [
"get_all_operators",
@@ -43,6 +62,7 @@
"Interpreter",
"Operator",
"OperatorParameter",
+ "UNINITIALIZED_VALUE",
"OperatorDocs",
"BinaryOperator",
"UnaryOperator",
@@ -51,5 +71,15 @@
"CallOperator",
"NameOperator",
"ExpressionOperator",
+ "PreComputingCallOperator",
+ "ReCallableOperatorMixin",
"InterpreterDependency",
+ "format_parameter_value",
+ "resove_operator_params",
+ "apply_resolved_parameter_value",
+ "add_resolved_parameter_value",
+ "DSLCallResult",
+ "has_unresolved_parameters",
+ "ReCallingOperatorResult",
+ "ReCallingOperatorResultKeys",
]
diff --git a/packages/commons/octobot_commons/dsl_interpreter/dsl_call_result.py b/packages/commons/octobot_commons/dsl_interpreter/dsl_call_result.py
new file mode 100644
index 000000000..aadaca7d2
--- /dev/null
+++ b/packages/commons/octobot_commons/dsl_interpreter/dsl_call_result.py
@@ -0,0 +1,37 @@
+# Drakkar-Software OctoBot
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+
+import dataclasses
+import typing
+
+import octobot_commons.dataclasses
+
+
+@dataclasses.dataclass
+class DSLCallResult(octobot_commons.dataclasses.FlexibleDataclass):
+ """
+ Stores a DSL call result alongside its statement (and error if any)
+ """
+ statement: str
+ result: typing.Optional[typing.Any] = None
+ error: typing.Optional[str] = None
+
+ def succeeded(self) -> bool:
+ """
+ Check if the DSL call succeeded
+ :return: True if the DSL call succeeded, False otherwise
+ """
+ return self.error is None
diff --git a/packages/commons/octobot_commons/dsl_interpreter/interpreter.py b/packages/commons/octobot_commons/dsl_interpreter/interpreter.py
index 454f898a7..4e87aaa1d 100644
--- a/packages/commons/octobot_commons/dsl_interpreter/interpreter.py
+++ b/packages/commons/octobot_commons/dsl_interpreter/interpreter.py
@@ -19,6 +19,8 @@
import octobot_commons.errors
import octobot_commons.dsl_interpreter.operator as dsl_interpreter_operator
import octobot_commons.dsl_interpreter.interpreter_dependency as dsl_interpreter_dependency
+import octobot_commons.dsl_interpreter.parameters_util as parameters_util
+import octobot_commons.dsl_interpreter.dsl_call_result as dsl_call_result
class Interpreter:
@@ -45,6 +47,7 @@ def __init__(
dsl_interpreter_operator.Operator,
dsl_interpreter_operator.ComputedOperatorParameterType,
] = None
+ self._parsed_expression: typing.Optional[str] = None
def extend(
self, operators: typing.List[typing.Type[dsl_interpreter_operator.Operator]]
@@ -73,7 +76,7 @@ async def interprete(
def get_dependencies(
self,
- ) -> typing.List[dsl_interpreter_dependency.InterpreterDependency]:
+ ) -> list[dsl_interpreter_dependency.InterpreterDependency]:
"""
Get the dependencies of the interpreter's parsed expression.
"""
@@ -109,10 +112,17 @@ def _parse_expression(self, expression: str):
# it consists of a single expression, or 'single' if it consists of a single
# interactive statement.
# docs: https://docs.python.org/3/library/functions.html#compile
- tree = ast.parse(expression, mode="eval")
-
- # Visit the AST and convert nodes to Operator instances
- self._operator_tree_or_constant = self._visit_node(tree.body)
+ self._parsed_expression = expression
+ try:
+ tree = ast.parse(expression, mode="eval")
+ self._operator_tree_or_constant = self._visit_node(tree.body)
+ except SyntaxError:
+ tree = ast.parse(expression, mode="single")
+ if len(tree.body) != 1:
+ raise octobot_commons.errors.DSLInterpreterError(
+ "Single statement required when using statement mode"
+ )
+ self._operator_tree_or_constant = self._visit_node(tree.body[0])
async def compute_expression(
self,
@@ -129,6 +139,25 @@ async def compute_expression(
return self._operator_tree_or_constant.compute()
return self._operator_tree_or_constant
+ async def compute_expression_with_result(
+ self,
+ ) -> dsl_call_result.DSLCallResult:
+ """
+ Compute the result of the expression stored in self._operator_tree_or_constant.
+ If the expression is a constant, return it directly.
+ If the expression is an operator, pre_compute and compute its result.
+ """
+ try:
+ return dsl_call_result.DSLCallResult(
+ statement=self._parsed_expression,
+ result=await self.compute_expression(),
+ )
+ except octobot_commons.errors.ErrorStatementEncountered as err:
+ return dsl_call_result.DSLCallResult(
+ statement=self._parsed_expression,
+ error=err.args[0] if err.args else ""
+ )
+
def _visit_node(self, node: typing.Optional[ast.AST]) -> typing.Union[
dsl_interpreter_operator.Operator,
dsl_interpreter_operator.ComputedOperatorParameterType,
@@ -159,7 +188,26 @@ def _visit_node(self, node: typing.Optional[ast.AST]) -> typing.Union[
)
for arg in node.args
]
- return operator_class(*args)
+ kwargs = {}
+ for kw in node.keywords:
+ value = (
+ self._get_value_from_constant_node(kw.value)
+ if isinstance(kw.value, ast.Constant)
+ else self._visit_node(kw.value)
+ )
+ if kw.arg is not None:
+ kwargs[kw.arg] = value
+ else:
+ if isinstance(value, dict):
+ kwargs.update(value)
+ else:
+ raise octobot_commons.errors.UnsupportedOperatorError(
+ f"**kwargs must unpack a dict, got {type(value).__name__}"
+ )
+ args, kwargs = parameters_util.resolve_operator_args_and_kwargs(
+ operator_class, args, kwargs
+ )
+ return operator_class(*args, **kwargs)
raise octobot_commons.errors.UnsupportedOperatorError(
f"Unknown operator: {func_name}"
)
@@ -259,6 +307,23 @@ def _visit_node(self, node: typing.Optional[ast.AST]) -> typing.Union[
operands = [self._visit_node(operand) for operand in node.elts]
return operator_class(*operands)
+ if isinstance(node, ast.Dict):
+ # Dict: {"a": 1, "b": 2} or {"a": 1, **other}
+ op_name = ast.Dict.__name__
+ result = {}
+ for key, value in zip(node.keys, node.values):
+ if key is not None:
+ result[self._visit_node(key)] = self._visit_node(value)
+ else:
+ unpacked = self._visit_node(value)
+ if isinstance(unpacked, dict):
+ result.update(unpacked)
+ else:
+ raise octobot_commons.errors.UnsupportedOperatorError(
+ f"** unpacking in dict requires a dict, got {type(unpacked).__name__}"
+ )
+ return result
+
if isinstance(node, ast.Slice):
# Slice: slice(1, 2, 3)
op_name = ast.Slice.__name__
@@ -269,6 +334,32 @@ def _visit_node(self, node: typing.Optional[ast.AST]) -> typing.Union[
step = self._visit_node(node.step)
return operator_class(lower, upper, step)
+ if isinstance(node, ast.Raise):
+ # Raise statement: raise exc [from cause] - maps to RaiseOperator
+ op_name = "raise"
+ if op_name in self.operators_by_name:
+ operator_class = self.operators_by_name[op_name]
+ args = []
+ if node.exc is not None:
+ args.append(
+ self._get_value_from_constant_node(node.exc)
+ if isinstance(node.exc, ast.Constant)
+ else self._visit_node(node.exc)
+ )
+ if node.cause is not None:
+ args.append(
+ self._get_value_from_constant_node(node.cause)
+ if isinstance(node.cause, ast.Constant)
+ else self._visit_node(node.cause)
+ )
+ args, kwargs = parameters_util.resolve_operator_args_and_kwargs(
+ operator_class, args, {}
+ )
+ return operator_class(*args, **kwargs)
+ raise octobot_commons.errors.UnsupportedOperatorError(
+ f"Unknown operator: {op_name}"
+ )
+
raise octobot_commons.errors.UnsupportedOperatorError(
f"Unsupported AST node type: {type(node).__name__}"
)
@@ -289,7 +380,7 @@ def _get_value_from_constant_node(
"""Extract a literal value from an AST constant node."""
value = node.value
# Filter out unsupported types like complex numbers or Ellipsis
- if isinstance(value, (str, int, float, bool, type(None))):
+ if isinstance(value, (str, int, float, bool, type(None), dict)):
return value
raise octobot_commons.errors.UnsupportedOperatorError(
f"Unsupported constant type: {type(value).__name__}"
diff --git a/packages/commons/octobot_commons/dsl_interpreter/interpreter_dependency.py b/packages/commons/octobot_commons/dsl_interpreter/interpreter_dependency.py
index dc268ba36..e1dec8751 100644
--- a/packages/commons/octobot_commons/dsl_interpreter/interpreter_dependency.py
+++ b/packages/commons/octobot_commons/dsl_interpreter/interpreter_dependency.py
@@ -1,4 +1,3 @@
-# pylint: disable=too-many-branches,too-many-return-statements
# Drakkar-Software OctoBot-Commons
# Copyright (c) Drakkar-Software, All rights reserved.
#
diff --git a/packages/commons/octobot_commons/dsl_interpreter/operator.py b/packages/commons/octobot_commons/dsl_interpreter/operator.py
index ce7b79cab..877eef793 100644
--- a/packages/commons/octobot_commons/dsl_interpreter/operator.py
+++ b/packages/commons/octobot_commons/dsl_interpreter/operator.py
@@ -14,6 +14,7 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import typing
+import collections
import numpy as np
import octobot_commons.errors
@@ -22,11 +23,11 @@
import octobot_commons.dsl_interpreter.operator_parameter as dsl_interpreter_operator_parameter
import octobot_commons.dsl_interpreter.operator_docs as dsl_interpreter_operator_docs
-OperatorParameterType = typing.Union[
- str, int, float, bool, None, list, np.ndarray, "Operator"
-]
ComputedOperatorParameterType = typing.Union[
- str, int, float, bool, None, list, np.ndarray
+ str, int, float, bool, None, list, dict, np.ndarray
+]
+OperatorParameterType = typing.Union[
+ str, int, float, bool, None, list, dict, np.ndarray, "Operator"
]
@@ -49,7 +50,7 @@ class Operator:
EXAMPLE: str = "" # example of the operator in the DSL
def __init__(self, *parameters: OperatorParameterType, **kwargs: typing.Any):
- self._validate_parameters(parameters)
+ self._validate_parameters(parameters, kwargs)
self.parameters = parameters
self.kwargs = kwargs
@@ -68,7 +69,7 @@ def get_library() -> str:
return octobot_commons.constants.BASE_OPERATORS_LIBRARY
def _validate_parameters(
- self, parameters: typing.List[OperatorParameterType]
+ self, parameters: list[OperatorParameterType], kwargs: dict[str, OperatorParameterType]
) -> None:
"""
Validate the parameters of the operator.
@@ -84,12 +85,13 @@ def _validate_parameters(
if expected_parameters := self.get_parameters():
min_params = len(tuple(p for p in expected_parameters if p.required))
max_params = len(tuple(p for p in expected_parameters))
- if len(parameters) < min_params:
+ total_params = len(parameters) + len(kwargs)
+ if total_params < min_params:
raise octobot_commons.errors.InvalidParametersError(
f"{self.get_name()} requires at least {min_params} "
f"parameter(s): {self.get_parameters_description()}"
)
- if max_params is not None and len(parameters) > max_params:
+ if max_params is not None and total_params > max_params:
raise octobot_commons.errors.InvalidParametersError(
f"{self.get_name()} supports up to {max_params} "
f"parameters: {self.get_parameters_description()}"
@@ -117,8 +119,8 @@ def get_docs(cls) -> dsl_interpreter_operator_docs.OperatorDocs:
parameters=cls.get_parameters(),
)
- @staticmethod
- def get_parameters() -> list[dsl_interpreter_operator_parameter.OperatorParameter]:
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter_operator_parameter.OperatorParameter]:
"""
return: the description of the parameters of the operator.
"""
@@ -139,19 +141,93 @@ def compute(self) -> ComputedOperatorParameterType:
"""
raise NotImplementedError("compute is not implemented")
- def get_computed_parameters(self) -> typing.List[ComputedOperatorParameterType]:
+ def get_computed_parameters(self) -> list[ComputedOperatorParameterType]:
"""
Get the computed parameters of the operator.
Here computed means that any nested operator has already been computed.
"""
- return [
- parameter.compute() if isinstance(parameter, Operator) else parameter
- for parameter in self.parameters
- ]
+ return [self._get_computed_parameter(parameter) for parameter in self.parameters]
+
+ def _get_computed_parameter(self, parameter: OperatorParameterType) -> ComputedOperatorParameterType:
+ if isinstance(parameter, Operator):
+ return parameter.compute()
+ if isinstance(parameter, dict):
+ return {self._get_computed_parameter(k): self._get_computed_parameter(v) for k, v in parameter.items()}
+ if isinstance(parameter, list):
+ return [self._get_computed_parameter(v) for v in parameter]
+ return parameter
+
+ def get_computed_kwargs(self) -> dict[str, ComputedOperatorParameterType]:
+ """
+ Get the computed kwargs of the operator.
+ """
+ return {
+ kw: value.compute() if isinstance(value, Operator) else value
+ for kw, value in self.kwargs.items()
+ }
+
+ def get_computed_value_by_parameter(self) -> dict[str, ComputedOperatorParameterType]:
+ """
+ Get the COMPUTED value of each parameter by its name.
+ """
+ computed_parameters_queue = collections.deque(self.get_computed_parameters())
+ computed_kwargs = self.get_computed_kwargs()
+ return self._get_value_by_parameter(computed_parameters_queue, computed_kwargs) # type: ignore
+
+ def get_input_value_by_parameter(self) -> dict[str, OperatorParameterType]:
+ """
+ Get the raw input (uncomputed) value of each parameter by its name.
+ """
+ return self._get_value_by_parameter(
+ collections.deque(self.parameters), dict(self.kwargs)
+ )
+
+ def _get_value_by_parameter(
+ self,
+ args: collections.deque[OperatorParameterType],
+ kwargs: dict[str, OperatorParameterType]
+ ) -> dict[str, OperatorParameterType]:
+ """
+ Get the value of each parameter by its name.
+ If a value is not provided, the default value will be used if available,
+ otherwise the parameter will be skipped.
+ """
+ value_by_parameter = {}
+ for parameter in self.get_parameters():
+ # 1. non kw parameters are first
+ if args:
+ value_by_parameter[parameter.name] = args.popleft()
+ else:
+ # 2. no more non kw parameters, explore kw parameters
+ if parameter.name in kwargs:
+ if parameter.name in value_by_parameter:
+ raise octobot_commons.errors.InvalidParametersError(
+ f"Parameter {parameter.name} has multiple values"
+ )
+ value_by_parameter[parameter.name] = kwargs[parameter.name]
+ kwargs.pop(parameter.name)
+ else:
+ # 3. try to get the default value if set
+ if parameter.default is not dsl_interpreter_operator_parameter.UNSET_VALUE:
+ value_by_parameter[parameter.name] = parameter.default
+ if kwargs:
+ parameter_names = [p.name for p in self.get_parameters()] # use a list to preserve order
+ if unknown_parameters := {
+ k: v for k, v in kwargs.items() if k not in parameter_names
+ }:
+ raise octobot_commons.errors.InvalidParametersError(
+ f"Parameter(s) {', '.join(f"'{k}'" for k in unknown_parameters.keys())} "
+ f"are unknown. Supported parameters: {', '.join(parameter_names)}"
+ )
+ raise octobot_commons.errors.InvalidParametersError(
+ f"Parameter(s) {', '.join(f"'{k}'" for k in kwargs.keys())} "
+ f"have multiple values"
+ )
+ return value_by_parameter
def get_dependencies(
self,
- ) -> typing.List[dsl_interpreter_dependency.InterpreterDependency]:
+ ) -> list[dsl_interpreter_dependency.InterpreterDependency]:
"""
Get the dependencies of the operator.
"""
diff --git a/packages/commons/octobot_commons/dsl_interpreter/operator_parameter.py b/packages/commons/octobot_commons/dsl_interpreter/operator_parameter.py
index 30319c191..7293f0f2d 100644
--- a/packages/commons/octobot_commons/dsl_interpreter/operator_parameter.py
+++ b/packages/commons/octobot_commons/dsl_interpreter/operator_parameter.py
@@ -18,15 +18,24 @@
import typing
+UNSET_VALUE = "UNSET_VALUE"
+UNINITIALIZED_VALUE = object()
+
+
@dataclasses.dataclass
class OperatorParameter:
name: str
description: str
required: bool
type: typing.Type[typing.Any]
+ default: typing.Any = UNSET_VALUE
def __repr__(self) -> str:
- return f"{self.name}{' (required)' if self.required else ''}[{self.type.__name__}] - {self.description}"
+ default_str = f' (default: {self.default})' if self.default is not UNSET_VALUE else ''
+ return (
+ f"{self.name}{' (required)' if self.required else default_str}"
+ f"[{self.type.__name__}] - {self.description}"
+ )
def to_json(self) -> dict:
"""
@@ -37,4 +46,5 @@ def to_json(self) -> dict:
"description": self.description,
"required": self.required,
"type": self.type.__name__,
+ "default": self.default,
}
diff --git a/packages/commons/octobot_commons/dsl_interpreter/operators/__init__.py b/packages/commons/octobot_commons/dsl_interpreter/operators/__init__.py
index cb7fcc15e..7591c1913 100644
--- a/packages/commons/octobot_commons/dsl_interpreter/operators/__init__.py
+++ b/packages/commons/octobot_commons/dsl_interpreter/operators/__init__.py
@@ -42,6 +42,14 @@
from octobot_commons.dsl_interpreter.operators.iterable_operator import (
IterableOperator,
)
+from octobot_commons.dsl_interpreter.operators.pre_computing_call_operator import (
+ PreComputingCallOperator,
+)
+from octobot_commons.dsl_interpreter.operators.re_callable_operator_mixin import (
+ ReCallableOperatorMixin,
+ ReCallingOperatorResult,
+ ReCallingOperatorResultKeys,
+)
__all__ = [
"BinaryOperator",
@@ -53,4 +61,8 @@
"ExpressionOperator",
"SubscriptingOperator",
"IterableOperator",
+ "PreComputingCallOperator",
+ "ReCallableOperatorMixin",
+ "ReCallingOperatorResult",
+ "ReCallingOperatorResultKeys",
]
diff --git a/packages/commons/octobot_commons/dsl_interpreter/operators/pre_computing_call_operator.py b/packages/commons/octobot_commons/dsl_interpreter/operators/pre_computing_call_operator.py
new file mode 100644
index 000000000..e6ba30f20
--- /dev/null
+++ b/packages/commons/octobot_commons/dsl_interpreter/operators/pre_computing_call_operator.py
@@ -0,0 +1,46 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import typing
+import octobot_commons.errors
+import octobot_commons.dsl_interpreter.operator as dsl_interpreter_operator
+import octobot_commons.dsl_interpreter.operator_parameter as dsl_interpreter_operator_parameter
+import octobot_commons.dsl_interpreter.operators.call_operator as dsl_interpreter_call_operator
+
+
+class PreComputingCallOperator(
+ dsl_interpreter_call_operator.CallOperator
+): # pylint: disable=abstract-method
+ """
+ Base class for pre-computing call operators (function calls).
+ Pre-computing call operators are call operators that must be
+ pre-computed before being computed.
+ """
+ def __init__(self, *parameters: dsl_interpreter_operator.OperatorParameterType, **kwargs: typing.Any):
+ super().__init__(*parameters, **kwargs)
+ self.value: dsl_interpreter_operator.ComputedOperatorParameterType = (
+ dsl_interpreter_operator_parameter.UNINITIALIZED_VALUE
+ ) # type: ignore
+
+ async def pre_compute(self) -> None:
+ await super().pre_compute()
+ self.value = dsl_interpreter_operator_parameter.UNINITIALIZED_VALUE # type: ignore
+
+ def compute(self) -> dsl_interpreter_operator.ComputedOperatorParameterType:
+ if self.value is dsl_interpreter_operator_parameter.UNINITIALIZED_VALUE:
+ raise octobot_commons.errors.DSLInterpreterError(
+ "{self.__class__.__name__} has not been pre_computed"
+ )
+ return self.value
diff --git a/packages/commons/octobot_commons/dsl_interpreter/operators/re_callable_operator_mixin.py b/packages/commons/octobot_commons/dsl_interpreter/operators/re_callable_operator_mixin.py
new file mode 100644
index 000000000..5dc0f156c
--- /dev/null
+++ b/packages/commons/octobot_commons/dsl_interpreter/operators/re_callable_operator_mixin.py
@@ -0,0 +1,115 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import dataclasses
+import typing
+import time
+import enum
+
+import octobot_commons.dataclasses
+import octobot_commons.dsl_interpreter.operator_parameter as operator_parameter
+
+
+class ReCallingOperatorResultKeys(str, enum.Enum):
+ WAITING_TIME = "waiting_time"
+ LAST_EXECUTION_TIME = "last_execution_time"
+
+
+@dataclasses.dataclass
+class ReCallingOperatorResult(octobot_commons.dataclasses.MinimizableDataclass):
+ reset_to_id: typing.Optional[str] = None
+ last_execution_result: typing.Optional[dict] = None
+
+ @staticmethod
+ def is_re_calling_operator_result(result: typing.Any) -> bool:
+ """
+ Check if the result is a re-calling operator result.
+ """
+ return isinstance(result, dict) and (
+ ReCallingOperatorResult.__name__ in result
+ )
+
+ def get_next_call_time(self) -> typing.Optional[float]:
+ """
+ Returns the next call time based on the last execution result's
+ waiting time and last execution time.
+ """
+ if (
+ self.last_execution_result
+ and (waiting_time := self.last_execution_result.get(ReCallingOperatorResultKeys.WAITING_TIME.value))
+ ):
+ last_execution_time = self.last_execution_result.get(
+ ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value
+ ) or time.time()
+ return last_execution_time + waiting_time
+ return None
+
+
+class ReCallableOperatorMixin:
+ """
+ Mixin for re-callable operators.
+ """
+ LAST_EXECUTION_RESULT_KEY = "last_execution_result"
+
+ @classmethod
+ def get_re_callable_parameters(cls) -> list[operator_parameter.OperatorParameter]:
+ """
+ Returns the parameters for the re-callable operator.
+ """
+ return [
+ operator_parameter.OperatorParameter(
+ name=cls.LAST_EXECUTION_RESULT_KEY,
+ description="the return value of the previous call",
+ required=False,
+ type=dict,
+ default=None,
+ ),
+ ]
+
+ def get_last_execution_result(
+ self, param_by_name: dict[str, typing.Any]
+ ) -> typing.Optional[dict]:
+ """
+ Returns the potential last execution result from param_by_name.
+ """
+ if (
+ (result_dict := param_by_name.get(self.LAST_EXECUTION_RESULT_KEY, None))
+ and ReCallingOperatorResult.is_re_calling_operator_result(result_dict)
+ ):
+ return ReCallingOperatorResult.from_dict(result_dict[
+ ReCallingOperatorResult.__name__
+ ]).last_execution_result
+ return None
+
+ def build_re_callable_result(
+ self,
+ reset_to_id: typing.Optional[str] = None,
+ waiting_time: typing.Optional[float] = None,
+ last_execution_time: typing.Optional[float] = None,
+ **kwargs: typing.Any,
+ ) -> dict:
+ """
+ Builds a dict formatted re-callable result from the given parameters.
+ """
+ return {
+ ReCallingOperatorResult.__name__: ReCallingOperatorResult(
+ reset_to_id=reset_to_id,
+ last_execution_result={
+ ReCallingOperatorResultKeys.WAITING_TIME.value: waiting_time,
+ ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value: last_execution_time,
+ **kwargs,
+ },
+ ).to_dict(include_default_values=False)
+ }
diff --git a/packages/commons/octobot_commons/dsl_interpreter/parameters_util.py b/packages/commons/octobot_commons/dsl_interpreter/parameters_util.py
new file mode 100644
index 000000000..a16486fb7
--- /dev/null
+++ b/packages/commons/octobot_commons/dsl_interpreter/parameters_util.py
@@ -0,0 +1,163 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import re
+import typing
+import json
+
+import octobot_commons.dsl_interpreter.operator as dsl_interpreter_operator
+import octobot_commons.errors
+import octobot_commons.constants
+
+
+def format_parameter_value(value: typing.Any) -> str: # pylint: disable=too-many-return-statements
+ """
+ Formats a parameter value to a string usable in a DSL expression.
+ Handles special cases for some values (ex: lists, dicts, ...).
+ """
+ if value is None:
+ return "None"
+ if isinstance(value, bool):
+ return "True" if value else "False"
+ if isinstance(value, (int, float)):
+ return repr(value)
+ if isinstance(value, str):
+ try:
+ parsed = json.loads(value)
+ if isinstance(parsed, list):
+ return repr(parsed)
+ if isinstance(parsed, dict):
+ return repr(parsed)
+ except (json.JSONDecodeError, TypeError):
+ return repr(value)
+ if isinstance(value, list):
+ return repr(value)
+ if isinstance(value, dict):
+ return repr(value)
+ return repr(value)
+
+
+def resove_operator_params(
+ operator_class: dsl_interpreter_operator.Operator,
+ param_value_by_name: dict[str, typing.Any]
+) -> list[str]:
+ """
+ Resolves operator parameters to a list of positional and keyword arguments.
+ Returns a list of formatted strings usable in a DSL expression.
+ """
+ operator_params = operator_class.get_parameters()
+ required_params = [p for p in operator_params if p.required]
+ optional_params = [p for p in operator_params if not p.required]
+ positional_parts = []
+ keyword_parts = []
+ for param_def in required_params:
+ name = param_def.name
+ if name in param_value_by_name:
+ value = param_value_by_name[name]
+ positional_parts.append(
+ format_parameter_value(value)
+ )
+ for param_def in optional_params:
+ name = param_def.name
+ if name in param_value_by_name:
+ value = param_value_by_name[name]
+ keyword_parts.append(f"{name}={format_parameter_value(value)}")
+ return positional_parts + keyword_parts
+
+
+def resolve_operator_args_and_kwargs(
+ operator_class: typing.Type[dsl_interpreter_operator.Operator],
+ args: typing.List,
+ kwargs: typing.Dict[str, typing.Any],
+) -> typing.Tuple[typing.List, typing.Dict[str, typing.Any]]:
+ """
+ For operators with get_parameters(), merge positional args and kwargs
+ into a single args tuple in parameter order. This ensures validation
+ passes when using named parameters (e.g. xyz(1, p2=2) where p2 is a required parameter).
+ """
+ expected_params = operator_class.get_parameters()
+ if not expected_params:
+ return args, kwargs
+
+ max_params = len(expected_params)
+ merged_args = []
+ args_index = 0
+ remaining_kwargs = dict(kwargs)
+
+ for param in expected_params:
+ if args_index < len(args):
+ merged_args.append(args[args_index])
+ args_index += 1
+ elif param.name in remaining_kwargs:
+ merged_args.append(remaining_kwargs.pop(param.name))
+ else:
+ # Parameter not provided - leave for Operator's default handling
+ break
+
+ if args_index < len(args):
+ raise octobot_commons.errors.InvalidParametersError(
+ f"{operator_class.get_name()} supports up to {max_params} "
+ f"parameters: {operator_class.get_parameters_description()}"
+ )
+
+ return merged_args, remaining_kwargs
+
+
+def apply_resolved_parameter_value(script: str, parameter: str, value: typing.Any):
+ """
+ Apply a resolved parameter value to a DSL script.
+ """
+ to_replace = f"{parameter}={octobot_commons.constants.UNRESOLVED_PARAMETER_PLACEHOLDER}"
+ if to_replace not in script:
+ raise octobot_commons.errors.ResolvedParameterNotFoundError(
+ f"Parameter {parameter} not found in script: {script}"
+ )
+ new_value = f"{parameter}={format_parameter_value(value)}"
+ return script.replace(to_replace, new_value)
+
+
+def add_resolved_parameter_value(script: str, parameter: str, value: typing.Any):
+ """
+ Append a resolved parameter value to the end of a DSL script.
+ Supports:
+ - Calls with no parenthesis (e.g. op -> op(x='a'))
+ - Calls with no existing params (e.g. op() -> op(x='a'))
+ - Calls with existing params (e.g. op(1) -> op(1, x='a'))
+ Raises InvalidParametersError if the parameter is already in the operator keyword args.
+ """
+ param_str = f"{parameter}={format_parameter_value(value)}"
+ if script[-1] == ")":
+ # Script ends with ) - append to existing call
+ if re.search(rf"(?:\(|,)\s*{re.escape(parameter)}\s*=", script):
+ raise octobot_commons.errors.InvalidParametersError(
+ f"Parameter {parameter} is already in operator keyword args: {script}"
+ )
+ inner = script[:-1]
+ has_existing_params = inner.rstrip().endswith("(")
+ if has_existing_params:
+ return f"{inner}{param_str})"
+ return f"{inner}, {param_str})"
+ if "(" in script:
+ raise octobot_commons.errors.InvalidParametersError(
+ f"Script {script} has unclosed parenthesis"
+ )
+ return f"{script}({param_str})"
+
+
+def has_unresolved_parameters(script: str) -> bool:
+ """
+ Check if a DSL script has unresolved parameters.
+ """
+ return octobot_commons.constants.UNRESOLVED_PARAMETER_PLACEHOLDER in script
diff --git a/packages/commons/octobot_commons/errors.py b/packages/commons/octobot_commons/errors.py
index 5b0f0484a..c8be287fc 100644
--- a/packages/commons/octobot_commons/errors.py
+++ b/packages/commons/octobot_commons/errors.py
@@ -151,3 +151,27 @@ class InvalidParametersError(DSLInterpreterError):
"""
Raised when the parameters of an operator are invalid
"""
+
+
+class MissingDefaultValueError(InvalidParametersError):
+ """
+ Raised when a parameter has no default value
+ """
+
+
+class InvalidParameterFormatError(InvalidParametersError):
+ """
+ Raised when the format of a parameter is invalid
+ """
+
+
+class ResolvedParameterNotFoundError(DSLInterpreterError):
+ """
+ Raised when a resolved parameter is not found in the script
+ """
+
+
+class ErrorStatementEncountered(DSLInterpreterError):
+ """
+ Raised when a error statement is encountered when executing a script
+ """
diff --git a/packages/commons/octobot_commons/logging/__init__.py b/packages/commons/octobot_commons/logging/__init__.py
index 4b78b5161..01acf6ef5 100644
--- a/packages/commons/octobot_commons/logging/__init__.py
+++ b/packages/commons/octobot_commons/logging/__init__.py
@@ -39,6 +39,11 @@
register_log_callback,
set_enable_web_interface_logs,
)
+from octobot_commons.logging.context_based_file_handler import (
+ add_context_based_file_handler,
+ ContextBasedFileHandler,
+)
+
__all__ = [
"BotLogger",
@@ -63,4 +68,6 @@
"register_error_notifier",
"register_log_callback",
"set_enable_web_interface_logs",
+ "add_context_based_file_handler",
+ "ContextBasedFileHandler",
]
diff --git a/packages/commons/octobot_commons/logging/context_based_file_handler.py b/packages/commons/octobot_commons/logging/context_based_file_handler.py
new file mode 100644
index 000000000..3fb3fa41d
--- /dev/null
+++ b/packages/commons/octobot_commons/logging/context_based_file_handler.py
@@ -0,0 +1,83 @@
+# pylint: disable=C0415, W0603, W1508, R0913, C0103
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import logging
+import os
+import typing
+
+
+MAX_CONTEXT_BASED_FILE_HANDLERS_PER_CATEGORY = 30
+DEFAULT_CONTEXT_BASED_FILE_FORMATTER = "%(asctime)s %(levelname)-8s %(name)-20s %(message)s"
+
+
+def add_context_based_file_handler(
+ logs_folder: str,
+ file_name_provider: typing.Callable[[], typing.Optional[str]]
+) -> None:
+ """
+ Add the ContextBasedFileHandler to the root logger. Logs will
+ additionally be written to a file named after the file name provided by the file_name_provider.
+ """
+ logging.getLogger().addHandler(
+ ContextBasedFileHandler(logs_folder, file_name_provider)
+ )
+
+
+class ContextBasedFileHandler(logging.Handler):
+ """
+ Logging handler that writes logs to specific files when the
+ context is set. The log file name is the file name provided by the file_name_provider.
+ """
+ def __init__(
+ self,
+ logs_folder: str,
+ file_name_provider: typing.Callable[[], typing.Optional[str]],
+ ):
+ super().__init__()
+ self._custom_handlers: dict[str, logging.FileHandler] = {}
+ self._file_name_provider = file_name_provider
+ self._logs_folder = logs_folder
+ os.makedirs(self._logs_folder, exist_ok=True)
+
+ def emit(self, record: logging.LogRecord) -> None:
+ if file_name := self._file_name_provider():
+ if file_name not in self._custom_handlers:
+ if len(self._custom_handlers) >= MAX_CONTEXT_BASED_FILE_HANDLERS_PER_CATEGORY:
+ self._remove_oldest_handler()
+ self._custom_handlers[file_name] = self._create_file_handler(file_name)
+ self._custom_handlers[file_name].emit(record)
+
+ def _remove_oldest_handler(self) -> None:
+ oldest_key = next(iter(self._custom_handlers))
+ oldest_handler = self._custom_handlers.pop(oldest_key)
+ logging.getLogger().removeHandler(oldest_handler)
+ oldest_handler.close()
+
+ def _create_file_handler(self, file_name: str) -> logging.FileHandler:
+ log_path = os.path.join(self._logs_folder, f"{file_name}.log")
+ file_handler = logging.FileHandler(log_path, mode="a", encoding="utf-8")
+ file_handler.setLevel(self.level)
+ root_logger = logging.getLogger()
+ for handler in root_logger.handlers:
+ if isinstance(handler, logging.FileHandler) and handler.formatter:
+ # reuse the user configured formatter
+ print(f"Reusing user configured formatter: {handler.formatter}")
+ file_handler.setFormatter(handler.formatter)
+ break
+ else:
+ # default formatter
+ file_handler.setFormatter(logging.Formatter(DEFAULT_CONTEXT_BASED_FILE_FORMATTER))
+ return file_handler
diff --git a/packages/commons/octobot_commons/time_frame_manager.py b/packages/commons/octobot_commons/time_frame_manager.py
index 158a46d2a..9cb4fe973 100644
--- a/packages/commons/octobot_commons/time_frame_manager.py
+++ b/packages/commons/octobot_commons/time_frame_manager.py
@@ -13,6 +13,8 @@
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
+import typing
+
import octobot_commons.constants as constants
import octobot_commons.logging as logging_util
import octobot_commons.enums as enums
@@ -99,7 +101,10 @@ def get_previous_time_frame(config_time_frames, time_frame, origin_time_frame):
return origin_time_frame
-def find_min_time_frame(time_frames, min_time_frame=None):
+def find_min_time_frame(
+ time_frames: list[typing.Union[str, enums.TimeFrames]],
+ min_time_frame: typing.Optional[str] = None
+) -> enums.TimeFrames:
"""
Find the minimum time frame
:param time_frames: the time frame list
diff --git a/packages/commons/tests/dsl_interpreter/operators/test_re_callable_operator_mixin.py b/packages/commons/tests/dsl_interpreter/operators/test_re_callable_operator_mixin.py
new file mode 100644
index 000000000..04eb57a96
--- /dev/null
+++ b/packages/commons/tests/dsl_interpreter/operators/test_re_callable_operator_mixin.py
@@ -0,0 +1,185 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import time
+import mock
+
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_commons.dsl_interpreter.operators.re_callable_operator_mixin as re_callable_operator_mixin
+
+
+class TestReCallingOperatorResult:
+ def test_is_re_calling_operator_result_with_reset_to_id(self):
+ assert re_callable_operator_mixin.ReCallingOperatorResult.is_re_calling_operator_result(
+ {re_callable_operator_mixin.ReCallingOperatorResult.__name__: {"reset_to_id": "some_id"}}
+ ) is True
+
+ def test_is_re_calling_operator_result_with_last_execution_result(self):
+ assert re_callable_operator_mixin.ReCallingOperatorResult.is_re_calling_operator_result(
+ {
+ re_callable_operator_mixin.ReCallingOperatorResult.__name__: {
+ "last_execution_result": {"waiting_time": 5, "last_execution_time": 1000.0},
+ }
+ }
+ ) is True
+
+ def test_is_re_calling_operator_result_false_for_non_dict(self):
+ assert re_callable_operator_mixin.ReCallingOperatorResult.is_re_calling_operator_result(None) is False
+ assert re_callable_operator_mixin.ReCallingOperatorResult.is_re_calling_operator_result([]) is False
+ assert re_callable_operator_mixin.ReCallingOperatorResult.is_re_calling_operator_result("str") is False
+
+ def test_is_re_calling_operator_result_false_for_dict_without_keys(self):
+ assert re_callable_operator_mixin.ReCallingOperatorResult.is_re_calling_operator_result({}) is False
+ assert re_callable_operator_mixin.ReCallingOperatorResult.is_re_calling_operator_result(
+ {"other_key": "value"}
+ ) is False
+
+ def test_get_next_call_time_with_full_data(self):
+ with mock.patch.object(time, "time", return_value=1000.0):
+ result = re_callable_operator_mixin.ReCallingOperatorResult(
+ reset_to_id=None,
+ last_execution_result={
+ re_callable_operator_mixin.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value: 1000.0,
+ re_callable_operator_mixin.ReCallingOperatorResultKeys.WAITING_TIME.value: 5.0,
+ },
+ )
+ assert result.get_next_call_time() == 1005.0
+
+ def test_get_next_call_time_with_missing_last_execution_time_uses_current_time(self):
+ with mock.patch.object(time, "time", return_value=2000.0):
+ result = re_callable_operator_mixin.ReCallingOperatorResult(
+ reset_to_id=None,
+ last_execution_result={
+ re_callable_operator_mixin.ReCallingOperatorResultKeys.WAITING_TIME.value: 10.0,
+ },
+ )
+ assert result.get_next_call_time() == 2010.0
+
+ def test_get_next_call_time_returns_none_when_no_last_execution_result(self):
+ result = re_callable_operator_mixin.ReCallingOperatorResult(
+ reset_to_id=None,
+ last_execution_result=None,
+ )
+ assert result.get_next_call_time() is None
+
+ def test_get_next_call_time_returns_none_when_waiting_time_is_zero(self):
+ result = re_callable_operator_mixin.ReCallingOperatorResult(
+ reset_to_id=None,
+ last_execution_result={
+ re_callable_operator_mixin.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value: 1000.0,
+ re_callable_operator_mixin.ReCallingOperatorResultKeys.WAITING_TIME.value: 0,
+ },
+ )
+ assert result.get_next_call_time() is None
+
+
+class _TestReCallableOperator(dsl_interpreter.ReCallableOperatorMixin):
+ """Minimal operator using the mixin for testing."""
+
+ def __init__(self):
+ pass
+
+
+class TestReCallableOperatorMixin:
+ def test_last_execution_result_key(self):
+ assert dsl_interpreter.ReCallableOperatorMixin.LAST_EXECUTION_RESULT_KEY == "last_execution_result"
+
+ def test_get_re_callable_parameters(self):
+ params = dsl_interpreter.ReCallableOperatorMixin.get_re_callable_parameters()
+ assert len(params) == 1
+ assert params[0].name == "last_execution_result"
+ assert params[0].required is False
+ assert params[0].default is None
+
+ def test_get_last_execution_result_returns_none_when_param_missing(self):
+ operator = _TestReCallableOperator()
+ assert operator.get_last_execution_result({}) is None
+ assert operator.get_last_execution_result({"other": "value"}) is None
+
+ def test_get_last_execution_result_returns_none_when_param_is_none(self):
+ operator = _TestReCallableOperator()
+ assert operator.get_last_execution_result({
+ dsl_interpreter.ReCallableOperatorMixin.LAST_EXECUTION_RESULT_KEY: None,
+ }) is None
+
+ def test_get_last_execution_result_returns_none_when_not_re_calling_format(self):
+ operator = _TestReCallableOperator()
+ assert operator.get_last_execution_result({
+ dsl_interpreter.ReCallableOperatorMixin.LAST_EXECUTION_RESULT_KEY: {"wrong": "structure"},
+ }) is None
+
+ def test_get_last_execution_result_returns_inner_dict_for_valid_format(self):
+ operator = _TestReCallableOperator()
+ inner = {
+ re_callable_operator_mixin.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value: 1000.0,
+ re_callable_operator_mixin.ReCallingOperatorResultKeys.WAITING_TIME.value: 5.0,
+ }
+ result = operator.get_last_execution_result({
+ dsl_interpreter.ReCallableOperatorMixin.LAST_EXECUTION_RESULT_KEY: {
+ re_callable_operator_mixin.ReCallingOperatorResult.__name__: {
+ "last_execution_result": inner,
+ },
+ },
+ })
+ assert result == inner
+
+ def test_get_last_execution_result_with_reset_to_id_format(self):
+ operator = _TestReCallableOperator()
+ inner = {"waiting_time": 3.0, "last_execution_time": 500.0}
+ result = operator.get_last_execution_result({
+ dsl_interpreter.ReCallableOperatorMixin.LAST_EXECUTION_RESULT_KEY: {
+ re_callable_operator_mixin.ReCallingOperatorResult.__name__: {
+ "reset_to_id": "abc",
+ "last_execution_result": inner,
+ },
+ },
+ })
+ assert result == inner
+
+ def test_build_re_callable_result(self):
+ operator = _TestReCallableOperator()
+ result = operator.build_re_callable_result(
+ last_execution_time=1000.0,
+ waiting_time=5.0,
+ )
+ inner = result[re_callable_operator_mixin.ReCallingOperatorResult.__name__]
+ assert "last_execution_result" in inner
+ assert inner["last_execution_result"][
+ re_callable_operator_mixin.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value
+ ] == 1000.0
+ assert inner["last_execution_result"][
+ re_callable_operator_mixin.ReCallingOperatorResultKeys.WAITING_TIME.value
+ ] == 5.0
+
+ def test_build_re_callable_result_with_reset_to_id(self):
+ operator = _TestReCallableOperator()
+ result = operator.build_re_callable_result(
+ reset_to_id="target_123",
+ last_execution_time=1000.0,
+ waiting_time=5.0,
+ )
+ inner = result[re_callable_operator_mixin.ReCallingOperatorResult.__name__]
+ assert inner["reset_to_id"] == "target_123"
+ assert "last_execution_result" in inner
+
+ def test_build_re_callable_result_with_extra_kwargs(self):
+ operator = _TestReCallableOperator()
+ result = operator.build_re_callable_result(
+ last_execution_time=1000.0,
+ waiting_time=5.0,
+ extra_field=42,
+ )
+ inner = result[re_callable_operator_mixin.ReCallingOperatorResult.__name__]
+ assert inner["last_execution_result"]["extra_field"] == 42
diff --git a/packages/commons/tests/dsl_interpreter/test_custom_operators.py b/packages/commons/tests/dsl_interpreter/test_custom_operators.py
index a22d31dde..14b823c34 100644
--- a/packages/commons/tests/dsl_interpreter/test_custom_operators.py
+++ b/packages/commons/tests/dsl_interpreter/test_custom_operators.py
@@ -16,6 +16,7 @@
import typing
import pytest
import ast
+import re
import octobot_commons.dsl_interpreter as dsl_interpreter
import octobot_commons.enums as commons_enums
@@ -50,8 +51,8 @@ def __init__(self, *parameters: dsl_interpreter.OperatorParameterType, **kwargs:
def get_name() -> str:
return "plus_x"
- @staticmethod
- def get_parameters() -> list[dsl_interpreter.OperatorParameter]:
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
return [
dsl_interpreter.OperatorParameter(name="data", description="the data to compute the sum of", required=True, type=int),
dsl_interpreter.OperatorParameter(name="data2", description="the data to compute the sum of", required=False, type=int),
@@ -96,8 +97,8 @@ class Add2Operator(dsl_interpreter.CallOperator):
def get_name() -> str:
return "add2"
- @staticmethod
- def get_parameters() -> list[dsl_interpreter.OperatorParameter]:
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
return [
dsl_interpreter.OperatorParameter(name="left", description="the left operand", required=True, type=int),
dsl_interpreter.OperatorParameter(name="right", description="the right operand", required=True, type=int),
@@ -107,12 +108,92 @@ def compute(self) -> dsl_interpreter.ComputedOperatorParameterType:
left, right = self.get_computed_left_and_right_parameters()
return left + right
+class PreComputeSumOperator(dsl_interpreter.PreComputingCallOperator):
+ @staticmethod
+ def get_name() -> str:
+ return "pre_compute_sum"
+
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(name="a", description="first value", required=True, type=int),
+ dsl_interpreter.OperatorParameter(name="b", description="second value", required=True, type=int),
+ ]
+
+ async def pre_compute(self) -> None:
+ await super().pre_compute()
+ value_by_parameter = self.get_computed_value_by_parameter()
+ self.value = value_by_parameter["a"] + value_by_parameter["b"]
+
+
+class CallWithDefaultParametersOperator(dsl_interpreter.CallOperator):
+ @staticmethod
+ def get_name() -> str:
+ return "call_with_default_parameters"
+
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(name="value1", description="the first value", required=True, type=int),
+ dsl_interpreter.OperatorParameter(name="value2", description="the second value", required=False, type=int, default=0),
+ dsl_interpreter.OperatorParameter(name="added_extra_value", description="value to add to the result", required=False, type=int, default=0),
+ dsl_interpreter.OperatorParameter(name="substracted_extra_value", description="value to substract from the result", required=False, type=int, default=0),
+ ]
+
+ def compute(self) -> dsl_interpreter.ComputedOperatorParameterType:
+ value_by_parameter = self.get_computed_value_by_parameter()
+ return (
+ value_by_parameter["value1"]
+ + value_by_parameter["value2"]
+ + value_by_parameter["added_extra_value"]
+ - value_by_parameter["substracted_extra_value"]
+ )
+
+
+class ParamMerger(dsl_interpreter.CallOperator):
+ @staticmethod
+ def get_name() -> str:
+ return "param_merger"
+
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(name="p1", description="the first value", required=True, type=int),
+ dsl_interpreter.OperatorParameter(name="p2", description="the second value", required=True, type=int),
+ ]
+
+ def compute(self) -> dsl_interpreter.ComputedOperatorParameterType:
+ value_by_parameter = self.get_computed_value_by_parameter()
+ return str(value_by_parameter)
+
+
+class NestedDictSumOperator(dsl_interpreter.CallOperator):
+ @staticmethod
+ def get_name() -> str:
+ return "nested_dict_sum"
+
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(name="values", description="the dictionary to sum the values of", required=True, type=dict),
+ ]
+
+ def nested_sum(self, values: dict) -> float:
+ return sum(
+ self.nested_sum(value) if isinstance(value, dict) else float(value)
+ for value in values.values()
+ )
+
+ def compute(self) -> dsl_interpreter.ComputedOperatorParameterType:
+ value_by_parameter = self.get_computed_value_by_parameter()
+ return self.nested_sum(value_by_parameter["values"])
+
@pytest.fixture
def interpreter():
return dsl_interpreter.Interpreter(
dsl_interpreter.get_all_operators() + [
- SumPlusXOperatorWithoutInit, SumPlusXOperatorWithPreCompute, TimeFrameToSecondsOperator, AddOperator, Add2Operator
+ SumPlusXOperatorWithoutInit, SumPlusXOperatorWithPreCompute, TimeFrameToSecondsOperator, AddOperator, Add2Operator, PreComputeSumOperator, CallWithDefaultParametersOperator, NestedDictSumOperator, ParamMerger
]
)
@@ -130,23 +211,75 @@ async def test_interpreter_basic_operations(interpreter):
assert await interpreter.interprete("time_frame_to_seconds('1'+'h')") == 3600
+@pytest.mark.asyncio
+async def test_interpreter_basic_operations_with_named_parameters(interpreter):
+ assert await interpreter.interprete("param_merger(1, 2)") == "{'p1': 1, 'p2': 2}"
+ assert await interpreter.interprete("param_merger(1, p2=2)") == "{'p1': 1, 'p2': 2}"
+ assert await interpreter.interprete("param_merger(p1=1, p2=2)") == "{'p1': 1, 'p2': 2}"
+ assert await interpreter.interprete("param_merger(p2=1, p1=2)") == "{'p1': 2, 'p2': 1}"
+
+
+@pytest.mark.asyncio
+async def test_pre_computing_call_operator(interpreter):
+ assert await interpreter.interprete("pre_compute_sum(1, 2)") == 3
+ assert await interpreter.interprete("pre_compute_sum(10, 20)") == 30
+ assert await interpreter.interprete("pre_compute_sum(1 + 1, 2 + 2)") == 6
+ with pytest.raises(commons_errors.DSLInterpreterError, match="has not been pre_computed"):
+ operator = PreComputeSumOperator(1, 2)
+ operator.compute()
+
+
+@pytest.mark.asyncio
+async def test_interpreter_call_with_default_parameters(interpreter):
+ assert await interpreter.interprete("call_with_default_parameters(1)") == 1
+ assert await interpreter.interprete("call_with_default_parameters(1, 2)") == 3
+ assert await interpreter.interprete("call_with_default_parameters(1, 2, 3)") == 6
+ assert await interpreter.interprete("call_with_default_parameters(1, 2, 3, 4)") == 2
+ assert await interpreter.interprete("call_with_default_parameters(1, 2, added_extra_value=3)") == 6
+ assert await interpreter.interprete("call_with_default_parameters(1, 2, 3, substracted_extra_value=4)") == 2
+ assert await interpreter.interprete("call_with_default_parameters(1, 2, substracted_extra_value=3)") == 0
+ assert await interpreter.interprete("call_with_default_parameters(1, 2, added_extra_value=4, substracted_extra_value=5)") == 2
+ with pytest.raises(commons_errors.InvalidParametersError, match="call_with_default_parameters requires at least 1 parameter"):
+ await interpreter.interprete("call_with_default_parameters()")
+ with pytest.raises(commons_errors.InvalidParametersError, match="call_with_default_parameters supports up to 4 parameters:"):
+ await interpreter.interprete("call_with_default_parameters(1, 2, 3, 4, 5)")
+ with pytest.raises(commons_errors.InvalidParametersError, match=re.escape("Parameter(s) 'added_extra_value' have multiple values")):
+ await interpreter.interprete("call_with_default_parameters(1, 2, 3, added_extra_value=4)")
+ with pytest.raises(commons_errors.InvalidParametersError, match=re.escape("call_with_default_parameters supports up to 4 parameters:")):
+ await interpreter.interprete("call_with_default_parameters(1, 2, 3, 4, added_extra_value=5)")
+
+
+@pytest.mark.asyncio
+async def test_interpreter_nested_dict_sum(interpreter):
+ assert await interpreter.interprete("nested_dict_sum({})") == 0
+ assert await interpreter.interprete("nested_dict_sum({'a': 1})") == 1
+ assert await interpreter.interprete("nested_dict_sum({'a': 1 + 1})") == 2
+ assert await interpreter.interprete("nested_dict_sum({'a': 1, 'b': 2})") == 3
+ assert await interpreter.interprete("nested_dict_sum({'a': 1, 'b': {'c': 2, 'd': 3}})") == 6
+ assert await interpreter.interprete("nested_dict_sum({'a': 1, 'b': {'c': 2, 'd': {'e': 3}}})") == 6
+ assert await interpreter.interprete("nested_dict_sum({'a': 1, 'b': {'c': 2, 'd': {'e': 3, 'f': {'g': 4}}}})") == 10
+ assert await interpreter.interprete("nested_dict_sum({'a': 1, 'b': {'c': 2, 'd': {'e': 3, 'f': {'g': 4, 'h': {'i': 5}}}}})") == 15
+ assert await interpreter.interprete("nested_dict_sum({'a': 1, 'b': {'c': 2, 'd': {'e': 3, 'f': {'g': 4, 'h': {'i': 5, 'j': {'k': 6}}}}}})") == 21
+ assert await interpreter.interprete("nested_dict_sum({'a': 1, 'b': {'c': 2, 'd': {'e': 3, 'f': {'g': 4, 'h': {'i': 5, 'j': {'k': 6, 'l': {'m': 7}}}}}}})") == 28
+ assert await interpreter.interprete("nested_dict_sum({'a': 1, 'b': {'c': 2, 'd': {'e': 3, 'f': {'g': 4, 'h': {'i': 5, 'j': {'k': 6, 'l': {'m': 7, 'n': {'o': 8}}}}}}, 'p': 9 + 0.1}})") == 45.1
+
@pytest.mark.asyncio
async def test_interpreter_invalid_parameters(interpreter):
- with pytest.raises(commons_errors.InvalidParametersError, match="plus_x requires at least 1 parameter\(s\): 1: data"):
+ with pytest.raises(commons_errors.InvalidParametersError, match=re.escape("plus_x requires at least 1 parameter(s): 1: data")):
interpreter.prepare("plus_x()")
- with pytest.raises(commons_errors.InvalidParametersError, match="plus_x requires at least 1 parameter\(s\): 1: data"):
+ with pytest.raises(commons_errors.InvalidParametersError, match=re.escape("plus_x requires at least 1 parameter(s): 1: data")):
await interpreter.interprete("plus_x()")
- with pytest.raises(commons_errors.InvalidParametersError, match="add2 requires at least 2 parameter\(s\): 1: left"):
+ with pytest.raises(commons_errors.InvalidParametersError, match=re.escape("add2 requires at least 2 parameter(s): 1: left")):
interpreter.prepare("add2()")
- with pytest.raises(commons_errors.InvalidParametersError, match="add2 requires at least 2 parameter\(s\): 1: left"):
+ with pytest.raises(commons_errors.InvalidParametersError, match=re.escape("add2 requires at least 2 parameter(s): 1: left")):
await interpreter.interprete("add2()")
with pytest.raises(commons_errors.InvalidParametersError, match="add2 supports up to 2 parameters:"):
interpreter.prepare("add2(1, 2, 3)")
with pytest.raises(commons_errors.InvalidParametersError, match="add2 supports up to 2 parameters:"):
await interpreter.interprete("add2(1, 2, 3)")
- with pytest.raises(commons_errors.InvalidParametersError, match="time_frame_to_seconds requires at least 1 parameter\(s\)"):
+ with pytest.raises(commons_errors.InvalidParametersError, match=re.escape("time_frame_to_seconds requires at least 1 parameter(s)")):
interpreter.prepare("time_frame_to_seconds()")
- with pytest.raises(commons_errors.InvalidParametersError, match="time_frame_to_seconds requires at least 1 parameter\(s\)"):
+ with pytest.raises(commons_errors.InvalidParametersError, match=re.escape("time_frame_to_seconds requires at least 1 parameter(s)")):
await interpreter.interprete("time_frame_to_seconds()")
with pytest.raises(commons_errors.InvalidParametersError, match="time_frame_to_seconds supports up to 1 parameters"):
interpreter.prepare("time_frame_to_seconds(1, 2, 3)")
@@ -154,6 +287,56 @@ async def test_interpreter_invalid_parameters(interpreter):
await interpreter.interprete("time_frame_to_seconds(1, 2, 3)")
+def test_get_input_value_by_parameter():
+ # Positional arguments
+ operator = ParamMerger(1, 2)
+ assert operator.get_input_value_by_parameter() == {"p1": 1, "p2": 2}
+
+ # Keyword arguments
+ operator = ParamMerger(p1=10, p2=20)
+ assert operator.get_input_value_by_parameter() == {"p1": 10, "p2": 20}
+
+ # Mixed positional and keyword
+ operator = ParamMerger(1, p2=2)
+ assert operator.get_input_value_by_parameter() == {"p1": 1, "p2": 2}
+
+ # Reversed keyword order
+ operator = ParamMerger(p2=100, p1=200)
+ assert operator.get_input_value_by_parameter() == {"p1": 200, "p2": 100}
+
+ # Default values for optional parameters
+ operator = CallWithDefaultParametersOperator(42)
+ assert operator.get_input_value_by_parameter() == {
+ "value1": 42,
+ "value2": 0,
+ "added_extra_value": 0,
+ "substracted_extra_value": 0,
+ }
+
+ # Nested operator as raw (uncomputed) parameter
+ nested_add = AddOperator(1, 2)
+ operator = Add2Operator(nested_add, 3)
+ value_by_param = operator.get_input_value_by_parameter()
+ assert value_by_param["left"] is nested_add
+ assert value_by_param["right"] == 3
+
+ # Dict parameter
+ operator = NestedDictSumOperator({"a": 1, "b": 2})
+ assert operator.get_input_value_by_parameter() == {"values": {"a": 1, "b": 2}}
+
+ # Unknown parameters raise InvalidParametersError
+ with pytest.raises(
+ commons_errors.InvalidParametersError,
+ match=re.escape("Parameter(s) 'unknown_param' are unknown. Supported parameters: p1, p2"),
+ ):
+ ParamMerger(1, unknown_param=3).get_input_value_by_parameter()
+ with pytest.raises(
+ commons_errors.InvalidParametersError,
+ match=re.escape("param_merger supports up to 2 parameters"),
+ ):
+ ParamMerger(p1=1, p2=2, extra=99, another=1).get_input_value_by_parameter()
+
+
class OperatorWithName(dsl_interpreter.Operator):
NAME = "custom_name"
DESCRIPTION = "A custom operator with NAME set"
@@ -188,8 +371,8 @@ class OperatorWithParameters(dsl_interpreter.Operator):
def get_name() -> str:
return "param_op"
- @staticmethod
- def get_parameters() -> list[dsl_interpreter.OperatorParameter]:
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
return [
dsl_interpreter.OperatorParameter(name="x", description="first parameter", required=True, type=int),
dsl_interpreter.OperatorParameter(name="y", description="second parameter", required=False, type=int),
diff --git a/packages/commons/tests/dsl_interpreter/test_parameters_util.py b/packages/commons/tests/dsl_interpreter/test_parameters_util.py
new file mode 100644
index 000000000..411233e15
--- /dev/null
+++ b/packages/commons/tests/dsl_interpreter/test_parameters_util.py
@@ -0,0 +1,408 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import mock
+import pytest
+
+import octobot_commons.constants as constants
+import octobot_commons.dsl_interpreter.parameters_util as parameters_util
+import octobot_commons.dsl_interpreter.operator_parameter as operator_parameter
+import octobot_commons.errors as commons_errors
+
+
+class TestFormatParameterValue:
+ def test_none(self):
+ assert parameters_util.format_parameter_value(None) == "None"
+
+ def test_true(self):
+ assert parameters_util.format_parameter_value(True) == "True"
+
+ def test_false(self):
+ assert parameters_util.format_parameter_value(False) == "False"
+
+ def test_int(self):
+ assert parameters_util.format_parameter_value(42) == "42"
+ assert parameters_util.format_parameter_value(-10) == "-10"
+
+ def test_float(self):
+ assert parameters_util.format_parameter_value(3.14) == "3.14"
+ assert parameters_util.format_parameter_value(1.0) == "1.0"
+
+ def test_plain_string(self):
+ assert parameters_util.format_parameter_value("hello") == "'hello'"
+ assert parameters_util.format_parameter_value("") == "''"
+
+ def test_string_json_list(self):
+ assert parameters_util.format_parameter_value("[1, 2, 3]") == "[1, 2, 3]"
+
+ def test_string_json_dict(self):
+ assert parameters_util.format_parameter_value('{"a": 1}') == "{'a': 1}"
+
+ def test_string_invalid_json(self):
+ assert parameters_util.format_parameter_value("not valid json") == "'not valid json'"
+
+ def test_list(self):
+ assert parameters_util.format_parameter_value([1, 2, 3]) == "[1, 2, 3]"
+ assert parameters_util.format_parameter_value([]) == "[]"
+
+ def test_dict(self):
+ assert parameters_util.format_parameter_value({"a": 1}) == "{'a': 1}"
+ assert parameters_util.format_parameter_value({}) == "{}"
+
+ def test_other_type_uses_repr(self):
+ class Custom:
+ def __repr__(self):
+ return "Custom()"
+ assert parameters_util.format_parameter_value(Custom()) == "Custom()"
+
+
+class TestResoveOperatorParams:
+ def test_empty_params_and_empty_values(self):
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = []
+ result = parameters_util.resove_operator_params(operator_class, {})
+ assert result == []
+
+ def test_required_params_only(self):
+ param_a = operator_parameter.OperatorParameter(
+ name="a", description="first", required=True, type=int
+ )
+ param_b = operator_parameter.OperatorParameter(
+ name="b", description="second", required=True, type=str
+ )
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = [param_a, param_b]
+ param_value_by_name = {"a": 1, "b": "hello"}
+ result = parameters_util.resove_operator_params(operator_class, param_value_by_name)
+ assert result == ["1", "'hello'"]
+
+ def test_optional_params_only(self):
+ param_x = operator_parameter.OperatorParameter(
+ name="x", description="optional", required=False, type=int
+ )
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = [param_x]
+ param_value_by_name = {"x": 99}
+ result = parameters_util.resove_operator_params(operator_class, param_value_by_name)
+ assert result == ["x=99"]
+
+ def test_mixed_required_and_optional(self):
+ param_req = operator_parameter.OperatorParameter(
+ name="req", description="required", required=True, type=int
+ )
+ param_opt = operator_parameter.OperatorParameter(
+ name="opt", description="optional", required=False, type=str
+ )
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = [param_req, param_opt]
+ param_value_by_name = {"req": 42, "opt": "value"}
+ result = parameters_util.resove_operator_params(operator_class, param_value_by_name)
+ assert result == ["42", "opt='value'"]
+
+ def test_skips_missing_params(self):
+ param_req = operator_parameter.OperatorParameter(
+ name="req", description="required", required=True, type=int
+ )
+ param_opt = operator_parameter.OperatorParameter(
+ name="opt", description="optional", required=False, type=str
+ )
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = [param_req, param_opt]
+ param_value_by_name = {"req": 1}
+ result = parameters_util.resove_operator_params(operator_class, param_value_by_name)
+ assert result == ["1"]
+
+ def test_extra_values_ignored(self):
+ param_a = operator_parameter.OperatorParameter(
+ name="a", description="first", required=True, type=int
+ )
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = [param_a]
+ param_value_by_name = {"a": 1, "unknown": "ignored"}
+ result = parameters_util.resove_operator_params(operator_class, param_value_by_name)
+ assert result == ["1"]
+
+
+class TestResolveOperatorArgsAndKwargs:
+ def test_empty_params_returns_unchanged(self):
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = []
+ args, kwargs = parameters_util.resolve_operator_args_and_kwargs(
+ operator_class, [1, 2], {"extra": "val"}
+ )
+ assert args == [1, 2]
+ assert kwargs == {"extra": "val"}
+
+ def test_positional_args_only(self):
+ param_a = operator_parameter.OperatorParameter(
+ name="a", description="first", required=True, type=int
+ )
+ param_b = operator_parameter.OperatorParameter(
+ name="b", description="second", required=True, type=int
+ )
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = [param_a, param_b]
+ args, kwargs = parameters_util.resolve_operator_args_and_kwargs(
+ operator_class, [1, 2], {}
+ )
+ assert args == [1, 2]
+ assert kwargs == {}
+
+ def test_positional_arg_as_keyword_arg(self):
+ param_a = operator_parameter.OperatorParameter(
+ name="a", description="first", required=True, type=int
+ )
+ param_b = operator_parameter.OperatorParameter(
+ name="b", description="second", required=True, type=int
+ )
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = [param_a, param_b]
+ args, kwargs = parameters_util.resolve_operator_args_and_kwargs(
+ operator_class, [1], {"b": 3}
+ )
+ assert args == [1, 3]
+ assert kwargs == {}
+
+ def test_positional_arg_as_keyword_arg_in_a_wrong_order(self):
+ param_a = operator_parameter.OperatorParameter(
+ name="a", description="first", required=True, type=int
+ )
+ param_b = operator_parameter.OperatorParameter(
+ name="b", description="second", required=True, type=int
+ )
+ param_c = operator_parameter.OperatorParameter(
+ name="c", description="third", required=True, type=int
+ )
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = [param_a, param_b, param_c]
+ args, kwargs = parameters_util.resolve_operator_args_and_kwargs(
+ operator_class, [1], {"c": 3, "b": 2}
+ )
+ assert args == [1, 2, 3]
+ assert kwargs == {}
+
+ def test_positional_all_args_as_keywords_in_a_wrong_order(self):
+ param_a = operator_parameter.OperatorParameter(
+ name="a", description="first", required=True, type=int
+ )
+ param_b = operator_parameter.OperatorParameter(
+ name="b", description="second", required=True, type=int
+ )
+ param_c = operator_parameter.OperatorParameter(
+ name="c", description="third", required=True, type=int
+ )
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = [param_a, param_b, param_c]
+ args, kwargs = parameters_util.resolve_operator_args_and_kwargs(
+ operator_class, [], {"b": 2, "a": 1, "c": 3}
+ )
+ assert args == [1, 2, 3]
+ assert kwargs == {}
+
+ def test_kwargs_only(self):
+ param_a = operator_parameter.OperatorParameter(
+ name="a", description="first", required=True, type=int
+ )
+ param_b = operator_parameter.OperatorParameter(
+ name="b", description="second", required=True, type=int
+ )
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = [param_a, param_b]
+ args, kwargs = parameters_util.resolve_operator_args_and_kwargs(
+ operator_class, [], {"a": 1, "b": 2}
+ )
+ assert args == [1, 2]
+ assert kwargs == {}
+
+ def test_mixed_args_and_kwargs(self):
+ param_a = operator_parameter.OperatorParameter(
+ name="a", description="first", required=True, type=int
+ )
+ param_b = operator_parameter.OperatorParameter(
+ name="b", description="second", required=True, type=int
+ )
+ param_c = operator_parameter.OperatorParameter(
+ name="c", description="optional", required=False, type=int
+ )
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = [param_a, param_b, param_c]
+ args, kwargs = parameters_util.resolve_operator_args_and_kwargs(
+ operator_class, [1], {"b": 2, "c": 3}
+ )
+ assert args == [1, 2, 3]
+ assert kwargs == {}
+
+ def test_extra_kwargs_preserved(self):
+ param_a = operator_parameter.OperatorParameter(
+ name="a", description="first", required=True, type=int
+ )
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = [param_a]
+ args, kwargs = parameters_util.resolve_operator_args_and_kwargs(
+ operator_class, [1], {"other": "value"}
+ )
+ assert args == [1]
+ assert kwargs == {"other": "value"}
+
+ def test_raises_when_too_many_positional_args(self):
+ param_a = operator_parameter.OperatorParameter(
+ name="a", description="first", required=True, type=int
+ )
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = [param_a]
+ operator_class.get_name.return_value = "test_op"
+ operator_class.get_parameters_description.return_value = "1: a [int] - first"
+ with pytest.raises(commons_errors.InvalidParametersError, match="test_op supports up to 1 parameters"):
+ parameters_util.resolve_operator_args_and_kwargs(
+ operator_class, [1, 2, 3], {}
+ )
+
+ def test_partial_params_allowed(self):
+ param_a = operator_parameter.OperatorParameter(
+ name="a", description="first", required=True, type=int
+ )
+ param_b = operator_parameter.OperatorParameter(
+ name="b", description="second", required=False, type=int
+ )
+ operator_class = mock.Mock()
+ operator_class.get_parameters.return_value = [param_a, param_b]
+ args, kwargs = parameters_util.resolve_operator_args_and_kwargs(
+ operator_class, [1], {}
+ )
+ assert args == [1]
+ assert kwargs == {}
+
+
+class TestApplyResolvedParameterValue:
+ def test_replaces_single_parameter_with_int(self):
+ script = f"op(x=1, y={constants.UNRESOLVED_PARAMETER_PLACEHOLDER})"
+ result = parameters_util.apply_resolved_parameter_value(script, "y", 42)
+ assert result == "op(x=1, y=42)"
+
+ def test_replaces_single_parameter_with_string(self):
+ script = f"op(name={constants.UNRESOLVED_PARAMETER_PLACEHOLDER})"
+ result = parameters_util.apply_resolved_parameter_value(script, "name", "hello")
+ assert result == "op(name='hello')"
+
+ def test_replaces_single_parameter_with_bool(self):
+ script = f"op(flag={constants.UNRESOLVED_PARAMETER_PLACEHOLDER})"
+ result = parameters_util.apply_resolved_parameter_value(script, "flag", True)
+ assert result == "op(flag=True)"
+
+ def test_replaces_single_parameter_with_list(self):
+ script = f"op(items={constants.UNRESOLVED_PARAMETER_PLACEHOLDER})"
+ result = parameters_util.apply_resolved_parameter_value(script, "items", [1, 2])
+ assert result == "op(items=[1, 2])"
+
+ def test_replaces_single_parameter_with_dict(self):
+ script = f"op(config={constants.UNRESOLVED_PARAMETER_PLACEHOLDER})"
+ result = parameters_util.apply_resolved_parameter_value(
+ script, "config", {"a": 1}
+ )
+ assert result == "op(config={'a': 1})"
+
+ def test_replaces_single_parameter_with_none(self):
+ script = f"op(val={constants.UNRESOLVED_PARAMETER_PLACEHOLDER})"
+ result = parameters_util.apply_resolved_parameter_value(script, "val", None)
+ assert result == "op(val=None)"
+
+ def test_raises_when_parameter_not_found(self):
+ script = "op(x=1, y=2)"
+ with pytest.raises(commons_errors.ResolvedParameterNotFoundError, match="Parameter z not found in script"):
+ parameters_util.apply_resolved_parameter_value(script, "z", 42)
+
+ def test_raises_when_placeholder_not_in_script_for_parameter(self):
+ script = f"op(x={constants.UNRESOLVED_PARAMETER_PLACEHOLDER}, y=2)"
+ with pytest.raises(commons_errors.ResolvedParameterNotFoundError, match="Parameter z not found in script"):
+ parameters_util.apply_resolved_parameter_value(script, "z", 42)
+
+ def test_replaces_only_exact_parameter_pattern(self):
+ script = f"op(a=1, b={constants.UNRESOLVED_PARAMETER_PLACEHOLDER})"
+ result = parameters_util.apply_resolved_parameter_value(script, "b", 100)
+ assert result == "op(a=1, b=100)"
+ # Ensure 'a' was not touched
+ assert "a=1" in result
+
+
+class TestAddResolvedParameterValue:
+ def test_adds_to_call_with_no_parenthesis(self):
+ result = parameters_util.add_resolved_parameter_value("op", "x", "a")
+ assert result == "op(x='a')"
+
+ def test_adds_to_empty_params_op(self):
+ result = parameters_util.add_resolved_parameter_value("op()", "x", 42)
+ assert result == "op(x=42)"
+
+ def test_adds_to_empty_params_with_spaces(self):
+ result = parameters_util.add_resolved_parameter_value("op( )", "x", 42)
+ assert result == "op( x=42)"
+
+ def test_adds_after_positional_arg(self):
+ result = parameters_util.add_resolved_parameter_value("op(1)", "x", 42)
+ assert result == "op(1, x=42)"
+
+ def test_adds_after_keyword_arg(self):
+ result = parameters_util.add_resolved_parameter_value("op(a=1)", "x", 42)
+ assert result == "op(a=1, x=42)"
+
+ def test_adds_after_multiple_args(self):
+ result = parameters_util.add_resolved_parameter_value("op(1, b=2)", "x", 42)
+ assert result == "op(1, b=2, x=42)"
+
+ def test_adds_string_value(self):
+ result = parameters_util.add_resolved_parameter_value("op()", "name", "hello")
+ assert result == "op(name='hello')"
+
+ def test_raises_when_parameter_already_in_kwargs(self):
+ with pytest.raises(commons_errors.InvalidParametersError, match="Parameter x is already in operator keyword args"):
+ parameters_util.add_resolved_parameter_value("op(x=1)", "x", 42)
+
+ def test_raises_when_parameter_already_first_kwarg(self):
+ with pytest.raises(commons_errors.InvalidParametersError, match="Parameter a is already"):
+ parameters_util.add_resolved_parameter_value("op(a=1, b=2)", "a", 99)
+
+ def test_raises_when_parameter_already_last_kwarg(self):
+ with pytest.raises(commons_errors.InvalidParametersError, match="Parameter b is already"):
+ parameters_util.add_resolved_parameter_value("op(a=1, b=2)", "b", 99)
+
+ def test_raises_when_script_has_unclosed_parenthesis(self):
+ with pytest.raises(commons_errors.InvalidParametersError, match="has unclosed parenthesis"):
+ parameters_util.add_resolved_parameter_value("op(1", "x", 42)
+
+
+class TestHasUnresolvedParameters:
+ def test_returns_true_when_placeholder_present(self):
+ script = f"op(x={constants.UNRESOLVED_PARAMETER_PLACEHOLDER})"
+ assert parameters_util.has_unresolved_parameters(script) is True
+
+ def test_returns_true_when_multiple_placeholders(self):
+ placeholder = constants.UNRESOLVED_PARAMETER_PLACEHOLDER
+ script = f"op(a={placeholder}, b={placeholder})"
+ assert parameters_util.has_unresolved_parameters(script) is True
+
+ def test_returns_false_when_no_placeholder(self):
+ script = "op(x=1, y=2)"
+ assert parameters_util.has_unresolved_parameters(script) is False
+
+ def test_returns_false_for_empty_script(self):
+ assert parameters_util.has_unresolved_parameters("") is False
+
+ def test_returns_true_when_placeholder_part_of_larger_string(self):
+ script = f"op(x='prefix_{constants.UNRESOLVED_PARAMETER_PLACEHOLDER}_suffix')"
+ assert parameters_util.has_unresolved_parameters(script) is True
+
+ def test_returns_true_when_placeholder_alone(self):
+ script = constants.UNRESOLVED_PARAMETER_PLACEHOLDER
+ assert parameters_util.has_unresolved_parameters(script) is True
diff --git a/packages/commons/tests/logging/test_context_based_file_handler.py b/packages/commons/tests/logging/test_context_based_file_handler.py
new file mode 100644
index 000000000..b98c3bf3a
--- /dev/null
+++ b/packages/commons/tests/logging/test_context_based_file_handler.py
@@ -0,0 +1,178 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import logging
+import os
+import tempfile
+
+import mock
+import pytest
+
+import octobot_commons.logging.context_based_file_handler as context_based_file_handler
+
+
+@pytest.fixture
+def temp_info_logs_and_cleanup_folder():
+ with tempfile.TemporaryDirectory() as tmpdir:
+ root = logging.getLogger()
+ original_level = root.level
+ root.setLevel(logging.INFO)
+ yield tmpdir
+ root.setLevel(original_level)
+ for handler in root.handlers[:]:
+ if isinstance(handler, context_based_file_handler.ContextBasedFileHandler):
+ handler.close()
+ root.removeHandler(handler)
+
+
+def test_context_based_file_handler_writes_to_file_when_provider_returns_name(temp_info_logs_and_cleanup_folder):
+ file_name_provider = mock.Mock(return_value="my_context")
+ handler = context_based_file_handler.ContextBasedFileHandler(
+ temp_info_logs_and_cleanup_folder, file_name_provider
+ )
+ handler.setLevel(logging.INFO)
+ logging.getLogger().addHandler(handler)
+
+ logger = logging.getLogger("test_logger")
+ logger.setLevel(logging.INFO)
+ logger.info("test message")
+
+ handler.flush()
+ log_path = os.path.join(temp_info_logs_and_cleanup_folder, "my_context.log")
+ with open(log_path, encoding="utf-8") as f:
+ content = f.read()
+ assert "test message" in content
+
+
+def test_context_based_file_handler_does_not_write_when_provider_returns_none(temp_info_logs_and_cleanup_folder):
+ file_name_provider = mock.Mock(return_value=None)
+ handler = context_based_file_handler.ContextBasedFileHandler(
+ temp_info_logs_and_cleanup_folder, file_name_provider
+ )
+ handler.setLevel(logging.INFO)
+ logging.getLogger().addHandler(handler)
+
+ logger = logging.getLogger("test_logger")
+ logger.setLevel(logging.INFO)
+ logger.info("test message")
+
+ handler.flush()
+ assert not os.listdir(temp_info_logs_and_cleanup_folder)
+
+
+def test_context_based_file_handler_creates_multiple_files_for_different_contexts(
+ temp_info_logs_and_cleanup_folder,
+):
+ contexts = []
+
+ def rotating_provider():
+ return contexts[0] if contexts else None
+
+ handler = context_based_file_handler.ContextBasedFileHandler(
+ temp_info_logs_and_cleanup_folder, rotating_provider
+ )
+ handler.setLevel(logging.INFO)
+ logging.getLogger().addHandler(handler)
+ logger = logging.getLogger("test_logger")
+ logger.setLevel(logging.INFO)
+
+ contexts.append("ctx_a")
+ logger.info("message a")
+ handler.flush()
+
+ contexts[0] = "ctx_b"
+ logger.info("message b")
+ handler.flush()
+
+ files = sorted(os.listdir(temp_info_logs_and_cleanup_folder))
+ assert files == ["ctx_a.log", "ctx_b.log"]
+
+ with open(f"{temp_info_logs_and_cleanup_folder}/ctx_a.log", encoding="utf-8") as f:
+ assert "message a" in f.read()
+ with open(f"{temp_info_logs_and_cleanup_folder}/ctx_b.log", encoding="utf-8") as f:
+ assert "message b" in f.read()
+
+
+def test_context_based_file_handler_removes_oldest_when_max_handlers_reached(temp_info_logs_and_cleanup_folder):
+ with mock.patch.object(
+ context_based_file_handler,
+ "MAX_CONTEXT_BASED_FILE_HANDLERS_PER_CATEGORY",
+ 3,
+ ):
+ contexts = []
+
+ def rotating_provider():
+ return contexts[0] if contexts else None
+
+ handler = context_based_file_handler.ContextBasedFileHandler(
+ temp_info_logs_and_cleanup_folder, rotating_provider
+ )
+ handler.setLevel(logging.INFO)
+ logging.getLogger().addHandler(handler)
+ logger = logging.getLogger("test_logger")
+ logger.setLevel(logging.INFO)
+
+ contexts.append("ctx_1")
+ logger.info("msg 1")
+ handler.flush()
+
+ contexts[0] = "ctx_2"
+ logger.info("msg 2")
+ handler.flush()
+
+ contexts[0] = "ctx_3"
+ logger.info("msg 3")
+ handler.flush()
+
+ contexts[0] = "ctx_4"
+ logger.info("msg 4")
+ handler.flush()
+
+ assert len(handler._custom_handlers) == 3
+ assert "ctx_1" not in handler._custom_handlers
+ assert "ctx_2" in handler._custom_handlers
+ assert "ctx_3" in handler._custom_handlers
+ assert "ctx_4" in handler._custom_handlers
+
+
+def test_add_context_based_file_handler_adds_handler_to_root_logger(temp_info_logs_and_cleanup_folder):
+ file_name_provider = mock.Mock(return_value=None)
+ root = logging.getLogger()
+ initial_count = len(root.handlers)
+
+ context_based_file_handler.add_context_based_file_handler(
+ temp_info_logs_and_cleanup_folder, file_name_provider
+ )
+
+ assert len(root.handlers) == initial_count + 1
+ added = root.handlers[-1]
+ assert isinstance(added, context_based_file_handler.ContextBasedFileHandler)
+
+
+def test_context_based_file_handler_creates_logs_folder_if_missing(temp_info_logs_and_cleanup_folder):
+ nested = f"{temp_info_logs_and_cleanup_folder}/nested/logs"
+ file_name_provider = mock.Mock(return_value="ctx")
+ handler = context_based_file_handler.ContextBasedFileHandler(
+ nested, file_name_provider
+ )
+ handler.setLevel(logging.INFO)
+ logging.getLogger().addHandler(handler)
+
+ assert os.path.isdir(nested)
+ logger = logging.getLogger("test")
+ logger.setLevel(logging.INFO)
+ logger.info("msg")
+ handler.flush()
+ assert os.path.isfile(f"{nested}/ctx.log")
diff --git a/packages/commons/tests/test_asyncio_tools.py b/packages/commons/tests/test_asyncio_tools.py
index f604b2355..91494eac0 100644
--- a/packages/commons/tests/test_asyncio_tools.py
+++ b/packages/commons/tests/test_asyncio_tools.py
@@ -14,6 +14,7 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import asyncio
+import mock
import pytest
import octobot_commons.asyncio_tools as asyncio_tools
@@ -255,5 +256,51 @@ async def test_RLock_error_setup_2():
pass
+async def test_logged_waiter_cancels_task_on_quick_exit():
+ mock_self = mock.Mock()
+ mock_self.logger = mock.Mock()
+
+ with asyncio_tools.logged_waiter(mock_self, "quick op", sleep_time=30):
+ await asyncio.sleep(0.001)
+
+ mock_self.logger.info.assert_not_called()
+
+
+async def test_logged_waiter_logs_when_body_runs_long():
+ mock_self = mock.Mock()
+ mock_self.logger = mock.Mock()
+
+ with asyncio_tools.logged_waiter(mock_self, "long op", sleep_time=0.05):
+ await asyncio.sleep(0.15)
+
+ assert mock_self.logger.info.call_count >= 1
+ call_args = mock_self.logger.info.call_args[0][0]
+ assert "long op" in call_args
+ assert "is still processing" in call_args
+
+
+async def test_logged_waiter_cancels_on_exception():
+ mock_self = mock.Mock()
+ mock_self.logger = mock.Mock()
+
+ with pytest.raises(ValueError, match="body failed"):
+ with asyncio_tools.logged_waiter(mock_self, "failing op", sleep_time=30):
+ raise ValueError("body failed")
+
+ mock_self.logger.info.assert_not_called()
+
+
+async def test_logged_waiter_uses_custom_sleep_time():
+ mock_self = mock.Mock()
+ mock_self.logger = mock.Mock()
+
+ with mock.patch.object(asyncio, "sleep", wraps=asyncio.sleep) as mock_sleep:
+ with asyncio_tools.logged_waiter(mock_self, "custom sleep", sleep_time=0.1):
+ await asyncio.sleep(0.2)
+
+ sleep_calls = [c[0][0] for c in mock_sleep.call_args_list]
+ assert 0.1 in sleep_calls
+
+
def _exception_raiser():
raise RuntimeError("error")
diff --git a/packages/flow/BUILD b/packages/flow/BUILD
new file mode 100644
index 000000000..692c6c9db
--- /dev/null
+++ b/packages/flow/BUILD
@@ -0,0 +1,9 @@
+python_sources(name="octobot_flow", sources=["octobot_flow/**/*.py"])
+
+python_tests(
+ name="tests",
+ sources=["tests/**/test_*.py"],
+ dependencies=[
+ ":octobot_flow",
+ ],
+)
\ No newline at end of file
diff --git a/packages/flow/README.md b/packages/flow/README.md
new file mode 100644
index 000000000..8929b22fa
--- /dev/null
+++ b/packages/flow/README.md
@@ -0,0 +1,3 @@
+# Mini OctoBot
+
+OctoBot automations runner
\ No newline at end of file
diff --git a/packages/flow/octobot_flow/__init__.py b/packages/flow/octobot_flow/__init__.py
new file mode 100644
index 000000000..e85b19c38
--- /dev/null
+++ b/packages/flow/octobot_flow/__init__.py
@@ -0,0 +1,28 @@
+import octobot_commons.logging
+
+_import_tentacles = False
+try:
+ import tentacles
+ _import_tentacles = True
+except ImportError:
+ octobot_commons.logging.get_logger("octobot_flow").info(
+ "tentacles is not installed, tentacles operators will not be available"
+ )
+
+if _import_tentacles:
+ from octobot_flow.jobs.automation_job import AutomationJob
+ from octobot_flow.entities import (
+ AbstractActionDetails,
+ parse_action_details,
+ AutomationState,
+ ActionsDAG,
+ )
+
+
+ __all__ = [
+ "AutomationJob",
+ "AbstractActionDetails",
+ "parse_action_details",
+ "ActionsDAG",
+ "AutomationState",
+ ]
diff --git a/packages/flow/octobot_flow/constants.py b/packages/flow/octobot_flow/constants.py
new file mode 100644
index 000000000..31d530772
--- /dev/null
+++ b/packages/flow/octobot_flow/constants.py
@@ -0,0 +1,11 @@
+import octobot_commons.os_util as os_util
+import octobot_commons.enums as commons_enums
+import octobot_commons.constants as commons_constants
+
+
+SAVE_STATE_AFTER_EVERY_ACTION = os_util.parse_boolean_environment_var("SAVE_STATE_AFTER_EVERY_ACTION", "false")
+
+DEFAULT_EXTERNAL_TRIGGER_ONLY_NO_ORDER_TIMEFRAME = commons_enums.TimeFrames.ONE_DAY
+
+# Caches settings
+TICKER_CACHE_TTL = 5 * commons_constants.MINUTE_TO_SECONDS
diff --git a/packages/flow/octobot_flow/encryption/__init__.py b/packages/flow/octobot_flow/encryption/__init__.py
new file mode 100644
index 000000000..ec2ff1a0e
--- /dev/null
+++ b/packages/flow/octobot_flow/encryption/__init__.py
@@ -0,0 +1,8 @@
+from octobot_flow.encryption.decrypt import (
+ decrypted_bots_configurations,
+)
+
+
+__all__ = [
+ "decrypted_bots_configurations",
+]
diff --git a/packages/flow/octobot_flow/encryption/decrypt.py b/packages/flow/octobot_flow/encryption/decrypt.py
new file mode 100644
index 000000000..4cb1c523a
--- /dev/null
+++ b/packages/flow/octobot_flow/encryption/decrypt.py
@@ -0,0 +1,40 @@
+import contextlib
+
+import octobot_flow.entities
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
+
+@contextlib.contextmanager
+def decrypted_bots_configurations(
+ automation_state: octobot_flow.entities.AutomationState
+):
+ try:
+ if automation_state.exchange_account_details:
+ _decrypt_exchange_credentials(automation_state.exchange_account_details.auth_details)
+ yield automation_state
+ finally:
+ if automation_state.exchange_account_details:
+ _clear_decrypted_exchange_credentials(automation_state.exchange_account_details.exchange_details)
+
+
+def _decrypt_exchange_credentials(
+ auth_details: exchange_data_import.ExchangeAuthDetails
+): # pylint: disable=undefined-variable
+ if auth_details.encrypted:
+ raise NotImplementedError("_decrypt_exchange_credentials not implemented")
+ # todo
+ message = pgpy.PGPMessage.from_blob(base64.b64decode(auth_details.encrypted))
+ decrypted = json.loads(message.decrypt(api_key).message)
+ auth_details.api_key = decrypted.get("apiKey", "")
+ auth_details.api_secret = decrypted.get("apiSecret", "")
+ auth_details.api_password = decrypted.get("password", "")
+ auth_details.access_token = decrypted.get("accessToken", "")
+
+
+def _clear_decrypted_exchange_credentials(
+ auth_details: exchange_data_import.ExchangeAuthDetails
+):
+ auth_details.api_key = ""
+ auth_details.api_secret = ""
+ auth_details.api_password = ""
+ auth_details.access_token = ""
diff --git a/packages/flow/octobot_flow/entities/__init__.py b/packages/flow/octobot_flow/entities/__init__.py
new file mode 100644
index 000000000..a21915806
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/__init__.py
@@ -0,0 +1,64 @@
+from octobot_flow.entities.accounts import (
+ PortfolioAssetHolding,
+ ReferenceExchangeAccountElements,
+ ClientExchangeAccountElements,
+ ExchangeAccountDetails,
+ ExchangeAccountPortfolio,
+ AccountElements,
+)
+from octobot_flow.entities.actions import (
+ ActionDependency,
+ AbstractActionDetails,
+ DSLScriptActionDetails,
+ ConfiguredActionDetails,
+ parse_action_details,
+ ActionsDAG,
+)
+from octobot_flow.entities.automations import (
+ FetchedExchangeAccountElements,
+ FetchedExchangePublicData,
+ FetchedExchangeData,
+ AutomationMetadata,
+ AutomationDetails,
+ AutomationState,
+ FetchedDependencies,
+ TriggerDetails,
+ DegradedStateDetails,
+ ExecutionDetails,
+ AdditionalActions,
+ RefreshExchangeBotsAuthenticatedDataDetails,
+ NextIterationDetails,
+ PostIterationActionsDetails,
+)
+from octobot_flow.entities.community import (
+ UserAuthentication
+)
+__all__ = [
+ "AccountElements",
+ "ReferenceExchangeAccountElements",
+ "ClientExchangeAccountElements",
+ "PortfolioAssetHolding",
+ "ExchangeAccountDetails",
+ "ExchangeAccountPortfolio",
+ "ActionDependency",
+ "AbstractActionDetails",
+ "DSLScriptActionDetails",
+ "ConfiguredActionDetails",
+ "parse_action_details",
+ "ActionsDAG",
+ "RefreshExchangeBotsAuthenticatedDataDetails",
+ "NextIterationDetails",
+ "PostIterationActionsDetails",
+ "FetchedExchangeAccountElements",
+ "FetchedExchangePublicData",
+ "FetchedExchangeData",
+ "AutomationMetadata",
+ "AutomationDetails",
+ "AutomationState",
+ "FetchedDependencies",
+ "TriggerDetails",
+ "DegradedStateDetails",
+ "ExecutionDetails",
+ "AdditionalActions",
+ "UserAuthentication",
+]
diff --git a/packages/flow/octobot_flow/entities/accounts/__init__.py b/packages/flow/octobot_flow/entities/accounts/__init__.py
new file mode 100644
index 000000000..e9872b8b9
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/accounts/__init__.py
@@ -0,0 +1,14 @@
+from octobot_flow.entities.accounts.portfolio_asset_holdings import PortfolioAssetHolding
+from octobot_flow.entities.accounts.reference_exchange_account_elements import ReferenceExchangeAccountElements
+from octobot_flow.entities.accounts.client_exchange_account_elements import ClientExchangeAccountElements
+from octobot_flow.entities.accounts.exchange_account_details import ExchangeAccountDetails, ExchangeAccountPortfolio
+from octobot_flow.entities.accounts.account_elements import AccountElements
+
+__all__ = [
+ "PortfolioAssetHolding",
+ "ReferenceExchangeAccountElements",
+ "ClientExchangeAccountElements",
+ "ExchangeAccountDetails",
+ "ExchangeAccountPortfolio",
+ "AccountElements",
+]
diff --git a/packages/flow/octobot_flow/entities/accounts/account_elements.py b/packages/flow/octobot_flow/entities/accounts/account_elements.py
new file mode 100644
index 000000000..ba5bf0be3
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/accounts/account_elements.py
@@ -0,0 +1,20 @@
+import dataclasses
+import typing
+
+import octobot_commons.dataclasses
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
+
+
+@dataclasses.dataclass
+class AccountElements(octobot_commons.dataclasses.MinimizableDataclass, octobot_commons.dataclasses.UpdatableDataclass):
+ """
+ Defines the ideal exchange account state of an automation. Only contains sharable data
+ """
+ name: typing.Optional[str] = None
+ portfolio: exchange_data_import.PortfolioDetails = dataclasses.field(default_factory=exchange_data_import.PortfolioDetails)
+ transactions: list[dict] = dataclasses.field(default_factory=list)
+
+ def __post_init__(self):
+ if self.portfolio and isinstance(self.portfolio, dict):
+ self.portfolio = exchange_data_import.PortfolioDetails.from_dict(self.portfolio)
diff --git a/packages/flow/octobot_flow/entities/accounts/client_exchange_account_elements.py b/packages/flow/octobot_flow/entities/accounts/client_exchange_account_elements.py
new file mode 100644
index 000000000..f972da620
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/accounts/client_exchange_account_elements.py
@@ -0,0 +1,35 @@
+import dataclasses
+
+import octobot_trading.exchanges
+import octobot_trading.api
+
+import octobot_flow.enums
+import octobot_flow.entities.accounts.reference_exchange_account_elements as reference_exchange_account_elements_import
+
+
+@dataclasses.dataclass
+class ClientExchangeAccountElements(reference_exchange_account_elements_import.ReferenceExchangeAccountElements):
+ """
+ Defines the local exchange account state of an automation. Contains private data specific to this client.
+ """
+ trades: list[dict] = dataclasses.field(default_factory=list)
+
+ def __post_init__(self):
+ super().__post_init__()
+ if self.trades and isinstance(self.trades[0], dict):
+ self.trades = [
+ dict(trade) for trade in self.trades # type: ignore
+ ]
+
+ def sync_from_exchange_manager(
+ self, exchange_manager: octobot_trading.exchanges.ExchangeManager
+ ) -> list[octobot_flow.enums.ChangedElements]:
+ changed_elements = super().sync_from_exchange_manager(exchange_manager)
+ if self._sync_trades_from_exchange_manager(exchange_manager):
+ changed_elements.append(octobot_flow.enums.ChangedElements.TRADES)
+ return changed_elements
+
+ def _sync_trades_from_exchange_manager(self, exchange_manager: octobot_trading.exchanges.ExchangeManager) -> bool:
+ previous_trades = self.trades
+ self.trades = octobot_trading.api.get_trade_history(exchange_manager, as_dict=True)
+ return previous_trades != self.trades
diff --git a/packages/flow/octobot_flow/entities/accounts/exchange_account_details.py b/packages/flow/octobot_flow/entities/accounts/exchange_account_details.py
new file mode 100644
index 000000000..f64731051
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/accounts/exchange_account_details.py
@@ -0,0 +1,51 @@
+import dataclasses
+import typing
+import decimal
+
+import octobot_commons.dataclasses
+import octobot_commons.profiles.profile_data as profile_data_import
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+import octobot_flow.entities.accounts.portfolio_asset_holdings as portfolio_asset_holdings_import
+
+
+@dataclasses.dataclass
+class ExchangeAccountPortfolio(octobot_commons.dataclasses.MinimizableDataclass, octobot_commons.dataclasses.UpdatableDataclass):
+ content: list[portfolio_asset_holdings_import.PortfolioAssetHolding] = dataclasses.field(default_factory=list)
+ unit: str = ""
+
+
+@dataclasses.dataclass
+class ExchangeAccountDetails(octobot_commons.dataclasses.MinimizableDataclass):
+ exchange_details: profile_data_import.ExchangeData = dataclasses.field(
+ default_factory=profile_data_import.ExchangeData, repr=True
+ )
+ auth_details: exchange_data_import.ExchangeAuthDetails = dataclasses.field(default_factory=exchange_data_import.ExchangeAuthDetails, repr=False)
+ portfolio: ExchangeAccountPortfolio = dataclasses.field(default_factory=ExchangeAccountPortfolio, repr=True)
+
+ def to_minimal_exchange_data(self, portfolio: typing.Optional[dict[str, dict[str, decimal.Decimal]]]) -> exchange_data_import.ExchangeData:
+ exchange_data = exchange_data_import.ExchangeData(
+ exchange_details=exchange_data_import.ExchangeDetails(
+ name=self.exchange_details.internal_name, # type: ignore
+ ),
+ auth_details=self.auth_details,
+ )
+ if portfolio:
+ exchange_data.portfolio_details.content = portfolio # type: ignore
+ return exchange_data
+
+ def is_simulated(self) -> bool:
+ return not (
+ self.auth_details.api_key
+ or self.auth_details.api_secret
+ or self.auth_details.api_password
+ or self.auth_details.access_token
+ or self.auth_details.encrypted
+ )
+
+ def __post_init__(self):
+ if self.portfolio and isinstance(self.portfolio, dict):
+ self.portfolio = ExchangeAccountPortfolio.from_dict(self.portfolio)
+ if self.exchange_details and isinstance(self.exchange_details, dict):
+ self.exchange_details = profile_data_import.ExchangeData.from_dict(self.exchange_details)
+ if self.auth_details and isinstance(self.auth_details, dict):
+ self.auth_details = exchange_data_import.ExchangeAuthDetails.from_dict(self.auth_details)
diff --git a/packages/flow/octobot_flow/entities/accounts/portfolio_asset_holdings.py b/packages/flow/octobot_flow/entities/accounts/portfolio_asset_holdings.py
new file mode 100644
index 000000000..1bdc09b95
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/accounts/portfolio_asset_holdings.py
@@ -0,0 +1,31 @@
+import dataclasses
+import typing
+import decimal
+
+import octobot_commons.dataclasses
+import octobot_commons.constants
+import octobot_trading.constants
+
+@dataclasses.dataclass
+class PortfolioAssetHolding(octobot_commons.dataclasses.FlexibleDataclass):
+ asset: str
+ available: float
+ total: float
+ value: float = 0
+ unlocked_available: typing.Optional[float] = None
+ unlocked_total: typing.Optional[float] = None
+ unlocked_value: typing.Optional[float] = None
+
+ def to_portfolio_asset_dict(self, zeroize_negative_values: bool) -> dict[str, decimal.Decimal]:
+ formatted = {
+ octobot_commons.constants.PORTFOLIO_AVAILABLE: decimal.Decimal(str(self.available)),
+ octobot_commons.constants.PORTFOLIO_TOTAL: decimal.Decimal(str(self.total)),
+ }
+ if zeroize_negative_values:
+ if formatted[octobot_commons.constants.PORTFOLIO_TOTAL] < octobot_trading.constants.ZERO:
+ # total can't be negative
+ formatted[octobot_commons.constants.PORTFOLIO_TOTAL] = octobot_trading.constants.ZERO
+ if formatted[octobot_commons.constants.PORTFOLIO_AVAILABLE] > formatted[octobot_commons.constants.PORTFOLIO_TOTAL]:
+ # available can't be greater than total
+ formatted[octobot_commons.constants.PORTFOLIO_AVAILABLE] = formatted[octobot_commons.constants.PORTFOLIO_TOTAL]
+ return formatted
diff --git a/packages/flow/octobot_flow/entities/accounts/reference_exchange_account_elements.py b/packages/flow/octobot_flow/entities/accounts/reference_exchange_account_elements.py
new file mode 100644
index 000000000..7a6f65b17
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/accounts/reference_exchange_account_elements.py
@@ -0,0 +1,97 @@
+import dataclasses
+
+import octobot_commons.logging
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+import octobot_trading.exchanges
+import octobot_trading.storage.orders_storage
+import octobot_trading.api
+import octobot_trading.enums
+import octobot_trading.constants
+import octobot_trading.personal_data
+
+import octobot_flow.enums
+import octobot_flow.entities.accounts.account_elements as account_elements_import
+
+
+
+
+@dataclasses.dataclass
+class ReferenceExchangeAccountElements(account_elements_import.AccountElements):
+ """
+ Defines the ideal exchange account state of an automation. Only contains sharable data
+ """
+ orders: exchange_data_import.OrdersDetails = dataclasses.field(default_factory=exchange_data_import.OrdersDetails)
+ positions: list[exchange_data_import.PositionDetails] = dataclasses.field(default_factory=list)
+
+ def __post_init__(self):
+ super().__post_init__()
+ if self.orders and isinstance(self.orders, dict):
+ self.orders = exchange_data_import.OrdersDetails.from_dict(self.orders)
+ if self.positions and isinstance(self.positions[0], dict):
+ self.positions = [
+ exchange_data_import.PositionDetails.from_dict(position) for position in self.positions # type: ignore
+ ]
+
+ def has_pending_chained_orders(self) -> bool:
+ for order in self.orders.missing_orders:
+ if order.get(octobot_trading.constants.STORAGE_ORIGIN_VALUE, {}).get(octobot_trading.enums.StoredOrdersAttr.CHAINED_ORDERS.value):
+ return True
+ return False
+
+ def has_pending_groups(self) -> bool:
+ # TODO
+ return False
+
+ def sync_from_exchange_manager(
+ self, exchange_manager: octobot_trading.exchanges.ExchangeManager
+ ) -> list[octobot_flow.enums.ChangedElements]:
+ changed_elements = []
+ if self.sync_orders_from_exchange_manager(exchange_manager):
+ changed_elements.append(octobot_flow.enums.ChangedElements.ORDERS)
+ if self.sync_portfolio_from_exchange_manager(exchange_manager):
+ changed_elements.append(octobot_flow.enums.ChangedElements.PORTFOLIO)
+ if self.sync_positions_from_exchange_manager(exchange_manager):
+ changed_elements.append(octobot_flow.enums.ChangedElements.POSITIONS)
+ return changed_elements
+
+ def sync_orders_from_exchange_manager(self, exchange_manager: octobot_trading.exchanges.ExchangeManager) -> bool:
+ previous_orders = self.orders
+ updated_open_orders_exchange_ids = set()
+ updated_open_orders = []
+ updated_missing_orders = []
+ for order in octobot_trading.api.get_open_orders(exchange_manager):
+ if order.is_self_managed():
+ octobot_commons.logging.get_logger(self.__class__.__name__).error(
+ f"Self managed order created. This type of [{exchange_manager.exchange_name}] "
+ f"order is not supported, order is ignored. Order: {order}"
+ )
+ continue
+ updated_open_orders_exchange_ids.add(order.exchange_order_id)
+ updated_open_orders.append(
+ octobot_trading.storage.orders_storage._format_order(order, exchange_manager)
+ )
+ updated_missing_orders = [
+ order
+ for exchange_id, order in octobot_trading.personal_data.get_enriched_orders_by_exchange_id(previous_orders.open_orders).items()
+ if exchange_id not in updated_open_orders_exchange_ids
+ ]
+ self.orders.open_orders = updated_open_orders
+ self.orders.missing_orders = updated_missing_orders
+ return previous_orders != self.orders
+
+ def sync_portfolio_from_exchange_manager(self, exchange_manager: octobot_trading.exchanges.ExchangeManager) -> bool:
+ previous_portfolio = self.portfolio.content
+ self.portfolio.content = {
+ key: values
+ for key, values in octobot_trading.api.get_portfolio(exchange_manager, as_decimal=False).items()
+ if any(value for value in values.values()) # skip 0 value assets
+ }
+ return previous_portfolio != self.portfolio.content
+
+ def sync_positions_from_exchange_manager(self, exchange_manager: octobot_trading.exchanges.ExchangeManager) -> bool:
+ previous_positions = self.positions
+ self.positions = [
+ exchange_data_import.PositionDetails(position.to_dict(), position.symbol_contract.to_dict())
+ for position in octobot_trading.api.get_positions(exchange_manager)
+ ]
+ return previous_positions != self.positions
diff --git a/packages/flow/octobot_flow/entities/actions/__init__.py b/packages/flow/octobot_flow/entities/actions/__init__.py
new file mode 100644
index 000000000..567fcf372
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/actions/__init__.py
@@ -0,0 +1,17 @@
+from octobot_flow.entities.actions.action_details import (
+ ActionDependency,
+ AbstractActionDetails,
+ DSLScriptActionDetails,
+ ConfiguredActionDetails,
+ parse_action_details,
+)
+from octobot_flow.entities.actions.actions_dag import ActionsDAG
+
+__all__ = [
+ "ActionDependency",
+ "AbstractActionDetails",
+ "DSLScriptActionDetails",
+ "ConfiguredActionDetails",
+ "parse_action_details",
+ "ActionsDAG",
+]
diff --git a/packages/flow/octobot_flow/entities/actions/action_details.py b/packages/flow/octobot_flow/entities/actions/action_details.py
new file mode 100644
index 000000000..75c1f4e2f
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/actions/action_details.py
@@ -0,0 +1,131 @@
+import dataclasses
+import typing
+import time
+
+import octobot_commons.dsl_interpreter
+import octobot_commons.dataclasses
+import octobot_flow.enums
+import octobot_flow.errors
+
+@dataclasses.dataclass
+class ActionDependency(octobot_commons.dataclasses.FlexibleDataclass):
+ # id of the action this dependency is on
+ action_id: str = dataclasses.field(repr=True)
+ # value of the dependency result. Used by an action to resolve its own DSL script when it has dependencies
+ parameter: typing.Optional[str] = dataclasses.field(default=None, repr=False)
+
+
+@dataclasses.dataclass
+class AbstractActionDetails(octobot_commons.dataclasses.FlexibleDataclass):
+ # unique id of the action
+ id: str = dataclasses.field(repr=True)
+ # result of the action. Set after the action is executed
+ result: typing.Optional[
+ octobot_commons.dsl_interpreter.ComputedOperatorParameterType
+ ] = dataclasses.field(default=None, repr=True)
+ # error status of the action. Set after the action is executed, in case an error occured
+ error_status: typing.Optional[str] = dataclasses.field(default=None, repr=True) # ActionErrorStatus
+ # time at which the action was executed
+ executed_at: typing.Optional[float] = dataclasses.field(default=None, repr=True)
+ # dependencies of this action. If an action has dependencies, it will not be executed until all its dependencies are completed
+ dependencies: list["ActionDependency"] = dataclasses.field(default_factory=list, repr=True)
+ # id of the action to reset the DAG to. If set, will reset the DAG to this action after this action is completed.
+ reset_target_action_id: typing.Optional[str] = dataclasses.field(default=None, repr=False)
+ # result of the previous execution of this action. Used when the action is reset
+ previous_execution_result: typing.Optional[dict] = dataclasses.field(default=None, repr=False)
+
+ def __post_init__(self):
+ if self.dependencies:
+ self.dependencies = [
+ ActionDependency.from_dict(dependency) if
+ isinstance(dependency, dict) else dependency
+ for dependency in self.dependencies
+ ]
+
+ def complete(
+ self,
+ result: typing.Optional[dict] = None,
+ error_status: typing.Optional[str] = None,
+ ):
+ self.executed_at = time.time()
+ if result:
+ self.result = result
+ if error_status:
+ self.error_status = error_status
+
+ def is_completed(self) -> bool:
+ return self.executed_at is not None
+
+ def update_execution_details(self, action: "AbstractActionDetails"):
+ self.result = action.result
+ self.executed_at = action.executed_at
+ self.error_status = action.error_status
+
+ def should_be_historised_in_database(self) -> bool:
+ return False
+
+ def add_dependency(self, action_id: str, parameter: typing.Optional[str] = None):
+ self.dependencies.append(ActionDependency(action_id, parameter))
+
+ def get_summary(self, minimal: bool = False) -> str:
+ raise NotImplementedError("get_summary is not implemented for this bot action type")
+
+ def get_rescheduled_parameters(self) -> dict:
+ rescheduled_parameters = {}
+ if self.previous_execution_result:
+ if octobot_commons.dsl_interpreter.ReCallingOperatorResult.is_re_calling_operator_result(
+ self.previous_execution_result
+ ):
+ rescheduled_parameters[
+ octobot_commons.dsl_interpreter.ReCallableOperatorMixin.LAST_EXECUTION_RESULT_KEY
+ ] = self.previous_execution_result
+ return rescheduled_parameters
+
+ def reset(self):
+ self.previous_execution_result = self.result
+ self.result = None
+ self.error_status = None
+ self.executed_at = None
+
+
+@dataclasses.dataclass
+class DSLScriptActionDetails(AbstractActionDetails):
+ # DSL script to execute
+ dsl_script: typing.Optional[str] = dataclasses.field(default=None, repr=True) # should be set to the DSL script
+ # resolved DSL script. self.dsl_script where all the dependencies have been replaced by their actual values
+ resolved_dsl_script: typing.Optional[str] = dataclasses.field(default=None, repr=False) # should be set to the resolved DSL script
+
+ def get_summary(self, minimal: bool = False) -> str:
+ if minimal:
+ # only return the first operator name
+ return str(self.dsl_script).split("(")[0]
+ return str(self.dsl_script)
+
+ def get_resolved_dsl_script(self) -> str:
+ if not self.resolved_dsl_script:
+ raise octobot_flow.errors.UnresolvedDSLScriptError(f"Resolved DSL script is not set: {self.resolved_dsl_script}")
+ if octobot_commons.dsl_interpreter.has_unresolved_parameters(self.resolved_dsl_script):
+ raise octobot_flow.errors.UnresolvedDSLScriptError(f"Resolved DSL script has unresolved parameters: {self.resolved_dsl_script}")
+ return self.resolved_dsl_script
+
+ def clear_resolved_dsl_script(self):
+ self.resolved_dsl_script = None
+
+
+@dataclasses.dataclass
+class ConfiguredActionDetails(AbstractActionDetails):
+ # type of the action. Must be an ActionType
+ action: str = dataclasses.field(default=octobot_flow.enums.ActionType.UNKNOWN.value, repr=True)
+ # configuration of the action. A dict specific to the action type
+ config: typing.Optional[dict] = dataclasses.field(default=None, repr=False)
+
+ def get_summary(self, minimal: bool = False) -> str:
+ return self.action
+
+
+def parse_action_details(action_details: dict) -> AbstractActionDetails:
+ if "dsl_script" in action_details:
+ return DSLScriptActionDetails.from_dict(action_details)
+ elif "action" in action_details:
+ return ConfiguredActionDetails.from_dict(action_details)
+ raise ValueError(f"Invalid action details: {action_details}")
diff --git a/packages/flow/octobot_flow/entities/actions/actions_dag.py b/packages/flow/octobot_flow/entities/actions/actions_dag.py
new file mode 100644
index 000000000..c1b75b472
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/actions/actions_dag.py
@@ -0,0 +1,148 @@
+import dataclasses
+
+import octobot_commons.dsl_interpreter
+import octobot_commons.dataclasses
+
+
+import octobot_flow.entities.actions.action_details as action_details
+import octobot_flow.enums
+import octobot_flow.errors
+
+
+@dataclasses.dataclass
+class ActionsDAG(octobot_commons.dataclasses.FlexibleDataclass):
+ actions: list[action_details.AbstractActionDetails] = dataclasses.field(default_factory=list)
+
+ def __post_init__(self):
+ self.actions = [
+ action_details.parse_action_details(action) if isinstance(action, dict) else action
+ for action in self.actions
+ ]
+
+ def add_action(self, action: action_details.AbstractActionDetails):
+ self.actions.append(action)
+
+ def __bool__(self) -> bool:
+ return bool(self.actions)
+
+ def get_actions_by_id(self) -> dict[str, action_details.AbstractActionDetails]:
+ return {
+ action.id: action for action in self.actions
+ }
+
+ def update_actions_results(self, actions: list[action_details.AbstractActionDetails]):
+ actions_by_id = self.get_actions_by_id()
+ for action in actions:
+ actions_by_id[action.id].update_execution_details(action)
+
+ def get_executable_actions(self) -> list[action_details.AbstractActionDetails]:
+ """Return actions that can be executed: not yet executed, and either have no
+ dependencies or all their dependencies have results (executed_at is set).
+ """
+ return [
+ action
+ for action in self.actions
+ if not action.is_completed() and self.filled_all_dependencies(action)
+ ]
+
+ def completed_all_actions(self) -> bool:
+ return all(action.is_completed() for action in self.actions)
+
+ def get_pending_actions(self) -> list[action_details.AbstractActionDetails]:
+ return [
+ action
+ for action in self.actions
+ if not action.is_completed()
+ ]
+
+ def _get_dependents_map(self) -> dict[str, set[str]]:
+ """Return a map: action_id -> set of action_ids that directly depend on it."""
+ dependents: dict[str, set[str]] = {action.id: set() for action in self.actions}
+ for action in self.actions:
+ for dep in action.dependencies:
+ dependents.setdefault(dep.action_id, set()).add(action.id)
+ return dependents
+
+ def _get_transitive_dependents(self, action_id: str, dependents_map: dict[str, set[str]]) -> set[str]:
+ """Return all action_ids that depend on the given action_id (directly or indirectly)."""
+ result: set[str] = set()
+ to_visit = [action_id]
+ visited: set[str] = set()
+ while to_visit:
+ current = to_visit.pop()
+ if current in visited:
+ continue
+ visited.add(current)
+ for dependent_id in dependents_map.get(current, set()):
+ if dependent_id not in visited:
+ result.add(dependent_id)
+ to_visit.append(dependent_id)
+ return result
+
+ def reset_to(self, action_id: str):
+ """
+ Reset the action identified by action_id and all DAG actions that depend
+ directly or indirectly from this action.
+ """
+ actions_by_id = self.get_actions_by_id()
+ if action_id not in actions_by_id:
+ raise octobot_flow.errors.ActionDependencyNotFoundError(
+ f"Action {action_id} not found in DAG"
+ )
+ dependents_map = self._get_dependents_map()
+ to_reset = self._get_transitive_dependents(action_id, dependents_map) | {action_id}
+ for aid in to_reset:
+ actions_by_id[aid].reset()
+
+ def filled_all_dependencies(self, action: action_details.AbstractActionDetails) -> bool:
+ try:
+ actions_by_id = self.get_actions_by_id()
+ return all(
+ actions_by_id[dep.action_id].is_completed()
+ for dep in action.dependencies
+ )
+ except KeyError as err:
+ raise octobot_flow.errors.ActionDependencyNotFoundError(
+ f"Action {action.id} has dependencies with unknown action IDs: {err}"
+ ) from err
+
+ def resolve_dsl_scripts(
+ self, actions: list[action_details.AbstractActionDetails]
+ ):
+ """
+ Return the resolved DSL script, with all the dependencies resolved.
+ If the DSL script is not set, return None.
+ """
+ actions_by_id = self.get_actions_by_id()
+ for action in actions:
+ if isinstance(action, action_details.DSLScriptActionDetails):
+ self._resolve_dsl_script(action, actions_by_id)
+
+ def _resolve_dsl_script(
+ self,
+ action: action_details.DSLScriptActionDetails,
+ actions_by_id: dict[str, action_details.AbstractActionDetails]
+ ):
+ resolved_dsl_script = str(action.dsl_script)
+ for dependency in action.dependencies:
+ dependency_action = actions_by_id[dependency.action_id]
+ if dependency_action.error_status != octobot_flow.enums.ActionErrorStatus.NO_ERROR.value:
+ raise octobot_flow.errors.ActionDependencyError(
+ f"Dependency {dependency.parameter} returned an error: {dependency_action.error_status}"
+ )
+ if not dependency.parameter:
+ # no parameter name: this dependency is not a parameter: it just needs to have been executed
+ continue
+ resolved_dsl_script = octobot_commons.dsl_interpreter.apply_resolved_parameter_value(
+ resolved_dsl_script, dependency.parameter, dependency_action.result
+ )
+ for rescheduled_parameter, rescheduled_value in action.get_rescheduled_parameters().items():
+ resolved_dsl_script = octobot_commons.dsl_interpreter.add_resolved_parameter_value(
+ resolved_dsl_script, rescheduled_parameter, rescheduled_value
+ )
+ action.resolved_dsl_script = resolved_dsl_script
+
+ def __repr__(self) -> str:
+ return (
+ f"ActionsDAG([{len(self.actions)}]: {', '.join([str(action) for action in self.actions])})"
+ )
diff --git a/packages/flow/octobot_flow/entities/automations/__init__.py b/packages/flow/octobot_flow/entities/automations/__init__.py
new file mode 100644
index 000000000..2b45a3253
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/automations/__init__.py
@@ -0,0 +1,38 @@
+from octobot_flow.entities.automations.fetched_exchange_data import (
+ FetchedExchangeAccountElements,
+ FetchedExchangePublicData,
+ FetchedExchangeData,
+)
+from octobot_flow.entities.automations.automation_details import (
+ AutomationMetadata,
+ AutomationDetails,
+)
+from octobot_flow.entities.automations.automation_state import AutomationState
+from octobot_flow.entities.automations.fetched_dependencies import FetchedDependencies
+from octobot_flow.entities.automations.execution_details import (
+ TriggerDetails,
+ DegradedStateDetails,
+ ExecutionDetails,
+)
+from octobot_flow.entities.automations.additional_actions import AdditionalActions
+from octobot_flow.entities.automations.post_iteration_actions_details import (
+ RefreshExchangeBotsAuthenticatedDataDetails,
+ NextIterationDetails,
+ PostIterationActionsDetails,
+)
+__all__ = [
+ "FetchedExchangeAccountElements",
+ "FetchedExchangePublicData",
+ "FetchedExchangeData",
+ "AutomationMetadata",
+ "AutomationDetails",
+ "AutomationState",
+ "FetchedDependencies",
+ "TriggerDetails",
+ "DegradedStateDetails",
+ "ExecutionDetails",
+ "AdditionalActions",
+ "RefreshExchangeBotsAuthenticatedDataDetails",
+ "NextIterationDetails",
+ "PostIterationActionsDetails",
+]
diff --git a/packages/flow/octobot_flow/entities/automations/additional_actions.py b/packages/flow/octobot_flow/entities/automations/additional_actions.py
new file mode 100644
index 000000000..c256e2e52
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/automations/additional_actions.py
@@ -0,0 +1,22 @@
+import dataclasses
+import octobot_commons.dataclasses
+
+
+@dataclasses.dataclass
+class AdditionalActions(octobot_commons.dataclasses.FlexibleDataclass):
+ # todo implement this when necessary
+ check_min_portfolio: bool = False
+ optimize_portfolio: bool = False
+ optimize_portfolio_for_restart: bool = False
+ trigger_initial_orders: bool = False
+ minimum_wait_time_before_next_iteration: float = 0
+
+ @classmethod
+ def default_iteration(cls):
+ return cls(
+ check_min_portfolio=False, optimize_portfolio=False,
+ optimize_portfolio_for_restart=False, trigger_initial_orders=False
+ )
+
+ def has_trading_actions(self) -> bool:
+ return self.optimize_portfolio or self.optimize_portfolio_for_restart
diff --git a/packages/flow/octobot_flow/entities/automations/automation_details.py b/packages/flow/octobot_flow/entities/automations/automation_details.py
new file mode 100644
index 000000000..ebd74c357
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/automations/automation_details.py
@@ -0,0 +1,66 @@
+import dataclasses
+import typing
+
+import octobot_commons.dataclasses
+
+import octobot_flow.entities.automations.execution_details as execution_details_import
+import octobot_flow.entities.accounts.reference_exchange_account_elements as reference_exchange_account_elements_import
+import octobot_flow.entities.accounts.client_exchange_account_elements as client_exchange_account_elements_import
+import octobot_flow.entities.actions.actions_dag as actions_dag_import
+import octobot_flow.entities.accounts.account_elements as account_elements_import
+import octobot_flow.entities.automations.post_iteration_actions_details as post_iteration_actions_details_import
+
+
+@dataclasses.dataclass
+class AutomationMetadata(octobot_commons.dataclasses.MinimizableDataclass):
+ automation_id: str = dataclasses.field(default="", repr=True)
+
+
+@dataclasses.dataclass
+class AutomationDetails(octobot_commons.dataclasses.MinimizableDataclass, octobot_commons.dataclasses.UpdatableDataclass):
+ """
+ Defines an automation made of:
+ - An actions DAG defining the actions to be executed as DSL or configured actions
+ This actions DAG also defines bot strategies in the form of a keyword with parameters
+ - Reference exchange account elements if relevant => ideal strategy exchange state, shared to others
+ - Client's local exchange account elements if relevant ((sub)portfolio, orders, positions, trades, ...)
+ - Extra accounts elements if any (blockchain wallets, etc.)
+ - Current and previous execution details
+ - Post actions if any (local to an iteration)
+ """
+
+ metadata: AutomationMetadata = dataclasses.field(default_factory=AutomationMetadata, repr=True)
+ actions_dag: actions_dag_import.ActionsDAG = dataclasses.field(default_factory=actions_dag_import.ActionsDAG, repr=True)
+ reference_exchange_account_elements: typing.Optional[reference_exchange_account_elements_import.ReferenceExchangeAccountElements] = dataclasses.field(default=None, repr=True)
+ client_exchange_account_elements: typing.Optional[client_exchange_account_elements_import.ClientExchangeAccountElements] = dataclasses.field(default=None, repr=True)
+ extra_accounts: list[account_elements_import.AccountElements] = dataclasses.field(default_factory=list, repr=True)
+ execution: execution_details_import.ExecutionDetails = dataclasses.field(default_factory=execution_details_import.ExecutionDetails, repr=False)
+ post_actions: post_iteration_actions_details_import.PostIterationActionsDetails = dataclasses.field(default_factory=post_iteration_actions_details_import.PostIterationActionsDetails, repr=False)
+
+ def __post_init__(self):
+ if self.metadata and isinstance(self.metadata, dict):
+ self.metadata = AutomationMetadata.from_dict(self.metadata)
+ if self.execution and isinstance(self.execution, dict):
+ self.execution = execution_details_import.ExecutionDetails.from_dict(self.execution)
+ if self.reference_exchange_account_elements and isinstance(self.reference_exchange_account_elements, dict):
+ self.reference_exchange_account_elements = reference_exchange_account_elements_import.ReferenceExchangeAccountElements.from_dict(self.reference_exchange_account_elements)
+ if self.client_exchange_account_elements and isinstance(self.client_exchange_account_elements, dict):
+ self.client_exchange_account_elements = client_exchange_account_elements_import.ClientExchangeAccountElements.from_dict(self.client_exchange_account_elements)
+ if self.extra_accounts and isinstance(self.extra_accounts[0], dict):
+ self.extra_accounts = [
+ account_elements_import.AccountElements.from_dict(account)
+ for account in self.extra_accounts
+ ]
+ if self.post_actions and isinstance(self.post_actions, dict):
+ self.post_actions = post_iteration_actions_details_import.PostIterationActionsDetails.from_dict(self.post_actions)
+
+ def get_exchange_account_elements(self, as_reference_account: bool) -> typing.Union[
+ reference_exchange_account_elements_import.ReferenceExchangeAccountElements, client_exchange_account_elements_import.ClientExchangeAccountElements
+ ]:
+ return (
+ self.reference_exchange_account_elements
+ if as_reference_account else self.client_exchange_account_elements
+ ) # type: ignore
+
+ def runs_on_reference_exchange_account_first(self) -> bool:
+ return False # TODO return True when the automation should run on the reference exchange account first and then copied by the client
diff --git a/packages/flow/octobot_flow/entities/automations/automation_state.py b/packages/flow/octobot_flow/entities/automations/automation_state.py
new file mode 100644
index 000000000..a1b9a657f
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/automations/automation_state.py
@@ -0,0 +1,84 @@
+# This file is part of OctoBot Node (https://github.com/Drakkar-Software/OctoBot-Node)
+# Copyright (c) 2025 Drakkar-Software, All rights reserved.
+#
+# OctoBot is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# OctoBot is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public
+# License along with OctoBot. If not, see .
+
+import dataclasses
+import typing
+import decimal
+
+import octobot_commons.dataclasses
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
+import octobot_flow.entities.accounts.exchange_account_details as exchange_account_details_import
+import octobot_flow.entities.automations.automation_details as automation_details_import
+import octobot_flow.errors
+import octobot_flow.entities.actions.action_details as action_details_import
+import octobot_flow.entities.actions.actions_dag as actions_dag_import
+
+
+def required_exchange_account_details(func: typing.Callable) -> typing.Callable:
+ def required_exchange_account_details_wrapper(self, *args, **kwargs):
+ if not self.exchange_account_details:
+ raise octobot_flow.errors.NoExchangeAccountDetailsError("Exchange account details are required")
+ return func(self, *args, **kwargs)
+ return required_exchange_account_details_wrapper
+
+
+@dataclasses.dataclass
+class AutomationState(octobot_commons.dataclasses.MinimizableDataclass):
+ """
+ Defines the state of a single automation which is potentially associated to an exchange account.
+ """
+ automation: automation_details_import.AutomationDetails = dataclasses.field(default_factory=automation_details_import.AutomationDetails, repr=True)
+ exchange_account_details: typing.Optional[exchange_account_details_import.ExchangeAccountDetails] = dataclasses.field(default=None, repr=True)
+ priority_actions: list[action_details_import.AbstractActionDetails] = dataclasses.field(default_factory=list, repr=True)
+
+ def update_automation_actions(self, actions: list[action_details_import.AbstractActionDetails]):
+ existing_actions = self.automation.actions_dag.get_actions_by_id()
+ for action in actions:
+ if action.id not in existing_actions:
+ self.automation.actions_dag.add_action(action)
+
+ def has_exchange(self) -> bool:
+ return bool(
+ self.exchange_account_details is not None
+ and self.exchange_account_details.exchange_details.internal_name
+ )
+
+ @required_exchange_account_details
+ def to_minimal_exchange_data(self, _automation_id: typing.Optional[str] = None) -> exchange_data_import.ExchangeData:
+ return self.exchange_account_details.to_minimal_exchange_data(
+ self._get_automation_portfolio()
+ )
+
+ def _get_automation_portfolio(self) -> dict[str, dict[str, decimal.Decimal]]:
+ elements = self.automation.get_exchange_account_elements(False)
+ return elements.portfolio.content if elements else {} # type: ignore
+
+ def update_priority_actions(self, added_actions: list[action_details_import.AbstractActionDetails]):
+ included_action_ids = set(
+ action.id for action in self.priority_actions
+ )
+ self.priority_actions.extend(
+ action
+ for action in added_actions
+ if action.id not in included_action_ids
+ )
+
+ def __post_init__(self):
+ if self.automation and isinstance(self.automation, dict):
+ self.automation = automation_details_import.AutomationDetails.from_dict(self.automation)
+ if self.exchange_account_details and isinstance(self.exchange_account_details, dict):
+ self.exchange_account_details = exchange_account_details_import.ExchangeAccountDetails.from_dict(self.exchange_account_details)
diff --git a/packages/flow/octobot_flow/entities/automations/execution_details.py b/packages/flow/octobot_flow/entities/automations/execution_details.py
new file mode 100644
index 000000000..06f5ded47
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/automations/execution_details.py
@@ -0,0 +1,74 @@
+import dataclasses
+import time
+import typing
+
+import octobot_commons.dataclasses
+import octobot_flow.enums
+import octobot_flow.entities.automations.additional_actions as additional_actions_import
+
+
+@dataclasses.dataclass
+class TriggerDetails(octobot_commons.dataclasses.MinimizableDataclass):
+ scheduled_to: float = 0
+ triggered_at: float = 0
+ trigger_reason: typing.Optional[str] = None
+ additional_actions: additional_actions_import.AdditionalActions = dataclasses.field(default_factory=additional_actions_import.AdditionalActions)
+ custom_action_ids: list[str] = dataclasses.field(default_factory=list)
+ signal_ids: list[str] = dataclasses.field(default_factory=list)
+ strategy_executed_at: float = 0
+ was_completed: bool = False
+
+ def __post_init__(self):
+ if self.additional_actions and isinstance(self.additional_actions, dict):
+ self.additional_actions = additional_actions_import.AdditionalActions.from_dict(self.additional_actions)
+
+ def has_custom_actions_or_signals_to_fetch(self) -> bool:
+ return bool(
+ self.trigger_reason == octobot_flow.enums.LastTriggerReason.CUSTOM_ACTION.value
+ or self.custom_action_ids
+ or self.trigger_reason == octobot_flow.enums.LastTriggerReason.SIGNAL.value
+ or self.signal_ids
+ )
+
+ def was_interrupted(self) -> bool:
+ return not self.was_completed
+
+
+@dataclasses.dataclass
+class DegradedStateDetails(octobot_commons.dataclasses.MinimizableDataclass):
+ since: float = 0
+ reason: typing.Optional[str] = None
+
+
+@dataclasses.dataclass
+class ExecutionDetails(octobot_commons.dataclasses.MinimizableDataclass):
+ previous_execution: TriggerDetails = dataclasses.field(default_factory=TriggerDetails)
+ current_execution: TriggerDetails = dataclasses.field(default_factory=TriggerDetails)
+ degraded_state: DegradedStateDetails = dataclasses.field(default_factory=DegradedStateDetails)
+ execution_error: typing.Optional[str] = None
+
+ def __post_init__(self):
+ if self.previous_execution and isinstance(self.previous_execution, dict):
+ self.previous_execution = TriggerDetails.from_dict(self.previous_execution)
+ if self.current_execution and isinstance(self.current_execution, dict):
+ self.current_execution = TriggerDetails.from_dict(self.current_execution)
+ if self.degraded_state and isinstance(self.degraded_state, dict):
+ self.degraded_state = DegradedStateDetails.from_dict(self.degraded_state)
+
+ def should_fetch_custom_actions_or_signals(self) -> bool:
+ return (
+ self.current_execution.has_custom_actions_or_signals_to_fetch()
+ or (self.previous_execution.was_interrupted() and self.previous_execution.has_custom_actions_or_signals_to_fetch())
+ )
+
+ def start_execution(self):
+ self.current_execution.triggered_at = time.time()
+
+ def complete_execution(self, next_execution_scheduled_to: float):
+ self.current_execution.was_completed = True
+ self.previous_execution = self.current_execution
+ self.current_execution = TriggerDetails(
+ scheduled_to=next_execution_scheduled_to,
+ trigger_reason=octobot_flow.enums.LastTriggerReason.SCHEDULED.value,
+ additional_actions=additional_actions_import.AdditionalActions.default_iteration(),
+ )
diff --git a/packages/flow/octobot_flow/entities/automations/fetched_dependencies.py b/packages/flow/octobot_flow/entities/automations/fetched_dependencies.py
new file mode 100644
index 000000000..059d3c52e
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/automations/fetched_dependencies.py
@@ -0,0 +1,10 @@
+import dataclasses
+import typing
+
+import octobot_commons.dataclasses
+
+import octobot_flow.entities.automations.fetched_exchange_data as fetched_exchange_data_import
+
+@dataclasses.dataclass
+class FetchedDependencies(octobot_commons.dataclasses.MinimizableDataclass):
+ fetched_exchange_data: typing.Optional[fetched_exchange_data_import.FetchedExchangeData] = None
diff --git a/packages/flow/octobot_flow/entities/automations/fetched_exchange_data.py b/packages/flow/octobot_flow/entities/automations/fetched_exchange_data.py
new file mode 100644
index 000000000..950825b98
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/automations/fetched_exchange_data.py
@@ -0,0 +1,127 @@
+import dataclasses
+import typing
+import decimal
+
+import octobot_commons.dataclasses
+import octobot_commons.logging
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+import octobot_trading.exchanges
+import octobot_trading.storage.orders_storage
+import octobot_trading.api
+import octobot_trading.enums
+import octobot_trading.constants
+import octobot_trading.personal_data
+
+import octobot_flow.enums
+
+
+
+@dataclasses.dataclass
+class FetchedExchangeAccountElements(octobot_commons.dataclasses.MinimizableDataclass, octobot_commons.dataclasses.UpdatableDataclass):
+ portfolio: exchange_data_import.PortfolioDetails = dataclasses.field(default_factory=exchange_data_import.PortfolioDetails)
+ orders: exchange_data_import.OrdersDetails = dataclasses.field(default_factory=exchange_data_import.OrdersDetails)
+ positions: list[exchange_data_import.PositionDetails] = dataclasses.field(default_factory=list)
+ trades: list[dict] = dataclasses.field(default_factory=list)
+
+ def __post_init__(self):
+ if self.portfolio and isinstance(self.portfolio, dict):
+ self.portfolio = exchange_data_import.PortfolioDetails.from_dict(self.portfolio)
+ if self.orders and isinstance(self.orders, dict):
+ self.orders = exchange_data_import.OrdersDetails.from_dict(self.orders)
+ if self.positions and isinstance(self.positions[0], dict):
+ self.positions = [
+ exchange_data_import.PositionDetails.from_dict(position) for position in self.positions
+ ]
+
+ def sync_from_exchange_manager(
+ self, exchange_manager: octobot_trading.exchanges.ExchangeManager
+ ) -> list[octobot_flow.enums.ChangedElements]:
+ changed_elements = []
+ if self.sync_orders_from_exchange_manager(exchange_manager):
+ changed_elements.append(octobot_flow.enums.ChangedElements.ORDERS)
+ if self._sync_trades_from_exchange_manager(exchange_manager):
+ changed_elements.append(octobot_flow.enums.ChangedElements.TRADES)
+ if self.sync_portfolio_from_exchange_manager(exchange_manager):
+ changed_elements.append(octobot_flow.enums.ChangedElements.PORTFOLIO)
+ if self.sync_positions_from_exchange_manager(exchange_manager):
+ changed_elements.append(octobot_flow.enums.ChangedElements.POSITIONS)
+ return changed_elements
+
+ def sync_orders_from_exchange_manager(self, exchange_manager: octobot_trading.exchanges.ExchangeManager) -> bool:
+ previous_orders = self.orders
+ updated_open_orders_exchange_ids = set()
+ updated_open_orders = []
+ updated_missing_orders = []
+ for order in octobot_trading.api.get_open_orders(exchange_manager):
+ if order.is_self_managed():
+ octobot_commons.logging.get_logger(self.__class__.__name__).error(
+ f"Self managed order created. This type of [{exchange_manager.exchange_name}] "
+ f"order is not supported, order is ignored. Order: {order}"
+ )
+ continue
+ updated_open_orders_exchange_ids.add(order.exchange_order_id)
+ updated_open_orders.append(
+ octobot_trading.storage.orders_storage._format_order(order, exchange_manager)
+ )
+ updated_missing_orders = [
+ order
+ for exchange_id, order in octobot_trading.personal_data.get_enriched_orders_by_exchange_id(previous_orders.open_orders).items()
+ if exchange_id not in updated_open_orders_exchange_ids
+ ]
+ self.orders.open_orders = updated_open_orders
+ self.orders.missing_orders = updated_missing_orders
+ return previous_orders != self.orders
+
+ def _sync_trades_from_exchange_manager(self, exchange_manager: octobot_trading.exchanges.ExchangeManager) -> bool:
+ previous_trades = self.trades
+ self.trades = octobot_trading.api.get_trade_history(exchange_manager, as_dict=True)
+ return previous_trades != self.trades
+
+ def sync_portfolio_from_exchange_manager(self, exchange_manager: octobot_trading.exchanges.ExchangeManager) -> bool:
+ previous_portfolio = self.portfolio.content
+ self.portfolio.content = {
+ key: values
+ for key, values in octobot_trading.api.get_portfolio(exchange_manager, as_decimal=False).items()
+ if any(value for value in values.values()) # skip 0 value assets
+ }
+ return previous_portfolio != self.portfolio.content
+
+ def sync_positions_from_exchange_manager(self, exchange_manager: octobot_trading.exchanges.ExchangeManager) -> bool:
+ previous_positions = self.positions
+ self.positions = [
+ exchange_data_import.PositionDetails(position.to_dict(), position.symbol_contract.to_dict())
+ for position in octobot_trading.api.get_positions(exchange_manager)
+ ]
+ return previous_positions != self.positions
+
+
+@dataclasses.dataclass
+class FetchedExchangePublicData(octobot_commons.dataclasses.MinimizableDataclass):
+ markets: list[exchange_data_import.MarketDetails] = dataclasses.field(default_factory=list)
+ tickers: dict[str, dict[str, typing.Any]] = dataclasses.field(default_factory=dict)
+
+
+@dataclasses.dataclass
+class FetchedExchangeData(octobot_commons.dataclasses.MinimizableDataclass):
+ public_data: FetchedExchangePublicData = dataclasses.field(default_factory=FetchedExchangePublicData)
+ authenticated_data: FetchedExchangeAccountElements = dataclasses.field(default_factory=FetchedExchangeAccountElements)
+
+ def __post_init__(self):
+ if self.public_data and isinstance(self.public_data, dict):
+ self.public_data = FetchedExchangePublicData.from_dict(self.public_data)
+ if self.authenticated_data and isinstance(self.authenticated_data, dict):
+ self.authenticated_data = FetchedExchangeAccountElements.from_dict(self.authenticated_data)
+
+ def get_last_price(self, symbol: str) -> decimal.Decimal:
+ # use if as in most cases, tickers are not available for all symbols
+ if symbol in self.public_data.tickers:
+ try:
+ return decimal.Decimal(str(
+ self.public_data.tickers[symbol][
+ octobot_trading.enums.ExchangeConstantsTickersColumns.CLOSE.value
+ ]
+ ))
+ except (KeyError, decimal.DecimalException):
+ return octobot_trading.constants.ZERO
+ else:
+ return octobot_trading.constants.ZERO
diff --git a/packages/flow/octobot_flow/entities/automations/post_iteration_actions_details.py b/packages/flow/octobot_flow/entities/automations/post_iteration_actions_details.py
new file mode 100644
index 000000000..6e0266910
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/automations/post_iteration_actions_details.py
@@ -0,0 +1,40 @@
+import dataclasses
+import typing
+
+import octobot_commons.dataclasses
+
+
+@dataclasses.dataclass
+class RefreshExchangeBotsAuthenticatedDataDetails:
+ # todo update this when global view refresh trigger is implemented
+ exchange_community_internal_name: str
+ exchange_id: str
+ exchange_account_id: typing.Optional[str]
+ to_recall_bot_id: typing.Optional[str] = None
+ update_account_status: bool = False
+ ignored_exchange_account_ids: typing.Optional[set[str]] = None
+
+
+@dataclasses.dataclass
+class NextIterationDetails(octobot_commons.dataclasses.FlexibleDataclass):
+ instant_trigger: bool = False
+ unclearable_trade_exchange_order_ids: list[str] = dataclasses.field(default_factory=list)
+
+
+@dataclasses.dataclass
+class PostIterationActionsDetails(octobot_commons.dataclasses.MinimizableDataclass):
+ stop_automation: bool = False
+ postpone_execution: bool = False
+ postpone_reason: typing.Optional[str] = None
+ raisable_error: typing.Optional[str] = None
+ trigger_global_view_refresh: bool = False
+ trigger_global_view_refresh_args: typing.Optional[RefreshExchangeBotsAuthenticatedDataDetails] = None
+ next_iteration_details: typing.Optional[NextIterationDetails] = None
+
+ def has_automation_actions(self) -> bool:
+ return bool(self.stop_automation)
+
+ def should_cancel_iteration(self) -> bool:
+ # cancelled if global view refresh is triggered, otherwise proceed
+ # with next iteration required steps
+ return self.trigger_global_view_refresh
\ No newline at end of file
diff --git a/packages/flow/octobot_flow/entities/community/__init__.py b/packages/flow/octobot_flow/entities/community/__init__.py
new file mode 100644
index 000000000..d1bce7642
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/community/__init__.py
@@ -0,0 +1,5 @@
+from octobot_flow.entities.community.user_authentication import UserAuthentication
+
+__all__ = [
+ "UserAuthentication",
+]
diff --git a/packages/flow/octobot_flow/entities/community/user_authentication.py b/packages/flow/octobot_flow/entities/community/user_authentication.py
new file mode 100644
index 000000000..cd5fb84c4
--- /dev/null
+++ b/packages/flow/octobot_flow/entities/community/user_authentication.py
@@ -0,0 +1,16 @@
+import dataclasses
+import typing
+import octobot_commons.dataclasses
+
+
+@dataclasses.dataclass
+class UserAuthentication(octobot_commons.dataclasses.FlexibleDataclass):
+ email: typing.Optional[str] = None
+ password: typing.Optional[str] = None
+ hidden: bool = False
+ user_id: typing.Optional[str] = None
+ auth_key: typing.Optional[str] = None
+ encrypted_keys_by_exchange: dict[str, str] = dataclasses.field(default_factory=dict)
+
+ def has_auth_details(self) -> bool:
+ return bool(self.password or self.auth_key)
\ No newline at end of file
diff --git a/packages/flow/octobot_flow/enums.py b/packages/flow/octobot_flow/enums.py
new file mode 100644
index 000000000..1f53eb527
--- /dev/null
+++ b/packages/flow/octobot_flow/enums.py
@@ -0,0 +1,47 @@
+import enum
+
+
+class LastTriggerReason(enum.Enum):
+ SCHEDULED = "scheduled"
+ CUSTOM_ACTION = "custom_action"
+ SIGNAL = "signal"
+ CONFIGURATION_UPDATE = "configuration_update"
+ UNDEFINED = None
+
+
+class DegradedStateReasons(enum.Enum):
+ INVALID_EXCHANGE_CREDENTIALS = "invalid_exchange_credentials"
+ MISSING_API_KEY_TRADING_RIGHTS = "missing_api_key_trading_rights"
+ MISSING_STRATEGY_MINIMAL_FUNDS = "missing_strategy_minimal_funds"
+ WORKFLOW_INIT_ERROR = "workflow_init_error"
+ UNDEFINED = None
+
+
+class ChangedElements(enum.Enum):
+ ORDERS = "orders"
+ TRADES = "trades"
+ PORTFOLIO = "portfolio"
+ POSITIONS = "positions"
+
+
+class ActionType(enum.Enum):
+ APPLY_CONFIGURATION = "apply_configuration"
+ UNKNOWN = "unknown"
+
+
+class ActionErrorStatus(enum.Enum):
+ NO_ERROR = None
+ NOT_ENOUGH_FUNDS = "not_enough_funds"
+ MISSING_SYMBOL = "missing_symbol"
+ SYMBOL_INCOMPATIBLE_WITH_ACCOUNT = "symbol_incompatible_with_account"
+ ORDER_NOT_FOUND = "order_not_found"
+ INVALID_ORDER = "invalid_order"
+ INVALID_CONFIG = "invalid_config"
+ INVALID_SIGNAL_FORMAT = "invalid_signal_format"
+ UNSUPPORTED_STOP_ORDER = "unsupported_stop_order"
+ INCOMPATIBLE_TRADING_TYPE = "incompatible_trading_type"
+ UNSUPPORTED_HEDGE_POSITION = "unsupported_hedge_position"
+ INTERNAL_ERROR = "internal_error"
+ BLOCKCHAIN_WALLET_ERROR = "blockchain_wallet_error"
+ DISABLED_FUNDS_TRANSFER_ERROR = "disabled_funds_transfer_error"
+ UNSUPPORTED_ACTION_TYPE = "unsupported_action_type"
diff --git a/packages/flow/octobot_flow/environment.py b/packages/flow/octobot_flow/environment.py
new file mode 100644
index 000000000..e32bacb11
--- /dev/null
+++ b/packages/flow/octobot_flow/environment.py
@@ -0,0 +1,10 @@
+import octobot.constants # will load .env file and init constants
+
+import octobot_flow.repositories.community
+import octobot_trading.constants
+
+
+def initialize_environment(allow_funds_transfer: bool = False) -> None:
+ octobot_flow.repositories.community.initialize_community_authentication()
+ if allow_funds_transfer:
+ octobot_trading.constants.ALLOW_FUNDS_TRANSFER = True
diff --git a/packages/flow/octobot_flow/errors.py b/packages/flow/octobot_flow/errors.py
new file mode 100644
index 000000000..32e1062f9
--- /dev/null
+++ b/packages/flow/octobot_flow/errors.py
@@ -0,0 +1,81 @@
+class OctobotFlowError(Exception):
+ """parent class for all octobot flow errors"""
+
+class ConfigurationError(OctobotFlowError):
+ """an error related to the configuration of the bot"""
+
+class ExchangeError(OctobotFlowError):
+ """an error related to the bot's communication with the exchange"""
+
+class AutomationActionError(OctobotFlowError):
+ """an error related to an automation action execution"""
+
+class DSLExecutorError(OctobotFlowError):
+ """raise when a DSL executor error occurs"""
+
+class ExchangeAccountInitializationError(ExchangeError):
+ """raise when an exchange account initialization fails"""
+
+class InitializationRunFailedError(ConfigurationError):
+ """raise when an initialization run fails"""
+
+
+class NoExchangeAccountDetailsError(ConfigurationError):
+ """raise when no exchange account details are available"""
+
+
+class AutomationValidationError(ConfigurationError):
+ """raise when an automation configuration or state is invalid"""
+
+
+class UnsupportedActionTypeError(AutomationActionError):
+ """raise when an unsupported action type is encountered"""
+
+
+class UnsupportedConfiguredActionTypeError(UnsupportedActionTypeError):
+ """raise when an unsupported configured action type is encountered"""
+
+
+
+class InvalidAutomationActionError(ConfigurationError):
+ """raise when an automation action is invalid"""
+
+
+class InvalidConfigurationActionError(ConfigurationError):
+ """raise when a configuration action is invalid"""
+
+
+class NoProfileDataError(ConfigurationError):
+ """raise when no profile data is available"""
+
+
+class NoAutomationError(ConfigurationError):
+ """raise when a automations state does not contain any automation"""
+
+
+class CommunityError(ConfigurationError):
+ """an error related to the community authentication of the bot"""
+
+
+class CommunityAuthenticationRequiredError(CommunityError):
+ """raise when community authentication is required"""
+
+
+class UnresolvedDSLScriptError(AutomationActionError):
+ """raise when a DSL script is not resolved"""
+
+
+class ActionDependencyError(AutomationActionError):
+ """raise when an action dependency is invalid"""
+
+
+class AutomationDAGResetError(AutomationActionError):
+ """raise when a DAG reset fails"""
+
+
+class ActionDependencyNotFoundError(ActionDependencyError):
+ """raise when an action dependency is not found"""
+
+
+class MissingDSLExecutorDependencyError(DSLExecutorError):
+ """raise when a DSL executor dependency is missing"""
diff --git a/packages/flow/octobot_flow/jobs/__init__.py b/packages/flow/octobot_flow/jobs/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/packages/flow/octobot_flow/jobs/automation_job.py b/packages/flow/octobot_flow/jobs/automation_job.py
new file mode 100644
index 000000000..6d6ebed41
--- /dev/null
+++ b/packages/flow/octobot_flow/jobs/automation_job.py
@@ -0,0 +1,321 @@
+import contextlib
+from octobot_flow.entities.actions.action_details import AbstractActionDetails
+import time
+import typing
+
+import octobot_commons.logging as common_logging
+import octobot.community
+
+import octobot_flow.entities
+import octobot_flow.enums
+import octobot_flow.errors
+import octobot_flow.logic.configuration
+import octobot_flow.parsers.sanitizer
+import octobot_flow.logic.dsl
+import octobot_flow.repositories.community
+import octobot_flow.encryption
+import octobot_flow.jobs.exchange_account_job as exchange_account_job_import
+import octobot_flow.jobs.automation_runner_job as automation_runner_job_import
+
+
+class AutomationJob:
+ """
+ Configures the automation environment and runs it:
+ 1. Parse the automation, initialize if necessary, resolve dependencies and DAG actions to prepare the automation environment.
+ 2. Use the AutomationRunner to run the automation itself.
+ 3. Execute pending priority actions if any, otherwise execute the DAG's executable actions.
+ """
+ def __init__(
+ self,
+ automation_state: dict[str, typing.Any],
+ added_priority_actions: list[octobot_flow.entities.AbstractActionDetails],
+ auth_details: typing.Union[octobot_flow.entities.UserAuthentication, dict],
+ ):
+ self.automation_state: octobot_flow.entities.AutomationState = (
+ octobot_flow.entities.AutomationState.from_dict(automation_state)
+ )
+ if added_priority_actions:
+ # Include added priority actions in the automation state.
+ # All pending priority actions will be executed before any other actions.
+ self.automation_state.update_priority_actions(added_priority_actions)
+ self._validate_input()
+ self.auth_details: octobot_flow.entities.UserAuthentication = octobot_flow.entities.UserAuthentication.from_dict(auth_details) if isinstance(auth_details, dict) else auth_details
+ self.is_initialization_run = self._requires_initialization_run()
+ self.fetched_actions: list[octobot_flow.entities.AbstractActionDetails] = []
+ self._logger: common_logging.BotLogger = common_logging.get_logger(self.__class__.__name__)
+
+ async def run(self) -> list[octobot_flow.entities.AbstractActionDetails]:
+ if self.is_initialization_run:
+ # Configure the automation
+ return await self.execute_initialization_run()
+ t0 = time.time()
+ executed_actions = []
+ async with self._maybe_authenticator() as maybe_authenticator:
+ maybe_community_repository = (
+ octobot_flow.repositories.community.CommunityRepository(maybe_authenticator)
+ if maybe_authenticator else None
+ )
+ with octobot_flow.encryption.decrypted_bots_configurations(self.automation_state):
+ to_execute_actions, are_priority_actions = self._get_actions_to_execute()
+ if are_priority_actions:
+ self._logger.info(f"Running {len(to_execute_actions)} priority actions: {to_execute_actions}")
+ self._resolve_dsl_scripts(to_execute_actions, True)
+ else:
+ # fetch the actions and signals if any
+ await self._fetch_actions(maybe_authenticator)
+ # resolve the DSL scripts in case it has dependencies on other actions
+ self._resolve_dsl_scripts(
+ self.automation_state.automation.actions_dag.get_executable_actions(),
+ True
+ )
+ # fetch the dependencies of the automation environment
+ fetched_dependencies = await self._fetch_dependencies(maybe_community_repository, to_execute_actions)
+ # Align on the previous scheduled time when possible when running priority actions
+ # to keep sleep cycles consistency when a priority action is processed.
+ default_next_execution_scheduled_to = (
+ self.automation_state.automation.execution.current_execution.scheduled_to
+ if are_priority_actions else 0
+ )
+ # execute the automation
+ executed_actions = await self._execute_automation_actions(
+ maybe_community_repository, fetched_dependencies, to_execute_actions,
+ default_next_execution_scheduled_to
+ )
+ # don't keep resolved DSL scripts after execution to avoid side effects
+ self._clear_resolved_dsl_scripts(executed_actions)
+ self._logger.info(f"Automation updated successfully in {round(time.time() - t0, 2)} seconds")
+ return executed_actions
+
+ @contextlib.asynccontextmanager
+ async def _maybe_authenticator(self) -> typing.AsyncGenerator[typing.Optional[octobot.community.CommunityAuthentication], None]:
+ authenticator_factory = octobot_flow.repositories.community.CommunityAuthenticatorFactory(
+ self.auth_details
+ )
+ if authenticator_factory.enable_community_authentication():
+ if self.auth_details.has_auth_details():
+ async with authenticator_factory.local_authenticator() as authenticator:
+ yield authenticator
+ else:
+ async with authenticator_factory.local_anon_authenticator() as authenticator:
+ yield authenticator
+ else:
+ yield None
+
+ async def execute_initialization_run(self) -> list[octobot_flow.entities.AbstractActionDetails]:
+ executed_actions = []
+ async with self._maybe_authenticator() as maybe_authenticator:
+ await self._fetch_actions(maybe_authenticator)
+ executed_actions = await self._initialize_exchange_account_details_from_actions()
+ if self._requires_initialization_run():
+ raise octobot_flow.errors.InitializationRunFailedError(
+ "Initialization run is still required after running the initialization run"
+ )
+ self._logger.info(
+ f"Initialization run completed, automation initialized on "
+ f"{self.automation_state.exchange_account_details.exchange_details.internal_name}"
+ )
+ return executed_actions
+
+ async def _initialize_exchange_account_details_from_actions(self) -> list[octobot_flow.entities.AbstractActionDetails]:
+ already_applied_config = False
+ actions, _ = self._get_actions_to_execute()
+ for action in actions:
+ if isinstance(action, octobot_flow.entities.ConfiguredActionDetails) and action.action == octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value:
+ if already_applied_config:
+ raise octobot_flow.errors.InitializationRunFailedError(
+ "Only one configuration action is allowed"
+ )
+ await self._apply_configuration_from_action(action)
+ already_applied_config = True
+ else:
+ self._logger.info(f"Ignoring non configuration action before initialization: {action}")
+ return actions
+
+ async def _apply_configuration_from_action(
+ self, action: octobot_flow.entities.ConfiguredActionDetails
+ ):
+ if self.automation_state.exchange_account_details is None:
+ self.automation_state.exchange_account_details = octobot_flow.entities.ExchangeAccountDetails()
+ action_configuration_updater = octobot_flow.logic.configuration.AutomationConfigurationUpdater(
+ self.automation_state, action
+ )
+ await action_configuration_updater.update()
+
+ async def _fetch_actions(
+ self, maybe_authenticator: typing.Optional[octobot.community.CommunityAuthentication]
+ ):
+ automation = self.automation_state.automation
+ if automation.execution.should_fetch_custom_actions_or_signals():
+ user_actions_to_fetch = automation.execution.current_execution.custom_action_ids
+ signals_to_fetch = automation.execution.current_execution.signal_ids
+ if user_actions_to_fetch or signals_to_fetch:
+ authenticator = octobot_flow.repositories.community.ensure_is_authenticated(maybe_authenticator)
+ t0 = time.time()
+ all_actions: list[octobot_flow.entities.AbstractActionDetails] = []
+ repository = octobot_flow.repositories.community.CustomActionsRepository(authenticator)
+ if user_actions_to_fetch:
+ all_actions.extend(await repository.fetch_custom_actions(
+ user_actions_to_fetch, select_pending_user_actions_only=True
+ ))
+ if signals_to_fetch:
+ all_actions.extend(await repository.fetch_signals(
+ signals_to_fetch, select_pending_signals_only=True
+ ))
+ self._logger.info(
+ f"Fetched {len(all_actions)} custom actions/signals for automation "
+ f"{automation.metadata.automation_id} in {round(time.time() - t0, 2)} seconds"
+ )
+ self.fetched_actions.extend(all_actions)
+
+ def _requires_initialization_run(self) -> bool:
+ return (
+ self.automation_state.automation.execution.previous_execution.triggered_at == 0
+ and (
+ not self.automation_state.exchange_account_details
+ or not self.automation_state.exchange_account_details.exchange_details.internal_name
+ )
+ )
+
+ async def _fetch_dependencies(
+ self,
+ maybe_community_repository: typing.Optional[octobot_flow.repositories.community.CommunityRepository],
+ to_execute_actions: list[octobot_flow.entities.AbstractActionDetails]
+ ) -> octobot_flow.entities.FetchedDependencies:
+ self._logger.info("Fetching automation dependencies.")
+ fetched_exchange_data = (
+ await self._init_all_required_exchange_data(
+ self.automation_state.exchange_account_details, maybe_community_repository, to_execute_actions
+ )
+ if self.automation_state.has_exchange() else None
+ )
+ return octobot_flow.entities.FetchedDependencies(
+ fetched_exchange_data=fetched_exchange_data
+ )
+
+ async def _init_all_required_exchange_data(
+ self,
+ exchange_account_details: octobot_flow.entities.ExchangeAccountDetails,
+ maybe_community_repository: typing.Optional[octobot_flow.repositories.community.CommunityRepository],
+ to_execute_actions: list[octobot_flow.entities.AbstractActionDetails]
+ ) -> octobot_flow.entities.FetchedExchangeData:
+ t0 = time.time()
+ exchange_summary = (
+ f"[{exchange_account_details.exchange_details.internal_name}]"
+ f"account with id: {exchange_account_details.exchange_details.exchange_account_id}"
+ )
+ self._logger.info(f"Initializing all required data for {exchange_summary}.")
+ exchange_account_job = exchange_account_job_import.ExchangeAccountJob(
+ self.automation_state, self.fetched_actions
+ )
+ symbol = set(
+ exchange_account_job.get_all_actions_symbols()
+ + octobot_flow.logic.dsl.get_actions_symbol_dependencies(to_execute_actions)
+ )
+ async with exchange_account_job.account_exchange_context(
+ octobot_flow.logic.configuration.create_profile_data(
+ self.automation_state.exchange_account_details,
+ self.automation_state.automation.metadata.automation_id,
+ symbol
+ )
+ ):
+ await exchange_account_job.update_public_data()
+ self._logger.info(
+ f"Public data updated for {exchange_account_details.exchange_details.internal_name} in {round(time.time() - t0, 2)} seconds"
+ )
+ t1 = time.time()
+ await exchange_account_job.update_authenticated_data()
+ self._logger.info(
+ f"Authenticated data updated for {exchange_account_details.exchange_details.internal_name} in {round(time.time() - t1, 2)} seconds"
+ )
+ self._logger.info(
+ f"Initialized all required data for {exchange_summary} in {round(time.time() - t0, 2)} seconds."
+ )
+ return exchange_account_job.fetched_dependencies.fetched_exchange_data # type: ignore
+
+ async def _execute_automation_actions(
+ self,
+ maybe_community_repository: typing.Optional[octobot_flow.repositories.community.CommunityRepository],
+ fetched_dependencies: octobot_flow.entities.FetchedDependencies,
+ to_execute_actions: list[octobot_flow.entities.AbstractActionDetails],
+ default_next_execution_scheduled_to: float
+ ) -> list[octobot_flow.entities.AbstractActionDetails]:
+ automation_runner_job = automation_runner_job_import.AutomationRunnerJob(
+ self.automation_state, fetched_dependencies, maybe_community_repository,
+ default_next_execution_scheduled_to
+ )
+ automation = self.automation_state.automation
+ exchange_account_desc = (
+ 'simulated exchange account' if self.automation_state.exchange_account_details.is_simulated()
+ else 'real exchange account'
+ )
+ automation_signature = f"{exchange_account_desc} automation {automation.metadata.automation_id}"
+ try:
+ self._logger.info(f"Updating {automation_signature}")
+ automation_runner_job.validate(automation)
+ start_time = time.time()
+ run_as_reference_account_first = automation.runs_on_reference_exchange_account_first()
+ async with automation_runner_job.actions_context(
+ to_execute_actions,
+ run_as_reference_account_first
+ ):
+ await automation_runner_job.run()
+ if run_as_reference_account_first:
+ raise NotImplementedError("TODO: implement copy from reference account to client account")
+ self._logger.info(
+ f"{automation_signature} successfully updated in {round(time.time() - start_time, 2)} seconds"
+ )
+ except octobot_flow.errors.AutomationValidationError as err:
+ self._logger.exception(
+ err, True, f"Skipped {automation_signature} update: automation configuration is invalid: {err}"
+ )
+ except Exception as err:
+ self._logger.exception(
+ err,
+ True,
+ f"Unexpected error when updating {automation_signature}: {err.__class__.__name__}: {err}"
+ )
+ return to_execute_actions
+
+ def _get_actions_to_execute(self) -> tuple[list[octobot_flow.entities.AbstractActionDetails], bool]:
+ if pending_priority_actions := self._get_pending_priority_actions():
+ return pending_priority_actions, True
+ executable_actions = self.automation_state.automation.actions_dag.get_executable_actions()
+ return executable_actions + self.fetched_actions, False
+
+ def _get_pending_priority_actions(self) -> list[octobot_flow.entities.AbstractActionDetails]:
+ return [
+ action for action in self.automation_state.priority_actions if not action.is_completed()
+ ]
+
+ def _resolve_dsl_scripts(
+ self, actions: list[octobot_flow.entities.AbstractActionDetails],
+ from_actions_dag: bool
+ ):
+ if from_actions_dag:
+ self.automation_state.automation.actions_dag.resolve_dsl_scripts(
+ actions
+ )
+ else:
+ local_dag = octobot_flow.entities.ActionsDAG(actions=actions)
+ local_dag.resolve_dsl_scripts(actions)
+
+ def _clear_resolved_dsl_scripts(self, actions: list[octobot_flow.entities.AbstractActionDetails]):
+ for action in actions:
+ if isinstance(action, octobot_flow.entities.DSLScriptActionDetails):
+ action.clear_resolved_dsl_script()
+
+ def dump(self) -> dict:
+ return octobot_flow.parsers.sanitizer.sanitize(
+ self.automation_state.to_dict(include_default_values=False)
+ ) # type: ignore
+
+ async def __aenter__(self) -> "AutomationJob":
+ return self
+
+ async def __aexit__(self, exc_type, exc_value, traceback) -> None:
+ # nothing to do for now
+ pass
+
+ def _validate_input(self):
+ if not self.automation_state.automation.metadata.automation_id:
+ raise octobot_flow.errors.NoAutomationError("Automation is required")
diff --git a/packages/flow/octobot_flow/jobs/automation_runner_job.py b/packages/flow/octobot_flow/jobs/automation_runner_job.py
new file mode 100644
index 000000000..4c245c884
--- /dev/null
+++ b/packages/flow/octobot_flow/jobs/automation_runner_job.py
@@ -0,0 +1,146 @@
+import contextlib
+import typing
+
+import octobot_commons.profiles as commons_profiles
+import octobot_commons.context_util as context_util
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
+import octobot_flow.entities
+import octobot_flow.enums
+import octobot_flow.errors
+import octobot_flow.logic.configuration
+import octobot_flow.logic.dsl
+import octobot_flow.repositories.exchange
+import octobot_flow.repositories.community
+import octobot_flow.logic.actions
+
+
+class AutomationRunnerJob(octobot_flow.repositories.exchange.ExchangeContextMixin):
+ """
+ Runs the automation from the configured environment.
+ Sequentially executes the automation pre-actions, actions and post-actions.
+ Finally, completes the current execution and register the next execution scheduled time.
+ """
+ WILL_EXECUTE_STRATEGY: bool = True
+
+ def __init__(
+ self,
+ automation_state: octobot_flow.entities.AutomationState,
+ fetched_dependencies: octobot_flow.entities.FetchedDependencies,
+ maybe_community_repository: typing.Optional[octobot_flow.repositories.community.CommunityRepository],
+ default_next_execution_scheduled_to: float,
+ ):
+ super().__init__(automation_state, fetched_dependencies)
+
+ self._maybe_community_repository: typing.Optional[
+ octobot_flow.repositories.community.CommunityRepository
+ ] = maybe_community_repository
+ self._as_reference_account: bool = False
+ self._to_execute_actions: list[octobot_flow.entities.AbstractActionDetails] = None # type: ignore
+ self._default_next_execution_scheduled_to: float = default_next_execution_scheduled_to
+
+ def validate(self, automation: octobot_flow.entities.AutomationDetails):
+ if not automation.metadata.automation_id:
+ raise octobot_flow.errors.AutomationValidationError(
+ f"automation_id is required. Found: {automation.metadata.automation_id}"
+ )
+
+ async def run(self):
+ self.automation_state.automation.execution.start_execution()
+ # TODO implement to remove after POC 4
+ # # 1. for each automation, process additional actions if necessary (ex: portfolio optimization)
+ # if self.automation_state.automation.execution.current_execution.additional_actions.has_trading_actions():
+ # await self._process_additional_actions()
+ # TODO implement to remove after POC 4
+ # # 2. process on filled and cancelled orders actions if necessary
+ # await self._process_on_filled_and_cancelled_orders_actions()
+ # # 3. update strategy if necessary
+ changed_elements, next_execution_scheduled_to = await self._execute_actions()
+ # if octobot_flow.enums.ChangedElements.ORDERS in changed_elements:
+ # TODO implement to remove after POC 4
+ # # 4. process on filled and cancelled orders actions again if necessary
+ # await self._process_on_filled_and_cancelled_orders_actions()
+ # 5. execute post actions if necessary
+ if self.automation_state.automation.post_actions.has_automation_actions():
+ await self._execute_post_actions()
+ # 6. register execution completion
+ self.automation_state.automation.execution.complete_execution(next_execution_scheduled_to)
+
+ async def _execute_actions(self) -> tuple[list[octobot_flow.enums.ChangedElements], float]:
+ actions_executor = octobot_flow.logic.actions.ActionsExecutor(
+ self._maybe_community_repository, self._exchange_manager,
+ self.automation_state.automation, self._to_execute_actions,
+ self._as_reference_account
+ )
+ await actions_executor.execute()
+ return actions_executor.changed_elements, (
+ # use self._default_next_execution_scheduled_to if set when no next_execution_scheduled_to
+ # is configured
+ actions_executor.next_execution_scheduled_to or self._default_next_execution_scheduled_to
+ )
+
+ async def _process_on_filled_and_cancelled_orders_actions(self):
+ # update chained orders, groups and other mechanics if necessary
+ if not self.automation_state.has_exchange():
+ return
+ exchange_account_elements = self.automation_state.automation.get_exchange_account_elements(self._as_reference_account)
+ if exchange_account_elements.has_pending_chained_orders():
+ await self._update_chained_orders()
+ if exchange_account_elements.has_pending_groups():
+ await self._update_groups()
+
+ async def _update_chained_orders(self):
+ raise NotImplementedError("_update_chained_orders not implemented")
+
+ async def _update_groups(self):
+ raise NotImplementedError("_update_groups not implemented")
+
+ async def _process_additional_actions(self):
+ raise NotImplementedError("_process_additional_actions not implemented")
+
+ async def _update_stopped_automation_sub_portfolio_if_necessary(self):
+ # TODO implement when supporting sub portfolios: unregister automation sub portfolio
+ pass
+
+ async def _execute_post_actions(self):
+ if self.automation_state.automation.post_actions.stop_automation:
+ await self._update_stopped_automation_sub_portfolio_if_necessary()
+
+ def init_strategy_exchange_data(self, exchange_data: exchange_data_import.ExchangeData):
+ exchange_account_elements = self.automation_state.automation.get_exchange_account_elements(self._as_reference_account)
+ exchange_data.markets = self.fetched_dependencies.fetched_exchange_data.public_data.markets
+ exchange_data.portfolio_details.content = exchange_account_elements.portfolio.content
+ exchange_data.orders_details.open_orders = exchange_account_elements.orders.open_orders
+
+ def _get_profile_data(self) -> commons_profiles.ProfileData:
+ return octobot_flow.logic.configuration.create_profile_data(
+ self.automation_state.exchange_account_details,
+ self.automation_state.automation.metadata.automation_id,
+ set(octobot_flow.logic.dsl.get_actions_symbol_dependencies(
+ self._to_execute_actions
+ ))
+ )
+
+ @contextlib.asynccontextmanager
+ async def actions_context(
+ self,
+ actions: list[octobot_flow.entities.AbstractActionDetails],
+ as_reference_account: bool
+ ):
+ try:
+ self._as_reference_account = as_reference_account
+ self._to_execute_actions = actions
+ with (
+ self._maybe_community_repository.automation_context(
+ self.automation_state.automation
+ ) if self._maybe_community_repository else context_util.EmptyContextManager(),
+ self.profile_data_provider.profile_data_context(self._get_profile_data())
+ ):
+ if not self.profile_data_provider.get_profile_data().profile_details.bot_id:
+ raise octobot_flow.errors.AutomationValidationError(
+ f"A bot_id is required to run a bot. Found: {self.profile_data_provider.get_profile_data().profile_details.bot_id}"
+ )
+ async with self.exchange_manager_context(as_reference_account=self._as_reference_account):
+ yield self
+ finally:
+ self._to_execute_actions = None # type: ignore
diff --git a/packages/flow/octobot_flow/jobs/exchange_account_job.py b/packages/flow/octobot_flow/jobs/exchange_account_job.py
new file mode 100644
index 000000000..1bcf3a639
--- /dev/null
+++ b/packages/flow/octobot_flow/jobs/exchange_account_job.py
@@ -0,0 +1,164 @@
+import asyncio
+import contextlib
+
+import octobot_commons.profiles as commons_profiles
+import octobot_commons.constants as common_constants
+import octobot_commons.symbols as symbol_util
+import octobot_commons.list_util as list_util
+import octobot_commons.logging as common_logging
+import octobot_trading.constants as trading_constants
+import octobot_trading.enums
+import octobot_trading.personal_data as personal_data
+import tentacles.Meta.Keywords.scripting_library as scripting_library
+import octobot_flow.repositories.exchange
+import octobot_flow.entities
+
+import octobot_flow.logic.exchange
+import octobot_flow.logic.dsl
+
+
+class ExchangeAccountJob(octobot_flow.repositories.exchange.ExchangeContextMixin):
+ def __init__(
+ self,
+ automation_state: octobot_flow.entities.AutomationState,
+ actions: list[octobot_flow.entities.AbstractActionDetails],
+ ):
+ super().__init__(automation_state, octobot_flow.entities.FetchedDependencies())
+ self.actions: list[octobot_flow.entities.AbstractActionDetails] = actions
+
+ self._logger: common_logging.BotLogger = common_logging.get_logger(self.__class__.__name__)
+
+ async def update_public_data(self):
+ """
+ Fetches all public data that might be required for any bot from the exchange
+ """
+ self._ensure_exchange_dependencies()
+ await asyncio.gather(
+ self._fetch_ohlcvs(),
+ self._fetch_tickers()
+ )
+
+ async def update_authenticated_data(self):
+ self._ensure_exchange_dependencies()
+ await self._fetch_authenticated_data()
+ await self._update_bot_authenticated_data()
+
+ async def _fetch_authenticated_data(self):
+ coros = [
+ self._fetch_open_orders(),
+ self._fetch_portfolio(),
+ ]
+ if self._exchange_manager.is_future:
+ coros.append(self._fetch_positions())
+ await asyncio.gather(*coros)
+
+ async def _update_bot_authenticated_data(self):
+ sub_portfolio_resolver = octobot_flow.logic.exchange.SubPortfolioResolver(
+ self.automation_state
+ )
+ await sub_portfolio_resolver.resolve_sub_portfolios()
+
+ @contextlib.asynccontextmanager
+ async def account_exchange_context(self, global_profile_data: commons_profiles.ProfileData):
+ with self.profile_data_provider.profile_data_context(global_profile_data):
+ async with self.exchange_manager_context(as_reference_account=False):
+ yield
+
+ async def _fetch_and_save_ohlcv(
+ self, repository: octobot_flow.repositories.exchange.OhlcvRepository,
+ symbol: str, time_frame: str, limit: int
+ ):
+ market = await repository.fetch_ohlcv(symbol, time_frame, limit)
+ self._logger.info(
+ f"Fetched [{self._exchange_manager.exchange_name}] OHLCV for {symbol} {time_frame}: ({len(market.close)} candles)"
+ )
+ self.fetched_dependencies.fetched_exchange_data.public_data.markets.append(market)
+
+ async def _fetch_ohlcvs(self):
+ repository = self.get_exchange_repository_factory().get_ohlcv_repository()
+ history_size = scripting_library.get_required_candles_count(
+ self.profile_data_provider.get_profile_data(), trading_constants.MIN_CANDLES_HISTORY_SIZE
+ )
+ await asyncio.gather(*[
+ self._fetch_and_save_ohlcv(repository, symbol, time_frame, history_size)
+ for symbol in self._get_traded_symbols()
+ for time_frame in self._get_time_frames()
+ ])
+
+ async def _fetch_tickers(self):
+ repository = self.get_exchange_repository_factory().get_tickers_repository()
+ self.fetched_dependencies.fetched_exchange_data.public_data.tickers = await repository.fetch_tickers(
+ self._get_traded_symbols()
+ )
+ ticker_close_by_symbols = {
+ symbol: ticker[octobot_trading.enums.ExchangeConstantsTickersColumns.CLOSE.value]
+ for symbol, ticker in self.fetched_dependencies.fetched_exchange_data.public_data.tickers.items()
+ }
+ self._logger.info(
+ f"Fetched [{self._exchange_manager.exchange_name}] {len(self.fetched_dependencies.fetched_exchange_data.public_data.tickers)} "
+ f"tickers: {ticker_close_by_symbols}"
+ )
+
+ async def _fetch_positions(self):
+ repository = self.get_exchange_repository_factory().get_positions_repository()
+ self.fetched_dependencies.fetched_exchange_data.authenticated_data.positions = await repository.fetch_positions(self._get_traded_symbols())
+ self._logger.info(
+ f"Fetched [{self._exchange_manager.exchange_name}] {len(self.fetched_dependencies.fetched_exchange_data.authenticated_data.positions)} positions: "
+ f"{[position.position for position in self.fetched_dependencies.fetched_exchange_data.authenticated_data.positions]}"
+ )
+
+ async def _fetch_open_orders(self):
+ repository = self.get_exchange_repository_factory().get_orders_repository()
+ symbols = self._get_traded_symbols()
+ self.fetched_dependencies.fetched_exchange_data.authenticated_data.orders.open_orders = await repository.fetch_open_orders(symbols)
+ self._logger.info(
+ f"Fetched [{self._exchange_manager.exchange_name}] "
+ f"{personal_data.get_symbol_count(self.fetched_dependencies.fetched_exchange_data.authenticated_data.orders.open_orders) or "0"} "
+ f"open orders for {symbols}"
+ )
+
+
+ async def _fetch_portfolio(self):
+ repository = self.get_exchange_repository_factory().get_portfolio_repository()
+ self.fetched_dependencies.fetched_exchange_data.authenticated_data.portfolio.full_content = await repository.fetch_portfolio() # type: ignore
+ self._logger.info(
+ f"Fetched [{self._exchange_manager.exchange_name}] full portfolio: "
+ f"{personal_data.get_balance_summary(self.fetched_dependencies.fetched_exchange_data.authenticated_data.portfolio.full_content, use_exchange_format=False)}"
+ )
+ self._update_exchange_account_portfolio()
+
+ def _update_exchange_account_portfolio(self):
+ unit = scripting_library.get_default_exchange_reference_market(self._exchange_manager.exchange_name)
+ self.automation_state.exchange_account_details.portfolio.content = [
+ octobot_flow.entities.PortfolioAssetHolding(
+ asset,
+ float(values[common_constants.PORTFOLIO_AVAILABLE]),
+ float(values[common_constants.PORTFOLIO_TOTAL]),
+ value=float(
+ (
+ self.fetched_dependencies.fetched_exchange_data.get_last_price(
+ symbol_util.merge_currencies(asset, unit)
+ ) if asset != unit else trading_constants.ONE
+ ) * values[common_constants.PORTFOLIO_TOTAL] # type: ignore
+ ),
+ )
+ for asset, values in self.fetched_dependencies.fetched_exchange_data.authenticated_data.portfolio.full_content.items()
+ ]
+
+ def _get_traded_symbols(self) -> list[str]:
+ profile_data = self.profile_data_provider.get_profile_data()
+ config_symbols = scripting_library.get_traded_symbols(profile_data)
+ return list_util.deduplicate(
+ config_symbols + self.get_all_actions_symbols()
+ )
+
+ def get_all_actions_symbols(self) -> list[str]:
+ return octobot_flow.logic.dsl.get_actions_symbol_dependencies(self.actions)
+
+ def _get_time_frames(self) -> list[str]:
+ return scripting_library.get_time_frames(self.profile_data_provider.get_profile_data())
+
+ def _ensure_exchange_dependencies(self):
+ if not self.fetched_dependencies.fetched_exchange_data:
+ self.fetched_dependencies.fetched_exchange_data = octobot_flow.entities.FetchedExchangeData()
+
diff --git a/packages/flow/octobot_flow/logic/__init__.py b/packages/flow/octobot_flow/logic/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/packages/flow/octobot_flow/logic/actions/__init__.py b/packages/flow/octobot_flow/logic/actions/__init__.py
new file mode 100644
index 000000000..d40a0157a
--- /dev/null
+++ b/packages/flow/octobot_flow/logic/actions/__init__.py
@@ -0,0 +1,5 @@
+from octobot_flow.logic.actions.actions_executor import ActionsExecutor
+
+__all__ = [
+ "ActionsExecutor",
+]
diff --git a/packages/flow/octobot_flow/logic/actions/abstract_action_executor.py b/packages/flow/octobot_flow/logic/actions/abstract_action_executor.py
new file mode 100644
index 000000000..8097c1cb1
--- /dev/null
+++ b/packages/flow/octobot_flow/logic/actions/abstract_action_executor.py
@@ -0,0 +1,20 @@
+import typing
+
+import octobot_commons.logging
+import octobot.community
+
+import octobot_flow.entities
+
+
+class AbstractActionExecutor:
+ def __init__(
+ self,
+ ):
+ self.pending_bot_logs: list[octobot.community.BotLogData] = []
+
+ async def execute_action(self, action: octobot_flow.entities.AbstractActionDetails) -> typing.Any:
+ raise NotImplementedError("execute_action is not implemented for this action type")
+
+
+ def get_logger(self) -> octobot_commons.logging.BotLogger:
+ return octobot_commons.logging.get_logger(self.__class__.__name__)
diff --git a/packages/flow/octobot_flow/logic/actions/actions_executor.py b/packages/flow/octobot_flow/logic/actions/actions_executor.py
new file mode 100644
index 000000000..4fad7ffad
--- /dev/null
+++ b/packages/flow/octobot_flow/logic/actions/actions_executor.py
@@ -0,0 +1,208 @@
+import typing
+import time
+
+import octobot_commons.logging
+import octobot_commons.dsl_interpreter
+import octobot_trading.exchanges
+
+import octobot.community
+
+import octobot_flow.entities
+import octobot_flow.repositories.community
+import octobot_flow.logic.dsl
+import octobot_flow.enums
+import octobot_flow.errors
+
+import tentacles.Meta.DSL_operators.exchange_operators as exchange_operators
+import tentacles.Meta.DSL_operators.blockchain_wallet_operators as blockchain_wallet_operators
+
+
+class ActionsExecutor:
+ def __init__(
+ self,
+ maybe_community_repository: typing.Optional[octobot_flow.repositories.community.CommunityRepository],
+ exchange_manager: typing.Optional[octobot_trading.exchanges.ExchangeManager],
+ automation: octobot_flow.entities.AutomationDetails,
+ actions: list[octobot_flow.entities.AbstractActionDetails],
+ as_reference_account: bool,
+ ):
+ self.changed_elements: list[octobot_flow.enums.ChangedElements] = []
+ self.next_execution_scheduled_to: float = 0
+
+ self._maybe_community_repository: typing.Optional[
+ octobot_flow.repositories.community.CommunityRepository
+ ] = maybe_community_repository
+ self._exchange_manager: typing.Optional[octobot_trading.exchanges.ExchangeManager] = exchange_manager
+ self._automation: octobot_flow.entities.AutomationDetails = automation
+ self._actions: list[octobot_flow.entities.AbstractActionDetails] = actions
+ self._as_reference_account: bool = as_reference_account
+
+ async def execute(self):
+ dsl_executor = octobot_flow.logic.dsl.DSLExecutor(
+ self._exchange_manager, None
+ )
+ if self._exchange_manager:
+ await octobot_trading.exchanges.create_exchange_channels(self._exchange_manager)
+ recall_dag_details: typing.Optional[octobot_commons.dsl_interpreter.ReCallingOperatorResult] = None
+ async with dsl_executor.dependencies_context(self._actions):
+ for index, action in enumerate(self._actions):
+ await self._execute_action(dsl_executor, action)
+ recall_dag_details, should_stop_processing = self._handle_execution_result(action, index)
+ if should_stop_processing:
+ break
+ self._sync_after_execution()
+ await self._update_actions_history()
+ await self._insert_execution_bot_logs(dsl_executor.pending_bot_logs)
+ if recall_dag_details:
+ self._reset_dag_to(recall_dag_details)
+ # next execution is scheduled to the time configured by the reset operator
+ self.next_execution_scheduled_to = self._compute_next_execution_scheduled_to(
+ recall_dag_details
+ )
+ else:
+ # no reset: schedule immediately
+ self.next_execution_scheduled_to = 0
+
+ def _handle_execution_result(
+ self, action: octobot_flow.entities.AbstractActionDetails, index: int
+ ) -> tuple[typing.Optional[octobot_commons.dsl_interpreter.ReCallingOperatorResult], bool]:
+ if not isinstance(action.result, dict):
+ return None, False
+ if octobot_flow.entities.PostIterationActionsDetails.__name__ in action.result:
+ post_iteration_actions_details = octobot_flow.entities.PostIterationActionsDetails.from_dict(
+ action.result[octobot_flow.entities.PostIterationActionsDetails.__name__]
+ )
+ if post_iteration_actions_details.stop_automation:
+ self._get_logger().info(f"Stopping automation: {self._automation.metadata.automation_id}")
+ self._automation.post_actions.stop_automation = True
+ # todo cancel open orders and sell assets if required in action config
+ return None, True
+ return None, False
+ if octobot_commons.dsl_interpreter.ReCallingOperatorResult.is_re_calling_operator_result(action.result):
+ recall_dag_details = octobot_commons.dsl_interpreter.ReCallingOperatorResult.from_dict(
+ action.result[octobot_commons.dsl_interpreter.ReCallingOperatorResult.__name__]
+ )
+ if not recall_dag_details.reset_to_id:
+ # reset to the current action if no specific id is provided (loop on this action)
+ recall_dag_details.reset_to_id = action.id
+ if recall_dag_details.reset_to_id == action.id:
+ # Keep executing other selected actions if any: those are not affected by the reset
+ # as they don't depend on the reset action
+ return recall_dag_details, False
+ # Reset to a past action: interrupt execution of the following actions
+ # as they might depend on the reset action
+ if index < len(self._actions) - 1:
+ interrupted_action = self._actions[index + 1: ]
+ self._get_logger().info(
+ f"DAG reset required. Interrupting execution of "
+ f"{len(interrupted_action)} actions: "
+ f"{', '.join([action.id for action in interrupted_action])}"
+ )
+ return recall_dag_details, True
+ return None, False
+
+ async def _execute_action(
+ self,
+ dsl_executor: "octobot_flow.logic.dsl.DSLExecutor",
+ action: octobot_flow.entities.AbstractActionDetails
+ ):
+ if isinstance(action, octobot_flow.entities.DSLScriptActionDetails):
+ return await dsl_executor.execute_action(action)
+ raise octobot_flow.errors.UnsupportedActionTypeError(
+ f"{self.__class__.__name__} does not support action type: {type(action)}"
+ ) from None
+
+ def _reset_dag_to(
+ self, recall_dag_details: octobot_commons.dsl_interpreter.ReCallingOperatorResult
+ ):
+ if not recall_dag_details.reset_to_id:
+ raise octobot_flow.errors.AutomationDAGResetError(
+ f"Reset to id is required to reset the DAG. got: {recall_dag_details}"
+ )
+ self._automation.actions_dag.reset_to(recall_dag_details.reset_to_id)
+
+ def _compute_next_execution_scheduled_to(
+ self, recall_dag_details: octobot_commons.dsl_interpreter.ReCallingOperatorResult
+ ) -> float:
+ return recall_dag_details.get_next_call_time() or 0
+
+ async def _update_actions_history(self):
+ if to_update_actions := [
+ action
+ for action in self._actions
+ if action.should_be_historised_in_database()
+ ]:
+ raise NotImplementedError("_update_actions_history is not implemented yet")
+
+ async def _insert_execution_bot_logs(self, log_data: list[octobot.community.BotLogData]):
+ try:
+ community_repository = octobot_flow.repositories.community.ensure_authenticated_community_repository(
+ self._maybe_community_repository
+ )
+ await community_repository.insert_bot_logs(log_data)
+ except octobot_flow.errors.CommunityAuthenticationRequiredError:
+ # no available community repository: skip bot logs to insert
+ self._get_logger().info(
+ "No available community repository: bot logs upload is skipped"
+ )
+
+ # def _get_or_compute_actions_next_execution_scheduled_to(
+ # self
+ # ) -> float: #todo
+ # for action in self._actions:
+ # if action.next_schedule:
+ # next_schedule_details = octobot_flow.entities.NextScheduleParams.from_dict(action.next_schedule)
+ # return next_schedule_details.get_next_schedule_time()
+ # return self._compute_next_execution_scheduled_to(
+ # octobot_flow.constants.DEFAULT_EXTERNAL_TRIGGER_ONLY_NO_ORDER_TIMEFRAME
+ # )
+
+ # def _compute_next_execution_scheduled_to(
+ # self,
+ # time_frame: octobot_commons.enums.TimeFrames
+ # ) -> float:
+ # # if this was scheduled, use it as a basis to always start at the same time,
+ # # otherwise use triggered at
+ # current_schedule_time = (
+ # self._automation.execution.current_execution.scheduled_to
+ # or self._automation.execution.current_execution.triggered_at
+ # )
+ # return current_schedule_time + (
+ # octobot_commons.enums.TimeFramesMinutes[time_frame] * octobot_commons.constants.MINUTE_TO_SECONDS
+ # )
+
+ def _sync_after_execution(self):
+ exchange_account_elements = self._automation.get_exchange_account_elements(
+ self._as_reference_account
+ )
+ self._sync_automation_from_actions_results(exchange_account_elements)
+ self._sync_exchange_account_elements(exchange_account_elements)
+
+ def _sync_automation_from_actions_results(
+ self,
+ exchange_account_elements: typing.Union[
+ octobot_flow.entities.ReferenceExchangeAccountElements,
+ octobot_flow.entities.ClientExchangeAccountElements
+ ]
+ ):
+ for action in self._actions:
+ if not action.is_completed() or not isinstance(action.result, dict):
+ continue
+ if created_transactions := (
+ action.result.get(exchange_operators.CREATED_WITHDRAWALS_KEY, [])
+ + action.result.get(blockchain_wallet_operators.CREATED_TRANSACTIONS_KEY, [])
+ ):
+ exchange_account_elements.transactions.extend(created_transactions)
+
+ def _sync_exchange_account_elements(
+ self,
+ exchange_account_elements: typing.Union[
+ octobot_flow.entities.ReferenceExchangeAccountElements,
+ octobot_flow.entities.ClientExchangeAccountElements
+ ]
+ ):
+ if self._exchange_manager:
+ self.changed_elements = exchange_account_elements.sync_from_exchange_manager(self._exchange_manager)
+
+ def _get_logger(self) -> octobot_commons.logging.BotLogger:
+ return octobot_commons.logging.get_logger(self.__class__.__name__)
diff --git a/packages/flow/octobot_flow/logic/configuration/__init__.py b/packages/flow/octobot_flow/logic/configuration/__init__.py
new file mode 100644
index 000000000..f056acaa3
--- /dev/null
+++ b/packages/flow/octobot_flow/logic/configuration/__init__.py
@@ -0,0 +1,9 @@
+from octobot_flow.logic.configuration.profile_data_provider import ProfileDataProvider
+from octobot_flow.logic.configuration.automation_configuration_updater import AutomationConfigurationUpdater
+from octobot_flow.logic.configuration.profile_data_factory import create_profile_data
+
+__all__ = [
+ "ProfileDataProvider",
+ "AutomationConfigurationUpdater",
+ "create_profile_data",
+]
diff --git a/packages/flow/octobot_flow/logic/configuration/automation_configuration_updater.py b/packages/flow/octobot_flow/logic/configuration/automation_configuration_updater.py
new file mode 100644
index 000000000..c83443fd2
--- /dev/null
+++ b/packages/flow/octobot_flow/logic/configuration/automation_configuration_updater.py
@@ -0,0 +1,116 @@
+# This file is part of OctoBot Node (https://github.com/Drakkar-Software/OctoBot-Node)
+# Copyright (c) 2025 Drakkar-Software, All rights reserved.
+#
+# OctoBot is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# OctoBot is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public
+# License along with OctoBot. If not, see .
+
+import time
+import copy
+import typing
+
+import octobot_commons.logging as common_logging
+import octobot_commons.profiles.profile_data as profiles_import
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
+import octobot_flow.entities
+import octobot_flow.errors
+
+
+class AutomationConfigurationUpdater:
+ def __init__(
+ self,
+ automation_state: octobot_flow.entities.AutomationState,
+ action: octobot_flow.entities.ConfiguredActionDetails,
+ ):
+ self.automation_state: octobot_flow.entities.AutomationState = automation_state
+ self.action: octobot_flow.entities.ConfiguredActionDetails = action
+ self._logger: common_logging.BotLogger = common_logging.get_logger(self.__class__.__name__)
+
+ async def update(self):
+ start_time = time.time()
+ try:
+ automation_state_update = octobot_flow.entities.AutomationState.from_dict(
+ self.action.config
+ )
+ except TypeError as err:
+ raise octobot_flow.errors.InvalidConfigurationActionError(
+ f"Invalid configuration update format: {err}. "
+ f"A octobot_flow.entities.AutomationState parsable dict is expected."
+ ) from err
+ self._apply_automation_state_configuration_update(automation_state_update)
+ self._register_execution_time(start_time)
+ self._complete_execution_and_register_next_schedule_time()
+ self.action.complete()
+
+ def _apply_automation_state_configuration_update(
+ self, automation_state_update: octobot_flow.entities.AutomationState
+ ):
+ if automation_state_update.exchange_account_details:
+ updating_exchange_account_id = self._update_exchange_details(
+ automation_state_update.exchange_account_details
+ )
+ if updating_exchange_account_id:
+ self._logger.info("Resetting exchange auth details as the exchange account id has changed")
+ self.automation_state.exchange_account_details.auth_details = exchange_data_import.ExchangeAuthDetails()
+ else:
+ self._update_auth_details(automation_state_update.exchange_account_details)
+ self._update_portfolio(automation_state_update.exchange_account_details)
+ self._update_automation(automation_state_update)
+
+ def _update_exchange_details(
+ self, configuration_update: octobot_flow.entities.ExchangeAccountDetails
+ ) -> bool:
+ exchange_data_update = profiles_import.ExchangeData().get_update(
+ configuration_update.exchange_details
+ )
+ updating_exchange_account_id = bool(
+ exchange_data_update.exchange_account_id
+ and exchange_data_update.exchange_account_id != self.automation_state.exchange_account_details.exchange_details.exchange_account_id
+ )
+ self.automation_state.exchange_account_details.exchange_details.update(exchange_data_update)
+ return updating_exchange_account_id
+
+ def _update_auth_details(
+ self, configuration_update: octobot_flow.entities.ExchangeAccountDetails
+ ):
+ local_auth_details = copy.deepcopy(configuration_update.auth_details)
+ base_auth_details = exchange_data_import.ExchangeAuthDetails()
+ local_auth_details.exchange_credential_id = None
+ auth_details_update = base_auth_details.get_update(local_auth_details)
+ self.automation_state.exchange_account_details.auth_details.update(auth_details_update)
+
+ def _update_portfolio(
+ self, configuration_update: octobot_flow.entities.ExchangeAccountDetails
+ ):
+ if self.automation_state.exchange_account_details.is_simulated():
+ portfolio_update = octobot_flow.entities.ExchangeAccountPortfolio().get_update(configuration_update.portfolio)
+ self.automation_state.exchange_account_details.portfolio.update(portfolio_update)
+
+ def _update_automation(
+ self, automation_state_update: octobot_flow.entities.AutomationState
+ ):
+ automation_update = automation_state_update.automation
+ base_automation = octobot_flow.entities.AutomationDetails()
+ update_result = base_automation.get_update(automation_update)
+ self.automation_state.automation.update(update_result)
+
+ def _register_execution_time(self, start_time: float):
+ automation = self.automation_state.automation
+ if automation.execution.previous_execution.triggered_at:
+ automation.execution.current_execution.triggered_at = automation.execution.previous_execution.triggered_at
+ else:
+ automation.execution.current_execution.triggered_at = start_time
+
+ def _complete_execution_and_register_next_schedule_time(self):
+ self.automation_state.automation.execution.complete_execution(0)
+ self._logger.info(f"Next action will trigger immediately")
diff --git a/packages/flow/octobot_flow/logic/configuration/profile_data_factory.py b/packages/flow/octobot_flow/logic/configuration/profile_data_factory.py
new file mode 100644
index 000000000..0785771e1
--- /dev/null
+++ b/packages/flow/octobot_flow/logic/configuration/profile_data_factory.py
@@ -0,0 +1,52 @@
+import typing
+
+import octobot_commons.profiles.profile_data as profile_data_import
+import octobot_commons.constants
+import octobot_trading.enums as trading_enums
+
+import octobot_flow.entities
+import octobot_flow.logic.dsl
+
+import tentacles.Meta.Keywords.scripting_library as scripting_library
+
+
+def create_profile_data(
+ exchange_account_details: typing.Optional[octobot_flow.entities.ExchangeAccountDetails],
+ automation_id: str,
+ symbols: set[str]
+) -> profile_data_import.ProfileData:
+ crypto_currencies = _get_crypto_currencies(symbols)
+ return profile_data_import.ProfileData(
+ profile_details=profile_data_import.ProfileDetailsData(
+ bot_id=automation_id
+ ),
+ crypto_currencies=crypto_currencies,
+ exchanges=[exchange_account_details.exchange_details] if exchange_account_details else [],
+ trading=profile_data_import.TradingData(
+ reference_market=_infer_reference_market(exchange_account_details, crypto_currencies)
+ ),
+ trader_simulator=profile_data_import.TraderSimulatorData(
+ enabled=exchange_account_details.is_simulated() if exchange_account_details else True,
+ ),
+ tentacles=[], # no tentacles: only the generic dsl executor will be used
+ )
+
+def _infer_reference_market(
+ exchange_account_details: typing.Optional[octobot_flow.entities.ExchangeAccountDetails],
+ crypto_currencies: list[profile_data_import.CryptoCurrencyData]) -> str:
+ if (
+ exchange_account_details
+ and exchange_account_details.exchange_details.exchange_type == trading_enums.ExchangeTypes.FUTURE
+ ):
+ return octobot_commons.constants.DEFAULT_REFERENCE_MARKET
+ if crypto_currencies:
+ return octobot_commons.symbols.parse_symbol(crypto_currencies[0].trading_pairs[0]).quote # type: ignore
+ elif exchange_account_details:
+ return scripting_library.get_default_exchange_reference_market(exchange_account_details.exchange_details.internal_name)
+ return octobot_commons.constants.DEFAULT_REFERENCE_MARKET
+
+def _get_crypto_currencies(symbols: set[str]) -> list[profile_data_import.CryptoCurrencyData]:
+ return [
+ profile_data_import.CryptoCurrencyData(trading_pairs=[symbol], name=symbol)
+ for symbol in symbols
+ ]
\ No newline at end of file
diff --git a/packages/flow/octobot_flow/logic/configuration/profile_data_provider.py b/packages/flow/octobot_flow/logic/configuration/profile_data_provider.py
new file mode 100644
index 000000000..99d747a00
--- /dev/null
+++ b/packages/flow/octobot_flow/logic/configuration/profile_data_provider.py
@@ -0,0 +1,25 @@
+import contextlib
+import typing
+import octobot_commons.profiles as commons_profiles
+
+import octobot_flow.errors
+
+
+class ProfileDataProvider:
+ def __init__(self):
+ self.profile_data: typing.Optional[commons_profiles.ProfileData] = None
+
+ @contextlib.contextmanager
+ def profile_data_context(self, profile_data: commons_profiles.ProfileData):
+ try:
+ self.profile_data = profile_data
+ yield
+ finally:
+ self.profile_data = None
+
+ def get_profile_data(self) -> commons_profiles.ProfileData:
+ if self.profile_data is None:
+ raise octobot_flow.errors.NoProfileDataError(
+ f"{self.__class__.__name__} is not in a profile data context"
+ )
+ return self.profile_data
diff --git a/packages/flow/octobot_flow/logic/dsl/__init__.py b/packages/flow/octobot_flow/logic/dsl/__init__.py
new file mode 100644
index 000000000..510e84e74
--- /dev/null
+++ b/packages/flow/octobot_flow/logic/dsl/__init__.py
@@ -0,0 +1,14 @@
+from octobot_flow.logic.dsl.dsl_dependencies import (
+ get_actions_symbol_dependencies,
+ get_actions_time_frames_dependencies,
+)
+from octobot_flow.logic.dsl.dsl_executor import DSLExecutor
+
+from octobot_flow.logic.dsl.dsl_action_execution_context import dsl_action_execution
+
+__all__ = [
+ "get_actions_symbol_dependencies",
+ "get_actions_time_frames_dependencies",
+ "DSLExecutor",
+ "dsl_action_execution",
+]
\ No newline at end of file
diff --git a/packages/flow/octobot_flow/logic/dsl/dsl_action_execution_context.py b/packages/flow/octobot_flow/logic/dsl/dsl_action_execution_context.py
new file mode 100644
index 000000000..d1cf1a4cc
--- /dev/null
+++ b/packages/flow/octobot_flow/logic/dsl/dsl_action_execution_context.py
@@ -0,0 +1,43 @@
+import octobot_commons.dsl_interpreter
+import octobot_commons.errors
+import octobot_commons.logging
+import octobot_trading.errors
+import octobot_trading.enums
+
+import octobot_flow.entities
+import octobot_flow.enums
+
+
+def dsl_action_execution(func):
+ async def _action_execution_error_handler_wrapper(
+ self, action: octobot_flow.entities.DSLScriptActionDetails
+ ):
+ try:
+ call_result: octobot_commons.dsl_interpreter.DSLCallResult = await func(self, action)
+ if call_result.succeeded():
+ action.complete(result=call_result.result)
+ else:
+ action.complete(error_status=call_result.error)
+ except octobot_trading.errors.DisabledFundsTransferError as err:
+ action.complete(error_status=octobot_flow.enums.ActionErrorStatus.DISABLED_FUNDS_TRANSFER_ERROR.value)
+ except octobot_trading.errors.MissingMinimalExchangeTradeVolume as err:
+ action.complete(error_status=octobot_flow.enums.ActionErrorStatus.INVALID_ORDER.value)
+ except (octobot_trading.errors.UnsupportedHedgeContractError, octobot_trading.errors.InvalidPositionSide) as err:
+ action.complete(error_status=octobot_flow.enums.ActionErrorStatus.UNSUPPORTED_HEDGE_POSITION.value)
+ except octobot_trading.errors.ExchangeAccountSymbolPermissionError as err:
+ action.complete(error_status=octobot_flow.enums.ActionErrorStatus.SYMBOL_INCOMPATIBLE_WITH_ACCOUNT.value)
+ except octobot_commons.errors.InvalidParameterFormatError as err:
+ action.complete(error_status=octobot_flow.enums.ActionErrorStatus.INVALID_SIGNAL_FORMAT.value)
+ except octobot_trading.errors.NotSupportedOrderTypeError as err:
+ if err.order_type == octobot_trading.enums.TraderOrderType.STOP_LOSS:
+ action.complete(error_status=octobot_flow.enums.ActionErrorStatus.UNSUPPORTED_STOP_ORDER.value)
+ else:
+ action.complete(error_status=octobot_flow.enums.ActionErrorStatus.INVALID_ORDER.value)
+ except octobot_trading.errors.BlockchainWalletError as err:
+ action.complete(error_status=octobot_flow.enums.ActionErrorStatus.BLOCKCHAIN_WALLET_ERROR.value)
+ except Exception as err:
+ octobot_commons.logging.get_logger("action_execution").exception(
+ err, True, f"Failed to interpret DSL script '{action.get_summary()}' for action: {action.id}: {err}"
+ )
+ action.complete(error_status=octobot_flow.enums.ActionErrorStatus.INTERNAL_ERROR.value)
+ return _action_execution_error_handler_wrapper
diff --git a/packages/flow/octobot_flow/logic/dsl/dsl_dependencies.py b/packages/flow/octobot_flow/logic/dsl/dsl_dependencies.py
new file mode 100644
index 000000000..12ac53a5b
--- /dev/null
+++ b/packages/flow/octobot_flow/logic/dsl/dsl_dependencies.py
@@ -0,0 +1,44 @@
+import octobot_trading.dsl
+import octobot_commons.enums
+import octobot_flow.entities
+import octobot_flow.logic.dsl.dsl_executor as dsl_executor
+
+
+def get_actions_symbol_dependencies(
+ actions: list[octobot_flow.entities.AbstractActionDetails]
+) -> list[str]:
+ all_symbol_dependencies = [
+ _get_symbol_dependencies(action.get_resolved_dsl_script())
+ for action in actions
+ if isinstance(action, octobot_flow.entities.DSLScriptActionDetails)
+ ]
+ return list(set(
+ symbol_dependency.symbol
+ for symbol_dependencies in all_symbol_dependencies
+ for symbol_dependency in symbol_dependencies
+ ))
+
+
+def get_actions_time_frames_dependencies(
+ actions: list[octobot_flow.entities.AbstractActionDetails]
+) -> list[octobot_commons.enums.TimeFrames]:
+ all_symbol_dependencies = [
+ _get_symbol_dependencies(action.get_resolved_dsl_script())
+ for action in actions
+ if isinstance(action, octobot_flow.entities.DSLScriptActionDetails)
+ ]
+ return list(set(
+ octobot_commons.enums.TimeFrames(symbol_dependency.time_frame)
+ for symbol_dependencies in all_symbol_dependencies
+ for symbol_dependency in symbol_dependencies
+ if symbol_dependency.time_frame
+ ))
+
+
+def _get_symbol_dependencies(dsl_script: str) -> list[octobot_trading.dsl.SymbolDependency]:
+ dependencies_only_executor = dsl_executor.DSLExecutor(None, dsl_script)
+ return [
+ symbol_dependency
+ for symbol_dependency in dependencies_only_executor.get_dependencies()
+ if isinstance(symbol_dependency, octobot_trading.dsl.SymbolDependency)
+ ]
diff --git a/packages/flow/octobot_flow/logic/dsl/dsl_executor.py b/packages/flow/octobot_flow/logic/dsl/dsl_executor.py
new file mode 100644
index 000000000..ab30173ae
--- /dev/null
+++ b/packages/flow/octobot_flow/logic/dsl/dsl_executor.py
@@ -0,0 +1,106 @@
+import typing
+import contextlib
+
+import octobot_commons.dsl_interpreter
+import octobot_commons.signals
+import octobot_commons.errors
+import octobot_trading.exchanges
+import octobot_trading.dsl
+
+import tentacles.Meta.DSL_operators as dsl_operators
+
+import octobot_flow.entities
+import octobot_flow.errors
+
+# avoid circular import
+from octobot_flow.logic.dsl.dsl_action_execution_context import dsl_action_execution
+from octobot_flow.logic.actions.abstract_action_executor import AbstractActionExecutor
+
+
+
+class DSLExecutor(AbstractActionExecutor):
+ def __init__(
+ self,
+ exchange_manager: typing.Optional[octobot_trading.exchanges.ExchangeManager],
+ dsl_script: typing.Optional[str],
+ dependencies: typing.Optional[octobot_commons.signals.SignalDependencies] = None,
+ ):
+ super().__init__()
+
+ self._exchange_manager = exchange_manager
+ self._dependencies = dependencies
+ self._interpreter: octobot_commons.dsl_interpreter.Interpreter = self._create_interpreter(None)
+ if dsl_script:
+ self._interpreter.prepare(dsl_script)
+
+ def _create_interpreter(
+ self, previous_execution_result: typing.Optional[dict]
+ ):
+ return octobot_commons.dsl_interpreter.Interpreter(
+ octobot_commons.dsl_interpreter.get_all_operators()
+ + dsl_operators.create_ohlcv_operators(self._exchange_manager, None, None)
+ + dsl_operators.create_portfolio_operators(self._exchange_manager)
+ + dsl_operators.create_create_order_operators(
+ self._exchange_manager, trading_mode=None, dependencies=self._dependencies
+ )
+ + dsl_operators.create_cancel_order_operators(
+ self._exchange_manager, trading_mode=None, dependencies=self._dependencies
+ )
+ + dsl_operators.create_blockchain_wallet_operators(self._exchange_manager)
+ )
+
+ def get_dependencies(self) -> list[
+ octobot_commons.dsl_interpreter.InterpreterDependency
+ ]:
+ return self._interpreter.get_dependencies()
+
+ @dsl_action_execution
+ async def execute_action(self, action: octobot_flow.entities.DSLScriptActionDetails) -> typing.Any:
+ self._interpreter = self._create_interpreter(
+ action.previous_execution_result
+ )
+ expression = action.get_resolved_dsl_script()
+ try:
+ return octobot_commons.dsl_interpreter.DSLCallResult(
+ statement=expression,
+ result=await self._interpreter.interprete(expression),
+ )
+ except octobot_commons.errors.ErrorStatementEncountered as err:
+ return octobot_commons.dsl_interpreter.DSLCallResult(
+ statement=expression,
+ error=err.args[0] if err.args else ""
+ )
+
+ @contextlib.asynccontextmanager
+ async def dependencies_context(
+ self, actions: list[octobot_flow.entities.AbstractActionDetails]
+ ) -> typing.AsyncGenerator[None, None]:
+ try:
+ all_dependencies = self._get_all_dependencies(actions) if actions else []
+ # 1. validate static dependencies
+ self._validate_dependencies(all_dependencies)
+ # 2. instanciate dynamic dependencies
+ # todo initialize dynamic dependencies when implemented
+ yield
+ finally:
+ # todo clean up dynamic dependencies when required
+ pass
+
+ def _validate_dependencies(self, dependencies: list[octobot_commons.dsl_interpreter.InterpreterDependency]):
+ if any(
+ isinstance(dependency, octobot_trading.dsl.SymbolDependency) for dependency in dependencies
+ ) and not self._exchange_manager:
+ raise octobot_flow.errors.MissingDSLExecutorDependencyError(
+ "Exchange manager is required when using symbol dependencies"
+ )
+
+ def _get_all_dependencies(
+ self, actions: list[octobot_flow.entities.AbstractActionDetails]
+ ) -> list[octobot_commons.dsl_interpreter.InterpreterDependency]:
+ dependencies = []
+ for action in actions:
+ if isinstance(action, octobot_flow.entities.DSLScriptActionDetails):
+ dsl_script = action.get_resolved_dsl_script()
+ self._interpreter.prepare(dsl_script)
+ dependencies.extend(self._interpreter.get_dependencies())
+ return dependencies
diff --git a/packages/flow/octobot_flow/logic/exchange/__init__.py b/packages/flow/octobot_flow/logic/exchange/__init__.py
new file mode 100644
index 000000000..63f01bb8a
--- /dev/null
+++ b/packages/flow/octobot_flow/logic/exchange/__init__.py
@@ -0,0 +1,5 @@
+from octobot_flow.logic.exchange.sub_portfolio_resolver import SubPortfolioResolver
+
+__all__ = [
+ "SubPortfolioResolver",
+]
diff --git a/packages/flow/octobot_flow/logic/exchange/sub_portfolio_resolver.py b/packages/flow/octobot_flow/logic/exchange/sub_portfolio_resolver.py
new file mode 100644
index 000000000..259d13c7f
--- /dev/null
+++ b/packages/flow/octobot_flow/logic/exchange/sub_portfolio_resolver.py
@@ -0,0 +1,31 @@
+import octobot_flow.entities
+import octobot_commons.constants as common_constants
+
+
+class SubPortfolioResolver:
+ def __init__(self, automation_state: octobot_flow.entities.AutomationState):
+ self._automation_state = automation_state
+
+ async def resolve_sub_portfolios(self):
+ # equivalent to serverless global view update
+ # 1. identify missing orders #TODO
+ # 2. resolve missing orders #TODO
+ # 3. resolve (sub)portfolios
+ if not self._automation_state.exchange_account_details.is_simulated():
+ await self._resolve_full_portfolio(self._automation_state.automation)
+ # await self._resolve_sub_portfolio(bot_details)
+
+ async def _resolve_sub_portfolio(self, automation: octobot_flow.entities.AutomationDetails):
+ # TODO: implement to support sub portfolios
+ # for now only uses the global portfolio content
+ raise NotImplementedError("SubPortfolioResolver._resolve_sub_portfolio is not implemented")
+
+ async def _resolve_full_portfolio(self, automation: octobot_flow.entities.AutomationDetails):
+ automation.client_exchange_account_elements.portfolio.content = {
+ asset.asset: {
+ common_constants.PORTFOLIO_AVAILABLE: asset.available,
+ common_constants.PORTFOLIO_TOTAL: asset.total,
+ }
+ for asset in self._automation_state.exchange_account_details.portfolio.content
+ if asset.total > 0
+ }
diff --git a/packages/flow/octobot_flow/parsers/__init__.py b/packages/flow/octobot_flow/parsers/__init__.py
new file mode 100644
index 000000000..8c8116ef3
--- /dev/null
+++ b/packages/flow/octobot_flow/parsers/__init__.py
@@ -0,0 +1,9 @@
+from octobot_flow.parsers.actions_dag_parser import (
+ ActionsDAGParser,
+ key_val_to_dict,
+)
+
+__all__ = [
+ "ActionsDAGParser",
+ "key_val_to_dict",
+]
\ No newline at end of file
diff --git a/packages/flow/octobot_flow/parsers/actions_dag_parser.py b/packages/flow/octobot_flow/parsers/actions_dag_parser.py
new file mode 100644
index 000000000..3d9c83ccb
--- /dev/null
+++ b/packages/flow/octobot_flow/parsers/actions_dag_parser.py
@@ -0,0 +1,457 @@
+import typing
+import dataclasses
+import enum
+import uuid
+
+import octobot_commons.constants as commons_constants
+import octobot_commons.symbols
+import octobot_commons.profiles.profile_data as profiles_import
+import octobot_commons.dataclasses
+import octobot_trading.blockchain_wallets as blockchain_wallets
+import octobot_trading.blockchain_wallets.simulator.blockchain_wallet_simulator as blockchain_wallets_simulator
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+import octobot_flow.errors
+import octobot_flow.entities
+import octobot_flow.enums
+
+import tentacles.Trading.Mode.trading_view_signals_trading_mode.actions_params as actions_params
+import tentacles.Trading.Mode.trading_view_signals_trading_mode.trading_view_signals_trading as trading_view_signals_trading
+import tentacles.Trading.Mode.trading_view_signals_trading_mode.tradingview_signal_to_dsl_translator as tradingview_signal_to_dsl_translator
+
+def key_val_to_dict(key_val: str) -> dict:
+ return trading_view_signals_trading.TradingViewSignalsTradingMode.parse_signal_data(key_val, None, None, None, [])
+
+
+class ActionType(enum.Enum):
+ WAIT = "wait"
+ TRADE = "trade"
+ CANCEL = "cancel"
+ WITHDRAW = "withdraw"
+ DEPOSIT = "deposit"
+ TRANSFER = "transfer"
+
+
+@dataclasses.dataclass
+class ActionsDAGParserParams(octobot_commons.dataclasses.FlexibleDataclass):
+ ACTIONS: list[str] = dataclasses.field(default_factory=list)
+ AUTOMATION_ID: str = dataclasses.field(default_factory=lambda: str(uuid.uuid4()))
+ EXCHANGE_TO: typing.Optional[str] = None
+ API_KEY: typing.Optional[str] = None
+ API_SECRET: typing.Optional[str] = None
+ SIMULATED_PORTFOLIO: typing.Optional[dict[str, float]] = None
+ ORDER_SIDE: typing.Optional[str] = None
+ ORDER_SYMBOL: typing.Optional[str] = None
+ ORDER_AMOUNT: typing.Optional[float] = None
+ ORDER_PRICE: typing.Optional[float] = None
+ ORDER_STOP_PRICE: typing.Optional[float] = None
+ ORDER_TAG: typing.Optional[str] = None
+ ORDER_REDUCE_ONLY: typing.Optional[bool] = None
+ ORDER_TYPE: typing.Optional[str] = None
+ EXCHANGE_FROM: typing.Optional[str] = None
+ MIN_DELAY: typing.Optional[float] = None
+ MAX_DELAY: typing.Optional[float] = None
+ BLOCKCHAIN_FROM: typing.Optional[str] = None
+ BLOCKCHAIN_FROM_AMOUNT: typing.Optional[float] = None
+ BLOCKCHAIN_FROM_ASSET: typing.Optional[str] = None
+ BLOCKCHAIN_FROM_ADDRESS: typing.Optional[str] = None
+ BLOCKCHAIN_FROM_MNEMONIC_SEED: typing.Optional[str] = None
+ BLOCKCHAIN_FROM_BLOCK_HEIGHT: typing.Optional[int] = None
+ BLOCKCHAIN_FROM_SECRET_VIEW_KEY: typing.Optional[str] = None
+ BLOCKCHAIN_FROM_SECRET_SPEND_KEY: typing.Optional[str] = None
+ BLOCKCHAIN_FROM_PRIVATE_KEY: typing.Optional[str] = None
+ BLOCKCHAIN_TO: typing.Optional[str] = None
+ BLOCKCHAIN_TO_ASSET: typing.Optional[str] = None
+ BLOCKCHAIN_TO_AMOUNT: typing.Optional[float] = None
+ BLOCKCHAIN_TO_ADDRESS: typing.Optional[str] = None
+ BLOCKCHAIN_TO_MNEMONIC_SEED: typing.Optional[str] = None
+ BLOCKCHAIN_TO_BLOCK_HEIGHT: typing.Optional[int] = None
+ BLOCKCHAIN_TO_SECRET_VIEW_KEY: typing.Optional[str] = None
+ BLOCKCHAIN_TO_SECRET_SPEND_KEY: typing.Optional[str] = None
+ BLOCKCHAIN_TO_PRIVATE_KEY: typing.Optional[str] = None
+
+ def __post_init__(self):
+ if self.ACTIONS and isinstance(self.ACTIONS, str):
+ # action is a string, convert it to a list
+ self.ACTIONS = self.ACTIONS.split(",") # pylint: disable=no-member
+ self.validate()
+
+ def validate(self):
+ if self.EXCHANGE_TO and self.EXCHANGE_FROM:
+ if self.EXCHANGE_TO != self.EXCHANGE_FROM:
+ raise octobot_flow.errors.InvalidAutomationActionError("EXCHANGE_TO and EXCHANGE_FROM must be the same")
+
+ def get_reference_market(self) -> typing.Optional[str]:
+ if self.ORDER_SYMBOL:
+ parsed_symbol = octobot_commons.symbols.parse_symbol(self.ORDER_SYMBOL)
+ return parsed_symbol.quote
+ return None
+
+ def has_next_schedule(self) -> bool:
+ return self.MIN_DELAY is not None or self.MAX_DELAY is not None
+
+ def _get_next_schedule_delay(self) -> tuple[float, float]:
+ if self.MIN_DELAY is None and self.MAX_DELAY is None:
+ return 0, 0
+ if self.MIN_DELAY is not None and self.MAX_DELAY is None:
+ return self.MIN_DELAY, self.MIN_DELAY # type: ignore
+ if self.MIN_DELAY is None and self.MAX_DELAY is not None:
+ return self.MAX_DELAY, self.MAX_DELAY # type: ignore
+ return self.MIN_DELAY, self.MAX_DELAY # type: ignore
+
+ def get_exchange_internal_name(self) -> typing.Optional[str]:
+ if self.EXCHANGE_TO or self.EXCHANGE_FROM:
+ return (self.EXCHANGE_TO or self.EXCHANGE_FROM).lower() # type: ignore
+ return None
+
+ def get_blockchain_and_wallet_descriptors_from_wallet_details(
+ self
+ ) -> dict[str, typing.Any]:
+ if (
+ not self.BLOCKCHAIN_FROM or
+ not self.BLOCKCHAIN_FROM_ASSET or
+ not self.BLOCKCHAIN_FROM_AMOUNT
+ ):
+ raise octobot_flow.errors.InvalidAutomationActionError(
+ f"BLOCKCHAIN_FROM, BLOCKCHAIN_FROM_ASSET, BLOCKCHAIN_FROM_ADDRESS and BLOCKCHAIN_FROM_AMOUNT "
+ f"must be provided for a blockchain from wallet"
+ )
+ if not (
+ # sending details
+ not self.BLOCKCHAIN_FROM_PRIVATE_KEY
+ or not self.BLOCKCHAIN_FROM_MNEMONIC_SEED
+ or not (
+ self.BLOCKCHAIN_FROM_SECRET_VIEW_KEY
+ and self.BLOCKCHAIN_FROM_SECRET_SPEND_KEY
+ )
+ ):
+ raise octobot_flow.errors.InvalidAutomationActionError(
+ f"BLOCKCHAIN_FROM_PRIVATE_KEY, BLOCKCHAIN_FROM_MNEMONIC_SEED, BLOCKCHAIN_FROM_SECRET_VIEW_KEY "
+ f"or BLOCKCHAIN_FROM_SECRET_SPEND_KEY must be provided for a blockchain from wallet"
+ )
+ blockchain, blockchain_descriptor_specific_config, wallet_descriptor_specific_config = self.get_blockchain_and_specific_configs(self.BLOCKCHAIN_FROM)
+ return {
+ "blockchain_descriptor": blockchain_wallets.BlockchainDescriptor(
+ blockchain=blockchain,
+ network=self.BLOCKCHAIN_FROM,
+ native_coin_symbol=self.BLOCKCHAIN_FROM_ASSET,
+ specific_config=blockchain_descriptor_specific_config,
+ ),
+ "wallet_descriptor": blockchain_wallets.WalletDescriptor(
+ address=self.BLOCKCHAIN_FROM_ADDRESS,
+ private_key=self.BLOCKCHAIN_FROM_PRIVATE_KEY,
+ mnemonic_seed=self.BLOCKCHAIN_FROM_MNEMONIC_SEED,
+ specific_config=wallet_descriptor_specific_config,
+ )
+ }
+
+ def get_blockchain_to_wallet_details(
+ self
+ ) -> blockchain_wallets.BlockchainWalletParameters:
+ if (
+ not self.BLOCKCHAIN_TO or
+ not self.BLOCKCHAIN_TO_ADDRESS
+ ):
+ raise octobot_flow.errors.InvalidAutomationActionError(
+ f"BLOCKCHAIN_TO, BLOCKCHAIN_TO_ADDRESS and BLOCKCHAIN_TO_ASSET must be provided for a blockchain to wallet"
+ )
+ if not (
+ self.BLOCKCHAIN_TO_ADDRESS
+ and not self.BLOCKCHAIN_TO_PRIVATE_KEY
+ and not self.BLOCKCHAIN_TO_MNEMONIC_SEED
+ and not self.BLOCKCHAIN_TO_SECRET_VIEW_KEY
+ ):
+ raise octobot_flow.errors.InvalidAutomationActionError(
+ f"BLOCKCHAIN_TO_ADDRESS, BLOCKCHAIN_TO_PRIVATE_KEY, BLOCKCHAIN_TO_MNEMONIC_SEED "
+ f"or BLOCKCHAIN_TO_SECRET_VIEW_KEY must be provided for a blockchain to wallet"
+ )
+ blockchain, blockchain_descriptor_specific_config, wallet_descriptor_specific_config = self.get_blockchain_and_specific_configs(self.BLOCKCHAIN_TO)
+ return blockchain_wallets.BlockchainWalletParameters(
+ blockchain_descriptor=blockchain_wallets.BlockchainDescriptor(
+ blockchain=blockchain,
+ network=self.BLOCKCHAIN_TO,
+ native_coin_symbol=self.BLOCKCHAIN_TO_ASSET,
+ specific_config=blockchain_descriptor_specific_config,
+ ),
+ wallet_descriptor=blockchain_wallets.WalletDescriptor(
+ address=self.BLOCKCHAIN_TO_ADDRESS,
+ specific_config=wallet_descriptor_specific_config,
+ )
+ )
+
+ def get_blockchain_and_specific_configs(
+ self, blockchain: str
+ ) -> tuple[str, dict, dict]:
+ blockchain_wallet_class = blockchain_wallets.get_blockchain_wallet_class_by_blockchain()[blockchain.lower()]
+ simulator_config = {
+ blockchain_wallets_simulator.BlockchainWalletSimulatorConfigurationKeys.ASSETS.value: {
+ self.BLOCKCHAIN_FROM_ASSET: self.BLOCKCHAIN_FROM_AMOUNT,
+ }
+ }
+ specific_config = self._create_generic_blockchain_wallet_specific_config(blockchain)
+ all_config = {**simulator_config, **specific_config}
+ return (
+ blockchain_wallet_class.BLOCKCHAIN,
+ blockchain_wallet_class.create_blockchain_descriptor_specific_config(**all_config),
+ blockchain_wallet_class.create_wallet_descriptor_specific_config(**all_config),
+ )
+
+ def _create_generic_blockchain_wallet_specific_config(self, blockchain: str) -> dict:
+ is_blockchain_from = blockchain == self.BLOCKCHAIN_FROM
+ prefix = "BLOCKCHAIN_FROM_" if is_blockchain_from else "BLOCKCHAIN_TO_"
+ return {
+ key.replace(prefix, "").lower(): value
+ for key, value in dataclasses.asdict(self).items()
+ if key.startswith(prefix)
+ }
+
+class ActionsDAGParser:
+ def __init__(self, params: dict):
+ self.params: ActionsDAGParserParams = ActionsDAGParserParams.from_dict(params)
+ self.blockchain_param_index = 0
+
+ def parse(self) -> octobot_flow.entities.ActionsDAG:
+ init_action = self._create_init_action(
+ self.params.AUTOMATION_ID,
+ self.params.get_exchange_internal_name(),
+ self.params.API_KEY,
+ self.params.API_SECRET,
+ self.params.SIMULATED_PORTFOLIO,
+ )
+ actions_dag = octobot_flow.entities.ActionsDAG([init_action])
+ self._parse_generic_actions(actions_dag)
+ return actions_dag
+
+ def _parse_generic_actions(self, actions_dag: octobot_flow.entities.ActionsDAG) -> None:
+ latest_action = actions_dag.get_executable_actions()[0]
+ for index, action in enumerate(self.params.ACTIONS):
+ new_action = self._create_generic_action(action, index + 1)
+ new_action.add_dependency(latest_action.id)
+ actions_dag.add_action(new_action)
+ latest_action = new_action
+
+ def _create_generic_action(
+ self, action: str, index: int
+ ) -> octobot_flow.entities.AbstractActionDetails:
+ match action:
+ case ActionType.TRADE.value:
+ return self._create_order_action(index)
+ case ActionType.CANCEL.value:
+ return self._create_cancel_action(index)
+ case ActionType.WITHDRAW.value:
+ return self._create_withdraw_action(index)
+ case ActionType.DEPOSIT.value:
+ return self._create_deposit_action(index)
+ case ActionType.TRANSFER.value:
+ return self._create_transfer_action(index)
+ case ActionType.WAIT.value:
+ return self._create_wait_action(index)
+ case _:
+ raise ValueError(
+ f"Unknown action: {action}"
+ )
+
+ def _create_order_action(self, index: int) -> octobot_flow.entities.AbstractActionDetails:
+ self._ensure_params(
+ ["ORDER_SYMBOL", "ORDER_AMOUNT", "ORDER_TYPE"],
+ "trade",
+ )
+ parsed_symbol = octobot_commons.symbols.parse_symbol(self.params.ORDER_SYMBOL)
+ if self.params.ORDER_SIDE:
+ signal = self.params.ORDER_SIDE.lower()
+ elif parsed_symbol.base == self.params.BLOCKCHAIN_FROM_ASSET and parsed_symbol.quote == self.params.BLOCKCHAIN_TO_ASSET: # type: ignore
+ # sell the first blockchain asset to get the second one
+ signal = trading_view_signals_trading.TradingViewSignalsTradingMode.SELL_SIGNAL
+ elif parsed_symbol.base == self.params.BLOCKCHAIN_TO_ASSET and parsed_symbol.quote == self.params.BLOCKCHAIN_FROM_ASSET: # type: ignore
+ # buy the second blockchain asset to get the first one
+ signal = trading_view_signals_trading.TradingViewSignalsTradingMode.BUY_SIGNAL
+ else:
+ raise octobot_flow.errors.InvalidAutomationActionError(
+ f"Invalid order symbol: {self.params.ORDER_SYMBOL}: symbol must contain the 2 "
+ f"blockchain assets to determine the side of the order"
+ )
+ order_details = {
+ trading_view_signals_trading.TradingViewSignalsTradingMode.EXCHANGE_KEY: self.params.get_exchange_internal_name(),
+ trading_view_signals_trading.TradingViewSignalsTradingMode.SYMBOL_KEY: self.params.ORDER_SYMBOL,
+ trading_view_signals_trading.TradingViewSignalsTradingMode.VOLUME_KEY: self.params.ORDER_AMOUNT,
+ trading_view_signals_trading.TradingViewSignalsTradingMode.ORDER_TYPE_SIGNAL: self.params.ORDER_TYPE,
+ }
+ if self.params.ORDER_PRICE:
+ order_details[trading_view_signals_trading.TradingViewSignalsTradingMode.PRICE_KEY] = self.params.ORDER_PRICE
+ if self.params.ORDER_STOP_PRICE:
+ order_details[trading_view_signals_trading.TradingViewSignalsTradingMode.STOP_PRICE_KEY] = self.params.ORDER_STOP_PRICE
+ if self.params.ORDER_TAG:
+ order_details[trading_view_signals_trading.TradingViewSignalsTradingMode.TAG_KEY] = self.params.ORDER_TAG
+ if self.params.ORDER_REDUCE_ONLY:
+ order_details[trading_view_signals_trading.TradingViewSignalsTradingMode.REDUCE_ONLY_KEY] = self.params.ORDER_REDUCE_ONLY
+ return self.create_dsl_script_from_tv_format_action_details(
+ f"action_trade_{index}", signal, order_details,
+ )
+
+ def _create_cancel_action(self, index: int) -> octobot_flow.entities.AbstractActionDetails:
+ self._ensure_params(
+ ["ORDER_SYMBOL"],
+ "cancel",
+ )
+ cancel_details = {
+ trading_view_signals_trading.TradingViewSignalsTradingMode.SYMBOL_KEY: self.params.ORDER_SYMBOL,
+ }
+ if self.params.ORDER_SIDE:
+ cancel_details[trading_view_signals_trading.TradingViewSignalsTradingMode.SIDE_PARAM_KEY] = self.params.ORDER_SIDE.lower()
+ if self.params.ORDER_TAG:
+ cancel_details[trading_view_signals_trading.TradingViewSignalsTradingMode.TAG_KEY] = self.params.ORDER_TAG
+ return self.create_dsl_script_from_tv_format_action_details(
+ f"action_cancel_{index}",
+ trading_view_signals_trading.TradingViewSignalsTradingMode.CANCEL_SIGNAL,
+ cancel_details,
+ )
+
+ def _create_withdraw_action(
+ self, index: int
+ ) -> octobot_flow.entities.AbstractActionDetails:
+ self._ensure_params(
+ ["BLOCKCHAIN_TO_ASSET", "BLOCKCHAIN_TO", "BLOCKCHAIN_TO_ADDRESS"],
+ "withdraw",
+ )
+ withdraw_details = actions_params.WithdrawFundsParams(
+ asset=self.params.BLOCKCHAIN_TO_ASSET,
+ network=self.params.BLOCKCHAIN_TO,
+ address=self.params.BLOCKCHAIN_TO_ADDRESS,
+ )
+ if self.params.BLOCKCHAIN_TO_AMOUNT:
+ withdraw_details.amount = self.params.BLOCKCHAIN_TO_AMOUNT
+ return self.create_dsl_script_from_tv_format_action_details(
+ f"action_withdraw_{index}",
+ trading_view_signals_trading.TradingViewSignalsTradingMode.WITHDRAW_FUNDS_SIGNAL,
+ dataclasses.asdict(withdraw_details),
+ )
+
+ def _create_deposit_action(
+ self, index: int
+ ) -> octobot_flow.entities.AbstractActionDetails:
+ self._ensure_params(
+ ["BLOCKCHAIN_FROM_ASSET", "BLOCKCHAIN_FROM_AMOUNT", "BLOCKCHAIN_FROM", "EXCHANGE_TO"],
+ "deposit",
+ )
+ deposit_details = actions_params.TransferFundsParams(
+ asset=self.params.BLOCKCHAIN_FROM_ASSET,
+ amount=self.params.BLOCKCHAIN_FROM_AMOUNT,
+ address=None,
+ destination_exchange=self.params.EXCHANGE_TO,
+ **self.params.get_blockchain_and_wallet_descriptors_from_wallet_details(),
+ )
+ return self.create_dsl_script_from_tv_format_action_details(
+ f"action_deposit_{index}",
+ trading_view_signals_trading.TradingViewSignalsTradingMode.TRANSFER_FUNDS_SIGNAL,
+ dataclasses.asdict(deposit_details),
+ )
+
+ def _create_transfer_action(
+ self, index: int
+ ) -> octobot_flow.entities.AbstractActionDetails:
+ self._ensure_params(
+ ["BLOCKCHAIN_FROM_ASSET", "BLOCKCHAIN_FROM_AMOUNT", "BLOCKCHAIN_FROM", "BLOCKCHAIN_TO_ADDRESS"],
+ "transfer",
+ )
+ if self.params.BLOCKCHAIN_TO != self.params.BLOCKCHAIN_FROM:
+ raise octobot_flow.errors.InvalidAutomationActionError(
+ f"BLOCKCHAIN_TO and BLOCKCHAIN_FROM must be the same for a transfer action"
+ )
+ transfer_details = actions_params.TransferFundsParams(
+ asset=self.params.BLOCKCHAIN_FROM_ASSET,
+ amount=self.params.BLOCKCHAIN_FROM_AMOUNT,
+ address=self.params.BLOCKCHAIN_TO_ADDRESS,
+ **self.params.get_blockchain_and_wallet_descriptors_from_wallet_details(),
+ )
+ return self.create_dsl_script_from_tv_format_action_details(
+ f"action_transfer_{index}",
+ trading_view_signals_trading.TradingViewSignalsTradingMode.TRANSFER_FUNDS_SIGNAL,
+ dataclasses.asdict(transfer_details),
+ )
+
+ def _create_wait_action(self, index: int) -> octobot_flow.entities.AbstractActionDetails:
+ if not self.params.has_next_schedule():
+ raise octobot_flow.errors.InvalidAutomationActionError(
+ f"{ActionType.WAIT.value} action requires at least a MIN_DELAY"
+ )
+ min_delay, max_delay = self.params._get_next_schedule_delay()
+ max_delay_str = f", {max_delay}" if max_delay and max_delay != min_delay else ""
+ dsl_script = f"wait({min_delay}{max_delay_str}, return_remaining_time=True)"
+ return octobot_flow.entities.DSLScriptActionDetails(
+ id=f"action_wait_{index}",
+ dsl_script=dsl_script,
+ )
+
+ def _ensure_params(self, keys: list[str], action: str) -> None:
+ missing_keys = []
+ for key in keys:
+ if not getattr(self.params, key):
+ missing_keys.append(key)
+ if missing_keys:
+ raise octobot_flow.errors.InvalidAutomationActionError(
+ f"Missing keys: {', '.join(missing_keys)} (required: {', '.join(keys)}) "
+ f"for a {action} action"
+ )
+
+ def _create_init_action(
+ self,
+ automation_id: str,
+ exchange_internal_name: typing.Optional[str],
+ api_key: typing.Optional[str],
+ api_secret: typing.Optional[str],
+ simulated_portfolio: typing.Optional[dict[str, float]],
+ ) -> octobot_flow.entities.AbstractActionDetails:
+ formatted_simulated_portfolio = {
+ asset: {
+ commons_constants.PORTFOLIO_TOTAL: value,
+ commons_constants.PORTFOLIO_AVAILABLE: value,
+ }
+ for asset, value in simulated_portfolio.items()
+ } if simulated_portfolio else None
+ automation_details = octobot_flow.entities.AutomationDetails(
+ metadata=octobot_flow.entities.AutomationMetadata(
+ automation_id=automation_id,
+ ),
+ client_exchange_account_elements=octobot_flow.entities.ClientExchangeAccountElements(
+ portfolio=exchange_data_import.PortfolioDetails(
+ content=formatted_simulated_portfolio,
+ )
+ ),
+ )
+ exchange_account_details = octobot_flow.entities.ExchangeAccountDetails(
+ exchange_details=profiles_import.ExchangeData(
+ internal_name=exchange_internal_name,
+ ),
+ auth_details=exchange_data_import.ExchangeAuthDetails(
+ api_key=api_key or "",
+ api_secret=api_secret or "",
+ ),
+ ) if exchange_internal_name else None
+ automation_state = octobot_flow.entities.AutomationState(
+ automation=automation_details,
+ exchange_account_details=exchange_account_details,
+ )
+ return self.create_configured_action_details(
+ "action_init",
+ octobot_flow.enums.ActionType.APPLY_CONFIGURATION,
+ automation_state.to_dict(include_default_values=False),
+ )
+
+ def create_dsl_script_from_tv_format_action_details(
+ self, action_id: str, signal: str, details: dict
+ ) -> octobot_flow.entities.DSLScriptActionDetails:
+ dsl_script = tradingview_signal_to_dsl_translator.TradingViewSignalToDSLTranslator.translate_signal(
+ {**{trading_view_signals_trading.TradingViewSignalsTradingMode.SIGNAL_KEY: signal}, **details}
+ )
+ return octobot_flow.entities.DSLScriptActionDetails(
+ id=action_id,
+ dsl_script=dsl_script,
+ )
+
+ def create_configured_action_details(
+ self, action_id: str, action: octobot_flow.enums.ActionType, config: dict
+ ) -> octobot_flow.entities.ConfiguredActionDetails:
+ return octobot_flow.entities.ConfiguredActionDetails(
+ id=action_id,
+ action=action.value,
+ config=config,
+ )
diff --git a/packages/flow/octobot_flow/parsers/sanitizer.py b/packages/flow/octobot_flow/parsers/sanitizer.py
new file mode 100644
index 000000000..1b3ab1ddf
--- /dev/null
+++ b/packages/flow/octobot_flow/parsers/sanitizer.py
@@ -0,0 +1,31 @@
+import decimal
+import typing
+
+import octobot_commons.signals as commons_signals
+import octobot_commons.enums as common_enums
+import octobot_commons.dataclasses as commons_dataclasses
+
+
+def _get_sanitized_value(value):
+ if isinstance(value, (list, dict)):
+ return sanitize(value)
+ if isinstance(value, decimal.Decimal):
+ return float(value)
+ if isinstance(value, commons_signals.SignalBundle):
+ return {common_enums.CommunityFeedAttrs.VALUE.value: sanitize(value.to_dict())}
+ return value
+
+
+def sanitize(values: typing.Any) -> typing.Any:
+ if isinstance(values, (list, tuple)):
+ return type(values)(
+ sanitize(val)
+ for val in values
+ )
+ elif isinstance(values, dict):
+ for key, val in values.items():
+ values[key] = _get_sanitized_value(val)
+ elif isinstance(values, commons_dataclasses.FlexibleDataclass):
+ for field in values.get_field_names():
+ setattr(values, field, sanitize(getattr(values, field)))
+ return values
diff --git a/packages/flow/octobot_flow/repositories/__init__.py b/packages/flow/octobot_flow/repositories/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/packages/flow/octobot_flow/repositories/community/__init__.py b/packages/flow/octobot_flow/repositories/community/__init__.py
new file mode 100644
index 000000000..ea4dfa647
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/community/__init__.py
@@ -0,0 +1,14 @@
+from octobot_flow.repositories.community.community_repository import CommunityRepository
+from octobot_flow.repositories.community.initializer import initialize_community_authentication
+from octobot_flow.repositories.community.authenticator_factory import CommunityAuthenticatorFactory
+from octobot_flow.repositories.community.custom_actions_repository import CustomActionsRepository
+from octobot_flow.repositories.community.community_lib import ensure_is_authenticated, ensure_authenticated_community_repository
+
+__all__ = [
+ "CommunityRepository",
+ "CustomActionsRepository",
+ "initialize_community_authentication",
+ "CommunityAuthenticatorFactory",
+ "ensure_is_authenticated",
+ "ensure_authenticated_community_repository",
+]
diff --git a/packages/flow/octobot_flow/repositories/community/authenticator_factory.py b/packages/flow/octobot_flow/repositories/community/authenticator_factory.py
new file mode 100644
index 000000000..75992019e
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/community/authenticator_factory.py
@@ -0,0 +1,77 @@
+import contextlib
+import typing
+
+import octobot_flow.entities
+import octobot_commons.logging as common_logging
+import octobot.community as community
+import octobot_flow.repositories.community.initializer as initializer
+
+
+class CommunityAuthenticatorFactory:
+ def __init__(
+ self,
+ auth_details: octobot_flow.entities.UserAuthentication,
+ backend_url: typing.Optional[str] = None,
+ anon_key: typing.Optional[str] = None
+ ):
+ self.auth_details: octobot_flow.entities.UserAuthentication = auth_details
+ self.backend_url: typing.Optional[str] = backend_url
+ self.anon_key: typing.Optional[str] = anon_key
+
+ def enable_community_authentication(self) -> bool:
+ return bool(self.auth_details.has_auth_details() or self.anon_key)
+
+ @contextlib.asynccontextmanager
+ async def local_authenticator(self) -> typing.AsyncGenerator[community.CommunityAuthentication, None]:
+ if not self.auth_details.email:
+ raise ValueError("auth_details.email is required")
+ community.IdentifiersProvider.use_production()
+ local_instance = None
+ configuration = initializer.get_stateless_configuration()
+ try:
+ local_instance = community.CommunityAuthentication(
+ config=configuration, backend_url=self.backend_url, use_as_singleton=False
+ )
+ local_instance.supabase_client.is_admin = False
+ local_instance.silent_auth = self.auth_details.hidden
+ # minimal operations: just authenticate
+ if self.auth_details.auth_key:
+ # auth key authentication
+ auth_key = self.auth_details.auth_key
+ password = None
+ else:
+ # password authentication
+ password = self.auth_details.password
+ auth_key = None
+ await local_instance.login(
+ self.auth_details.email, password, password_token=None, auth_key=auth_key, minimal=True
+ )
+ common_logging.get_logger("local_community_user_authenticator").info(
+ f"Authenticated as {self.auth_details.email[:3]}[...]{self.auth_details.email[-4:]}"
+ )
+ yield local_instance
+ finally:
+ if local_instance is not None:
+ await local_instance.logout()
+ await local_instance.stop()
+
+ @contextlib.asynccontextmanager
+ async def local_anon_authenticator(self) -> typing.AsyncGenerator[community.CommunityAuthentication, None]:
+ if not self.anon_key:
+ raise ValueError("Anon key is required")
+ community.IdentifiersProvider.use_production()
+ local_instance = None
+ configuration = initializer.get_stateless_configuration()
+ try:
+ local_instance = community.CommunityAuthentication(
+ config=configuration, backend_url=self.backend_url, backend_key=self.anon_key, use_as_singleton=False
+ )
+ local_instance.supabase_client.is_admin = False
+ common_logging.get_logger("local_community_user_authenticator").info(
+ f"Authenticated as anonymous user"
+ )
+ yield local_instance
+ finally:
+ if local_instance is not None:
+ await local_instance.logout()
+ await local_instance.stop()
diff --git a/packages/flow/octobot_flow/repositories/community/community_lib.py b/packages/flow/octobot_flow/repositories/community/community_lib.py
new file mode 100644
index 000000000..7ef2589b5
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/community/community_lib.py
@@ -0,0 +1,25 @@
+import typing
+
+import octobot.community
+import octobot_flow.errors
+import octobot_flow.repositories.community.community_repository as community_repository_import
+
+
+def ensure_is_authenticated(
+ maybe_authenticator: typing.Optional[octobot.community.CommunityAuthentication]
+) -> octobot.community.CommunityAuthentication:
+ if maybe_authenticator and maybe_authenticator.is_logged_in():
+ return maybe_authenticator
+ raise octobot_flow.errors.CommunityAuthenticationRequiredError(
+ "Community authentication is required to fetch custom actions"
+ )
+
+
+def ensure_authenticated_community_repository(
+ maybe_community_repository: typing.Optional[community_repository_import.CommunityRepository]
+) -> community_repository_import.CommunityRepository:
+ if maybe_community_repository is not None and ensure_is_authenticated(maybe_community_repository.authenticator):
+ return maybe_community_repository
+ raise octobot_flow.errors.CommunityAuthenticationRequiredError(
+ "Community authentication is required to use the community repository"
+ )
diff --git a/packages/flow/octobot_flow/repositories/community/community_repository.py b/packages/flow/octobot_flow/repositories/community/community_repository.py
new file mode 100644
index 000000000..91d282ade
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/community/community_repository.py
@@ -0,0 +1,32 @@
+import contextlib
+import asyncio
+
+import octobot.community
+
+import octobot_flow.entities
+
+
+class CommunityRepository:
+ def __init__(self, authenticator: octobot.community.CommunityAuthentication):
+ self.authenticator: octobot.community.CommunityAuthentication = authenticator
+
+ async def insert_bot_logs(self, log_data: list[octobot.community.BotLogData]):
+ await asyncio.gather(
+ *[
+ self.authenticator.supabase_client.insert_bot_log(
+ self.authenticator.user_account.bot_id,
+ log_data.log_type,
+ log_data.content
+ )
+ for log_data in log_data
+ ]
+ )
+
+ @contextlib.contextmanager
+ def automation_context(self, automation: octobot_flow.entities.AutomationDetails):
+ previous_bot_id = self.authenticator.user_account.bot_id
+ try:
+ self.authenticator.user_account.bot_id = automation.metadata.automation_id # type: ignore
+ yield
+ finally:
+ self.authenticator.user_account.bot_id = previous_bot_id # type: ignore
diff --git a/packages/flow/octobot_flow/repositories/community/custom_actions_repository.py b/packages/flow/octobot_flow/repositories/community/custom_actions_repository.py
new file mode 100644
index 000000000..16fc212c8
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/community/custom_actions_repository.py
@@ -0,0 +1,23 @@
+import octobot.community
+
+import octobot_flow.entities
+
+
+class CustomActionsRepository:
+ def __init__(self, authenticator: octobot.community.CommunityAuthentication):
+ self.authenticator: octobot.community.CommunityAuthentication = authenticator
+
+ async def fetch_custom_actions(
+ self,
+ user_action_history_ids: list[str],
+ select_pending_user_actions_only: bool
+ ) -> list[octobot_flow.entities.AbstractActionDetails]:
+ raise NotImplementedError("TODO: fetch_custom_actions")
+
+ async def fetch_signals(
+ self, signal_history_ids: list[str], select_pending_signals_only: bool
+ ) -> list[octobot_flow.entities.AbstractActionDetails]:
+ raise NotImplementedError("TODO: fetch_signals")
+
+ async def update_custom_actions_history(self, actions: list[octobot_flow.entities.AbstractActionDetails]):
+ raise NotImplementedError("TODO: update_custom_actions_history")
diff --git a/packages/flow/octobot_flow/repositories/community/initializer.py b/packages/flow/octobot_flow/repositories/community/initializer.py
new file mode 100644
index 000000000..316bf2bc1
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/community/initializer.py
@@ -0,0 +1,17 @@
+import octobot.community
+import octobot_commons.configuration
+
+
+def initialize_community_authentication():
+ octobot.community.IdentifiersProvider.use_production()
+ configuration = get_stateless_configuration()
+ # create CommunityAuthentication singleton
+ octobot.community.CommunityAuthentication.create(configuration)
+
+
+def get_stateless_configuration() -> octobot_commons.configuration.Configuration:
+ configuration = octobot_commons.configuration.Configuration(None, None)
+ configuration.config = {}
+ # disable save
+ configuration.save = lambda *_, **__: _ # type: ignore
+ return configuration
diff --git a/packages/flow/octobot_flow/repositories/exchange/__init__.py b/packages/flow/octobot_flow/repositories/exchange/__init__.py
new file mode 100644
index 000000000..1b6ed3dc3
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/__init__.py
@@ -0,0 +1,21 @@
+from octobot_flow.repositories.exchange.base_exchange_repository import BaseExchangeRepository
+from octobot_flow.repositories.exchange.ohlcv_repository import OhlcvRepository
+from octobot_flow.repositories.exchange.orders_repository import OrdersRepository
+from octobot_flow.repositories.exchange.portfolio_repository import PortfolioRepository
+from octobot_flow.repositories.exchange.positions_repository import PositionsRepository
+from octobot_flow.repositories.exchange.trades_repository import TradesRepository
+from octobot_flow.repositories.exchange.tickers_repository import TickersRepository
+from octobot_flow.repositories.exchange.exchange_repository_factory import ExchangeRepositoryFactory
+from octobot_flow.repositories.exchange.exchange_context_mixin import ExchangeContextMixin
+
+__all__ = [
+ "BaseExchangeRepository",
+ "OhlcvRepository",
+ "OrdersRepository",
+ "PortfolioRepository",
+ "PositionsRepository",
+ "TradesRepository",
+ "TickersRepository",
+ "ExchangeRepositoryFactory",
+ "ExchangeContextMixin",
+]
\ No newline at end of file
diff --git a/packages/flow/octobot_flow/repositories/exchange/base_exchange_repository.py b/packages/flow/octobot_flow/repositories/exchange/base_exchange_repository.py
new file mode 100644
index 000000000..4abbb96d6
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/base_exchange_repository.py
@@ -0,0 +1,13 @@
+import octobot_trading.exchanges
+import octobot_flow.entities
+
+class BaseExchangeRepository:
+ def __init__(
+ self,
+ exchange_manager: octobot_trading.exchanges.ExchangeManager,
+ known_automations: list[octobot_flow.entities.AutomationDetails],
+ fetched_exchange_data: octobot_flow.entities.FetchedExchangeData,
+ ):
+ self.exchange_manager: octobot_trading.exchanges.ExchangeManager = exchange_manager
+ self.known_automations: list[octobot_flow.entities.AutomationDetails] = known_automations
+ self.fetched_exchange_data: octobot_flow.entities.FetchedExchangeData = fetched_exchange_data
diff --git a/packages/flow/octobot_flow/repositories/exchange/exchange_context_mixin.py b/packages/flow/octobot_flow/repositories/exchange/exchange_context_mixin.py
new file mode 100644
index 000000000..5858b4045
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/exchange_context_mixin.py
@@ -0,0 +1,209 @@
+import contextlib
+import typing
+import uuid
+
+import octobot_commons.databases as databases
+import octobot_commons.tree as commons_tree
+import octobot_commons.constants as common_constants
+import octobot_commons.profiles as commons_profiles
+import octobot_commons.logging as commons_logging
+import octobot_trading.exchanges
+import octobot_trading.exchange_data
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+import octobot_tentacles_manager.api
+import octobot.databases_util as databases_util
+import tentacles.Meta.Keywords.scripting_library as scripting_library
+import octobot_flow.errors
+import octobot_flow.entities
+import octobot_flow.repositories.exchange.exchange_repository_factory as exchange_repository_factory
+import octobot_flow.repositories.exchange.tickers_repository as tickers_repository
+import octobot_flow.logic.configuration
+
+class ExchangeContextMixin:
+ WILL_EXECUTE_STRATEGY: bool = False
+
+ def __init__(
+ self,
+ automation_state: octobot_flow.entities.AutomationState,
+ fetched_dependencies: octobot_flow.entities.FetchedDependencies,
+ ):
+ self.automation_state: octobot_flow.entities.AutomationState = automation_state
+ self.fetched_dependencies: octobot_flow.entities.FetchedDependencies = fetched_dependencies
+ self.profile_data_provider: octobot_flow.logic.configuration.ProfileDataProvider = octobot_flow.logic.configuration.ProfileDataProvider()
+
+ # context dependant attributes
+ self._exchange_manager: typing.Optional[octobot_trading.exchanges.ExchangeManager] = None
+
+ def get_exchange_repository_factory(self) -> exchange_repository_factory.ExchangeRepositoryFactory:
+ self.ensure_context()
+ return exchange_repository_factory.ExchangeRepositoryFactory(
+ self._exchange_manager,
+ [self.automation_state.automation],
+ self.fetched_dependencies.fetched_exchange_data,
+ self.profile_data_provider.get_profile_data().trader_simulator.enabled,
+ )
+
+ def init_strategy_exchange_data(self, exchange_data: exchange_data_import.ExchangeData):
+ """
+ should be implemented when self.WILL_EXECUTE_STRATEGY is True
+ """
+ raise NotImplementedError("init_strategy_exchange_data should be implemented in subclass")
+
+ def ensure_context(self):
+ if self._exchange_manager is None:
+ raise octobot_flow.errors.ExchangeAccountInitializationError("Not in exchange context")
+
+ @contextlib.asynccontextmanager
+ async def exchange_manager_context(
+ self, as_reference_account: bool
+ ) -> typing.AsyncGenerator[typing.Optional[octobot_trading.exchanges.ExchangeManager], None]:
+ exchange_manager_bot_id = None
+ profile_data = self.profile_data_provider.get_profile_data()
+ if not self.automation_state.has_exchange():
+ # no need to initialize an exchange manager
+ yield None
+ return
+ exchange_data = self.automation_state.exchange_account_details.to_minimal_exchange_data(
+ self.automation_state.automation.get_exchange_account_elements(as_reference_account).portfolio.content
+ )
+ try:
+ if self.WILL_EXECUTE_STRATEGY:
+ # make all markets available to the strategy, it will use the required ones
+ self.init_strategy_exchange_data(exchange_data)
+ tentacles_setup_config = scripting_library.get_full_tentacles_setup_config()
+ exchange_config_by_exchange = scripting_library.get_config_by_tentacle(profile_data)
+ auth = profile_data.trader_simulator.enabled is False
+ builder = await self._get_exchange_builder(
+ profile_data,
+ exchange_data,
+ auth,
+ tentacles_setup_config,
+ exchange_config_by_exchange,
+ )
+ octobot_tentacles_manager.api.set_tentacle_config_proxy(scripting_library.empty_config_proxy)
+ exchange_config = builder.config[common_constants.CONFIG_EXCHANGES][exchange_data.exchange_details.name]
+ ignore_config = (
+ not auth and not scripting_library.is_auth_required_exchanges(
+ exchange_data, tentacles_setup_config, exchange_config_by_exchange
+ )
+ )
+ async with octobot_trading.exchanges.get_local_exchange_manager(
+ exchange_data.exchange_details.name, exchange_config, tentacles_setup_config,
+ exchange_data.auth_details.sandboxed, ignore_config=ignore_config,
+ builder=builder, use_cached_markets=True,
+ is_broker_enabled=exchange_data.auth_details.broker_enabled,
+ exchange_config_by_exchange=exchange_config_by_exchange,
+ disable_unauth_retry=True, # unauth fallback is never required, if auth fails, this should fail
+ ) as exchange_manager:
+ exchange_manager_bot_id = exchange_manager.bot_id
+ octobot_trading.exchange_data.initialize_contracts_from_exchange_data(exchange_manager, exchange_data)
+ price_by_symbol = {
+ market.symbol: self.get_price_from_exchange_data_or_cached_tickers(exchange_data, market.symbol)
+ for market in exchange_data.markets
+ }
+ await exchange_manager.initialize_from_exchange_data(
+ exchange_data, price_by_symbol, False,
+ False, profile_data.trader_simulator.enabled
+ )
+ portfolio_config = {
+ asset: portfolio_element[common_constants.PORTFOLIO_TOTAL]
+ for asset, portfolio_element in exchange_data.portfolio_details.content.items()
+ }
+ exchange_manager.exchange_personal_data.portfolio_manager.apply_forced_portfolio(portfolio_config)
+ self._exchange_manager = exchange_manager
+ if self.WILL_EXECUTE_STRATEGY:
+ with self._predictive_order_sync_context(exchange_manager, profile_data):
+ yield exchange_manager
+ else:
+ yield exchange_manager
+ finally:
+ if exchange_manager_bot_id:
+ if databases.RunDatabasesProvider.instance().has_bot_id(exchange_manager_bot_id):
+ databases.RunDatabasesProvider.instance().remove_bot_id(exchange_manager_bot_id)
+ commons_tree.EventProvider.instance().remove_event_tree(exchange_manager_bot_id)
+ self._exchange_manager = None
+
+ def get_exchange_config(self) -> dict:
+ raise NotImplementedError("get_exchange_config not implemented")
+
+ def get_price_from_exchange_data_or_cached_tickers(
+ self, exchange_data: exchange_data_import.ExchangeData, symbol: str
+ ) -> typing.Optional[float]:
+ try:
+ return exchange_data.get_price(symbol)
+ except (IndexError, KeyError):
+ try:
+ price = tickers_repository.TickersRepository.get_cached_market_price(
+ exchange_data.exchange_details.name, exchange_data.auth_details.exchange_type,
+ exchange_data.auth_details.sandboxed, symbol,
+ )
+ commons_logging.get_logger(self.__class__.__name__).warning(
+ f"Using {symbol} [{exchange_data.exchange_details.name}] "
+ f"ticker price for mark price: candles are missing"
+ )
+ return price
+ except KeyError:
+ commons_logging.get_logger(self.__class__.__name__).error(
+ f"Impossible to initialize {symbol} price on {exchange_data.exchange_details.name}: no "
+ f"candle or cached ticker price"
+ )
+ return None
+
+ async def _get_exchange_builder(
+ self,
+ profile_data: commons_profiles.ProfileData,
+ exchange_data: exchange_data_import.ExchangeData,
+ auth: bool,
+ tentacles_setup_config,
+ exchange_config_by_exchange,
+ matrix_id=None,
+ ignore_symbols_in_exchange_init=False
+ ) -> octobot_trading.exchanges.ExchangeBuilder:
+ config = scripting_library.get_config(
+ profile_data, exchange_data, tentacles_setup_config, auth, ignore_symbols_in_exchange_init, True
+ )
+ bot_id = str(uuid.uuid4())
+ if tentacles_setup_config is not None:
+ await databases.init_bot_storage(
+ bot_id,
+ databases_util.get_run_databases_identifier(
+ config.config,
+ tentacles_setup_config,
+ enable_storage=False,
+ ),
+ False
+ )
+ builder = octobot_trading.exchanges.ExchangeBuilder(
+ config.config,
+ exchange_data.exchange_details.name
+ ) \
+ .set_bot_id(bot_id) \
+ .enable_storage(False)
+ if auth:
+ builder.is_real()
+ else:
+ builder.is_simulated()
+ if matrix_id:
+ builder.has_matrix(matrix_id)
+ return builder
+
+ @contextlib.contextmanager
+ def _predictive_order_sync_context(self, exchange_manager, profile_data: commons_profiles.ProfileData):
+ # disable portfolio fetch and available value updates as portfolio is already up-to-date
+ with (
+ # don't fetch portfolio update when creating/filling order
+ exchange_manager.exchange_personal_data.orders_manager.disabled_order_auto_synchronization(),
+ # dont fetch positions update when creating/filling order
+ exchange_manager.exchange_personal_data.positions_manager.disabled_positions_update_from_order(),
+ ):
+ if profile_data.trader_simulator.enabled:
+ # in simulated context, temporarily enable trader simulator automations
+ # to update portfolio and handle orders as simulated
+ previous_simulated_state = exchange_manager.trader.simulate
+ exchange_manager.trader.simulate = True
+ try:
+ yield
+ finally:
+ exchange_manager.trader.simulate = previous_simulated_state
+ else:
+ yield
diff --git a/packages/flow/octobot_flow/repositories/exchange/exchange_repository_factory.py b/packages/flow/octobot_flow/repositories/exchange/exchange_repository_factory.py
new file mode 100644
index 000000000..4c71422e6
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/exchange_repository_factory.py
@@ -0,0 +1,85 @@
+import octobot_trading.exchanges
+
+import octobot_flow.entities
+import octobot_flow.repositories.exchange.simulated_trading as simulated_trading_repositories
+import octobot_flow.repositories.exchange.ohlcv_repository as ohlcv_repository_import
+import octobot_flow.repositories.exchange.orders_repository as orders_repository_import
+import octobot_flow.repositories.exchange.portfolio_repository as portfolio_repository_import
+import octobot_flow.repositories.exchange.positions_repository as positions_repository_import
+import octobot_flow.repositories.exchange.trades_repository as trades_repository_import
+import octobot_flow.repositories.exchange.tickers_repository as tickers_repository_import
+
+
+
+class ExchangeRepositoryFactory:
+ def __init__(
+ self,
+ exchange_manager: octobot_trading.exchanges.ExchangeManager,
+ known_automations: list[octobot_flow.entities.AutomationDetails],
+ fetched_exchange_data: octobot_flow.entities.FetchedExchangeData,
+ is_simulated: bool,
+ ):
+ self.exchange_manager: octobot_trading.exchanges.ExchangeManager = exchange_manager
+ self.known_automations: list[octobot_flow.entities.AutomationDetails] = known_automations
+ self.fetched_exchange_data: octobot_flow.entities.FetchedExchangeData = fetched_exchange_data
+ self.is_simulated: bool = is_simulated
+
+ def get_ohlcv_repository(self) -> ohlcv_repository_import.OhlcvRepository:
+ if self.is_simulated:
+ return simulated_trading_repositories.SimulatedOhlcvRepository(
+ self.exchange_manager, self.known_automations, self.fetched_exchange_data
+ )
+ else:
+ return ohlcv_repository_import.OhlcvRepository(
+ self.exchange_manager, self.known_automations, self.fetched_exchange_data
+ )
+
+ def get_orders_repository(self) -> orders_repository_import.OrdersRepository:
+ if self.is_simulated:
+ return simulated_trading_repositories.SimulatedOrdersRepository(
+ self.exchange_manager, self.known_automations, self.fetched_exchange_data
+ )
+ else:
+ return orders_repository_import.OrdersRepository(
+ self.exchange_manager, self.known_automations, self.fetched_exchange_data
+ )
+
+ def get_portfolio_repository(self) -> portfolio_repository_import.PortfolioRepository:
+ if self.is_simulated:
+ return simulated_trading_repositories.SimulatedPortfolioRepository(
+ self.exchange_manager, self.known_automations, self.fetched_exchange_data
+ )
+ else:
+ return portfolio_repository_import.PortfolioRepository(
+ self.exchange_manager, self.known_automations, self.fetched_exchange_data
+ )
+
+ def get_positions_repository(self) -> positions_repository_import.PositionsRepository:
+ if self.is_simulated:
+ return simulated_trading_repositories.SimulatedPositionsRepository(
+ self.exchange_manager, self.known_automations, self.fetched_exchange_data
+ )
+ else:
+ return positions_repository_import.PositionsRepository(
+ self.exchange_manager, self.known_automations, self.fetched_exchange_data
+ )
+
+ def get_trades_repository(self) -> trades_repository_import.TradesRepository:
+ if self.is_simulated:
+ return simulated_trading_repositories.SimulatedTradesRepository(
+ self.exchange_manager, self.known_automations, self.fetched_exchange_data
+ )
+ else:
+ return trades_repository_import.TradesRepository(
+ self.exchange_manager, self.known_automations, self.fetched_exchange_data
+ )
+
+ def get_tickers_repository(self) -> tickers_repository_import.TickersRepository:
+ if self.is_simulated:
+ return simulated_trading_repositories.SimulatedTickersRepository(
+ self.exchange_manager, self.known_automations, self.fetched_exchange_data
+ )
+ else:
+ return tickers_repository_import.TickersRepository(
+ self.exchange_manager, self.known_automations, self.fetched_exchange_data
+ )
diff --git a/packages/flow/octobot_flow/repositories/exchange/ohlcv_repository.py b/packages/flow/octobot_flow/repositories/exchange/ohlcv_repository.py
new file mode 100644
index 000000000..ee7a138f4
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/ohlcv_repository.py
@@ -0,0 +1,13 @@
+import octobot_flow.repositories.exchange.base_exchange_repository as base_exchange_repository_import
+import octobot_trading.util.test_tools.exchanges_test_tools as exchanges_test_tools
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
+
+class OhlcvRepository(base_exchange_repository_import.BaseExchangeRepository):
+
+ async def fetch_ohlcv(
+ self, symbol: str, time_frame: str, limit: int
+ ) -> exchange_data_import.MarketDetails:
+ return await exchanges_test_tools.fetch_ohlcv(
+ self.exchange_manager, symbol, time_frame, limit
+ )
diff --git a/packages/flow/octobot_flow/repositories/exchange/orders_repository.py b/packages/flow/octobot_flow/repositories/exchange/orders_repository.py
new file mode 100644
index 000000000..65e93016f
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/orders_repository.py
@@ -0,0 +1,13 @@
+import octobot_flow.repositories.exchange.base_exchange_repository as base_exchange_repository_import
+import octobot_trading.util.test_tools.exchanges_test_tools as exchanges_test_tools_import
+
+class OrdersRepository(base_exchange_repository_import.BaseExchangeRepository):
+
+ async def fetch_open_orders(
+ self, symbols: list[str], ignore_unsupported_orders: bool = True
+ ) -> list[dict]:
+ if not symbols:
+ return []
+ return await exchanges_test_tools_import.get_open_orders(
+ self.exchange_manager, None, symbols=symbols, ignore_unsupported_orders=ignore_unsupported_orders
+ )
diff --git a/packages/flow/octobot_flow/repositories/exchange/portfolio_repository.py b/packages/flow/octobot_flow/repositories/exchange/portfolio_repository.py
new file mode 100644
index 000000000..f4cf81de1
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/portfolio_repository.py
@@ -0,0 +1,14 @@
+import decimal
+
+import octobot_trading.personal_data as personal_data
+import octobot_flow.repositories.exchange.base_exchange_repository as base_exchange_repository_import
+import octobot_trading.util.test_tools.exchanges_test_tools as exchanges_test_tools_import
+
+class PortfolioRepository(base_exchange_repository_import.BaseExchangeRepository):
+
+ async def fetch_portfolio(self) -> dict[str, dict[str, decimal.Decimal]]:
+ return personal_data.from_raw_to_formatted_portfolio(
+ await exchanges_test_tools_import.get_portfolio(
+ self.exchange_manager
+ ), as_float=False
+ ) # type: ignore
diff --git a/packages/flow/octobot_flow/repositories/exchange/positions_repository.py b/packages/flow/octobot_flow/repositories/exchange/positions_repository.py
new file mode 100644
index 000000000..b854e4350
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/positions_repository.py
@@ -0,0 +1,20 @@
+import octobot_flow.repositories.exchange.base_exchange_repository as base_exchange_repository_import
+import octobot_trading.util.test_tools.exchanges_test_tools as exchanges_test_tools_import
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
+class PositionsRepository(base_exchange_repository_import.BaseExchangeRepository):
+
+ async def fetch_positions(self, symbols: list[str]) -> list[exchange_data_import.PositionDetails]:
+ raw_positions = await exchanges_test_tools_import.get_positions(
+ self.exchange_manager, None, symbols=symbols
+ )
+ return [self._parse_position(position) for position in raw_positions]
+
+
+ def _parse_position(self, raw_position: dict) -> exchange_data_import.PositionDetails:
+ return exchange_data_import.PositionDetails(
+ position=raw_position, contract=self._parse_contract(raw_position)
+ )
+
+ def _parse_contract(self, raw_position: dict) -> dict:
+ raise NotImplementedError("Not _parse_contract not implemented")
diff --git a/packages/flow/octobot_flow/repositories/exchange/simulated_trading/__init__.py b/packages/flow/octobot_flow/repositories/exchange/simulated_trading/__init__.py
new file mode 100644
index 000000000..ce3deb3f6
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/simulated_trading/__init__.py
@@ -0,0 +1,15 @@
+from octobot_flow.repositories.exchange.simulated_trading.simulated_ohlcv_repository import SimulatedOhlcvRepository
+from octobot_flow.repositories.exchange.simulated_trading.simulated_orders_repository import SimulatedOrdersRepository
+from octobot_flow.repositories.exchange.simulated_trading.simulated_portfolio_repository import SimulatedPortfolioRepository
+from octobot_flow.repositories.exchange.simulated_trading.simulated_tickers_repository import SimulatedTickersRepository
+from octobot_flow.repositories.exchange.simulated_trading.simulated_trades_repository import SimulatedTradesRepository
+from octobot_flow.repositories.exchange.simulated_trading.simulated_positions_repository import SimulatedPositionsRepository
+
+__all__ = [
+ "SimulatedOhlcvRepository",
+ "SimulatedOrdersRepository",
+ "SimulatedPortfolioRepository",
+ "SimulatedTickersRepository",
+ "SimulatedTradesRepository",
+ "SimulatedPositionsRepository",
+]
\ No newline at end of file
diff --git a/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_ohlcv_repository.py b/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_ohlcv_repository.py
new file mode 100644
index 000000000..c969f6239
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_ohlcv_repository.py
@@ -0,0 +1,6 @@
+import octobot_flow.repositories.exchange.ohlcv_repository as ohlcv_repository_import
+
+
+class SimulatedOhlcvRepository(ohlcv_repository_import.OhlcvRepository):
+ # nothing simulator specific to do
+ pass
diff --git a/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_orders_repository.py b/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_orders_repository.py
new file mode 100644
index 000000000..c692b35f0
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_orders_repository.py
@@ -0,0 +1,19 @@
+import octobot_trading.constants
+import octobot_trading.enums
+
+import octobot_flow.repositories.exchange.orders_repository as orders_repository_import
+
+
+class SimulatedOrdersRepository(orders_repository_import.OrdersRepository):
+
+ async def fetch_open_orders(
+ self, symbols: list[str], ignore_unsupported_orders: bool = True
+ ) -> list[dict]:
+ return []
+ # TODO see if returning the orders from the known bot details is necessary in simulated
+ return [
+ order[octobot_trading.constants.STORAGE_ORIGIN_VALUE]
+ for automation in self.known_automations
+ for order in automation.exchange_account_elements.orders.open_orders
+ if order[octobot_trading.constants.STORAGE_ORIGIN_VALUE][octobot_trading.enums.ExchangeConstantsOrderColumns.SYMBOL.value] in symbols
+ ]
diff --git a/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_portfolio_repository.py b/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_portfolio_repository.py
new file mode 100644
index 000000000..506ca1446
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_portfolio_repository.py
@@ -0,0 +1,13 @@
+import decimal
+
+import octobot_flow.repositories.exchange.portfolio_repository as portfolio_repository_import
+import octobot_trading.personal_data as trading_personal_data
+
+
+class SimulatedPortfolioRepository(portfolio_repository_import.PortfolioRepository):
+
+ async def fetch_portfolio(self) -> dict[str, dict[str, decimal.Decimal]]:
+ # todo update simulated portfolio with updated orders and trades
+ return trading_personal_data.format_dict_portfolio_values(
+ self.fetched_exchange_data.authenticated_data.portfolio.full_content, True
+ ) # type: ignore
diff --git a/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_positions_repository.py b/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_positions_repository.py
new file mode 100644
index 000000000..80abbd37a
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_positions_repository.py
@@ -0,0 +1,9 @@
+import octobot_flow.repositories.exchange.positions_repository as positions_repository_import
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
+
+class SimulatedPositionsRepository(positions_repository_import.PositionsRepository):
+
+ async def fetch_positions(self, symbols: list[str]) -> list[exchange_data_import.PositionDetails]:
+ # todo update simulated positions with updated orders and trades
+ return self.fetched_exchange_data.authenticated_data.positions
diff --git a/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_tickers_repository.py b/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_tickers_repository.py
new file mode 100644
index 000000000..f85800530
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_tickers_repository.py
@@ -0,0 +1,6 @@
+import octobot_flow.repositories.exchange.tickers_repository as tickers_repository_import
+
+
+class SimulatedTickersRepository(tickers_repository_import.TickersRepository):
+ # nothing simulator specific to do
+ pass
diff --git a/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_trades_repository.py b/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_trades_repository.py
new file mode 100644
index 000000000..a1cab2836
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/simulated_trading/simulated_trades_repository.py
@@ -0,0 +1,9 @@
+import octobot_flow.repositories.exchange.trades_repository as trades_repository_import
+
+
+class SimulatedTradesRepository(trades_repository_import.TradesRepository):
+
+ async def fetch_trades(self, symbols: list[str]) -> list[dict]:
+ # todo add generated trades
+ return self.fetched_exchange_data.authenticated_data.trades
+
diff --git a/packages/flow/octobot_flow/repositories/exchange/tickers_repository.py b/packages/flow/octobot_flow/repositories/exchange/tickers_repository.py
new file mode 100644
index 000000000..2d0f30fd0
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/tickers_repository.py
@@ -0,0 +1,48 @@
+import octobot_trading.util.test_tools.exchanges_test_tools as exchanges_test_tools
+import octobot_trading.exchange_data
+import octobot_trading.exchanges
+import octobot_trading.enums as trading_enums
+
+import octobot_flow.repositories.exchange.base_exchange_repository as base_exchange_repository_import
+import octobot_flow.constants
+
+
+_TICKER_CACHE = octobot_trading.exchange_data.TickerCache(
+ ttl=octobot_flow.constants.TICKER_CACHE_TTL,
+ maxsize=50
+)
+
+class TickersRepository(base_exchange_repository_import.BaseExchangeRepository):
+
+ async def fetch_tickers(self, symbols: list[str]) -> dict[str, dict]:
+ if not symbols:
+ return {}
+ if len(symbols) == 1:
+ return {
+ symbols[0]: await exchanges_test_tools.get_price_ticker(self.exchange_manager, symbols[0]) # type: ignore
+ }
+ tickers = await exchanges_test_tools.get_all_currencies_price_ticker(
+ self.exchange_manager, symbols=symbols
+ )
+ self.set_tickers_cache(
+ self.exchange_manager.exchange_name,
+ octobot_trading.exchanges.get_exchange_type(self.exchange_manager).value,
+ self.exchange_manager.is_sandboxed,
+ tickers
+ )
+ return tickers
+
+ @staticmethod
+ def get_cached_market_price(exchange_internal_name, exchange_type, sandboxed: bool, symbol: str) -> float:
+ try:
+ return _TICKER_CACHE.get_all_tickers(exchange_internal_name, exchange_type, sandboxed)[symbol][ # type: ignore
+ trading_enums.ExchangeConstantsTickersColumns.CLOSE.value
+ ]
+ except TypeError as err:
+ # symbol not found in cache
+ raise KeyError(err) from err
+
+
+ @staticmethod
+ def set_tickers_cache(exchange_name: str, exchange_type: str, sandboxed: bool, tickers: dict):
+ _TICKER_CACHE.set_all_tickers(exchange_name, exchange_type, sandboxed, tickers, replace_all=False)
diff --git a/packages/flow/octobot_flow/repositories/exchange/trades_repository.py b/packages/flow/octobot_flow/repositories/exchange/trades_repository.py
new file mode 100644
index 000000000..f53ecee5a
--- /dev/null
+++ b/packages/flow/octobot_flow/repositories/exchange/trades_repository.py
@@ -0,0 +1,11 @@
+import octobot_flow.repositories.exchange.base_exchange_repository as base_exchange_repository_import
+import octobot_trading.util.test_tools.exchanges_test_tools as exchanges_test_tools_import
+
+class TradesRepository(base_exchange_repository_import.BaseExchangeRepository):
+
+ async def fetch_trades(self, symbols: list[str]) -> list[dict]:
+ if not symbols:
+ return []
+ return await exchanges_test_tools_import.get_trades(
+ self.exchange_manager, None, symbols=symbols
+ )
diff --git a/packages/flow/tests/.env.template b/packages/flow/tests/.env.template
new file mode 100644
index 000000000..92fb0df19
--- /dev/null
+++ b/packages/flow/tests/.env.template
@@ -0,0 +1,5 @@
+BINANCE_KEY=
+BINANCE_SECRET=
+
+EXCHANGE_HTTP_PROXY_AUTHENTICATED_URL=http://localhost:8081
+USE_AUTHENTICATED_EXCHANGE_REQUESTS_ONLY_PROXY=true
diff --git a/packages/flow/tests/__init__.py b/packages/flow/tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/packages/flow/tests/conftest.py b/packages/flow/tests/conftest.py
new file mode 100644
index 000000000..59b5518d3
--- /dev/null
+++ b/packages/flow/tests/conftest.py
@@ -0,0 +1,5 @@
+# in conftest.py to load the .env file before any test is run or any import is done
+
+import dotenv
+import os
+dotenv.load_dotenv(dotenv_path=os.path.join(os.path.dirname(__file__), ".env"))
diff --git a/packages/flow/tests/functionnal_tests/__init__.py b/packages/flow/tests/functionnal_tests/__init__.py
new file mode 100644
index 000000000..e690b4c37
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/__init__.py
@@ -0,0 +1,272 @@
+import contextlib
+import mock
+import pytest
+import time
+import os
+import typing
+
+# force env var
+os.environ["USE_MINIMAL_LIBS"] = "true"
+os.environ["ALLOW_FUNDS_TRANSFER"] = "True"
+
+import octobot_trading.exchanges.connectors.ccxt.ccxt_clients_cache as ccxt_clients_cache
+import octobot.community as community
+
+import octobot_flow.entities
+
+import octobot_flow.environment
+import octobot_flow.repositories.community
+
+def is_on_github_ci():
+ # Always set to true when GitHub Actions is running the workflow.
+ # You can use this variable to differentiate when tests are being run locally or by GitHub Actions.
+ # from https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/store-information-in-variables
+ return bool(os.getenv("GITHUB_ACTIONS"))
+
+current_time = time.time()
+EXCHANGE_INTERNAL_NAME = "binanceus" if is_on_github_ci() else "binance" # binanceus works on github CI
+
+@contextlib.contextmanager
+def mocked_community_authentication():
+ with mock.patch.object(
+ community.CommunityAuthentication, "login", mock.AsyncMock(),
+ ) as login_mock, mock.patch.object(
+ community.CommunityAuthentication, "is_logged_in", mock.AsyncMock(return_value=True)
+ ):
+ yield login_mock
+
+
+@contextlib.contextmanager
+def mocked_community_repository():
+ with mock.patch.object(
+ octobot_flow.repositories.community.CommunityRepository, "insert_bot_logs", mock.AsyncMock()
+ ) as insert_bot_logs_mock:
+ yield insert_bot_logs_mock
+
+# ensure environment is initialized
+octobot_flow.environment.initialize_environment()
+
+
+@pytest.fixture
+def global_state():
+ return {
+ "exchange_account_details": {
+ "exchange_details": {
+ "internal_name": EXCHANGE_INTERNAL_NAME,
+ },
+ # "auth_details": {}, # not needed for simulator
+ # "portfolio": {}, # irrelevant for simulator
+ },
+ "automation": {
+ # "profile_data": {
+ # "profile_details": {
+ # "id": "bot_1",
+ # "bot_id": "id:bot_1",
+ # },
+ # "crypto_currencies": [
+ # {"trading_pairs": ["BTC/USDT"], "name": "BTC"},
+ # {"trading_pairs": ["ETH/USDT"], "name": "ETH"},
+ # ],
+ # "trading": {
+ # "reference_market": "USDT",
+ # },
+ # "exchanges": [
+ # {
+ # "internal_name": EXCHANGE_INTERNAL_NAME,
+ # "exchange_type": "spot",
+ # }
+ # ],
+ # "trader": {
+ # "enabled": False,
+ # },
+ # "trader_simulator": {
+ # "enabled": True,
+ # },
+ # "tentacles": [
+ # {
+ # "name": "IndexTradingMode",
+ # "config": {
+ # "required_strategies": [],
+ # "refresh_interval": 1,
+ # "rebalance_trigger_min_percent": 5,
+ # "sell_unindexed_traded_coins": True,
+ # "quote_asset_rebalance_trigger_min_percent": 20,
+ # "index_content": [
+ # {"name": "BTC", "value": 1},
+ # {"name": "ETH", "value": 1},
+ # ]
+ # }
+ # },
+ # ]
+ # },
+ "metadata": {
+ "automation_id": "automation_1",
+ },
+ "client_exchange_account_elements": {
+ "portfolio": {
+ "content": {
+ "USDT": {
+ "available": 1000.0,
+ "total": 1000.0,
+ },
+ "ETH": {
+ "available": 0.1,
+ "total": 0.1,
+ },
+ },
+ },
+ },
+ "execution": {
+ "previous_execution": {
+ "trigger_time": current_time - 600,
+ "trigger_reason": "scheduled",
+ # "additional_actions": {}, # no additional actions
+ "strategy_execution_time": current_time - 590,
+ },
+ "current_execution": {
+ "trigger_reason": "scheduled",
+ # "additional_actions": {}, # no additional actions
+ },
+ # "degraded_state": {} # no degraded state
+ "execution_error": None # no execution error
+ },
+ # "exchange_account_elements": {
+ # "portfolio": {
+ # "initial_value": 3000,
+ # "content": {
+ # # should trigger a rebalance: this does not follow the index config
+ # "USDT": {
+ # "available": 1000.0,
+ # "total": 1000.0,
+ # },
+ # "ETH": {
+ # "available": 0.1,
+ # "total": 0.1,
+ # },
+ # }
+ # # "full_content": {} # irrelevant for simulator
+ # # "asset_values": {} # cleared after iteration
+ # },
+ # "orders": {}, # no open orders
+ # "positions": {}, # no positions
+ # "trades": [], # no trades
+ # }
+ # "post_actions": {}, # no post actions
+ },
+ }
+
+
+@pytest.fixture
+def btc_usdc_global_state():
+ return {
+ "exchange_account_details": {
+ "exchange_details": {
+ "internal_name": EXCHANGE_INTERNAL_NAME,
+ },
+ },
+ "automation": {
+ "metadata": {
+ "automation_id": "automation_1",
+ },
+ "client_exchange_account_elements": {
+ "portfolio": {
+ "content": {
+ "USDC": {
+ "available": 1000.0,
+ "total": 1000.0,
+ },
+ "BTC": {
+ "available": 0.1,
+ "total": 0.1,
+ },
+ },
+ },
+ },
+ "execution": {
+ "previous_execution": {
+ "trigger_time": current_time - 600,
+ "trigger_reason": "scheduled",
+ "strategy_execution_time": current_time - 590,
+ },
+ "current_execution": {
+ "trigger_reason": "scheduled",
+ },
+ },
+ },
+ }
+
+
+@pytest.fixture
+def auth_details():
+ return octobot_flow.entities.UserAuthentication(
+ email="test@test.com",
+ password="test_password",
+ hidden=True,
+ )
+
+
+@pytest.fixture
+def actions_with_market_orders():
+ return [
+ {
+ "id": "action_1",
+ "dsl_script": "market('buy', 'BTC/USDT', '20q')",
+ },
+ {
+ "id": "action_2",
+ "dsl_script": "market('buy', 'BTC/USDT', '10q')",
+ },
+ ]
+
+
+@pytest.fixture
+def actions_with_create_limit_orders():
+ return [
+ {
+ "id": "action_1",
+ "dsl_script": "limit('buy', 'BTC/USDC', '10q', '-20%')",
+ }
+ ]
+
+
+@pytest.fixture
+def actions_with_cancel_limit_orders():
+ return [
+ {
+ "id": "action_1",
+ "dsl_script": "cancel_order('BTC/USDC')",
+ }
+ ]
+
+
+@pytest.fixture
+def isolated_exchange_cache():
+ with ccxt_clients_cache.isolated_empty_cache():
+ yield
+
+
+def automation_state_dict(resolved_actions: list[octobot_flow.entities.AbstractActionDetails]) -> dict[str, typing.Any]:
+ return {
+ "automation": {
+ "metadata": {"automation_id": "automation_1"},
+ "actions_dag": {"actions": resolved_actions}
+ }
+ }
+
+
+automations_state_dict = automation_state_dict # alias for backward compatibility
+
+
+def resolved_actions(actions: list[dict[str, typing.Any]]) -> list[octobot_flow.entities.AbstractActionDetails]:
+ dag = octobot_flow.entities.ActionsDAG(
+ actions=[octobot_flow.entities.parse_action_details(action) for action in actions],
+ )
+ return dag.actions
+
+
+def create_wait_action(min_delay: float, max_delay: float, id: str = "action_wait", dependencies: list[dict[str, typing.Any]] = []) -> dict[str, typing.Any]:
+ return {
+ "id": id,
+ "dsl_script": f"wait({min_delay}, {max_delay}, return_remaining_time=True)",
+ "dependencies": dependencies,
+ }
diff --git a/packages/flow/tests/functionnal_tests/actions_reset/test_exchange_actions_reset.py b/packages/flow/tests/functionnal_tests/actions_reset/test_exchange_actions_reset.py
new file mode 100644
index 000000000..742a1b7de
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/actions_reset/test_exchange_actions_reset.py
@@ -0,0 +1,241 @@
+import pytest
+
+import octobot_commons.constants as common_constants
+import octobot_trading.enums as trading_enums
+
+import octobot_flow
+import octobot_flow.entities
+import octobot_flow.enums
+
+import tests.functionnal_tests as functionnal_tests
+from tests.functionnal_tests import (
+ current_time,
+ global_state,
+ btc_usdc_global_state,
+ auth_details,
+ actions_with_market_orders,
+ resolved_actions,
+ actions_with_create_limit_orders,
+ actions_with_cancel_limit_orders,
+)
+
+
+ADDED_COIN_SYMBOL = "BTC"
+
+
+@pytest.mark.asyncio
+async def test_exchange_actions_reset_executing_market_order_twice(
+ global_state: dict, auth_details: octobot_flow.entities.UserAuthentication, actions_with_market_orders: list[dict]
+):
+ assert len(actions_with_market_orders) == 2
+ with (
+ functionnal_tests.mocked_community_authentication(),
+ functionnal_tests.mocked_community_repository(),
+ ):
+ # 1. execute market order actions
+ automation_state = octobot_flow.entities.AutomationState.from_dict(global_state)
+ automation_state.update_automation_actions(resolved_actions(actions_with_market_orders))
+ async with octobot_flow.AutomationJob(automation_state, [], auth_details) as automations_job:
+ await automations_job.run()
+
+ # check bot actions execution
+ actions = automations_job.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_with_market_orders)
+ for action in actions:
+ assert isinstance(action, octobot_flow.entities.AbstractActionDetails)
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert action.executed_at and action.executed_at >= current_time
+ if isinstance(action, octobot_flow.entities.DSLScriptActionDetails):
+ assert action.resolved_dsl_script is None
+ assert isinstance(action.result, dict)
+ assert "created_orders" in action.result
+ created_order = action.result["created_orders"][0]
+ assert created_order["symbol"] == "BTC/USDT"
+ assert created_order["side"] == "buy"
+ assert created_order["type"] == "market"
+
+ after_execution_dump = automations_job.dump()
+ # reported next execution time to the current execution triggered_at
+ assert after_execution_dump["automation"]["execution"]["previous_execution"]["triggered_at"] >= current_time
+ # no next execution time scheduled: trigger immediately
+ assert after_execution_dump["automation"]["execution"]["current_execution"]["scheduled_to"] == 0
+ # check portfolio content
+ after_execution_portfolio_content = after_execution_dump["automation"]["client_exchange_account_elements"]["portfolio"]["content"]
+ assert isinstance(after_execution_dump, dict)
+ assert list(sorted(after_execution_portfolio_content.keys())) == ["BTC", "ETH", "USDT"]
+ for asset_type in [common_constants.PORTFOLIO_AVAILABLE, common_constants.PORTFOLIO_TOTAL]:
+ assert 950 < after_execution_portfolio_content["USDT"][asset_type] < 1000 # spent some USDT to buy BTC
+ assert after_execution_portfolio_content["ETH"][asset_type] == 0.1 # did not touch ETH
+ assert 0.0001 < after_execution_portfolio_content["BTC"][asset_type] < 0.001 # bought BTC
+
+ # 2. reset the first market order action
+ post_first_buy_state = automations_job.automation_state
+ post_first_buy_state.automation.actions_dag.reset_to(post_first_buy_state.automation.actions_dag.actions[0].id)
+ # action 1 has been reset
+ assert post_first_buy_state.automation.actions_dag.actions[0].executed_at is None
+ # action 2 has NOT been reset (it's not dependent on the first action)
+ assert post_first_buy_state.automation.actions_dag.actions[1].executed_at is not None
+
+ # 3. execute market order actions again
+ async with octobot_flow.AutomationJob(post_first_buy_state, [], auth_details) as automations_job_2:
+ await automations_job_2.run()
+
+ # check bot actions execution
+ actions = automations_job_2.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_with_market_orders)
+ for action in actions:
+ # action has been executed again
+ assert isinstance(action, octobot_flow.entities.AbstractActionDetails)
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert action.executed_at and action.executed_at >= current_time
+ assert isinstance(action.result, dict)
+ assert "created_orders" in action.result
+ created_order = action.result["created_orders"][0]
+ assert created_order["symbol"] == "BTC/USDT"
+ assert created_order["side"] == "buy"
+ assert created_order["type"] == "market"
+
+ after_execution_dump_2 = automations_job_2.dump()
+ # reported next execution time to the current execution triggered_at
+ assert after_execution_dump_2["automation"]["execution"]["previous_execution"]["triggered_at"] >= current_time
+ # no next execution time scheduled: trigger immediately
+ assert after_execution_dump_2["automation"]["execution"]["current_execution"]["scheduled_to"] == 0
+ # check portfolio content
+ after_execution_portfolio_content_2 = after_execution_dump_2["automation"]["client_exchange_account_elements"]["portfolio"]["content"]
+ for asset_type in [common_constants.PORTFOLIO_AVAILABLE, common_constants.PORTFOLIO_TOTAL]:
+ # spent some more USDT to buy BTC
+ assert after_execution_portfolio_content_2["USDT"][asset_type] < after_execution_portfolio_content["USDT"][asset_type]
+ # bought BTC
+ assert after_execution_portfolio_content_2["BTC"][asset_type] > after_execution_portfolio_content["BTC"][asset_type]
+ assert after_execution_portfolio_content_2["ETH"][asset_type] == 0.1 # did not touch ETH
+
+
+@pytest.mark.asyncio
+async def test_exchange_actions_reset_creating_and_cancelling_limit_order_twice(
+ btc_usdc_global_state: dict, auth_details: octobot_flow.entities.UserAuthentication,
+ actions_with_create_limit_orders: list[dict], actions_with_cancel_limit_orders: list[dict]
+):
+ actions_with_cancel_limit_orders[0]["id"] = "action_cancel"
+ actions_with_cancel_limit_orders[0]["dependencies"] = [{"action_id": actions_with_create_limit_orders[0]["id"]}]
+ actions_to_execute = actions_with_create_limit_orders + actions_with_cancel_limit_orders
+ assert len(actions_to_execute) == 2
+ with (
+ functionnal_tests.mocked_community_authentication(),
+ functionnal_tests.mocked_community_repository(),
+ ):
+ # 1. execute create limit order action
+ automation_state = octobot_flow.entities.AutomationState.from_dict(btc_usdc_global_state)
+ automation_state.update_automation_actions(
+resolved_actions(actions_to_execute),
+ )
+ async with octobot_flow.AutomationJob(automation_state, [], auth_details) as automations_job:
+ await automations_job.run()
+
+ # check bot actions execution
+ actions = automations_job.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_to_execute)
+ create_limit_action = actions[0]
+ cancel_action = actions[1]
+ assert create_limit_action.executed_at is not None and create_limit_action.executed_at > 0 # create order action has been executed
+ assert isinstance(create_limit_action, octobot_flow.entities.AbstractActionDetails)
+ assert create_limit_action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert isinstance(create_limit_action.result, dict)
+ assert "created_orders" in create_limit_action.result
+ order = create_limit_action.result["created_orders"][0]
+ assert order[trading_enums.ExchangeConstantsOrderColumns.SYMBOL.value] == "BTC/USDC"
+ assert 0 < order[trading_enums.ExchangeConstantsOrderColumns.AMOUNT.value] < 0.001
+ assert order[trading_enums.ExchangeConstantsOrderColumns.TYPE.value] == "limit"
+ assert order[trading_enums.ExchangeConstantsOrderColumns.SIDE.value] == "buy"
+ assert 5_000 < order[trading_enums.ExchangeConstantsOrderColumns.PRICE.value] < 10_000_000
+
+ # cancel action has not been executed yet (it depends on the create action)
+ assert cancel_action.executed_at is None
+ assert isinstance(cancel_action, octobot_flow.entities.AbstractActionDetails)
+
+ # 2. execute cancel limit order action
+ automation_state_2 = automations_job.automation_state
+ async with octobot_flow.AutomationJob(automation_state_2, [], auth_details) as automations_job_2:
+ await automations_job_2.run()
+
+ # check bot actions execution
+ actions = automations_job_2.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_to_execute)
+ create_limit_action = actions[0]
+ assert create_limit_action.executed_at is not None and create_limit_action.executed_at > 0
+ cancel_action = actions[1]
+ assert cancel_action.executed_at is not None and cancel_action.executed_at > 0
+ assert cancel_action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert isinstance(cancel_action.result, dict)
+ assert "cancelled_orders" in cancel_action.result
+ cancelled = cancel_action.result["cancelled_orders"]
+ assert len(cancelled) == 1
+ assert len(cancelled[0]) > 2 # id of the cancelled order
+
+ after_execution_dump = automations_job_2.dump()
+ after_execution_portfolio_content = after_execution_dump["automation"]["client_exchange_account_elements"]["portfolio"]["content"]
+ assert "USDC" in after_execution_portfolio_content
+ for asset_type in [common_constants.PORTFOLIO_AVAILABLE, common_constants.PORTFOLIO_TOTAL]:
+ assert 5 <= after_execution_portfolio_content["USDC"][asset_type] < 10_000_000
+
+ # reported next execution time to the current execution scheduled to
+ automation_execution = after_execution_dump["automation"]["execution"]
+ assert automation_execution["previous_execution"]["triggered_at"] >= current_time
+
+ # 3. reset the create limit order action
+ limit_order_state_3 = automations_job_2.automation_state
+ limit_order_state_3.automation.actions_dag.reset_to(
+ limit_order_state_3.automation.actions_dag.actions[0].id
+ )
+ for action in limit_order_state_3.automation.actions_dag.actions:
+ assert action.executed_at is None
+ assert action.result is None
+
+ # 4. execute create limit order action again
+ async with octobot_flow.AutomationJob(limit_order_state_3, [], auth_details) as automations_job_3:
+ await automations_job_3.run()
+
+ # check bot actions execution
+ actions = automations_job_3.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_to_execute)
+ create_limit_action = actions[0]
+ cancel_action = actions[1]
+ assert create_limit_action.executed_at is not None and create_limit_action.executed_at > 0 # create order action has been executed
+ assert isinstance(create_limit_action, octobot_flow.entities.AbstractActionDetails)
+ assert create_limit_action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert isinstance(create_limit_action.result, dict)
+ assert "created_orders" in create_limit_action.result
+ order = create_limit_action.result["created_orders"][0]
+ assert order[trading_enums.ExchangeConstantsOrderColumns.SYMBOL.value] == "BTC/USDC"
+ assert 0 < order[trading_enums.ExchangeConstantsOrderColumns.AMOUNT.value] < 0.001
+ assert order[trading_enums.ExchangeConstantsOrderColumns.TYPE.value] == "limit"
+ assert order[trading_enums.ExchangeConstantsOrderColumns.SIDE.value] == "buy"
+ assert 5_000 < order[trading_enums.ExchangeConstantsOrderColumns.PRICE.value] < 10_000_000
+
+ # cancel action has not been executed yet (it depends on the create action)
+ assert cancel_action.executed_at is None
+ assert isinstance(cancel_action, octobot_flow.entities.AbstractActionDetails)
+
+ # 5. execute cancel limit order action
+ automation_state_4 = automations_job_3.automation_state
+ async with octobot_flow.AutomationJob(automation_state_4, [], auth_details) as automations_job_4:
+ await automations_job_4.run()
+
+ # check bot actions execution
+ actions = automations_job_4.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_to_execute)
+ create_limit_action = actions[0]
+ assert create_limit_action.executed_at is not None and create_limit_action.executed_at > 0
+ cancel_action = actions[1]
+ assert cancel_action.executed_at is not None and cancel_action.executed_at > 0
+ assert cancel_action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert isinstance(cancel_action.result, dict)
+ assert "cancelled_orders" in cancel_action.result
+ cancelled = cancel_action.result["cancelled_orders"]
+ assert len(cancelled) == 1
+ assert len(cancelled[0]) > 2 # id of the cancelled order
+
+ after_execution_dump = automations_job_4.dump()
+ after_execution_portfolio_content = after_execution_dump["automation"]["client_exchange_account_elements"]["portfolio"]["content"]
+ assert "USDC" in after_execution_portfolio_content
+ for asset_type in [common_constants.PORTFOLIO_AVAILABLE, common_constants.PORTFOLIO_TOTAL]:
+ assert 5 <= after_execution_portfolio_content["USDC"][asset_type] < 10_000_000
\ No newline at end of file
diff --git a/packages/flow/tests/functionnal_tests/actions_reset/test_exchange_actions_split_by_wait.py b/packages/flow/tests/functionnal_tests/actions_reset/test_exchange_actions_split_by_wait.py
new file mode 100644
index 000000000..6c97364de
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/actions_reset/test_exchange_actions_split_by_wait.py
@@ -0,0 +1,203 @@
+import pytest
+import time
+import asyncio
+import mock
+
+import octobot_commons.constants as common_constants
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_trading.blockchain_wallets as blockchain_wallets
+import octobot_trading.constants as trading_constants
+import octobot_trading.enums as trading_enums
+
+import tentacles.Meta.DSL_operators.python_std_operators.base_resetting_operators as resetting_operators
+
+import octobot_flow
+import octobot_flow.entities
+import octobot_flow.enums
+
+import tests.functionnal_tests as functionnal_tests
+from tests.functionnal_tests import (
+ current_time,
+ global_state,
+ btc_usdc_global_state,
+ auth_details,
+ actions_with_market_orders,
+ resolved_actions,
+ actions_with_create_limit_orders,
+ actions_with_cancel_limit_orders,
+ create_wait_action,
+)
+
+
+ADDED_COIN_SYMBOL = "BTC"
+
+@pytest.mark.asyncio
+async def test_exchange_actions_creating_and_waiting_and_cancelling_limit(
+ btc_usdc_global_state: dict, auth_details: octobot_flow.entities.UserAuthentication,
+ actions_with_create_limit_orders: list[dict], actions_with_cancel_limit_orders: list[dict]
+):
+ wait_action = create_wait_action(50, 100, dependencies=[{"action_id": actions_with_create_limit_orders[0]["id"]}])
+ actions_with_cancel_limit_orders[0]["id"] = "action_cancel"
+ actions_with_cancel_limit_orders[0]["dependencies"] = [{"action_id": wait_action["id"]}]
+ actions_to_execute = actions_with_create_limit_orders + [wait_action] + actions_with_cancel_limit_orders
+
+ assert len(actions_to_execute) == 3
+ with (
+ functionnal_tests.mocked_community_authentication(),
+ functionnal_tests.mocked_community_repository(),
+ ):
+ t0 = time.time()
+ # 1. execute create limit order action
+ automation_state = octobot_flow.entities.AutomationState.from_dict(btc_usdc_global_state)
+ automation_state.update_automation_actions(
+resolved_actions(actions_to_execute),
+ )
+ async with octobot_flow.AutomationJob(automation_state, [], auth_details) as automations_job:
+ await automations_job.run()
+
+ # check bot actions execution
+ actions = automations_job.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_to_execute)
+ create_limit_action = actions[0]
+ wait_action = actions[1]
+ cancel_action = actions[2]
+ assert create_limit_action.executed_at is not None and create_limit_action.executed_at > 0 # create order action has been executed
+ assert isinstance(create_limit_action, octobot_flow.entities.AbstractActionDetails)
+ assert create_limit_action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert isinstance(create_limit_action.result, dict)
+ assert "created_orders" in create_limit_action.result
+ order = create_limit_action.result["created_orders"][0]
+ assert order[trading_enums.ExchangeConstantsOrderColumns.SYMBOL.value] == "BTC/USDC"
+ assert 0 < order[trading_enums.ExchangeConstantsOrderColumns.AMOUNT.value] < 0.001
+ assert order[trading_enums.ExchangeConstantsOrderColumns.TYPE.value] == "limit"
+ assert order[trading_enums.ExchangeConstantsOrderColumns.SIDE.value] == "buy"
+ assert 5_000 < order[trading_enums.ExchangeConstantsOrderColumns.PRICE.value] < 10_000_000
+
+ for action in [wait_action, cancel_action]:
+ assert action.executed_at is None
+ assert isinstance(action, octobot_flow.entities.AbstractActionDetails)
+
+ # immediately execute wait action
+ assert automations_job.automation_state.automation.execution.current_execution.scheduled_to == 0
+
+ # 2.A execute wait action 1/3
+ automation_state_2 = automations_job.automation_state
+ with mock.patch.object(asyncio, "sleep", mock.AsyncMock(return_value=None)) as sleep_mock:
+ async with octobot_flow.AutomationJob(automation_state_2, [], auth_details) as automations_job_2:
+ await automations_job_2.run()
+ for call in sleep_mock.mock_calls:
+ # there was no call for the wait action
+ assert call.args[0] < 1
+
+ # check bot actions execution
+ actions = automations_job_2.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_to_execute)
+ create_limit_action = actions[0]
+ assert create_limit_action.executed_at is not None and create_limit_action.executed_at > 0
+ # special case: wait action is executed and automatically reset since less than 50 seconds have passed
+ wait_action = actions[1]
+ assert wait_action.executed_at is None
+ assert wait_action.result is None
+ assert wait_action.error_status is None
+ assert isinstance(wait_action.previous_execution_result, dict)
+ rescheduled_parameters = wait_action.get_rescheduled_parameters()
+ assert dsl_interpreter.ReCallableOperatorMixin.LAST_EXECUTION_RESULT_KEY in rescheduled_parameters
+ last_execution_result = dsl_interpreter.ReCallingOperatorResult.from_dict(
+ rescheduled_parameters[dsl_interpreter.ReCallableOperatorMixin.LAST_EXECUTION_RESULT_KEY][
+ dsl_interpreter.ReCallingOperatorResult.__name__
+ ]
+ )
+ assert isinstance(last_execution_result.last_execution_result, dict)
+ waiting_time_1 = last_execution_result.last_execution_result[dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value]
+ assert 0 < waiting_time_1 <= 100
+ cancel_action = actions[2]
+ assert cancel_action.executed_at is None
+
+
+ # 2.B execute wait action 2/3
+ automation_state_3 = automations_job.automation_state
+ with mock.patch.object(asyncio, "sleep", mock.AsyncMock(return_value=None)) as sleep_mock:
+ async with octobot_flow.AutomationJob(automation_state_3, [], auth_details) as automations_job_3:
+ await automations_job_3.run()
+ for call in sleep_mock.mock_calls:
+ # there was no call for the wait action
+ assert call.args[0] < 1
+
+ # check bot actions execution
+ actions = automations_job_3.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_to_execute)
+ create_limit_action = actions[0]
+ assert create_limit_action.executed_at is not None and create_limit_action.executed_at > 0
+ # special case: wait action is executed and automatically reset since less than 50 seconds have passed
+ wait_action = actions[1]
+ assert wait_action.executed_at is None
+ assert wait_action.result is None
+ assert wait_action.error_status is None
+ assert isinstance(wait_action.previous_execution_result, dict)
+ rescheduled_parameters = wait_action.get_rescheduled_parameters()
+ assert dsl_interpreter.ReCallableOperatorMixin.LAST_EXECUTION_RESULT_KEY in rescheduled_parameters
+ last_execution_result = dsl_interpreter.ReCallingOperatorResult.from_dict(
+ rescheduled_parameters[dsl_interpreter.ReCallableOperatorMixin.LAST_EXECUTION_RESULT_KEY][
+ dsl_interpreter.ReCallingOperatorResult.__name__
+ ]
+ )
+ assert isinstance(last_execution_result.last_execution_result, dict)
+ waiting_time_2 = last_execution_result.last_execution_result[dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value]
+ assert waiting_time_2 < waiting_time_1 # there is now less time to wait than during the first time
+ assert 0 < waiting_time_2 <= 100
+ cancel_action = actions[2]
+ assert cancel_action.executed_at is None
+
+ # 2.C execute wait action 3/3
+ automation_state_4 = automations_job.automation_state
+ with (
+ mock.patch.object(asyncio, "sleep", mock.AsyncMock(return_value=None)) as sleep_mock,
+ mock.patch.object(time, "time", mock.Mock(return_value=t0 + waiting_time_1 + 50)),
+ ):
+ async with octobot_flow.AutomationJob(automation_state_4, [], auth_details) as automations_job_4:
+ await automations_job_4.run()
+ for call in sleep_mock.mock_calls:
+ # there was no call for the wait action
+ assert call.args[0] < 1
+
+ # wait bot actions has now been executed
+ actions = automations_job_4.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_to_execute)
+ create_limit_action = actions[0]
+ assert create_limit_action.executed_at is not None and create_limit_action.executed_at > 0
+ # special case: wait action is executed and automatically reset since less than 50 seconds have passed
+ wait_action = actions[1]
+ assert wait_action.executed_at is not None and wait_action.executed_at > 0
+ assert wait_action.result is None
+ assert wait_action.error_status is None
+ assert isinstance(wait_action.previous_execution_result, dict)
+ cancel_action = actions[2]
+ assert cancel_action.executed_at is None
+
+ # 3. execute cancel limit order action
+ automation_state_4 = automations_job_4.automation_state
+ async with octobot_flow.AutomationJob(automation_state_4, [], auth_details) as automations_job_4:
+ await automations_job_4.run()
+
+ # check bot actions execution
+ actions = automations_job_4.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_to_execute)
+ create_limit_action = actions[0]
+ assert create_limit_action.executed_at is not None and create_limit_action.executed_at > 0
+ wait_action = actions[1]
+ assert wait_action.executed_at is not None and wait_action.executed_at > 0
+ cancel_action = actions[2]
+ assert cancel_action.executed_at is not None and cancel_action.executed_at > 0
+ assert cancel_action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert isinstance(cancel_action.result, dict)
+ assert "cancelled_orders" in cancel_action.result
+ cancelled = cancel_action.result["cancelled_orders"]
+ assert len(cancelled) == 1
+ assert len(cancelled[0]) > 2 # id of the cancelled order
+
+ after_execution_dump = automations_job_4.dump()
+ after_execution_portfolio_content = after_execution_dump["automation"]["client_exchange_account_elements"]["portfolio"]["content"]
+ assert "USDC" in after_execution_portfolio_content
+ for asset_type in [common_constants.PORTFOLIO_AVAILABLE, common_constants.PORTFOLIO_TOTAL]:
+ assert 5 <= after_execution_portfolio_content["USDC"][asset_type] < 10_000_000
+
\ No newline at end of file
diff --git a/packages/flow/tests/functionnal_tests/automation_management/test_stop_automation.py b/packages/flow/tests/functionnal_tests/automation_management/test_stop_automation.py
new file mode 100644
index 000000000..ad5336cae
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/automation_management/test_stop_automation.py
@@ -0,0 +1,121 @@
+import pytest
+import mock
+import time
+
+import octobot_flow
+import octobot_flow.enums
+
+import tests.functionnal_tests as functionnal_tests
+from tests.functionnal_tests import (
+ current_time,
+ resolved_actions,
+ automation_state_dict,
+)
+
+
+@pytest.fixture
+def init_action():
+ return {
+ "id": "action_init",
+ "action": octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value,
+ "config": {
+ "automation": {
+ "metadata": {"automation_id": "automation_1"},
+ },
+ },
+ }
+
+
+@pytest.fixture
+def stop_automation_action():
+ return {
+ "id": "action_stop",
+ "dsl_script": "stop_automation()",
+ "dependencies": [
+ {"action_id": "action_init"},
+ ],
+ }
+
+
+@pytest.fixture
+def random_action():
+ return {
+ "id": "action_random",
+ "dsl_script": "'yes' if 1 == 2 else 'no'",
+ "dependencies": [
+ {"action_id": "action_init"},
+ ],
+ }
+
+@pytest.mark.asyncio
+async def test_stop_automation_action_sets_post_actions_stop_flag(
+ init_action: dict,
+ stop_automation_action: dict,
+):
+ all_actions = [init_action, stop_automation_action]
+ with (
+ functionnal_tests.mocked_community_authentication(),
+ functionnal_tests.mocked_community_repository(),
+ mock.patch.object(time, "time", return_value=current_time),
+ ):
+ # 1. Initialize with configuration (only init action is executed)
+ automation_state = automation_state_dict(resolved_actions(all_actions))
+ async with octobot_flow.AutomationJob(automation_state, [], {}) as init_automation_job:
+ await init_automation_job.run()
+ assert init_automation_job.automation_state.automation.post_actions.stop_automation is False
+
+ # 2. Run again to execute the stop_automation action
+ after_config_execution_dump = init_automation_job.dump()
+ state = after_config_execution_dump
+ async with octobot_flow.AutomationJob(state, [], {}) as automation_job:
+ await automation_job.run()
+
+ # 3. Verify stop_automation action was executed and post_actions.stop_automation is set
+ actions = automation_job.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(all_actions)
+ for action in actions:
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert action.executed_at and action.executed_at >= current_time
+
+ assert automation_job.automation_state.automation.post_actions.stop_automation is True
+ assert automation_job.automation_state.priority_actions == []
+
+
+@pytest.mark.asyncio
+async def test_stop_automation_action_via_priority_actions_sets_post_actions_stop_flag(
+ init_action: dict,
+ stop_automation_action: dict,
+ random_action: dict,
+):
+ all_actions = [init_action, random_action]
+ with (
+ functionnal_tests.mocked_community_authentication(),
+ functionnal_tests.mocked_community_repository(),
+ mock.patch.object(time, "time", return_value=current_time),
+ ):
+ # 1. Initialize with configuration (only init action is executed)
+ automation_state = automation_state_dict(resolved_actions(all_actions))
+ async with octobot_flow.AutomationJob(automation_state, [], {}) as init_automation_job:
+ await init_automation_job.run()
+ assert init_automation_job.automation_state.automation.post_actions.stop_automation is False
+ # check random action is not executed
+ assert init_automation_job.automation_state.automation.actions_dag.actions[1].result is None
+ assert init_automation_job.automation_state.automation.actions_dag.actions[1].executed_at is None
+
+ # 2. Run again with stop_automation_action as priority_actions
+ after_config_execution_dump = init_automation_job.dump()
+ state = after_config_execution_dump
+ priority_actions = resolved_actions([stop_automation_action])
+ async with octobot_flow.AutomationJob(state, priority_actions, {}) as automation_job:
+ await automation_job.run()
+ # check random action is not executed
+ assert init_automation_job.automation_state.automation.actions_dag.actions[1].result is None
+ assert init_automation_job.automation_state.automation.actions_dag.actions[1].executed_at is None
+
+ # check stop_automation action is executed
+ assert priority_actions[0].executed_at is not None and priority_actions[0].executed_at >= current_time
+
+ # 3. Verify stop_automation action was executed and post_actions.stop_automation is set
+ assert automation_job.automation_state.automation.post_actions.stop_automation is True
+ assert automation_job.automation_state.priority_actions == priority_actions
+ # ensure priority_actions is added to history
diff --git a/packages/flow/tests/functionnal_tests/blockchains_actions/test_no_exchange_action.py b/packages/flow/tests/functionnal_tests/blockchains_actions/test_no_exchange_action.py
new file mode 100644
index 000000000..17cd700c4
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/blockchains_actions/test_no_exchange_action.py
@@ -0,0 +1,237 @@
+import pytest
+import mock
+import time
+import decimal
+
+import octobot_trading.constants as trading_constants
+import octobot_trading.blockchain_wallets as blockchain_wallets
+
+import octobot_flow
+import octobot_flow.entities
+import octobot_flow.enums
+
+import tests.functionnal_tests as functionnal_tests
+from tests.functionnal_tests import (
+ current_time,
+ resolved_actions,
+ automation_state_dict,
+)
+
+
+ADDED_COIN_SYMBOL = "BTC"
+DESTINATION_ADDRESS = "0xDESTINATION_ADDRESS1234567890abcdef1234567890abcdef12345678"
+WALLET_ADDRESS = "0x1234567890abcdef1234567890abcdef12345678"
+
+
+@pytest.fixture
+def init_action():
+ return {
+ "id": "action_init",
+ "action": octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value,
+ "config": {
+ "automation": {
+ "metadata": {"automation_id": "automation_1"},
+ },
+ # "exchange_account_details": {}, # no exchange account details
+ },
+ }
+
+
+@pytest.fixture
+def actions_with_blockchain_deposit_and_withdrawal_with_holding_checks():
+ blockchain_descriptor = {
+ "blockchain": blockchain_wallets.BlockchainWalletSimulator.BLOCKCHAIN,
+ "network": trading_constants.SIMULATED_BLOCKCHAIN_NETWORK,
+ "native_coin_symbol": ADDED_COIN_SYMBOL,
+ "tokens": [
+ {
+ "symbol": "ETH",
+ "decimals": 18,
+ "contract_address": "0x1234567890abcdef1234567890abcdef12345678",
+ },
+ ]
+ }
+ wallet_descriptor = {
+ "address": WALLET_ADDRESS,
+ "private_key": f"{WALLET_ADDRESS}_private_key",
+ "specific_config": {
+ "assets": [
+ {
+ "asset": ADDED_COIN_SYMBOL,
+ "amount": 1,
+ },
+ {
+ "asset": "ETH",
+ "amount": 42,
+ },
+ ]
+ }
+ }
+ return [
+ {
+ "id": "action_1",
+ "dsl_script": f"error('{octobot_flow.enums.ActionErrorStatus.NOT_ENOUGH_FUNDS.value}') if blockchain_wallet_balance({blockchain_descriptor}, {wallet_descriptor}, '{ADDED_COIN_SYMBOL}') < 1 else 'ok'",
+ "dependencies": [
+ {
+ "action_id": "action_init",
+ },
+ ],
+ },
+ {
+ "id": "action_2",
+ "dsl_script": f"blockchain_wallet_transfer({blockchain_descriptor}, {wallet_descriptor}, '{ADDED_COIN_SYMBOL}', 0.1, '{DESTINATION_ADDRESS}')",
+ "dependencies": [
+ {
+ "action_id": "action_init",
+ },
+ ],
+ },
+ ]
+
+
+@pytest.mark.asyncio
+async def test_start_with_empty_state_and_execute_simple_condition_action(
+ init_action: dict,
+):
+ all_actions = [init_action] + [{
+ "id": "action_1",
+ "dsl_script": "'yes' if 1 == 2 else 'no'",
+ "dependencies": [
+ {
+ "action_id": "action_init",
+ },
+ ],
+ }]
+ with (
+ functionnal_tests.mocked_community_authentication(),
+ functionnal_tests.mocked_community_repository(),
+ mock.patch.object(time, 'time', return_value=current_time),
+ ):
+ # 1. initialize with configuration (other actions wont be executed as their dependencies are not met)
+ automation_state = automation_state_dict(resolved_actions(all_actions))
+ async with octobot_flow.AutomationJob(automation_state, [], {}) as init_automation_job:
+ await init_automation_job.run()
+ # check actions execution
+ assert len(init_automation_job.automation_state.automation.actions_dag.actions) == len(all_actions)
+ for index, action in enumerate(init_automation_job.automation_state.automation.actions_dag.actions):
+ if index == 0:
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert action.executed_at and action.executed_at >= current_time
+ assert action.result is None
+ else:
+ # not yet executed
+ assert action.executed_at is None
+ assert action.error_status is None
+ assert action.result is None
+ # check no exchange account details
+ after_config_execution_dump = init_automation_job.dump()
+ assert after_config_execution_dump["exchange_account_details"]["portfolio"]["content"] == []
+ assert "automation" in after_config_execution_dump
+ assert "reference_exchange_account_elements" not in after_config_execution_dump["automation"]
+ assert "client_exchange_account_elements" not in after_config_execution_dump["automation"]
+
+ # 2. execute simple condition action
+ state = after_config_execution_dump
+ async with octobot_flow.AutomationJob(state, [], {}) as automation_job:
+ await automation_job.run()
+
+ # check bot actions execution
+ actions = automation_job.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(all_actions)
+ for index, action in enumerate(actions):
+ assert isinstance(action, octobot_flow.entities.AbstractActionDetails)
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert action.executed_at and action.executed_at >= current_time
+ if index == 0:
+ assert action.result is None
+ elif index == 1:
+ assert action.result == "no"
+ assert action.error_status is None
+ assert action.executed_at and action.executed_at >= current_time
+
+ after_execution_dump = automation_job.dump()
+ # still no portfolio
+ assert after_execution_dump["exchange_account_details"]["portfolio"]["content"] == []
+ assert "reference_exchange_account_elements" not in after_execution_dump["automation"]
+ assert "client_exchange_account_elements" not in after_execution_dump["automation"]
+
+
+@pytest.mark.asyncio
+async def test_start_with_empty_state_and_execute_blockchain_transfer_without_exchange(
+ init_action: dict, actions_with_blockchain_deposit_and_withdrawal_with_holding_checks: list[dict]
+):
+ all_actions = [init_action] + actions_with_blockchain_deposit_and_withdrawal_with_holding_checks
+ with (
+ functionnal_tests.mocked_community_authentication() as login_mock,
+ functionnal_tests.mocked_community_repository() as insert_bot_logs_mock,
+ mock.patch.object(trading_constants, 'ALLOW_FUNDS_TRANSFER', True),
+ mock.patch.object(time, 'time', return_value=current_time),
+ ):
+ # 1. initialize with configuration (other actions wont be executed as their dependencies are not met)
+ automation_state = automation_state_dict(resolved_actions(all_actions))
+ async with octobot_flow.AutomationJob(automation_state, [], {}) as init_automation_job:
+ await init_automation_job.run()
+ # check actions execution
+ assert len(init_automation_job.automation_state.automation.actions_dag.actions) == len(all_actions)
+ for index, action in enumerate(init_automation_job.automation_state.automation.actions_dag.actions):
+ if index == 0:
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert action.executed_at and action.executed_at >= current_time
+ assert action.result is None
+ else:
+ # not yet executed
+ assert action.executed_at is None
+ assert action.error_status is None
+ assert action.result is None
+ # check no exchange account details
+ after_config_execution_dump = init_automation_job.dump()
+ assert after_config_execution_dump["exchange_account_details"]["portfolio"]["content"] == []
+ assert "automation" in after_config_execution_dump
+ assert "reference_exchange_account_elements" not in after_config_execution_dump["automation"]
+ assert "client_exchange_account_elements" not in after_config_execution_dump["automation"]
+ # communit auth is not used in this test
+ login_mock.assert_not_called()
+ insert_bot_logs_mock.assert_not_called()
+
+ # 2. execute blockchain transfer actions
+ state = after_config_execution_dump
+ async with octobot_flow.AutomationJob(state, [], {}) as automation_job:
+ await automation_job.run()
+
+ # check bot actions execution
+ actions = automation_job.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(all_actions)
+ for index, action in enumerate(actions):
+ assert isinstance(action, octobot_flow.entities.AbstractActionDetails)
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert action.executed_at and action.executed_at >= current_time
+ if index == 0:
+ assert action.result is None
+ elif index == 1:
+ assert action.result == "ok"
+ elif index == 2:
+ checked = {
+ "timestamp": int(current_time),
+ "address_from": WALLET_ADDRESS,
+ "address_to": DESTINATION_ADDRESS,
+ "network": trading_constants.SIMULATED_BLOCKCHAIN_NETWORK,
+ "currency": ADDED_COIN_SYMBOL,
+ "amount": decimal.Decimal("0.1"),
+ "fee": None,
+ "comment": "",
+ "internal": False,
+ }
+ assert len(action.result["created_transactions"]) == 1
+ for key, value in checked.items():
+ assert action.result["created_transactions"][0][key] == value
+ assert action.executed_at and action.executed_at >= current_time
+
+ after_execution_dump = automation_job.dump()
+ # still no portfolio
+ assert after_execution_dump["exchange_account_details"]["portfolio"]["content"] == []
+ assert "reference_exchange_account_elements" not in after_execution_dump["automation"]
+ assert "client_exchange_account_elements" not in after_execution_dump["automation"]
+
+ # communit auth is not used in this test
+ login_mock.assert_not_called()
+ insert_bot_logs_mock.assert_not_called()
diff --git a/packages/flow/tests/functionnal_tests/blockchains_actions/test_simulator_blockchain_actions.py b/packages/flow/tests/functionnal_tests/blockchains_actions/test_simulator_blockchain_actions.py
new file mode 100644
index 000000000..c2631f795
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/blockchains_actions/test_simulator_blockchain_actions.py
@@ -0,0 +1,215 @@
+import pytest
+import mock
+
+import octobot_trading.blockchain_wallets as blockchain_wallets
+import octobot_trading.constants as trading_constants
+import octobot_trading.enums as trading_enums
+
+import octobot_flow
+import octobot_flow.entities
+import octobot_flow.enums
+
+import tests.functionnal_tests as functionnal_tests
+from tests.functionnal_tests import (
+ current_time,
+ global_state,
+ auth_details,
+ resolved_actions,
+)
+
+
+ADDED_COIN_SYMBOL = "BTC"
+
+@pytest.fixture
+def actions_with_blockchain_deposit_and_withdrawal_with_holding_checks():
+ wallet_address = "0x1234567890abcdef1234567890abcdef12345678"
+ blockchain_descriptor = {
+ "blockchain": blockchain_wallets.BlockchainWalletSimulator.BLOCKCHAIN,
+ "network": trading_constants.SIMULATED_BLOCKCHAIN_NETWORK,
+ "native_coin_symbol": ADDED_COIN_SYMBOL,
+ "tokens": [
+ {
+ "symbol": "ETH",
+ "decimals": 18,
+ "contract_address": "0x1234567890abcdef1234567890abcdef12345678",
+ },
+ ]
+ }
+ wallet_descriptor = {
+ "address": wallet_address,
+ "private_key": f"{wallet_address}_private_key",
+ "specific_config": {
+ "assets": [
+ {
+ "asset": ADDED_COIN_SYMBOL,
+ "amount": 1,
+ },
+ {
+ "asset": "ETH",
+ "amount": 42,
+ },
+ ]
+ }
+ }
+ return [
+ {
+ "id": "action_1",
+ "dsl_script": f"error('{octobot_flow.enums.ActionErrorStatus.NOT_ENOUGH_FUNDS.value}') if (blockchain_wallet_balance({blockchain_descriptor}, {wallet_descriptor}, '{ADDED_COIN_SYMBOL}') < 1) else 'ok'", # will pass
+ },
+ {
+ "id": "action_2",
+ "dsl_script": f"error('{octobot_flow.enums.ActionErrorStatus.NOT_ENOUGH_FUNDS.value}') if blockchain_wallet_balance({blockchain_descriptor}, {wallet_descriptor}, '{ADDED_COIN_SYMBOL}') < 2500 else 'ok'", # will fail
+ },
+ {
+ "id": "action_3",
+ "dsl_script": f"error('{octobot_flow.enums.ActionErrorStatus.NOT_ENOUGH_FUNDS.value}') if blockchain_wallet_balance({blockchain_descriptor}, {wallet_descriptor}, '{ADDED_COIN_SYMBOL}') < 1 else 'ok'", # will pass
+ },
+ {
+ "id": "action_4",
+ "dsl_script": f"blockchain_wallet_transfer({blockchain_descriptor}, {wallet_descriptor}, '{ADDED_COIN_SYMBOL}', 0.1, '{trading_constants.SIMULATED_DEPOSIT_ADDRESS}_{ADDED_COIN_SYMBOL}')",
+ },
+ {
+ "id": "action_5",
+ "dsl_script": f"error('{octobot_flow.enums.ActionErrorStatus.NOT_ENOUGH_FUNDS.value}') if available('{ADDED_COIN_SYMBOL}') < 0.1 else 'ok'",
+ },
+ {
+ "id": "action_6",
+ "dsl_script": f"market('sell', 'BTC/USDT', '0.04')",
+ },
+ {
+ "id": "action_7",
+ "dsl_script": f"withdraw('{ADDED_COIN_SYMBOL}', '{trading_constants.SIMULATED_BLOCKCHAIN_NETWORK}', '{wallet_address}', 0.05)",
+ },
+ {
+ "id": "action_8",
+ "dsl_script": f"error('{octobot_flow.enums.ActionErrorStatus.NOT_ENOUGH_FUNDS.value}') if blockchain_wallet_balance({blockchain_descriptor}, {wallet_descriptor}, '{ADDED_COIN_SYMBOL}') < 0.95 else 'ok'",
+ },
+ ]
+
+@pytest.mark.asyncio
+async def test_execute_actions_with_blockchain_deposit_and_withdrawal(
+ global_state: dict,
+ auth_details: octobot_flow.entities.UserAuthentication,
+ actions_with_blockchain_deposit_and_withdrawal_with_holding_checks: list[dict]
+):
+ with (
+ functionnal_tests.mocked_community_authentication() as login_mock,
+ functionnal_tests.mocked_community_repository() as insert_bot_logs_mock,
+ mock.patch.object(trading_constants, 'ALLOW_FUNDS_TRANSFER', True),
+ ):
+ async with octobot_flow.AutomationJob(global_state, [], auth_details) as automations_job:
+ automations_job.automation_state.update_automation_actions(
+ resolved_actions(actions_with_blockchain_deposit_and_withdrawal_with_holding_checks),
+ )
+ await automations_job.run()
+
+ # check bot actions execution
+ actions = automations_job.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_with_blockchain_deposit_and_withdrawal_with_holding_checks)
+ for index, action in enumerate(actions):
+ assert isinstance(action, octobot_flow.entities.AbstractActionDetails)
+ if index == 1:
+ # only the second action will fail because of not enough funds
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NOT_ENOUGH_FUNDS.value
+ assert action.result is None
+ else:
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert isinstance(action.result, dict) or action.result == "ok"
+ assert action.result
+ assert action.executed_at and action.executed_at >= current_time
+
+ after_execution_dump = automations_job.dump()
+ # reported next execution time to the current execution triggered_at
+ assert after_execution_dump["automation"]["execution"]["previous_execution"]["triggered_at"] >= current_time
+ # no next execution time scheduled: trigger immediately
+ assert after_execution_dump["automation"]["execution"]["current_execution"]["scheduled_to"] == 0
+ # check portfolio content
+ after_execution_portfolio_content = after_execution_dump["automation"]["client_exchange_account_elements"]["portfolio"]["content"]
+ assert isinstance(after_execution_dump, dict)
+ assert list(sorted(after_execution_portfolio_content.keys())) == ["BTC", "ETH", "USDT"] # BTC is now added to the portfolio
+ assert after_execution_portfolio_content["USDT"]["available"] > 2000 # sold BTC, therefore added some USDT to the portfolio (initially 1000 USDT)
+ assert after_execution_portfolio_content["ETH"]["available"] == 0.1 # did not touch ETH
+ assert 0.009 < after_execution_portfolio_content["BTC"]["total"] <= 0.01 # deposited 0.1 BTC, sold 0.04 BTC and withdrew 0.05 BTC
+ assert 0.009 < after_execution_portfolio_content["BTC"]["available"] <= 0.01 # deposited 0.1 BTC, sold 0.04 BTC and withdrew 0.05 BTC
+
+ # check transactions
+ after_execution_transactions = after_execution_dump["automation"]["client_exchange_account_elements"]["transactions"]
+ assert isinstance(after_execution_transactions, list)
+ assert len(after_execution_transactions) == 2
+ # first transaction is the deposit
+ assert after_execution_transactions[0][trading_enums.ExchangeConstantsTransactionColumns.CURRENCY.value] == ADDED_COIN_SYMBOL
+ assert after_execution_transactions[0][trading_enums.ExchangeConstantsTransactionColumns.AMOUNT.value] == 0.1
+ assert after_execution_transactions[0][trading_enums.ExchangeConstantsTransactionColumns.ADDRESS_TO.value] == "0x123_simulated_deposit_address_BTC"
+ assert after_execution_transactions[0][trading_enums.ExchangeConstantsTransactionColumns.NETWORK.value] == trading_constants.SIMULATED_BLOCKCHAIN_NETWORK
+ # second transaction is the withdrawal
+ assert after_execution_transactions[1][trading_enums.ExchangeConstantsTransactionColumns.CURRENCY.value] == ADDED_COIN_SYMBOL
+ assert after_execution_transactions[1][trading_enums.ExchangeConstantsTransactionColumns.AMOUNT.value] == 0.05
+ assert after_execution_transactions[1][trading_enums.ExchangeConstantsTransactionColumns.ADDRESS_TO.value] == "0x1234567890abcdef1234567890abcdef12345678"
+ assert after_execution_transactions[1][trading_enums.ExchangeConstantsTransactionColumns.NETWORK.value] == trading_constants.SIMULATED_BLOCKCHAIN_NETWORK
+
+ login_mock.assert_called_once()
+ insert_bot_logs_mock.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_execute_actions_with_blockchain_deposit_and_withdrawal_with_holding_checks(
+ global_state: dict,
+ auth_details: octobot_flow.entities.UserAuthentication,
+ actions_with_blockchain_deposit_and_withdrawal_with_holding_checks: list[dict]
+):
+ with (
+ functionnal_tests.mocked_community_authentication() as login_mock,
+ functionnal_tests.mocked_community_repository() as insert_bot_logs_mock,
+ mock.patch.object(trading_constants, 'ALLOW_FUNDS_TRANSFER', True),
+ ):
+ async with octobot_flow.AutomationJob(global_state, [], auth_details) as automations_job:
+ automations_job.automation_state.update_automation_actions(
+ resolved_actions(actions_with_blockchain_deposit_and_withdrawal_with_holding_checks),
+ )
+ await automations_job.run()
+
+ # check bot actions execution
+ actions = automations_job.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_with_blockchain_deposit_and_withdrawal_with_holding_checks)
+ for index, action in enumerate(actions):
+ assert isinstance(action, octobot_flow.entities.AbstractActionDetails)
+ if index == 1:
+ # only the second action will fail because of not enough funds
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NOT_ENOUGH_FUNDS.value
+ assert action.result is None
+ else:
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert isinstance(action.result, dict) or action.result == "ok"
+ assert action.result
+ assert action.executed_at and action.executed_at >= current_time
+
+ after_execution_dump = automations_job.dump()
+ # reported next execution time to the current execution triggered_at
+ assert after_execution_dump["automation"]["execution"]["previous_execution"]["triggered_at"] >= current_time
+ # no next execution time scheduled: trigger immediately
+ assert after_execution_dump["automation"]["execution"]["current_execution"]["scheduled_to"] == 0
+ # check portfolio content
+ after_execution_portfolio_content = after_execution_dump["automation"]["client_exchange_account_elements"]["portfolio"]["content"]
+ assert isinstance(after_execution_dump, dict)
+ assert list(sorted(after_execution_portfolio_content.keys())) == ["BTC", "ETH", "USDT"] # BTC is now added to the portfolio
+ assert after_execution_portfolio_content["USDT"]["available"] > 2000 # sold BTC, therefore added some USDT to the portfolio (initially 1000 USDT)
+ assert after_execution_portfolio_content["ETH"]["available"] == 0.1 # did not touch ETH
+ assert 0.009 < after_execution_portfolio_content["BTC"]["total"] <= 0.01 # deposited 0.1 BTC, sold 0.04 BTC and withdrew 0.05 BTC
+ assert 0.009 < after_execution_portfolio_content["BTC"]["available"] <= 0.01 # deposited 0.1 BTC, sold 0.04 BTC and withdrew 0.05 BTC
+ # check transactions
+ after_execution_transactions = after_execution_dump["automation"]["client_exchange_account_elements"]["transactions"]
+ assert isinstance(after_execution_transactions, list)
+ assert len(after_execution_transactions) == 2
+ # first transaction is the deposit
+ assert after_execution_transactions[0][trading_enums.ExchangeConstantsTransactionColumns.CURRENCY.value] == ADDED_COIN_SYMBOL
+ assert after_execution_transactions[0][trading_enums.ExchangeConstantsTransactionColumns.AMOUNT.value] == 0.1
+ assert after_execution_transactions[0][trading_enums.ExchangeConstantsTransactionColumns.ADDRESS_TO.value] == "0x123_simulated_deposit_address_BTC"
+ assert after_execution_transactions[0][trading_enums.ExchangeConstantsTransactionColumns.NETWORK.value] == trading_constants.SIMULATED_BLOCKCHAIN_NETWORK
+ # second transaction is the withdrawal
+ assert after_execution_transactions[1][trading_enums.ExchangeConstantsTransactionColumns.CURRENCY.value] == ADDED_COIN_SYMBOL
+ assert after_execution_transactions[1][trading_enums.ExchangeConstantsTransactionColumns.AMOUNT.value] == 0.05
+ assert after_execution_transactions[1][trading_enums.ExchangeConstantsTransactionColumns.ADDRESS_TO.value] == "0x1234567890abcdef1234567890abcdef12345678"
+ assert after_execution_transactions[1][trading_enums.ExchangeConstantsTransactionColumns.NETWORK.value] == trading_constants.SIMULATED_BLOCKCHAIN_NETWORK
+
+ login_mock.assert_called_once()
+ insert_bot_logs_mock.assert_called_once()
diff --git a/packages/flow/tests/functionnal_tests/exchanges_actions/test_authenticated_exchange_actions.py b/packages/flow/tests/functionnal_tests/exchanges_actions/test_authenticated_exchange_actions.py
new file mode 100644
index 000000000..8f2c3e1f9
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/exchanges_actions/test_authenticated_exchange_actions.py
@@ -0,0 +1,143 @@
+import pytest
+import os
+
+
+import octobot_commons.constants as common_constants
+import octobot_trading.enums as trading_enums
+
+
+import octobot_flow
+import octobot_flow.entities
+import octobot_flow.enums
+
+import tests.functionnal_tests as functionnal_tests
+from tests.functionnal_tests import (
+ current_time,
+ EXCHANGE_INTERNAL_NAME,
+ actions_with_create_limit_orders,
+ actions_with_cancel_limit_orders,
+ resolved_actions,
+ automation_state_dict,
+)
+
+
+
+@pytest.fixture
+def init_action():
+ if not os.environ.get("BINANCE_KEY") or not os.environ.get("BINANCE_SECRET"):
+ pytest.skip("BINANCE_KEY and BINANCE_SECRET must be set in the .env file to run this test, skipping...")
+ return {
+ "id": "action_init",
+ "action": octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value,
+ "config": {
+ "automation": {
+ "metadata": {"automation_id": "automation_1"},
+ "client_exchange_account_elements": {
+ "portfolio": {"content": {}},
+ },
+ },
+ "exchange_account_details": {
+ "exchange_details": {
+ "internal_name": EXCHANGE_INTERNAL_NAME,
+ },
+ "auth_details": {
+ "api_key": os.environ["BINANCE_KEY"],
+ "api_secret": os.environ["BINANCE_SECRET"],
+ },
+ "portfolio": {},
+ },
+ },
+ }
+
+
+@pytest.mark.asyncio
+async def test_execute_actions_with_limit_orders_and_empty_state(
+ init_action: dict, actions_with_create_limit_orders: list[dict], actions_with_cancel_limit_orders: list[dict]
+):
+ all_actions = [init_action]
+ with (
+ functionnal_tests.mocked_community_authentication() as login_mock,
+ functionnal_tests.mocked_community_repository() as insert_bot_logs_mock,
+ ):
+ automation_state = automation_state_dict(resolved_actions(all_actions))
+ async with octobot_flow.AutomationJob(automation_state, [], {}) as automations_job:
+ await automations_job.run()
+
+ # check bot actions execution
+ assert len(automations_job.automation_state.automation.actions_dag.actions) == len(all_actions)
+ for action in automations_job.automation_state.automation.actions_dag.actions:
+ assert isinstance(action, octobot_flow.entities.AbstractActionDetails)
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert action.executed_at and action.executed_at >= current_time
+ assert action.result is None
+
+ after_execution_dump = automations_job.dump()
+ exchange_account_details = after_execution_dump["exchange_account_details"]
+ exchange_details = exchange_account_details["exchange_details"]
+ dump_auth_details = exchange_account_details["auth_details"]
+ portfolio = exchange_account_details["portfolio"]
+ assert "automation" in after_execution_dump
+ automation_execution = after_execution_dump["automation"]["execution"]
+ # assert exchange account details init
+ assert exchange_details["internal_name"] == EXCHANGE_INTERNAL_NAME
+ assert dump_auth_details["api_key"] == os.environ["BINANCE_KEY"]
+ assert dump_auth_details["api_secret"] == os.environ["BINANCE_SECRET"]
+ assert portfolio["content"] == []
+ assert portfolio["unit"] == ""
+ # assert automation portfolio (not fetched yet)
+ portfolio_content = after_execution_dump["automation"]["client_exchange_account_elements"]["portfolio"]["content"]
+ assert portfolio_content == {}
+ # reported next execution time to the current execution triggered_at
+ assert automation_execution["previous_execution"]["triggered_at"] >= current_time
+ # no next execution time scheduled: trigger immediately
+ assert automation_execution["current_execution"]["scheduled_to"] == 0
+ # communit auth is not used in this context
+ login_mock.assert_not_called()
+ insert_bot_logs_mock.assert_not_called()
+
+ # 2. second call: execute received limit/cancel orders actions
+ actions_to_execute = actions_with_create_limit_orders + actions_with_cancel_limit_orders
+ state = after_execution_dump
+ other_actions = resolved_actions(actions_to_execute)
+ automation_id = after_execution_dump["automation"]["metadata"]["automation_id"]
+ async with octobot_flow.AutomationJob(state, [], {}) as automations_job:
+ automations_job.automation_state.update_automation_actions(other_actions)
+ await automations_job.run()
+
+ # check bot actions execution
+ actions = automations_job.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_to_execute) + len(all_actions)
+ # Skip init action at index 0, check limit/cancel actions
+ create_limit_action = actions[1]
+ cancel_action = actions[2]
+ assert isinstance(create_limit_action, octobot_flow.entities.AbstractActionDetails)
+ assert create_limit_action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert isinstance(create_limit_action.result, dict)
+ assert "created_orders" in create_limit_action.result
+ order = create_limit_action.result["created_orders"][0]
+ assert order[trading_enums.ExchangeConstantsOrderColumns.SYMBOL.value] == "BTC/USDC"
+ assert 0 < order[trading_enums.ExchangeConstantsOrderColumns.AMOUNT.value] < 0.001
+ assert order[trading_enums.ExchangeConstantsOrderColumns.TYPE.value] == "limit"
+ assert order[trading_enums.ExchangeConstantsOrderColumns.SIDE.value] == "buy"
+ assert 5_000 < order[trading_enums.ExchangeConstantsOrderColumns.PRICE.value] < 10_000_000
+
+ assert isinstance(cancel_action, octobot_flow.entities.AbstractActionDetails)
+ assert cancel_action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert isinstance(cancel_action.result, dict)
+ assert "cancelled_orders" in cancel_action.result
+ cancelled = cancel_action.result["cancelled_orders"]
+ assert len(cancelled) == 1
+ assert len(cancelled[0]) > 2 # id of the cancelled order
+
+ after_execution_dump = automations_job.dump()
+ after_execution_portfolio_content = after_execution_dump["automation"]["client_exchange_account_elements"]["portfolio"]["content"]
+ assert "USDC" in after_execution_portfolio_content
+ for asset_type in [common_constants.PORTFOLIO_AVAILABLE, common_constants.PORTFOLIO_TOTAL]:
+ assert 5 <= after_execution_portfolio_content["USDC"][asset_type] < 10_000_000
+
+ # reported next execution time to the current execution scheduled to
+ automation_execution = after_execution_dump["automation"]["execution"]
+ assert automation_execution["previous_execution"]["triggered_at"] >= current_time
+ # communit auth is not used in this test
+ login_mock.assert_not_called()
+ insert_bot_logs_mock.assert_not_called()
\ No newline at end of file
diff --git a/packages/flow/tests/functionnal_tests/exchanges_actions/test_simulated_exchange_actions.py b/packages/flow/tests/functionnal_tests/exchanges_actions/test_simulated_exchange_actions.py
new file mode 100644
index 000000000..04d7a0a20
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/exchanges_actions/test_simulated_exchange_actions.py
@@ -0,0 +1,72 @@
+import pytest
+import logging
+import mock
+
+import octobot_commons.constants as common_constants
+import octobot_trading.blockchain_wallets as blockchain_wallets
+import octobot_trading.constants as trading_constants
+
+import octobot_flow
+import octobot_flow.entities
+import octobot_flow.enums
+
+import tests.functionnal_tests as functionnal_tests
+from tests.functionnal_tests import (
+ current_time,
+ global_state,
+ auth_details,
+ actions_with_market_orders,
+ resolved_actions,
+)
+
+
+ADDED_COIN_SYMBOL = "BTC"
+
+
+@pytest.mark.asyncio
+async def test_execute_actions_with_market_orders_and_existing_state(
+ global_state: dict, auth_details: octobot_flow.entities.UserAuthentication, actions_with_market_orders: list[dict]
+):
+ with (
+ functionnal_tests.mocked_community_authentication() as login_mock,
+ functionnal_tests.mocked_community_repository() as insert_bot_logs_mock,
+ ):
+ # test with parsed global state
+ automation_state = octobot_flow.entities.AutomationState.from_dict(global_state)
+ automation_state.update_automation_actions(resolved_actions(actions_with_market_orders))
+ async with octobot_flow.AutomationJob(automation_state, [], auth_details) as automations_job:
+ await automations_job.run()
+
+ # check bot actions execution
+ actions = automations_job.automation_state.automation.actions_dag.actions
+ assert len(actions) == len(actions_with_market_orders)
+ for action in actions:
+ assert isinstance(action, octobot_flow.entities.AbstractActionDetails)
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert action.executed_at and action.executed_at >= current_time
+ assert isinstance(action.result, dict)
+ assert "created_orders" in action.result
+ created_order = action.result["created_orders"][0]
+ assert created_order["symbol"] == "BTC/USDT"
+ assert created_order["side"] == "buy"
+ assert created_order["type"] == "market"
+
+ after_execution_dump = automations_job.dump()
+ # reported next execution time to the current execution triggered_at
+ assert after_execution_dump["automation"]["execution"]["previous_execution"]["triggered_at"] >= current_time
+ # no next execution time scheduled: trigger immediately
+ assert after_execution_dump["automation"]["execution"]["current_execution"]["scheduled_to"] == 0
+ # check portfolio content
+ after_execution_portfolio_content = after_execution_dump["automation"]["client_exchange_account_elements"]["portfolio"]["content"]
+ assert isinstance(after_execution_dump, dict)
+ assert list(sorted(after_execution_portfolio_content.keys())) == ["BTC", "ETH", "USDT"]
+ for asset_type in [common_constants.PORTFOLIO_AVAILABLE, common_constants.PORTFOLIO_TOTAL]:
+ assert 950 < after_execution_portfolio_content["USDT"][asset_type] < 1000 # spent some USDT to buy BTC
+ assert after_execution_portfolio_content["ETH"][asset_type] == 0.1 # did not touch ETH
+ assert 0.0001 < after_execution_portfolio_content["BTC"][asset_type] < 0.001 # bought BTC
+ logging.getLogger("test_execute_actions_with_market_orders").info(
+ f"after_execution_portfolio_content: {after_execution_portfolio_content}"
+ )
+ # check bot actions
+ login_mock.assert_called_once()
+ insert_bot_logs_mock.assert_called_once()
diff --git a/packages/flow/tests/functionnal_tests/state_initialization/test_start_with_empty_state.py b/packages/flow/tests/functionnal_tests/state_initialization/test_start_with_empty_state.py
new file mode 100644
index 000000000..ba1de7d3e
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/state_initialization/test_start_with_empty_state.py
@@ -0,0 +1,179 @@
+import pytest
+
+import octobot_commons.constants as common_constants
+
+import octobot_flow
+import octobot_flow.entities
+import octobot_flow.enums
+
+import tests.functionnal_tests as functionnal_tests
+from tests.functionnal_tests import current_time, EXCHANGE_INTERNAL_NAME, actions_with_market_orders, auth_details, resolved_actions, automation_state_dict
+
+
+@pytest.fixture
+def init_action():
+ return {
+ "id": "action_init",
+ "action": octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value,
+ "config": {
+ "automation": {
+ "metadata": {
+ "automation_id": "automation_1",
+ },
+ "client_exchange_account_elements": {
+ "portfolio": {
+ "content": {
+ "USDT": {
+ "available": 1000.0,
+ "total": 1000.0,
+ },
+ "ETH": {
+ "available": 0.1,
+ "total": 0.1,
+ },
+ },
+ },
+ },
+ },
+ "exchange_account_details": {
+ "exchange_details": {
+ "internal_name": EXCHANGE_INTERNAL_NAME,
+ },
+ "auth_details": {},
+ "portfolio": {},
+ },
+ },
+ }
+
+
+
+@pytest.mark.asyncio
+async def test_start_with_empty_state_and_reschedule_no_community_auth(init_action: dict):
+ all_actions = [init_action]
+ with (
+ functionnal_tests.mocked_community_authentication() as login_mock,
+ functionnal_tests.mocked_community_repository() as insert_bot_logs_mock,
+ ):
+ automation_state = automation_state_dict(resolved_actions(all_actions))
+ async with octobot_flow.AutomationJob(automation_state, [], {}) as automation_job:
+ await automation_job.run()
+
+ # check bot actions execution
+ assert len(automation_job.automation_state.automation.actions_dag.actions) == len(all_actions)
+ for action in automation_job.automation_state.automation.actions_dag.actions:
+ assert isinstance(action, octobot_flow.entities.AbstractActionDetails)
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert action.executed_at and action.executed_at >= current_time
+ assert action.result is None
+
+ after_execution_dump = automation_job.dump()
+ exchange_account_details = after_execution_dump["exchange_account_details"]
+ exchange_details = exchange_account_details["exchange_details"]
+ dump_auth_details = exchange_account_details["auth_details"]
+ portfolio = exchange_account_details["portfolio"]
+ assert "automation" in after_execution_dump
+ automation_execution = after_execution_dump["automation"]["execution"]
+ # assert exchange account details init
+ assert exchange_details["internal_name"] == EXCHANGE_INTERNAL_NAME
+ assert dump_auth_details["api_key"] == ""
+ assert dump_auth_details["api_secret"] == ""
+ assert portfolio["content"] == []
+ assert portfolio["unit"] == ""
+ # assert automation portfolio
+ portfolio_content = after_execution_dump["automation"]["client_exchange_account_elements"]["portfolio"]["content"]
+ assert portfolio_content == {
+ "USDT": {
+ "available": 1000.0,
+ "total": 1000.0,
+ },
+ "ETH": {
+ "available": 0.1,
+ "total": 0.1,
+ },
+ }
+ # reported next execution time to the current execution triggered_at
+ assert automation_execution["previous_execution"]["triggered_at"] >= current_time
+ # no next execution time scheduled: trigger immediately
+ assert automation_execution["current_execution"]["scheduled_to"] == 0
+ # communit auth is not used in this context
+ login_mock.assert_not_called()
+ insert_bot_logs_mock.assert_not_called()
+
+
+
+@pytest.mark.asyncio
+async def test_start_with_empty_state_action_followed_by_market_orders_no_community_auth(
+ init_action: dict, actions_with_market_orders: list[dict]
+):
+ init_actions = [init_action]
+ with (
+ functionnal_tests.mocked_community_authentication() as login_mock,
+ functionnal_tests.mocked_community_repository() as insert_bot_logs_mock,
+ ):
+ # 1. initialize bot with configuration
+ automation_state = automation_state_dict(resolved_actions(init_actions))
+ async with octobot_flow.AutomationJob(automation_state, [], {}) as init_automation_job:
+ await init_automation_job.run()
+ # check actions execution
+ assert len(init_automation_job.automation_state.automation.actions_dag.actions) == len(init_actions)
+ for action in init_automation_job.automation_state.automation.actions_dag.actions:
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert action.executed_at and action.executed_at >= current_time
+ assert action.result is None
+ # check portfolio content
+ after_config_execution_dump = init_automation_job.dump()
+ assert after_config_execution_dump["automation"]["client_exchange_account_elements"]["portfolio"]["content"] == {
+ "USDT": {
+ "available": 1000.0,
+ "total": 1000.0,
+ },
+ "ETH": {
+ "available": 0.1,
+ "total": 0.1,
+ },
+ }
+ # communit auth is not used in this test
+ login_mock.assert_not_called()
+ insert_bot_logs_mock.assert_not_called()
+
+ # 2. second call: execute received market orders bot actions
+ state = after_config_execution_dump
+ other_actions = resolved_actions(actions_with_market_orders)
+ async with octobot_flow.AutomationJob(state, [], {}) as automation_job:
+ automation_job.automation_state.update_automation_actions(
+ other_actions
+ )
+ await automation_job.run()
+
+ # check bot actions execution
+ assert len(automation_job.automation_state.automation.actions_dag.actions) == len(actions_with_market_orders) + len(init_actions)
+ for index, action in enumerate(automation_job.automation_state.automation.actions_dag.actions):
+ if index == 0:
+ assert action.id == init_actions[0]["id"]
+ else:
+ assert isinstance(action, octobot_flow.entities.AbstractActionDetails)
+ assert action.error_status == octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ assert action.executed_at and action.executed_at >= current_time
+ assert isinstance(action.result, dict)
+ assert "created_orders" in action.result
+ created_order = action.result["created_orders"][0]
+ assert created_order["symbol"] == "BTC/USDT"
+ assert created_order["side"] == "buy"
+ assert created_order["type"] == "market"
+
+ after_execution_dump = automation_job.dump()
+ after_execution_portfolio_content = after_execution_dump["automation"]["client_exchange_account_elements"]["portfolio"]["content"]
+ assert list(sorted(after_execution_portfolio_content.keys())) == ["BTC", "ETH", "USDT"]
+ for asset_type in [common_constants.PORTFOLIO_AVAILABLE, common_constants.PORTFOLIO_TOTAL]:
+ assert 950 < after_execution_portfolio_content["USDT"][asset_type] < 1000 # spent some USDT to buy BTC
+ assert after_execution_portfolio_content["ETH"][asset_type] == 0.1 # did not touch ETH
+ assert 0.0001 < after_execution_portfolio_content["BTC"][asset_type] < 0.001 # bought BTC
+
+ # reported next execution time to the current execution triggered_at
+ automation_execution = after_execution_dump["automation"]["execution"]
+ assert automation_execution["previous_execution"]["triggered_at"] >= current_time
+ # no next execution time scheduled: trigger immediately
+ assert automation_execution["current_execution"]["scheduled_to"] == 0
+ # communit auth is not used in this test
+ login_mock.assert_not_called()
+ insert_bot_logs_mock.assert_not_called()
diff --git a/packages/flow/tests/functionnal_tests/state_initialization/test_start_with_invalid_state.py b/packages/flow/tests/functionnal_tests/state_initialization/test_start_with_invalid_state.py
new file mode 100644
index 000000000..3ef8abd80
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/state_initialization/test_start_with_invalid_state.py
@@ -0,0 +1,30 @@
+import pytest
+
+import octobot_flow
+import octobot_flow.entities
+import octobot_flow.errors
+
+import tests.functionnal_tests as functionnal_tests
+from tests.functionnal_tests import auth_details
+
+
+
+@pytest.mark.asyncio
+async def test_multi_bots_job_start_with_invalid_empty_state(auth_details: octobot_flow.entities.UserAuthentication):
+ with (
+ functionnal_tests.mocked_community_authentication() as login_mock,
+ functionnal_tests.mocked_community_repository() as insert_bot_logs_mock,
+ ):
+ # AutomationJob requires at least 1 automation
+ automation_state_empty = {}
+ with pytest.raises(octobot_flow.errors.NoAutomationError):
+ async with octobot_flow.AutomationJob(automation_state_empty, [], {}) as automation_job:
+ await automation_job.run()
+
+ with pytest.raises(octobot_flow.errors.NoAutomationError):
+ async with octobot_flow.AutomationJob(automation_state_empty, [], auth_details) as automation_job:
+ await automation_job.run()
+
+ # communit auth is not used in (raising before)
+ login_mock.assert_not_called()
+ insert_bot_logs_mock.assert_not_called()
diff --git a/packages/flow/tests/functionnal_tests/trading_modes_actions/test_simulator_index_trading_mode_action.py b/packages/flow/tests/functionnal_tests/trading_modes_actions/test_simulator_index_trading_mode_action.py
new file mode 100644
index 000000000..bc29a9bf3
--- /dev/null
+++ b/packages/flow/tests/functionnal_tests/trading_modes_actions/test_simulator_index_trading_mode_action.py
@@ -0,0 +1,55 @@
+import pytest
+import logging
+import re
+
+import octobot_commons.enums as common_enums
+import octobot_commons.constants as common_constants
+import octobot_commons.errors
+import octobot_flow
+import octobot_flow.entities
+
+import tests.functionnal_tests as functionnal_tests
+from tests.functionnal_tests import current_time, global_state, auth_details, isolated_exchange_cache, resolved_actions
+
+
+def index_actions():
+ return [
+ {
+ "id": "action_1",
+ "dsl_script": "IndexTradingMode('BTC/USDT')",
+ }
+ ]
+
+
+@pytest.mark.asyncio
+async def test_index_update(
+ global_state: dict, auth_details: octobot_flow.entities.UserAuthentication,
+ isolated_exchange_cache, # use isolated exchange cache to avoid side effects on other tests (uses different markets)
+):
+ with (
+ functionnal_tests.mocked_community_authentication() as login_mock,
+ functionnal_tests.mocked_community_repository() as insert_bot_logs_mock,
+ ):
+ async with octobot_flow.AutomationJob(global_state, [], auth_details) as automations_job:
+ automations_job.automation_state.update_automation_actions(resolved_actions(index_actions()))
+ with pytest.raises(octobot_commons.errors.UnsupportedOperatorError, match=re.escape("Unknown operator: IndexTradingMode")):
+ await automations_job.run()
+ return # TODO: remove this once the index update is implemented
+ after_execution_dump = automations_job.dump()
+ # scheduled next execution time at 1h after the current execution (1h is the default time when unspecified)
+ assert after_execution_dump["automation"]["execution"]["previous_execution"]["triggered_at"] >= current_time
+ assert after_execution_dump["automation"]["execution"]["current_execution"]["scheduled_to"] == (
+ after_execution_dump["automation"]["execution"]["previous_execution"]["triggered_at"]
+ + common_enums.TimeFramesMinutes[common_enums.TimeFrames.ONE_HOUR] * common_constants.MINUTE_TO_SECONDS
+ )
+ # check portfolio content
+ after_execution_portfolio_content = after_execution_dump["automation"]["client_exchange_account_elements"]["portfolio"]["content"]
+ assert isinstance(after_execution_dump, dict)
+ assert list(sorted(after_execution_portfolio_content.keys())) == ["BTC", "ETH", "USDT"]
+ assert 0 < after_execution_portfolio_content["USDT"]["available"] < 5
+ assert 0.1 < after_execution_portfolio_content["ETH"]["available"] < 0.4
+ assert 0.001 < after_execution_portfolio_content["BTC"]["available"] < 0.01
+ logging.getLogger("test_update_simulated_basket_bot").info(f"after_execution_portfolio_content: {after_execution_portfolio_content}")
+ # check bot logs
+ login_mock.assert_called_once()
+ insert_bot_logs_mock.assert_called_once()
diff --git a/packages/flow/tests/jobs/test_automations_job.py b/packages/flow/tests/jobs/test_automations_job.py
new file mode 100644
index 000000000..d86b003b2
--- /dev/null
+++ b/packages/flow/tests/jobs/test_automations_job.py
@@ -0,0 +1,15 @@
+import pytest
+
+import octobot_flow
+import octobot_flow.entities
+import octobot_flow.errors
+
+from tests.functionnal_tests import global_state, auth_details
+
+
+@pytest.mark.asyncio
+async def test_not_automations_configured(global_state: dict, auth_details: octobot_flow.entities.UserAuthentication):
+ global_state["automation"] = {}
+ with pytest.raises(octobot_flow.errors.NoAutomationError):
+ async with octobot_flow.AutomationJob(global_state, [], auth_details):
+ pass
diff --git a/packages/node/octobot_node/config.py b/packages/node/octobot_node/config.py
index 1d5cc1ff9..58f735148 100644
--- a/packages/node/octobot_node/config.py
+++ b/packages/node/octobot_node/config.py
@@ -107,6 +107,8 @@ def all_cors_origins(self) -> list[str]:
TASKS_OUTPUTS_RSA_PRIVATE_KEY: Annotated[bytes | None, BeforeValidator(parse_key_to_bytes)] = None
TASKS_OUTPUTS_ECDSA_PUBLIC_KEY: Annotated[bytes | None, BeforeValidator(parse_key_to_bytes)] = None
+ USE_DEDICATED_LOG_FILE_PER_AUTOMATION: bool = False
+
def _check_default_secret(self, var_name: str, value: str | None, default_value: EmailStr | None) -> None:
if value == default_value:
message = (
diff --git a/packages/node/octobot_node/constants.py b/packages/node/octobot_node/constants.py
index ad25c2415..62aebb906 100644
--- a/packages/node/octobot_node/constants.py
+++ b/packages/node/octobot_node/constants.py
@@ -13,3 +13,11 @@
#
# You should have received a copy of the GNU General Public
# License along with OctoBot. If not, see .
+try:
+ import octobot.constants as octobot_constants
+ BASE_LOGS_FOLDER = octobot_constants.LOGS_FOLDER
+except ImportError:
+ BASE_LOGS_FOLDER = "logs"
+
+AUTOMATION_LOGS_FOLDER = f"{BASE_LOGS_FOLDER}/automations"
+PARENT_WORKFLOW_ID_LENGTH = 36 # length of a UUID4
diff --git a/packages/node/octobot_node/enums.py b/packages/node/octobot_node/enums.py
index e5ca58db1..8af634b73 100644
--- a/packages/node/octobot_node/enums.py
+++ b/packages/node/octobot_node/enums.py
@@ -25,4 +25,4 @@ class TaskResultKeys(enum.Enum):
class SchedulerQueues(enum.Enum):
- BOT_WORKFLOW_QUEUE = "bot_workflow_queue"
+ AUTOMATION_WORKFLOW_QUEUE = "automation_workflow_queue"
diff --git a/packages/node/octobot_node/errors.py b/packages/node/octobot_node/errors.py
index e2f3b3304..67b7181f2 100644
--- a/packages/node/octobot_node/errors.py
+++ b/packages/node/octobot_node/errors.py
@@ -20,3 +20,11 @@ class WorkflowError(Exception):
class WorkflowInputError(WorkflowError):
"""Raised when a workflow input is invalid"""
+
+
+class WorkflowActionExecutionError(WorkflowError):
+ """Raised when a workflow action execution fails"""
+
+
+class WorkflowPriorityActionExecutionError(WorkflowActionExecutionError):
+ """Raised when a workflow priority action execution fails"""
diff --git a/packages/node/octobot_node/models.py b/packages/node/octobot_node/models.py
index 25d136f43..89dc045f4 100644
--- a/packages/node/octobot_node/models.py
+++ b/packages/node/octobot_node/models.py
@@ -44,12 +44,10 @@ class TaskStatus(str, Enum):
class TaskType(str, Enum):
- START_OCTOBOT = "start_octobot"
EXECUTE_ACTIONS = "execute_actions"
- STOP_OCTOBOT = "stop_octobot"
class Task(BaseModel):
- id: uuid.UUID = uuid.uuid4()
+ id: str = str(uuid.uuid4())
name: typing.Optional[str] = None
description: typing.Optional[str] = None
content: typing.Optional[str] = None
diff --git a/packages/node/octobot_node/scheduler/octobot_flow_client.py b/packages/node/octobot_node/scheduler/octobot_flow_client.py
new file mode 100644
index 000000000..4fb8c57aa
--- /dev/null
+++ b/packages/node/octobot_node/scheduler/octobot_flow_client.py
@@ -0,0 +1,153 @@
+# This file is part of OctoBot Node (https://github.com/Drakkar-Software/OctoBot-Node)
+# Copyright (c) 2025 Drakkar-Software, All rights reserved.
+#
+# OctoBot is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# OctoBot is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public
+# License along with OctoBot. If not, see .
+import typing
+import dataclasses
+import json
+import logging
+
+import octobot_commons.dataclasses
+
+import octobot_node.scheduler.workflows_util as workflows_util
+
+try:
+ import octobot_flow
+ import octobot_flow.environment
+ import octobot_flow.parsers
+ import octobot_flow.entities
+ # Requires octobot_flow import and importable tentacles folder
+
+ # ensure environment is initialized
+ octobot_flow.environment.initialize_environment(True)
+
+except ImportError:
+ pass # OctoBot Flow is not available
+
+
+@dataclasses.dataclass
+class OctoBotActionsJobDescription(octobot_commons.dataclasses.MinimizableDataclass):
+ state: dict = dataclasses.field(default_factory=dict)
+ auth_details: dict = dataclasses.field(default_factory=dict)
+ params: dict = dataclasses.field(default_factory=dict)
+
+ def __post_init__(self):
+ if self.params:
+ self._parse_actions_plan(self.params)
+
+ def _parse_actions_plan(self, params: dict) -> None:
+ to_add_actions_dag = octobot_flow.parsers.ActionsDAGParser(params).parse()
+ if not to_add_actions_dag:
+ raise ValueError("No action found in params")
+ automation_id = None
+ if not automation_id and isinstance(to_add_actions_dag.actions[0], octobot_flow.entities.ConfiguredActionDetails) and to_add_actions_dag.actions[0].config:
+ config = to_add_actions_dag.actions[0].config
+ if "automation" in config:
+ automation_id = config["automation"]["metadata"]["automation_id"]
+ if not automation_id:
+ raise ValueError("No automation id found in params")
+ self._include_actions_in_automation_state(automation_id, to_add_actions_dag)
+
+ def _include_actions_in_automation_state(self, automation_id: str, actions: "octobot_flow.ActionsDAG"):
+ automation_state = octobot_flow.AutomationState.from_dict(self.state)
+ if not automation_state.automation.metadata.automation_id:
+ automation_state.automation = octobot_flow.entities.AutomationDetails(
+ metadata=octobot_flow.entities.AutomationMetadata(
+ automation_id=automation_id,
+ ),
+ actions_dag=actions,
+ )
+ else:
+ automation_state.update_automation_actions(actions.actions)
+ self.state = automation_state.to_dict(include_default_values=False)
+
+ def get_next_execution_time(self) -> float:
+ return self.state["automation"]["execution"]["current_execution"]["scheduled_to"]
+
+
+@dataclasses.dataclass
+class OctoBotActionsJobResult:
+ processed_actions: list["octobot_flow.AbstractActionDetails"]
+ next_actions_description: typing.Optional[OctoBotActionsJobDescription] = None
+ actions_dag: typing.Optional["octobot_flow.ActionsDAG"] = None
+ should_stop: bool = False
+
+
+class OctoBotActionsJob:
+ def __init__(self, description: typing.Union[str, dict], user_actions: list[dict]):
+ parsed_description = self._parse_description(description)
+ self.description: OctoBotActionsJobDescription = OctoBotActionsJobDescription.from_dict(
+ parsed_description
+ )
+ self.priority_user_actions: list[octobot_flow.AbstractActionDetails] = [
+ octobot_flow.parse_action_details(
+ user_action
+ ) for user_action in user_actions
+ ]
+ self.after_execution_state = None
+
+ def _parse_description(self, description: typing.Union[str, dict]) -> dict:
+ try:
+ parsed_description = workflows_util.get_automation_dict(description)
+ except ValueError:
+ if isinstance(description, dict):
+ parsed_description = description
+ else:
+ # description is a JSON string with key/value parameters: store it in params
+ dict_description = json.loads(description)
+ parsed_description = {
+ "params": dict_description
+ }
+ return parsed_description
+
+ async def run(self) -> OctoBotActionsJobResult:
+ async with octobot_flow.AutomationJob(
+ self.description.state, self.priority_user_actions, self.description.auth_details,
+ ) as automation_job:
+ selected_actions = (
+ self.priority_user_actions
+ or automation_job.automation_state.automation.actions_dag.get_executable_actions()
+ )
+ logging.getLogger(self.__class__.__name__).info(f"Running automation actions: {selected_actions}")
+ executed_actions = await automation_job.run()
+ self.after_execution_state = automation_job.automation_state
+ post_execution_state_dump = automation_job.dump()
+ return OctoBotActionsJobResult(
+ processed_actions=executed_actions,
+ next_actions_description=self.get_next_actions_description(post_execution_state_dump),
+ actions_dag=automation_job.automation_state.automation.actions_dag,
+ should_stop=automation_job.automation_state.automation.post_actions.stop_automation,
+ )
+
+ def get_next_actions_description(
+ self, post_execution_state: dict
+ ) -> typing.Optional[OctoBotActionsJobDescription]:
+ automation = self.after_execution_state.automation
+ if automation.actions_dag.get_executable_actions():
+ return OctoBotActionsJobDescription(
+ state=post_execution_state,
+ auth_details=self.description.auth_details,
+ )
+ if pending_actions := automation.actions_dag.get_pending_actions():
+ raise ValueError(
+ f"Automation {automation.metadata.automation_id}: actions DAG dependencies issue: "
+ f"no executable actions while there are still "
+ f"{len(pending_actions)} pending actions: {pending_actions}"
+ )
+ return None
+
+ def __repr__(self) -> str:
+ parsed_state = octobot_flow.AutomationState.from_dict(self.description.state)
+ automation_repr = str(parsed_state.automation) if parsed_state.automation else "No automation"
+ return f"OctoBotActionsJob with automation:\n- {automation_repr}"
diff --git a/packages/node/octobot_node/scheduler/octobot_lib.py b/packages/node/octobot_node/scheduler/octobot_lib.py
deleted file mode 100644
index 735c88211..000000000
--- a/packages/node/octobot_node/scheduler/octobot_lib.py
+++ /dev/null
@@ -1,196 +0,0 @@
-# This file is part of OctoBot Node (https://github.com/Drakkar-Software/OctoBot-Node)
-# Copyright (c) 2025 Drakkar-Software, All rights reserved.
-#
-# OctoBot is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either
-# version 3.0 of the License, or (at your option) any later version.
-#
-# OctoBot is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# General Public License for more details.
-#
-# You should have received a copy of the GNU General Public
-# License along with OctoBot. If not, see .
-import typing
-import dataclasses
-import json
-import logging
-
-import octobot_commons.list_util as list_util
-import octobot_commons.dataclasses
-
-import octobot_tentacles_manager.api
-
-import octobot_node.config
-
-try:
- import mini_octobot
- import mini_octobot.environment
- import mini_octobot.parsers
- # Requires mini_octobot import and importable tentacles folder
-
- # ensure environment is initialized
- mini_octobot.environment.initialize_environment(True)
- # reload tentacles info to ensure mini-octobot tentacles are loaded
- octobot_tentacles_manager.api.reload_tentacle_info()
-
-
-except ImportError:
- logging.getLogger("octobot_node.scheduler.octobot_lib").warning("OctoBot is not installed, OctoBot actions will not be available")
- # mocks to allow import
- class mini_octobot_mock:
- class BotActionDetails:
- def from_dict(self, *args, **kwargs):
- raise NotImplementedError("BotActionDetails.from_dict is not implemented")
- class SingleBotActionsJob:
- def __init__(self, *args, **kwargs):
- raise NotImplementedError("SingleBotActionsJob.__init__ is not implemented")
- async def __aenter__(self):
- raise NotImplementedError("SingleBotActionsJob.__aenter__ is not implemented")
- async def __aexit__(self, *args, **kwargs):
- raise NotImplementedError("SingleBotActionsJob.__aexit__ is not implemented")
- class parsers:
- class BotActionBundleParser:
- def __init__(self, *args, **kwargs):
- raise NotImplementedError("BotActionBundleParser.__init__ is not implemented")
- def parse(self, *args, **kwargs):
- raise NotImplementedError("BotActionBundleParser.parse is not implemented")
- mini_octobot = mini_octobot_mock()
-
-
-@dataclasses.dataclass
-class OctoBotActionsJobDescription(octobot_commons.dataclasses.MinimizableDataclass):
- state: dict = dataclasses.field(default_factory=dict)
- auth_details: dict = dataclasses.field(default_factory=dict)
- params: dict = dataclasses.field(default_factory=dict)
- immediate_actions: list[mini_octobot.BotActionDetails] = dataclasses.field(default_factory=list)
- pending_actions: list[list[mini_octobot.BotActionDetails]] = dataclasses.field(default_factory=list)
-
- def __post_init__(self):
- if self.immediate_actions and isinstance(self.immediate_actions[0], dict):
- self.immediate_actions = [
- mini_octobot.BotActionDetails.from_dict(action) for action in self.immediate_actions
- ]
- if self.pending_actions and self.pending_actions[0] and isinstance(self.pending_actions[0][0], dict):
- self.pending_actions = [
- [mini_octobot.BotActionDetails.from_dict(action) for action in bundle]
- for bundle in self.pending_actions
- ]
- if self.params:
- if self.immediate_actions or self.pending_actions:
- raise ValueError("adding extra actions to a task is not yet supported")
- self._parse_actions_plan(self.params)
-
- def _parse_actions_plan(self, params: dict) -> None:
- action_bundles: list[list[mini_octobot.BotActionDetails]] = mini_octobot.parsers.BotActionBundleParser(params).parse()
- if not action_bundles:
- raise ValueError("No action bundles found in params")
- self.immediate_actions = action_bundles[0]
- self.pending_actions = action_bundles[1:]
-
- def get_next_execution_time(self) -> float:
- return min(
- bot["execution"]["current_execution"]["scheduled_to"]
- for bot in self.state["bots"]
- )
-
-
-def required_actions(func):
- def get_required_actions_wrapper(self, *args, **kwargs):
- if self.processed_actions is None:
- raise ValueError("No bot actions were executed yet")
- return func(self, *args, **kwargs)
- return get_required_actions_wrapper
-
-
-@dataclasses.dataclass
-class OctoBotActionsJobResult:
- processed_actions: list[mini_octobot.BotActionDetails]
- next_actions_description: typing.Optional[OctoBotActionsJobDescription] = None
-
- @required_actions
- def get_failed_actions(self) -> list[dict]:
- failed_actions = [
- action.result
- for action in self.processed_actions
- if action.result and isinstance(action.result, dict) and "error" in action.result
- ]
- return failed_actions
-
- @required_actions
- def get_created_orders(self) -> list[dict]:
- order_lists = [
- action.result.get("orders", [])
- for action in self.processed_actions
- if action.result
- ]
- return list_util.flatten_list(order_lists) if order_lists else []
-
- @required_actions
- def get_deposit_and_withdrawal_details(self) -> list[dict]:
- withdrawal_lists = [
- action.result
- for action in self.processed_actions
- if action.result and isinstance(action.result, dict) and "network" in action.result
- ]
- return withdrawal_lists
-
-
-class OctoBotActionsJob:
- def __init__(self, description: typing.Union[str, dict]):
- parsed_description = self._parse_description(description)
- self.description: OctoBotActionsJobDescription = OctoBotActionsJobDescription.from_dict(
- parsed_description
- )
- self.after_execution_state = None
-
- def _parse_description(self, description: typing.Union[str, dict]) -> dict:
- if isinstance(description, dict):
- # normal Non-init case
- parsed_description = description
- else:
- dict_description = json.loads(description)
- if "state" in dict_description:
- # there is a state, so it's a non init case
- parsed_description = dict_description
- else:
- # normal init case: description is a JSON string: store it in params
- parsed_description = {
- "params": dict_description
- }
- # TMP: add a simulated portfolio to the params
- parsed_description["params"]["SIMULATED_PORTFOLIO"] = {
- "ETH": 1,
- }
- return parsed_description
-
- async def run(self) -> OctoBotActionsJobResult:
- selected_actions = self.description.immediate_actions
- async with mini_octobot.SingleBotActionsJob(
- self.description.state, self.description.auth_details, selected_actions
- ) as single_bot_actions_job:
- logging.getLogger(self.__class__.__name__).info(f"Running single bot actions job actions: {selected_actions}")
- await single_bot_actions_job.run()
- self.after_execution_state = single_bot_actions_job.exchange_account_details
- post_execution_state_dump = single_bot_actions_job.dump()
- return OctoBotActionsJobResult(
- processed_actions=single_bot_actions_job.bot_actions,
- next_actions_description=self.get_next_actions_description(post_execution_state_dump)
- )
-
- def get_next_actions_description(
- self, post_execution_state: dict
- ) -> typing.Optional[OctoBotActionsJobDescription]:
- if not self.description.pending_actions:
- # completed all actions
- return None
- return OctoBotActionsJobDescription(
- state=post_execution_state,
- auth_details=self.description.auth_details,
- # next immediate actions are the first remaining pending actions
- immediate_actions=self.description.pending_actions[0],
- # next pending actions are the remaining pending actions
- pending_actions=self.description.pending_actions[1:]
- )
diff --git a/packages/node/octobot_node/scheduler/scheduler.py b/packages/node/octobot_node/scheduler/scheduler.py
index 874522016..3b453455a 100644
--- a/packages/node/octobot_node/scheduler/scheduler.py
+++ b/packages/node/octobot_node/scheduler/scheduler.py
@@ -18,11 +18,16 @@
import json
import logging
import typing
+import decimal
+import enum
+import sqlalchemy
+import octobot_commons.logging
import octobot_node.config
import octobot_node.enums
import octobot_node.models
-import octobot_node.scheduler.workflows.base as workflow_base
+import octobot_node.constants
+import octobot_node.scheduler.workflows_util as workflows_util
try:
from octobot import VERSION
except ImportError:
@@ -37,9 +42,21 @@
)
+def _sanitize(result: typing.Any) -> typing.Any:
+ if isinstance(result, decimal.Decimal):
+ return float(result)
+ if isinstance(result, enum.Enum):
+ return result.value
+ if isinstance(result, dict):
+ return {k: _sanitize(v) for k, v in result.items()}
+ elif isinstance(result, list):
+ return [_sanitize(v) for v in result]
+ return result
+
+
class Scheduler:
INSTANCE: dbos.DBOS = None # type: ignore
- BOT_WORKFLOW_QUEUE: dbos.Queue = None # type: ignore
+ AUTOMATION_WORKFLOW_QUEUE: dbos.Queue = None # type: ignore
def __init__(self):
self.logger = logging.getLogger(self.__class__.__name__)
@@ -67,6 +84,24 @@ def create(self):
"system_database_url": f"sqlite:///{octobot_node.config.settings.SCHEDULER_SQLITE_FILE}",
},
))
+ if self.INSTANCE and octobot_node.config.settings.USE_DEDICATED_LOG_FILE_PER_AUTOMATION:
+ self._setup_workflow_logging()
+
+ def _setup_workflow_logging(self) -> None:
+ """Register DBOS workflow ID provider and add workflow file handler for per-workflow log files."""
+ octobot_commons.logging.add_context_based_file_handler(
+ octobot_node.constants.AUTOMATION_LOGS_FOLDER,
+ self._get_dbos_workflow_id
+ )
+
+ @staticmethod
+ def _get_dbos_workflow_id() -> typing.Optional[str]:
+ """Return the current DBOS workflow ID when executing within a step or workflow."""
+ if workflow_id := getattr(dbos.DBOS, "workflow_id", None):
+ # group children workflows and parent workflows together
+ # (a child workflow has the parent's workflow ID as a prefix)
+ return workflow_id[:octobot_node.constants.PARENT_WORKFLOW_ID_LENGTH]
+ return None
def is_enabled(self) -> bool:
# enabled if master mode or consumer only mode
@@ -92,7 +127,7 @@ def stop(self) -> None:
self.logger.warning("Scheduler not initialized")
def create_queues(self):
- self.BOT_WORKFLOW_QUEUE = dbos.Queue(name=octobot_node.enums.SchedulerQueues.BOT_WORKFLOW_QUEUE.value)
+ self.AUTOMATION_WORKFLOW_QUEUE = dbos.Queue(name=octobot_node.enums.SchedulerQueues.AUTOMATION_WORKFLOW_QUEUE.value)
async def get_periodic_tasks(self) -> list[dict]:
"""DBOS scheduled workflows are not easily introspectable; return empty list."""
@@ -103,17 +138,48 @@ async def get_pending_tasks(self) -> list[dict]:
return []
tasks: list[dict] = []
try:
- workflows = await self.INSTANCE.list_workflows_async(status=["ENQUEUED", "PENDING"])
- for w in workflows or []:
+ pending_workflow_statuses = await self.INSTANCE.list_workflows_async(status=[dbos.WorkflowStatusString.ENQUEUED.value, dbos.WorkflowStatusString.PENDING.value])
+ for pending_workflow_status in pending_workflow_statuses or []:
try:
- task_dict = self._parse_workflow_status(w, octobot_node.models.TaskStatus.PENDING, f"Pending task: {w.name}")
+ if state := workflows_util.get_automation_state(pending_workflow_status):
+ next_step = ", ".join([
+ action.get_summary()
+ for action in state.automation.actions_dag.get_executable_actions()
+ ])
+ description = f"next steps: {next_step}"
+ else:
+ description = f"Pending task: {pending_workflow_status.workflow_id}"
+ task_dict = self._parse_workflow_status(pending_workflow_status, octobot_node.models.TaskStatus.PENDING, description)
tasks.append(task_dict)
except Exception as e:
- self.logger.warning(f"Failed to process pending workflow {w.name}: {e}")
+ self.logger.warning(f"Failed to process pending workflow {pending_workflow_status.workflow_id}: {e}")
except Exception as e:
self.logger.warning(f"Failed to list pending workflows: {e}")
return tasks
+ async def delete_workflows(self, to_delete_workflow_ids: list[str]):
+ self.logger.info(f"Deleting {len(to_delete_workflow_ids)} workflows")
+ all_completed_workflows = await self.INSTANCE.list_workflows_async(status=[
+ dbos.WorkflowStatusString.SUCCESS.value, dbos.WorkflowStatusString.ERROR.value,
+ dbos.WorkflowStatusString.CANCELLED.value, dbos.WorkflowStatusString.MAX_RECOVERY_ATTEMPTS_EXCEEDED.value
+ ])
+ to_delete_parent_workflow_ids = [
+ workflow_id[:octobot_node.constants.PARENT_WORKFLOW_ID_LENGTH] for workflow_id in to_delete_workflow_ids
+ ]
+ children_workflow_ids = [
+ workflow.workflow_id for workflow in all_completed_workflows
+ if any(workflow.workflow_id.startswith(parent_workflow_id) for parent_workflow_id in to_delete_parent_workflow_ids)
+ ]
+ merged_to_delete_workflow_ids = list(set(to_delete_workflow_ids + children_workflow_ids))
+ self.logger.info(
+ f"Including {len(merged_to_delete_workflow_ids) - len(to_delete_workflow_ids)} associated children workflows to delete"
+ )
+ await self.INSTANCE.delete_workflows_async(merged_to_delete_workflow_ids, delete_children=False)
+ self.logger.info(f"Vacuuming database")
+ with self.INSTANCE._sys_db.engine.begin() as conn:
+ conn.execute(sqlalchemy.text("VACUUM"))
+ self.logger.info(f"Database vacuum completed")
+
async def get_scheduled_tasks(self) -> list[dict]:
"""DBOS has no direct 'scheduled for later' queue; return empty list."""
return []
@@ -123,67 +189,59 @@ async def get_results(self) -> list[dict]:
return []
tasks: list[dict] = []
try:
- workflows = await self.INSTANCE.list_workflows_async(status=["SUCCESS", "ERROR"], load_output=True)
- for w in workflows or []:
+ completed_workflow_statuses = await self.INSTANCE.list_workflows_async(status=[
+ dbos.WorkflowStatusString.SUCCESS.value, dbos.WorkflowStatusString.ERROR.value
+ ], load_output=True)
+ for completed_workflow_status in completed_workflow_statuses or []:
try:
- wf_status = w.status
- if wf_status == "SUCCESS":
- if step := await workflow_base.get_current_step(w.workflow_id):
- description = f"{step.previous_step_details}"
- else:
- description = "Task completed"
- status = octobot_node.models.TaskStatus.COMPLETED
- result_obj = w.output
- if isinstance(result_obj, dict):
- result = result_obj.get(octobot_node.enums.TaskResultKeys.RESULT.value)
- metadata = result_obj.get(octobot_node.enums.TaskResultKeys.METADATA.value)
- else:
- result = result_obj
- metadata = ""
+ wf_status = completed_workflow_status.status
+ task_name = completed_workflow_status.workflow_id
+ metadata = ""
+ result = ""
+ if wf_status == dbos.WorkflowStatusString.SUCCESS.value:
+ result = completed_workflow_status.output
+ execution_error = result.get("error") if isinstance(result, dict) else None
+ description = "Error" if execution_error else "Completed"
+ status = octobot_node.models.TaskStatus.FAILED if execution_error else octobot_node.models.TaskStatus.COMPLETED
+ if task := workflows_util.get_input_task(completed_workflow_status):
+ metadata = task.content_metadata
+ task_name = task.name
else:
description = "Task failed"
status = octobot_node.models.TaskStatus.FAILED
- result = ""
- metadata = ""
- result_obj = None
tasks.append({
- "id": w.workflow_id,
- "name": self.get_task_name(result_obj, w.workflow_id),
+ "id": completed_workflow_status.workflow_id,
+ "name": task_name,
"description": description,
"status": status,
- "result": json.dumps(result) if result is not None else "",
+ "result": json.dumps(_sanitize(result.get("history", result))) if isinstance(result, dict) else "", #todo change
"result_metadata": metadata,
- "scheduled_at": w.created_at,
+ "scheduled_at": completed_workflow_status.created_at,
"started_at": None,
- "completed_at": w.updated_at,
+ "completed_at": completed_workflow_status.updated_at,
})
except Exception as e:
- self.logger.warning(f"Failed to process result workflow {w.workflow_id}: {e}")
+ self.logger.exception(e, True, f"Failed to process result workflow {completed_workflow_status.workflow_id}: {e}")
except Exception as e:
self.logger.warning(f"Failed to list result workflows: {e}")
return tasks
def _parse_workflow_status(
self,
- w: typing.Any,
+ workflow_status: dbos.WorkflowStatus,
status: octobot_node.models.TaskStatus,
description: typing.Optional[str] = None,
) -> dict:
"""Map DBOS WorkflowStatus to octobot_node.models.Task dict."""
- task_id = str(w.workflow_id)
- task_name = w.name if hasattr(w, "name") else str(w.workflow_id)
+ task_id = str(workflow_status.workflow_id)
+ task_name = workflow_status.name
task_type = None
task_actions = None
- if hasattr(w, "input") and w.input:
- inp = w.input
- if isinstance(inp, (list, tuple)) and inp:
- first = inp[0]
- if hasattr(first, "type"):
- task_type = first.type
- elif isinstance(first, dict):
- task_type = first.get("type")
- task_actions = first.get("actions")
+ if workflow_status.input:
+ if task := workflows_util.get_input_task(workflow_status):
+ task_type = task.type
+ task_actions = task.content #todo confi
return {
"id": task_id,
diff --git a/packages/node/octobot_node/scheduler/tasks.py b/packages/node/octobot_node/scheduler/tasks.py
index 99d369cc4..77182565b 100644
--- a/packages/node/octobot_node/scheduler/tasks.py
+++ b/packages/node/octobot_node/scheduler/tasks.py
@@ -13,43 +13,29 @@
#
# You should have received a copy of the GNU General Public
# License along with OctoBot. If not, see .
-import uuid
import octobot_node.models
-import octobot_node.scheduler.workflows.base as workflow_base
-import octobot_commons.dataclasses.minimizable_dataclass as minimizable_dataclass
-from octobot_node.scheduler import SCHEDULER # avoid circular import
-
-
-def _generate_instance_name() -> str:
- # names can't be re-used: ensure each are unique not to mix
- # workflow attributes on recovery
- return str(uuid.uuid4())
+import octobot_node.scheduler.workflows_util as workflows_util
+import octobot_node.scheduler.workflows.params as params
async def trigger_task(task: octobot_node.models.Task) -> bool:
- import octobot_node.scheduler.workflows.bot_workflow as bot_workflow
- import octobot_node.scheduler.workflows.full_bot_workflow as full_bot_workflow
- delay = 1
+ import octobot_node.scheduler.workflows.automation_workflow as automation_workflow
+ import octobot_node.scheduler # avoid circular import
handle = None
# enqueue workflow instead of starting it to dispatch them to multiple workers if possible
- if task.type == octobot_node.models.TaskType.START_OCTOBOT.value:
- handle = await SCHEDULER.BOT_WORKFLOW_QUEUE.enqueue_async(
- full_bot_workflow.FullBotWorkflow.start,
- t=workflow_base.Tracker(name=f"{task.name}_{_generate_instance_name()}"),
- inputs=full_bot_workflow.FullBotWorkflowStartInputs(task=task, delay=delay).to_dict(include_default_values=False)
- )
- elif task.type == octobot_node.models.TaskType.STOP_OCTOBOT.value:
- handle = await SCHEDULER.BOT_WORKFLOW_QUEUE.enqueue_async(
- full_bot_workflow.FullBotWorkflow.stop,
- t=workflow_base.Tracker(name=f"{task.name}_{_generate_instance_name()}"),
- inputs=full_bot_workflow.FullBotWorkflowStopInputs(task=task, delay=delay).to_dict(include_default_values=False)
- )
- elif task.type == octobot_node.models.TaskType.EXECUTE_ACTIONS.value:
- handle = await SCHEDULER.BOT_WORKFLOW_QUEUE.enqueue_async(
- bot_workflow.BotWorkflow.execute_octobot,
- t=workflow_base.Tracker(name=f"{task.name}_{_generate_instance_name()}"),
- inputs=bot_workflow.BotWorkflowInputs(task=task, delay=delay).to_dict(include_default_values=False)
+ if task.type == octobot_node.models.TaskType.EXECUTE_ACTIONS.value:
+ handle = await octobot_node.scheduler.SCHEDULER.AUTOMATION_WORKFLOW_QUEUE.enqueue_async(
+ automation_workflow.AutomationWorkflow.execute_automation,
+ inputs=params.AutomationWorkflowInputs(task=task).to_dict(include_default_values=False)
)
else:
- raise ValueError(f"Invalid task type: {task.type}")
+ raise ValueError(f"Unsupported task type: {task.type}")
return handle is not None
+
+
+async def send_actions_to_automation(actions: list[dict], automation_id: str):
+ import octobot_node.scheduler # avoid circular import
+ workflow_status = await workflows_util.get_automation_workflow_status(automation_id)
+ await octobot_node.scheduler.SCHEDULER.INSTANCE.send_async(
+ workflow_status.workflow_id, actions, topic="user_actions"
+ )
diff --git a/packages/node/octobot_node/scheduler/workflows/__init__.py b/packages/node/octobot_node/scheduler/workflows/__init__.py
index 66bb6fb24..032554eb5 100644
--- a/packages/node/octobot_node/scheduler/workflows/__init__.py
+++ b/packages/node/octobot_node/scheduler/workflows/__init__.py
@@ -15,5 +15,4 @@
# License along with this library.
def register_workflows():
- import octobot_node.scheduler.workflows.bot_workflow
- import octobot_node.scheduler.workflows.full_bot_workflow
+ import octobot_node.scheduler.workflows.automation_workflow
diff --git a/packages/node/octobot_node/scheduler/workflows/automation_workflow.py b/packages/node/octobot_node/scheduler/workflows/automation_workflow.py
new file mode 100644
index 000000000..1c70981dc
--- /dev/null
+++ b/packages/node/octobot_node/scheduler/workflows/automation_workflow.py
@@ -0,0 +1,260 @@
+# Drakkar-Software OctoBot-Node
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import json
+import time
+import typing
+
+import octobot_commons.logging
+
+import octobot_node.models
+import octobot_node.scheduler.octobot_flow_client as octobot_flow_client
+import octobot_node.scheduler.task_context
+import octobot_node.scheduler.workflows.params as params
+import octobot_node.errors as errors
+
+if typing.TYPE_CHECKING:
+ import octobot_flow.entities
+
+from octobot_node.scheduler import SCHEDULER # avoid circular import
+
+
+
+
+@SCHEDULER.INSTANCE.dbos_class()
+class AutomationWorkflow:
+ # Always use dict as input to parse minimizable dataclasses and facilitate data format updates
+
+ @staticmethod
+ @SCHEDULER.INSTANCE.workflow(name="execute_automation")
+ async def execute_automation(inputs: dict) -> None:
+ """
+ Automation workflow runner:
+ 1. Wait for priority actions if any.
+ 2. Execute the iteration (received priority action or DAG's executable actions).
+ 3. Check and process other received priority actions if any.
+ 4. Either:
+ A. Reschedule the next iteration as a child workflow to avoid growing the workflow forever.
+ B. Complete the workflow and stop the automation.
+ """
+ try:
+ parsed_inputs = params.AutomationWorkflowInputs.from_dict(inputs)
+ delay = parsed_inputs.execution_time - time.time()
+ delay_str = f" in {delay:.2f} seconds" if delay > 0 else ""
+ AutomationWorkflow.get_logger(parsed_inputs).info(f"{AutomationWorkflow.__name__} starting{delay_str}.")
+ priority_actions: list[dict] = []
+ if delay > 0:
+ priority_actions = await AutomationWorkflow._wait_and_trigger_on_priority_actions(
+ parsed_inputs, parsed_inputs.execution_time
+ )
+ raw_iteration_result = await AutomationWorkflow.execute_iteration(inputs, priority_actions)
+ iteration_result = params.AutomationWorkflowIterationResult.from_dict(raw_iteration_result)
+ continue_workflow = False
+ if AutomationWorkflow._should_continue_workflow(parsed_inputs, iteration_result.progress_status, bool(priority_actions)):
+ continue_workflow = await AutomationWorkflow._process_pending_priority_actions_and_reschedule(
+ parsed_inputs, iteration_result
+ )
+ if not continue_workflow:
+ AutomationWorkflow.get_logger(parsed_inputs).info(
+ f"Stopped workflow (remaining steps: {iteration_result.progress_status.remaining_steps})"
+ )
+ except Exception as err:
+ AutomationWorkflow.get_logger(parsed_inputs).exception(
+ err, True, f"Interrupted workflow: unexpected critical error: {err} ({err.__class__.__name__})"
+ )
+
+ @staticmethod
+ @SCHEDULER.INSTANCE.step(name="execute_iteration")
+ async def execute_iteration(inputs: dict, user_actions: list[dict]) -> dict:
+ """
+ Execute an automation iteration: executed actions can be received priority actions or DAG's executable actions.
+ In case of priority actions, the returned next scheduled time will be the same as the previous one to respect
+ the latest DAG execution time schedule.
+
+ Should be a SCHEDULER.INSTANCE.step to avoid executing actions twice when recovering a workflow
+ that was interrupted while executing priority actions which were received AFTER the initial
+ iteration of the workflow.
+ """
+ parsed_inputs: params.AutomationWorkflowInputs = params.AutomationWorkflowInputs.from_dict(inputs)
+ executed_step: str = "no action executed"
+ execution_error = next_iteration_description = next_step = next_step_at = None
+ with octobot_node.scheduler.task_context.encrypted_task(parsed_inputs.task):
+ #### Start of decryped task context ####
+ result: octobot_flow_client.OctoBotActionsJobResult = None # type: ignore
+ if parsed_inputs.task.type == octobot_node.models.TaskType.EXECUTE_ACTIONS.value:
+ if user_actions:
+ AutomationWorkflow.get_logger(parsed_inputs).info(f"Executing user actions: {user_actions}")
+ else:
+ AutomationWorkflow.get_logger(parsed_inputs).info(
+ f"Executing {parsed_inputs.task.name} ' DAG's executable actions"
+ )
+ result = await octobot_flow_client.OctoBotActionsJob(
+ parsed_inputs.task.content, user_actions
+ ).run()
+ if result.processed_actions:
+ if latest_step := AutomationWorkflow._get_actions_summary(result.processed_actions, minimal=True):
+ executed_step = latest_step
+ for action in result.processed_actions:
+ if action.error_status is not None:
+ AutomationWorkflow.get_logger(parsed_inputs).error(
+ f"Error: {action.error_status} when executing action {action.id}: {action.get_summary()} "
+ )
+ execution_error = action.error_status
+ if result is None:
+ raise errors.WorkflowInputError(f"Invalid task type: {parsed_inputs.task.type}")
+ next_actions = []
+ remaining_steps = 0
+ if result.next_actions_description:
+ if result.actions_dag:
+ next_actions = result.actions_dag.get_executable_actions()
+ remaining_steps = len(result.actions_dag.get_pending_actions())
+ next_step_at = result.next_actions_description.get_next_execution_time()
+ # TODO next_iteration_description should be encrypted if encryption is enabled
+ next_iteration_description = json.dumps(
+ result.next_actions_description.to_dict(include_default_values=False)
+ )
+ next_step = AutomationWorkflow._get_actions_summary(next_actions, minimal=True)
+ AutomationWorkflow.get_logger(parsed_inputs).info(
+ f"Iteration completed, executed step: '{executed_step}', next immediate actions: {next_actions}"
+ )
+ should_stop = result.should_stop
+ #### End of decryped task context - nothing should be done after this point ####
+
+ return params.AutomationWorkflowIterationResult(
+ progress_status=params.ProgressStatus(
+ latest_step=executed_step,
+ next_step=next_step,
+ next_step_at=next_step_at,
+ remaining_steps=remaining_steps,
+ error=execution_error,
+ should_stop=should_stop,
+ ),
+ next_iteration_description=next_iteration_description
+ ).to_dict(include_default_values=False)
+
+ @staticmethod
+ async def _wait_and_trigger_on_priority_actions(
+ parsed_inputs: params.AutomationWorkflowInputs, resume_execution_time: float
+ ) -> list[dict]:
+ delay = max(0, resume_execution_time - time.time())
+ if priority_actions := await SCHEDULER.INSTANCE.recv_async(topic="user_actions", timeout_seconds=delay):
+ AutomationWorkflow.get_logger(parsed_inputs).info(f"Received user actions: {priority_actions}")
+ return priority_actions
+ return []
+
+ @staticmethod
+ async def _process_pending_priority_actions_and_reschedule(
+ parsed_inputs: params.AutomationWorkflowInputs,
+ previous_iteration_result: params.AutomationWorkflowIterationResult
+ ) -> bool:
+ if not previous_iteration_result.next_iteration_description:
+ return False
+ # In case new priority actions were sent, execute them now.
+ # Any action sent to this workflow will be lost if not processed by it.
+ latest_iteration_result: params.AutomationWorkflowIterationResult = previous_iteration_result
+ while new_priority_actions := await AutomationWorkflow._wait_and_trigger_on_priority_actions(
+ parsed_inputs, 0
+ ):
+ extra_iteration_inputs = AutomationWorkflow._create_next_iteration_inputs(
+ parsed_inputs, latest_iteration_result.next_iteration_description, 0
+ )
+ # execute the iteration on the updated state from last iteration
+ raw_iteration_result = await AutomationWorkflow.execute_iteration(extra_iteration_inputs, new_priority_actions)
+ # use the new inputs for the next iteration of this loop
+ parsed_inputs = params.AutomationWorkflowInputs.from_dict(extra_iteration_inputs)
+ latest_iteration_result = params.AutomationWorkflowIterationResult.from_dict(raw_iteration_result)
+ if not AutomationWorkflow._should_continue_workflow(parsed_inputs, latest_iteration_result.progress_status, False):
+ return False
+ if not latest_iteration_result.next_iteration_description:
+ raise errors.WorkflowPriorityActionExecutionError(
+ f"Unexpected error: no next iteration description after processing priority actions: {latest_iteration_result}"
+ )
+ if latest_iteration_result.progress_status.should_stop:
+ AutomationWorkflow.get_logger(parsed_inputs).info(
+ f"Stopping workflow, should stop: {latest_iteration_result.progress_status.should_stop}"
+ )
+ else:
+ # successful iteration and a new iteration is required, schedule next iteration, don't return anything
+ await AutomationWorkflow._schedule_next_iteration(
+ parsed_inputs,
+ latest_iteration_result.next_iteration_description, # type: ignore
+ latest_iteration_result.progress_status
+ )
+ return True
+
+ @staticmethod
+ async def _schedule_next_iteration(
+ parsed_inputs: params.AutomationWorkflowInputs,
+ next_iteration_description: str,
+ progress_status: params.ProgressStatus
+ ):
+ next_execution_time = progress_status.next_step_at or 0
+ next_iteration_inputs = AutomationWorkflow._create_next_iteration_inputs(
+ parsed_inputs, next_iteration_description, next_execution_time
+ )
+ delay = next_execution_time - time.time()
+ delay_str = f", starting in {delay:.2f} seconds" if delay > 0 else ""
+ AutomationWorkflow.get_logger(parsed_inputs).info(
+ f"Enqueuing next iteration: next step: {progress_status.next_step}, "
+ f"remaining steps: {progress_status.remaining_steps}{delay_str}."
+ )
+ await SCHEDULER.AUTOMATION_WORKFLOW_QUEUE.enqueue_async(
+ AutomationWorkflow.execute_automation,
+ inputs=next_iteration_inputs
+ )
+
+ @staticmethod
+ def _create_next_iteration_inputs(
+ parsed_inputs: params.AutomationWorkflowInputs,
+ next_iteration_description: str,
+ next_execution_time: float
+ ) -> dict:
+ # update task.content with the next iteration description containing the automation state
+ next_task = parsed_inputs.task
+ next_task.content = next_iteration_description
+ next_execution_time = next_execution_time or 0
+ return params.AutomationWorkflowInputs(
+ task=parsed_inputs.task, execution_time=next_execution_time
+ ).to_dict(include_default_values=False)
+
+ @staticmethod
+ def _should_continue_workflow(
+ parsed_inputs: params.AutomationWorkflowInputs,
+ progress_status: params.ProgressStatus,
+ stop_on_error: bool
+ ) -> bool:
+ if progress_status.error:
+ # failed iteration, return global progress where it stopped and exit workflow
+ AutomationWorkflow.get_logger(parsed_inputs).error(
+ f"Failed iteration: stopping workflow, error: {progress_status.error}. "
+ f"Iteration's last step: {progress_status.latest_step}"
+ )
+ return stop_on_error
+ elif progress_status.should_stop:
+ AutomationWorkflow.get_logger(parsed_inputs).info(
+ f"Workflow stop required: stopping workflow"
+ )
+ return False
+ return True
+
+ @staticmethod
+ def _get_actions_summary(actions: list["octobot_flow.entities.AbstractActionDetails"], minimal: bool = False) -> str:
+ return ", ".join([action.get_summary(minimal=minimal) for action in actions]) if actions else ""
+
+ @staticmethod
+ def get_logger(parsed_inputs: params.AutomationWorkflowInputs) -> octobot_commons.logging.BotLogger:
+ return octobot_commons.logging.get_logger(
+ parsed_inputs.task.name or AutomationWorkflow.__name__
+ )
diff --git a/packages/node/octobot_node/scheduler/workflows/base/workflow_helper_mixin.py b/packages/node/octobot_node/scheduler/workflows/base/workflow_helper_mixin.py
deleted file mode 100644
index 9c6a78eb0..000000000
--- a/packages/node/octobot_node/scheduler/workflows/base/workflow_helper_mixin.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Drakkar-Software OctoBot-Node
-# Copyright (c) Drakkar-Software, All rights reserved.
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 3.0 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library.
-import typing
-import dbos as dbos_lib
-import time
-
-import octobot_node.scheduler.workflows.base.workflow_tracking as workflow_tracking
-
-
-class DBOSWorkflowHelperMixin:
- @staticmethod
- def get_name(workflow_status: dbos_lib.WorkflowStatus) -> str:
- if workflow_status.input:
- for input in list(workflow_status.input.get("args", [])) + list(workflow_status.input.get("kwargs", {}).values()):
- if isinstance(input, workflow_tracking.Tracker):
- return input.name
- raise ValueError(f"No Tracker found in workflow status: {workflow_status}")
-
- @staticmethod
- async def register_delayed_start_step(t: workflow_tracking.Tracker, delay: float, next_step: str) -> None:
- await t.set_current_step(workflow_tracking.ProgressStatus(
- previous_step="delayed_start",
- previous_step_details={"delay": delay},
- next_step=next_step,
- next_step_at=time.time() + delay,
- ))
-
- @staticmethod
- async def sleep_if_needed(t: workflow_tracking.Tracker, delay: float) -> None:
- if delay > 0:
- t.logger.info(f"Sleeping for {delay} seconds ...")
- await dbos_lib.DBOS.sleep_async(delay)
diff --git a/packages/node/octobot_node/scheduler/workflows/base/workflow_tracking.py b/packages/node/octobot_node/scheduler/workflows/base/workflow_tracking.py
deleted file mode 100644
index bf8a304ab..000000000
--- a/packages/node/octobot_node/scheduler/workflows/base/workflow_tracking.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import typing
-import pydantic
-import dataclasses
-import logging
-import dbos as dbos_lib
-
-import octobot_commons.dataclasses
-
-
-CURRENT_STEP_KEY = "current_step"
-
-
-class ProgressStatus(pydantic.BaseModel):
- previous_step: str
- previous_step_details: typing.Optional[dict] = None
- next_step: typing.Optional[str] = None
- next_step_at: typing.Optional[float] = None
- remaining_steps: typing.Optional[int] = None
-
-
-async def get_current_step(workflow_id: str) -> typing.Optional[ProgressStatus]:
- return await dbos_lib.DBOS.get_event_async(workflow_id, CURRENT_STEP_KEY)
-
-
-@dataclasses.dataclass
-class Tracker(octobot_commons.dataclasses.MinimizableDataclass):
- name: str
-
- @property
- def logger(self) -> logging.Logger:
- return logging.getLogger(self.name)
-
- async def set_current_step(self, progress_status: ProgressStatus):
- await dbos_lib.DBOS.set_event_async(CURRENT_STEP_KEY, progress_status)
- self.logger.info(f"Current step updated: {progress_status.model_dump(exclude_defaults=True)}")
\ No newline at end of file
diff --git a/packages/node/octobot_node/scheduler/workflows/bot_workflow.py b/packages/node/octobot_node/scheduler/workflows/bot_workflow.py
deleted file mode 100644
index 64fb2857e..000000000
--- a/packages/node/octobot_node/scheduler/workflows/bot_workflow.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# Drakkar-Software OctoBot-Node
-# Copyright (c) Drakkar-Software, All rights reserved.
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 3.0 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library.
-
-import dataclasses
-import typing
-import json
-import copy
-import time
-
-import octobot_node.models
-import octobot_node.enums
-import octobot_node.scheduler.octobot_lib as octobot_lib
-import octobot_node.scheduler.task_context
-import octobot_node.scheduler.workflows.base as workflow_base
-import octobot_commons.dataclasses.minimizable_dataclass
-import octobot_node.errors as errors
-
-
-from octobot_node.scheduler import SCHEDULER # avoid circular import
-
-
-@dataclasses.dataclass
-class BotWorkflowInputs(octobot_commons.dataclasses.minimizable_dataclass.MinimizableDataclass):
- task: octobot_node.models.Task
- delay: float = 0
-
-
-@dataclasses.dataclass
-class BotIterationResult(octobot_commons.dataclasses.minimizable_dataclass.MinimizableDataclass):
- task_result: dict
- next_iteration_time: typing.Optional[float]
- next_task: typing.Optional[octobot_node.models.Task]
-
-
-INIT_STEP = "init"
-
-
-@SCHEDULER.INSTANCE.dbos_class()
-class BotWorkflow(workflow_base.DBOSWorkflowHelperMixin):
- # use dict as input to parse minimizable dataclasses and facilitate data format updates
-
- @staticmethod
- @SCHEDULER.INSTANCE.workflow(name="execute_octobot")
- async def execute_octobot(t: workflow_base.Tracker, inputs: dict) -> dict:
- parsed_inputs = BotWorkflowInputs.from_dict(inputs)
- should_continue = True
- delay = parsed_inputs.delay
- if delay > 0:
- await workflow_base.DBOSWorkflowHelperMixin.register_delayed_start_step(t, delay, INIT_STEP)
- next_task: octobot_node.models.Task = parsed_inputs.task
- while should_continue:
- await BotWorkflow.sleep_if_needed(t, delay)
- raw_iteration_result = await BotWorkflow.execute_iteration(t, next_task)
- iteration_result = BotIterationResult.from_dict(raw_iteration_result)
- if iteration_result.next_iteration_time:
- should_continue = True
- delay = iteration_result.next_iteration_time - time.time()
- if iteration_result.next_task is None:
- raise errors.WorkflowInputError(f"iteration_result.next_task is None, this should not happen. {iteration_result=}")
- next_task = iteration_result.next_task
- else:
- should_continue = False
- t.logger.info(f"BotWorkflow completed, last iteration result: {iteration_result.task_result}")
- return iteration_result.task_result
-
- @staticmethod
- @SCHEDULER.INSTANCE.step(name="execute_iteration")
- async def execute_iteration(t: workflow_base.Tracker, task: octobot_node.models.Task) -> dict:
- next_iteration_time = None
- task_output = {}
- next_task = copy.copy(task)
- with octobot_node.scheduler.task_context.encrypted_task(task):
- current_step = INIT_STEP
- if task.type == octobot_node.models.TaskType.EXECUTE_ACTIONS.value:
- t.logger.info(f"Executing task '{task.name}' ...")
- result: octobot_lib.OctoBotActionsJobResult = await octobot_lib.OctoBotActionsJob(
- task.content
- ).run()
- current_step = ", ".join([str(action.config) for action in result.processed_actions]) if result.processed_actions else None
- task_output = BotWorkflow._format_octobot_actions_job_result(result)
- if result.next_actions_description:
- next_iteration_time = result.next_actions_description.get_next_execution_time()
- next_task.content = json.dumps(result.next_actions_description.to_dict(
- include_default_values=False
- ))
- else:
- raise errors.WorkflowInputError(f"Invalid task type: {task.type}")
- t.logger.info(
- f"Task '{task.name}' completed. Next immediate actions: "
- f"{result.next_actions_description.immediate_actions if result.next_actions_description else None}"
- )
- await t.set_current_step(workflow_base.ProgressStatus(
- previous_step=current_step,
- previous_step_details=task_output,
- next_step=", ".join([str(action.config) for action in result.next_actions_description.immediate_actions]) if result.next_actions_description else None,
- next_step_at=result.next_actions_description.get_next_execution_time() if result.next_actions_description else None,
- remaining_steps=len(result.next_actions_description.pending_actions) + 1 if result.next_actions_description else (
- 1 if result.next_actions_description else 0
- ),
- ))
- task_result = {
- octobot_node.enums.TaskResultKeys.STATUS.value: octobot_node.models.TaskStatus.COMPLETED.value,
- octobot_node.enums.TaskResultKeys.RESULT.value: task_output,
- octobot_node.enums.TaskResultKeys.METADATA.value: task.result_metadata,
- octobot_node.enums.TaskResultKeys.TASK.value: {"name": task.name},
- octobot_node.enums.TaskResultKeys.ERROR.value: None,
- }
- return BotIterationResult(
- task_result=task_result,
- next_iteration_time=next_iteration_time,
- next_task=next_task,
- ).to_dict(include_default_values=False)
-
- @staticmethod
- def _format_octobot_actions_job_result(result: octobot_lib.OctoBotActionsJobResult) -> dict:
- result_dict = {
- "orders": result.get_created_orders(),
- "transfers": result.get_deposit_and_withdrawal_details(),
- }
- if failed_actions := result.get_failed_actions():
- result_dict["errors"] = failed_actions
- return result_dict
diff --git a/packages/node/octobot_node/scheduler/workflows/full_bot_workflow.py b/packages/node/octobot_node/scheduler/workflows/full_bot_workflow.py
deleted file mode 100644
index 6da970b28..000000000
--- a/packages/node/octobot_node/scheduler/workflows/full_bot_workflow.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# Drakkar-Software OctoBot-Node
-# Copyright (c) Drakkar-Software, All rights reserved.
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 3.0 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library.
-
-import dataclasses
-
-import octobot_node.models
-import octobot_node.enums
-import octobot_node.scheduler.workflows.base as workflow_base
-import octobot_node.scheduler.task_context
-
-from octobot_node.scheduler import SCHEDULER # avoid circular import
-import octobot_commons.dataclasses.minimizable_dataclass
-
-
-@dataclasses.dataclass
-class FullBotWorkflowStartInputs(octobot_commons.dataclasses.minimizable_dataclass.MinimizableDataclass):
- task: octobot_node.models.Task
- delay: float
-
-@dataclasses.dataclass
-class FullBotWorkflowStopInputs(octobot_commons.dataclasses.minimizable_dataclass.MinimizableDataclass):
- task: octobot_node.models.Task
- delay: float
-
-
-@SCHEDULER.INSTANCE.dbos_class()
-class FullBotWorkflow(workflow_base.DBOSWorkflowHelperMixin):
- # use dict as inputs to parse minimizable dataclasses and facilitate data format updates
-
- @staticmethod
- @SCHEDULER.INSTANCE.workflow(name="start_full_octobot")
- async def start(t: workflow_base.Tracker, inputs: dict) -> dict:
- parsed_inputs = FullBotWorkflowStartInputs.from_dict(inputs)
- if parsed_inputs.delay > 0:
- await workflow_base.DBOSWorkflowHelperMixin.register_delayed_start_step(t, parsed_inputs.delay, "start_bot")
- await FullBotWorkflow.sleep_if_needed(t, parsed_inputs.delay)
- # todo implement start logic: start bot with process name from self.get_bot_process_name()
- with octobot_node.scheduler.task_context.encrypted_task(parsed_inputs.task):
- parsed_inputs.task.result = "ok"
- return {
- octobot_node.enums.TaskResultKeys.STATUS.value: octobot_node.models.TaskStatus.COMPLETED.value,
- octobot_node.enums.TaskResultKeys.RESULT.value: parsed_inputs.task.result,
- octobot_node.enums.TaskResultKeys.METADATA.value: parsed_inputs.task.result_metadata,
- octobot_node.enums.TaskResultKeys.TASK.value: {"name": parsed_inputs.task.name},
- octobot_node.enums.TaskResultKeys.ERROR.value: None,
- }
-
- @staticmethod
- @SCHEDULER.INSTANCE.workflow(name="stop_full_octobot")
- async def stop(t: workflow_base.Tracker, inputs: dict) -> dict:
- parsed_inputs = FullBotWorkflowStopInputs.from_dict(inputs)
- if parsed_inputs.delay > 0:
- await workflow_base.DBOSWorkflowHelperMixin.register_delayed_start_step(t, parsed_inputs.delay, "stop_bot")
- await FullBotWorkflow.sleep_if_needed(t, parsed_inputs.delay)
- # todo implement stop logic: stop bot with process name from self.get_bot_process_name()
- with octobot_node.scheduler.task_context.encrypted_task(parsed_inputs.task):
- parsed_inputs.task.result = "ok"
- return {
- octobot_node.enums.TaskResultKeys.STATUS.value: octobot_node.models.TaskStatus.COMPLETED.value,
- octobot_node.enums.TaskResultKeys.RESULT.value: parsed_inputs.task.result,
- octobot_node.enums.TaskResultKeys.METADATA.value: parsed_inputs.task.result_metadata,
- octobot_node.enums.TaskResultKeys.TASK.value: {"name": parsed_inputs.task.name},
- octobot_node.enums.TaskResultKeys.ERROR.value: None,
- }
-
- @staticmethod
- def get_bot_process_name(t: workflow_base.Tracker) -> str:
- return f"octobot_{t.name}"
diff --git a/packages/node/octobot_node/scheduler/workflows/base/__init__.py b/packages/node/octobot_node/scheduler/workflows/params/__init__.py
similarity index 73%
rename from packages/node/octobot_node/scheduler/workflows/base/__init__.py
rename to packages/node/octobot_node/scheduler/workflows/params/__init__.py
index eda19f066..4448d2ab2 100644
--- a/packages/node/octobot_node/scheduler/workflows/base/__init__.py
+++ b/packages/node/octobot_node/scheduler/workflows/params/__init__.py
@@ -13,20 +13,16 @@
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
-
-from octobot_node.scheduler.workflows.base.workflow_tracking import (
- Tracker,
+from .base_params import (
ProgressStatus,
- get_current_step
)
-
-from octobot_node.scheduler.workflows.base.workflow_helper_mixin import (
- DBOSWorkflowHelperMixin
+from .automation_workflow_params import (
+ AutomationWorkflowInputs,
+ AutomationWorkflowIterationResult,
)
__all__ = [
- "Tracker",
+ "AutomationWorkflowInputs",
+ "AutomationWorkflowIterationResult",
"ProgressStatus",
- "DBOSWorkflowHelperMixin",
- "get_current_step"
]
diff --git a/packages/node/octobot_node/scheduler/workflows/params/automation_workflow_params.py b/packages/node/octobot_node/scheduler/workflows/params/automation_workflow_params.py
new file mode 100644
index 000000000..adc96f5eb
--- /dev/null
+++ b/packages/node/octobot_node/scheduler/workflows/params/automation_workflow_params.py
@@ -0,0 +1,33 @@
+# Drakkar-Software OctoBot-Node
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import dataclasses
+import typing
+
+import octobot_commons.dataclasses.minimizable_dataclass
+import octobot_node.models
+import octobot_node.scheduler.workflows.params.base_params as base_params
+
+
+@dataclasses.dataclass
+class AutomationWorkflowInputs(octobot_commons.dataclasses.minimizable_dataclass.MinimizableDataclass):
+ task: octobot_node.models.Task
+ execution_time: float = 0
+
+
+@dataclasses.dataclass
+class AutomationWorkflowIterationResult(octobot_commons.dataclasses.minimizable_dataclass.MinimizableDataclass):
+ progress_status: base_params.ProgressStatus
+ next_iteration_description: typing.Optional[str]
diff --git a/packages/node/octobot_node/scheduler/workflows/params/base_params.py b/packages/node/octobot_node/scheduler/workflows/params/base_params.py
new file mode 100644
index 000000000..ac320048c
--- /dev/null
+++ b/packages/node/octobot_node/scheduler/workflows/params/base_params.py
@@ -0,0 +1,29 @@
+# Drakkar-Software OctoBot-Node
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import pydantic
+import typing
+
+
+class ProgressStatus(pydantic.BaseModel):
+ """
+ Only contains non-encrypted data
+ """
+ latest_step: typing.Optional[str] = None
+ next_step: typing.Optional[str] = None
+ next_step_at: typing.Optional[float] = None
+ remaining_steps: typing.Optional[int] = None
+ error: typing.Optional[str] = None
+ should_stop: bool = False
diff --git a/packages/node/octobot_node/scheduler/workflows_util.py b/packages/node/octobot_node/scheduler/workflows_util.py
index 903af01fd..daa9492e0 100644
--- a/packages/node/octobot_node/scheduler/workflows_util.py
+++ b/packages/node/octobot_node/scheduler/workflows_util.py
@@ -13,11 +13,78 @@
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
-
+import json
+import typing
import dbos as dbos_lib
-from octobot_node.scheduler import SCHEDULER
+import octobot_commons.logging
+import octobot_node.models as models
+import octobot_node.scheduler.workflows.params as params
+
+
+try:
+ import octobot_flow
+except ImportError:
+ octobot_commons.logging.get_logger("octobot_node.scheduler.workflows_util").warning(
+ "octobot_flow is not installed, workflows utilities will not be available"
+ )
+
+
+STATE_KEY = "state"
+
+
+def get_automation_state(workflow_status: dbos_lib.WorkflowStatus) -> typing.Optional["octobot_flow.AutomationState"]:
+ """Get the automation state from the workflow status"""
+ if state_dict := get_automation_state_dict(workflow_status):
+ return octobot_flow.AutomationState.from_dict(state_dict)
+ return None
+
+
+def get_automation_id(workflow_status: dbos_lib.WorkflowStatus) -> typing.Optional[str]:
+ if state_dict := get_automation_state_dict(workflow_status):
+ return state_dict.get("automation", {}).get("metadata", {}).get("automation_id")
+ return None
+
+
+def get_automation_state_dict(workflow_status: dbos_lib.WorkflowStatus) -> typing.Optional[dict]:
+ if inputs := get_automation_workflow_inputs(workflow_status):
+ try:
+ return get_automation_dict(inputs.task.content)[STATE_KEY]
+ except ValueError:
+ return None
+ return None
+
+
+def get_input_task(workflow_status: dbos_lib.WorkflowStatus) -> typing.Optional[models.Task]:
+ if inputs := get_automation_workflow_inputs(workflow_status):
+ return inputs.task
+ return None
+
+
+def get_automation_workflow_inputs(workflow_status: dbos_lib.WorkflowStatus) -> typing.Optional[params.AutomationWorkflowInputs]:
+ for input in list(workflow_status.input.get("args", [])) + list(workflow_status.input.get("kwargs", {}).values()):
+ if isinstance(input, dict):
+ try:
+ parsed_inputs = params.AutomationWorkflowInputs.from_dict(input)
+ return parsed_inputs
+ except TypeError:
+ print(f"Failed to parse inputs: {input}")
+ pass
+ return None
+
+
+def get_automation_dict(description: typing.Union[str, dict]) -> dict:
+ if isinstance(description, str):
+ description = json.loads(description)
+ if isinstance(description, dict) and (state := description.get(STATE_KEY)) and isinstance(state, dict):
+ return description
+ raise ValueError("No automation state found in description")
-async def get_workflow_handle(workflow_id: str) -> dbos_lib.WorkflowHandleAsync:
- return await SCHEDULER.INSTANCE.retrieve_workflow_async(workflow_id)
\ No newline at end of file
+async def get_automation_workflow_status(automation_id: str) -> dbos_lib.WorkflowStatus:
+ for workflow_status in await dbos_lib.DBOS.list_workflows_async(status=[
+ dbos_lib.WorkflowStatusString.PENDING.value, dbos_lib.WorkflowStatusString.ENQUEUED.value
+ ]):
+ if get_automation_id(workflow_status) == automation_id:
+ return workflow_status
+ raise ValueError(f"No automation workflow found for automation_id: {automation_id}")
diff --git a/packages/node/requirements.txt b/packages/node/requirements.txt
index 2cc389102..6e1b9f323 100644
--- a/packages/node/requirements.txt
+++ b/packages/node/requirements.txt
@@ -2,4 +2,4 @@ fastapi[standard]==0.135.1
passlib[bcrypt]==1.7.4
pydantic
-dbos==2.14.0
+dbos==2.15.0
diff --git a/packages/node/tests/scheduler/__init__.py b/packages/node/tests/scheduler/__init__.py
index d9e911ea1..f4c888305 100644
--- a/packages/node/tests/scheduler/__init__.py
+++ b/packages/node/tests/scheduler/__init__.py
@@ -21,24 +21,35 @@
import octobot_node.scheduler.workflows
+def init_scheduler(db_file_name: str):
+ config: dbos.DBOSConfig = {
+ "name": "scheduler_test",
+ "system_database_url": f"sqlite:///{db_file_name}",
+ }
+ if octobot_node.scheduler.SCHEDULER.AUTOMATION_WORKFLOW_QUEUE is None:
+ octobot_node.scheduler.SCHEDULER.create_queues()
+ dbos.DBOS(config=config)
+ octobot_node.scheduler.SCHEDULER.INSTANCE = dbos.DBOS
+ octobot_node.scheduler.workflows.register_workflows()
+ return dbos.DBOS
+
+
@pytest.fixture()
def temp_dbos_scheduler():
# from https://docs.dbos.dev/python/tutorials/testing
# don't use too muck as it is very slow
with tempfile.NamedTemporaryFile() as temp_file:
- temp_file_name = temp_file.name
- config: dbos.DBOSConfig = {
- "name": "scheduler_test",
- "system_database_url": f"sqlite:///{temp_file_name}",
- }
- if octobot_node.scheduler.SCHEDULER.BOT_WORKFLOW_QUEUE is None:
- octobot_node.scheduler.SCHEDULER.create_queues()
- dbos.DBOS(config=config)
- dbos.DBOS.reset_system_database()
- octobot_node.scheduler.SCHEDULER.INSTANCE = dbos.DBOS
- octobot_node.scheduler.workflows.register_workflows()
- dbos.DBOS.launch()
+ dbos =init_scheduler(temp_file.name)
+ dbos.reset_system_database()
+ dbos.launch()
try:
yield octobot_node.scheduler.SCHEDULER
finally:
- dbos.DBOS.destroy()
+ dbos.destroy()
+
+
+def init_and_destroy_scheduler(db_file_name: str):
+ dbos = init_scheduler(db_file_name)
+ dbos.reset_system_database()
+ dbos.launch()
+ dbos.destroy()
diff --git a/packages/node/tests/scheduler/test_octobot_flow_client_lib.py b/packages/node/tests/scheduler/test_octobot_flow_client_lib.py
new file mode 100644
index 000000000..e236d7267
--- /dev/null
+++ b/packages/node/tests/scheduler/test_octobot_flow_client_lib.py
@@ -0,0 +1,982 @@
+# Drakkar-Software OctoBot-Node
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import pytest
+import decimal
+import time
+import mock
+import typing
+
+import octobot_commons.list_util as list_util
+import octobot_commons.constants as common_constants
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_trading.constants
+import octobot_trading.errors
+import octobot_trading.enums as trading_enums
+import octobot_trading.personal_data.orders.order_factory as order_factory
+import octobot_node.scheduler.octobot_flow_client as octobot_flow_client
+
+RUN_TESTS = True
+
+
+try:
+ import octobot_flow.entities
+ import octobot_flow.enums
+
+ import tentacles.Meta.DSL_operators as DSL_operators
+
+ BLOCKCHAIN = octobot_trading.constants.SIMULATED_BLOCKCHAIN_NETWORK
+except ImportError as err:
+ import traceback
+ traceback.print_exc()
+ print(f"Error importing octobot_flow: {err}")
+ # tests will be skipped if octobot_trading or octobot_wrapper are not installed
+ RUN_TESTS = False
+ BLOCKCHAIN = "unavailable"
+
+
+# All test coroutines will be treated as marked.
+pytestmark = pytest.mark.asyncio
+
+
+EXCHANGE_INTERNAL_NAME = "binanceus"
+
+
+@pytest.fixture
+def market_order_action():
+ return {
+ "params": {
+ "ACTIONS": "trade",
+ "EXCHANGE_FROM": EXCHANGE_INTERNAL_NAME,
+ "ORDER_SYMBOL": "ETH/BTC",
+ "ORDER_AMOUNT": 1,
+ "ORDER_TYPE": "market",
+ "ORDER_SIDE": "BUY",
+ "SIMULATED_PORTFOLIO": {
+ "BTC": 1,
+ },
+ }
+ }
+
+
+@pytest.fixture
+def limit_order_action():
+ return {
+ "params": {
+ "ACTIONS": "trade",
+ "EXCHANGE_FROM": EXCHANGE_INTERNAL_NAME,
+ "ORDER_SYMBOL": "ETH/BTC",
+ "ORDER_AMOUNT": 1,
+ "ORDER_PRICE": "-10%",
+ "ORDER_TYPE": "limit",
+ "ORDER_SIDE": "BUY",
+ "SIMULATED_PORTFOLIO": {
+ "BTC": 1,
+ },
+ }
+ }
+
+
+@pytest.fixture
+def stop_loss_order_action():
+ return {
+ "params": {
+ "ACTIONS": "trade",
+ "EXCHANGE_FROM": EXCHANGE_INTERNAL_NAME,
+ "ORDER_SYMBOL": "ETH/BTC",
+ "ORDER_TYPE": "stop",
+ "ORDER_AMOUNT": "10%",
+ "ORDER_SIDE": "SELL",
+ "ORDER_STOP_PRICE": "-10%",
+ "SIMULATED_PORTFOLIO": {
+ "ETH": 1,
+ },
+ }
+ }
+
+
+@pytest.fixture
+def cancel_order_action():
+ return {
+ "params": {
+ "ACTIONS": "cancel",
+ "EXCHANGE_FROM": EXCHANGE_INTERNAL_NAME,
+ "ORDER_SYMBOL": "ETH/BTC",
+ "ORDER_SIDE": "BUY",
+ }
+ }
+
+
+@pytest.fixture
+def polymarket_order_action():
+ return {
+ "params": {
+ "ACTIONS": "trade",
+ "EXCHANGE_FROM": "polymarket",
+ "ORDER_SYMBOL": "what-price-will-bitcoin-hit-in-january-2026/USDC:USDC-260131-0-YES",
+ "ORDER_AMOUNT": 1,
+ "ORDER_TYPE": "market",
+ "ORDER_SIDE": "BUY",
+ "SIMULATED_PORTFOLIO": {
+ "USDC": 100,
+ },
+ }
+ }
+
+
+@pytest.fixture
+def deposit_action():
+ return {
+ "params": {
+ "ACTIONS": "deposit",
+ "EXCHANGE_TO": EXCHANGE_INTERNAL_NAME,
+ "BLOCKCHAIN_FROM_ASSET": "BTC",
+ "BLOCKCHAIN_FROM_AMOUNT": 1,
+ "BLOCKCHAIN_FROM": BLOCKCHAIN,
+ "SIMULATED_PORTFOLIO": {
+ "BTC": 0.01,
+ },
+ }
+ }
+
+
+@pytest.fixture
+def transfer_blockchain_action():
+ return {
+ "params": {
+ "ACTIONS": "transfer",
+ "BLOCKCHAIN_FROM_ASSET": "BTC",
+ "BLOCKCHAIN_FROM_AMOUNT": 1,
+ "BLOCKCHAIN_FROM": BLOCKCHAIN,
+ "BLOCKCHAIN_TO": BLOCKCHAIN,
+ "BLOCKCHAIN_TO_ASSET": "BTC",
+ "BLOCKCHAIN_TO_ADDRESS": "0x123_simulated_transfer_to_address_BTC",
+ }
+ }
+
+
+@pytest.fixture
+def withdraw_action():
+ return {
+ "params": {
+ "ACTIONS": "withdraw",
+ "EXCHANGE_FROM": EXCHANGE_INTERNAL_NAME,
+ "BLOCKCHAIN_TO": "ethereum",
+ "BLOCKCHAIN_TO_ASSET": "ETH",
+ "BLOCKCHAIN_TO_ADDRESS": "0x1234567890123456789012345678901234567890",
+ "SIMULATED_PORTFOLIO": {
+ "ETH": 2,
+ },
+ },
+ }
+
+
+@pytest.fixture
+def create_limit_instant_wait_and_cancel_order_action(limit_order_action, cancel_order_action):
+ all = {
+ "params": {
+ **limit_order_action["params"],
+ **cancel_order_action["params"],
+ **{
+ "MIN_DELAY": 0,
+ "MAX_DELAY": 0,
+ }
+ }
+ }
+ all["params"]["SIMULATED_PORTFOLIO"] = {
+ "BTC": 1,
+ }
+ all["params"]["ACTIONS"] = "trade,wait,cancel"
+ return all
+
+
+@pytest.fixture
+def multiple_actions_bundle_no_wait(deposit_action, limit_order_action):
+ all = {
+ "params": {
+ **deposit_action["params"],
+ **limit_order_action["params"],
+ }
+ }
+ all["params"]["SIMULATED_PORTFOLIO"] = {
+ "BTC": 1,
+ }
+ all["params"]["ACTIONS"] = "deposit,trade"
+ return all
+
+
+@pytest.fixture
+def multiple_action_bundle_with_wait(deposit_action, market_order_action, withdraw_action):
+ all = {
+ "params": {
+ **deposit_action["params"],
+ **market_order_action["params"],
+ **withdraw_action["params"],
+ **{
+ "MIN_DELAY": 100,
+ "MAX_DELAY": 150,
+ }
+ }
+ }
+ all["params"]["SIMULATED_PORTFOLIO"] = {
+ "BTC": 1,
+ }
+ all["params"]["ACTIONS"] = "deposit,wait,trade,wait,withdraw"
+ return all
+
+
+def misses_required_octobot_flow_client_import():
+ try:
+ if not RUN_TESTS:
+ return "OctoBot dependencies are not installed"
+ import octobot_flow
+ return None
+ except ImportError:
+ return "octobot_flow_client is not installed"
+
+
+def get_failed_actions(actions: list["octobot_flow.entities.AbstractActionDetails"]) -> list[typing.Optional[dict]]:
+ return [
+ action.result
+ for action in actions
+ if action.error_status is not octobot_flow.enums.ActionErrorStatus.NO_ERROR.value
+ ]
+
+def get_created_orders(actions: list["octobot_flow.entities.AbstractActionDetails"]) -> list[dict]:
+ order_lists = [
+ action.result.get(DSL_operators.CREATED_ORDERS_KEY, [])
+ for action in actions
+ if action.result
+ ]
+ return list_util.flatten_list(order_lists) if order_lists else []
+
+def get_cancelled_orders(actions: list["octobot_flow.entities.AbstractActionDetails"]) -> list[str]:
+ cancelled_orders = [
+ action.result.get(DSL_operators.CANCELLED_ORDERS_KEY, [])
+ for action in actions
+ if action.result
+ ]
+ return list_util.flatten_list(cancelled_orders) if cancelled_orders else []
+
+def get_deposit_and_withdrawal_details(actions: list["octobot_flow.entities.AbstractActionDetails"]) -> list[dict]:
+ withdrawal_lists = [
+ action.result.get(DSL_operators.CREATED_WITHDRAWALS_KEY, []) + action.result.get(DSL_operators.CREATED_TRANSACTIONS_KEY, [])
+ for action in actions
+ if action.result and isinstance(action.result, dict) and (
+ DSL_operators.CREATED_WITHDRAWALS_KEY in action.result or
+ DSL_operators.CREATED_TRANSACTIONS_KEY in action.result
+ )
+ ]
+ return list_util.flatten_list(withdrawal_lists) if withdrawal_lists else []
+
+
+class TestOctoBotActionsJob:
+
+ def setup_method(self):
+ if message := misses_required_octobot_flow_client_import():
+ pytest.skip(reason=message)
+ octobot_trading.constants.ALLOW_FUNDS_TRANSFER = True
+
+ def teardown_method(self):
+ octobot_trading.constants.ALLOW_FUNDS_TRANSFER = False
+
+ async def test_run_market_order_action(self, market_order_action):
+ # step 1: configure the job
+ job = octobot_flow_client.OctoBotActionsJob(market_order_action, [])
+ result = await job.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.ConfiguredActionDetails)
+ assert processed_actions[0].action == octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value
+ assert processed_actions[0].config is not None
+ assert "automation" in processed_actions[0].config
+ assert isinstance(processed_actions[0].config["exchange_account_details"], dict)
+ pre_trade_portfolio = job.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert pre_trade_portfolio["BTC"] == {
+ common_constants.PORTFOLIO_AVAILABLE: 1,
+ common_constants.PORTFOLIO_TOTAL: 1,
+ }
+
+ # step 2: run the trade action
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script == "market('buy', 'ETH/BTC', 1)"
+ job2 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False), []
+ )
+ result = await job2.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script == "market('buy', 'ETH/BTC', 1)"
+ assert len(get_created_orders(processed_actions)) == 1
+ order = get_created_orders(processed_actions)[0]
+ assert order["symbol"] == "ETH/BTC"
+ assert order["amount"] == 1
+ assert order["type"] == "market"
+ assert order["side"] == "buy"
+ assert result.next_actions_description is None # no more actions to execute
+
+ # ensure deposit is successful
+ post_deposit_portfolio = job2.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert post_deposit_portfolio["BTC"][common_constants.PORTFOLIO_AVAILABLE] < pre_trade_portfolio["BTC"][common_constants.PORTFOLIO_AVAILABLE]
+ assert post_deposit_portfolio["BTC"][common_constants.PORTFOLIO_TOTAL] < pre_trade_portfolio["BTC"][common_constants.PORTFOLIO_TOTAL]
+
+ # bought ETH - fees
+ assert 0.990 < post_deposit_portfolio["ETH"][common_constants.PORTFOLIO_AVAILABLE] <= 0.999
+ assert 0.990 < post_deposit_portfolio["ETH"][common_constants.PORTFOLIO_TOTAL] <= 0.999
+
+ async def test_run_limit_order_action(self, limit_order_action):
+ # step 1: configure the job
+ job = octobot_flow_client.OctoBotActionsJob(limit_order_action, [])
+ result = await job.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.ConfiguredActionDetails)
+ assert processed_actions[0].action == octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value
+ assert processed_actions[0].config is not None
+ assert "automation" in processed_actions[0].config
+ assert isinstance(processed_actions[0].config["exchange_account_details"], dict)
+ pre_trade_portfolio = job.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert pre_trade_portfolio["BTC"] == {
+ common_constants.PORTFOLIO_AVAILABLE: 1,
+ common_constants.PORTFOLIO_TOTAL: 1,
+ }
+
+ # step 2: run the trade action
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script == "limit('buy', 'ETH/BTC', 1, '-10%')"
+ job2 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False), []
+ )
+ result = await job2.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script == "limit('buy', 'ETH/BTC', 1, '-10%')"
+ assert len(get_created_orders(processed_actions)) == 1
+ order = get_created_orders(processed_actions)[0]
+ assert order["symbol"] == "ETH/BTC"
+ assert order["amount"] == decimal.Decimal("1")
+ assert decimal.Decimal("0.001") < order["price"] < decimal.Decimal("0.2")
+ assert order["type"] == "limit"
+ assert order["side"] == "buy"
+ assert result.next_actions_description is None # no more actions to execute
+
+ async def test_run_stop_loss_order_action(self, stop_loss_order_action):
+ # step 1: configure the job
+ job = octobot_flow_client.OctoBotActionsJob(stop_loss_order_action, [])
+ result = await job.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.ConfiguredActionDetails)
+ assert processed_actions[0].action == octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value
+ assert processed_actions[0].config is not None
+ assert "automation" in processed_actions[0].config
+ assert isinstance(processed_actions[0].config["exchange_account_details"], dict)
+ pre_trade_portfolio = job.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert pre_trade_portfolio["ETH"] == {
+ common_constants.PORTFOLIO_AVAILABLE: 1,
+ common_constants.PORTFOLIO_TOTAL: 1,
+ }
+
+ # step 2: run the trade action
+ with mock.patch.object(
+ # force stop loseses to be supported no matter the exchange
+ order_factory.OrderFactory, "_ensure_supported_order_type", mock.Mock()
+ ) as _ensure_supported_order_type:
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script.startswith("stop_loss('sell', 'ETH/BTC', '10%', '-10%')")
+ job2 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False), []
+ )
+ result = await job2.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script.startswith("stop_loss('sell', 'ETH/BTC', '10%', '-10%')")
+ assert processed_actions[0].error_status is None
+ assert len(get_created_orders(processed_actions)) == 1
+ order = get_created_orders(processed_actions)[0]
+ assert order["symbol"] == "ETH/BTC"
+ assert order["amount"] == decimal.Decimal("0.1") # 10% of 1 ETH
+ assert decimal.Decimal("0.001") < order["price"] < decimal.Decimal("0.2")
+ assert order["type"] == "stop_loss"
+ assert order["side"] == "sell"
+ assert result.next_actions_description is None # no more actions to execute
+
+ async def test_run_cancel_limit_order_after_instant_wait_action(self, create_limit_instant_wait_and_cancel_order_action):
+ # step 1: configure the job
+ job = octobot_flow_client.OctoBotActionsJob(create_limit_instant_wait_and_cancel_order_action, [])
+ result = await job.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.ConfiguredActionDetails)
+ assert processed_actions[0].action == octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value
+ assert processed_actions[0].config is not None
+ assert "automation" in processed_actions[0].config
+ assert isinstance(processed_actions[0].config["exchange_account_details"], dict)
+ pre_trade_portfolio = job.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert pre_trade_portfolio["BTC"] == {
+ common_constants.PORTFOLIO_AVAILABLE: 1,
+ common_constants.PORTFOLIO_TOTAL: 1,
+ }
+
+ # step 2: run the trade action
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script == "limit('buy', 'ETH/BTC', 1, '-10%')"
+ job2 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False), []
+ )
+ result = await job2.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script.startswith("limit(")
+ assert len(get_created_orders(processed_actions)) == 1
+ order = get_created_orders(processed_actions)[0]
+ assert order["symbol"] == "ETH/BTC"
+ assert order["amount"] == decimal.Decimal("1")
+ assert decimal.Decimal("0.001") < order["price"] < decimal.Decimal("0.2")
+ assert order["type"] == "limit"
+ assert order["side"] == "buy"
+ assert result.next_actions_description is not None
+
+ # step 3: run the wait action
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script.startswith("wait(")
+ job3 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False), []
+ )
+ result = await job3.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script.startswith("wait(")
+ # wait is waiting 0 seconds, so it should be executed immediately
+ assert processed_actions[0].executed_at is not None and processed_actions[0].executed_at > 0
+
+ # step 4: run the cancel action
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script == "cancel_order('ETH/BTC', side='buy')"
+ job4 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False), []
+ )
+ result = await job4.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script.startswith("cancel_order(")
+ assert processed_actions[0].result is not None
+ assert len(processed_actions[0].result[DSL_operators.CANCELLED_ORDERS_KEY]) == len(get_cancelled_orders(processed_actions)) == 1
+ assert result.next_actions_description is None # no more actions to execute
+
+ @pytest.mark.skip(reason="restore once polymarket is fully supported")
+ async def test_polymarket_trade_action(self, polymarket_order_action): # TODO: update once polymarket is fullly supported
+ # step 1: configure the job
+ job = octobot_flow_client.OctoBotActionsJob(polymarket_order_action, [])
+ result = await job.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.ConfiguredActionDetails)
+ assert processed_actions[0].action == octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value
+ assert processed_actions[0].config is not None
+ assert "automation" in processed_actions[0].config
+ assert isinstance(processed_actions[0].config["exchange_account_details"], dict)
+ pre_trade_portfolio = job.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert pre_trade_portfolio["USDC"] == {
+ common_constants.PORTFOLIO_AVAILABLE: 100,
+ common_constants.PORTFOLIO_TOTAL: 100,
+ }
+
+ # step 2: run the trade action
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script.startswith("market(")
+ job2 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False),
+ []
+ )
+ with pytest.raises(octobot_trading.errors.FailedRequest): # TODO: update once supported
+ result = await job2.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script.startswith("market(")
+ assert len(get_created_orders(processed_actions)) == 1
+ order = get_created_orders(processed_actions)[0]
+ assert order["symbol"] == "what-price-will-bitcoin-hit-in-january-2026/USDC:USDC-260131-0-YES"
+ assert order["amount"] == decimal.Decimal("1")
+ assert order["type"] == "market"
+ assert order["side"] == "buy"
+
+ async def test_run_transfer_blockchain_only_action(self, transfer_blockchain_action):
+ # step 1: configure the job
+ job = octobot_flow_client.OctoBotActionsJob(transfer_blockchain_action, [])
+ result = await job.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.ConfiguredActionDetails)
+ assert processed_actions[0].action == octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value
+ assert processed_actions[0].config is not None
+ assert "automation" in processed_actions[0].config
+ assert job.after_execution_state.automation.reference_exchange_account_elements is None
+ assert job.after_execution_state.automation.client_exchange_account_elements.portfolio.content is None
+
+ # step 2: run the transfer action
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script is not None and "blockchain_wallet_transfer" in next_actions[0].dsl_script
+ job2 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False),
+ []
+ )
+ result = await job2.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script is not None and "blockchain_wallet_transfer" in processed_actions[0].dsl_script
+ assert result.next_actions_description is None # no more actions to execute
+
+ assert processed_actions[0].result is not None
+ assert len(processed_actions[0].result[DSL_operators.CREATED_TRANSACTIONS_KEY]) == len(get_deposit_and_withdrawal_details(processed_actions)) == 1
+ assert len(get_deposit_and_withdrawal_details(processed_actions)) == 1
+ transaction = get_deposit_and_withdrawal_details(processed_actions)[0]
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.CURRENCY.value] == "BTC"
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.AMOUNT.value] == decimal.Decimal("1")
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.NETWORK.value] == BLOCKCHAIN
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.ADDRESS_TO.value] == "0x123_simulated_transfer_to_address_BTC"
+
+
+
+ async def test_run_deposit_action(self, deposit_action):
+ # step 1: configure the job
+ job = octobot_flow_client.OctoBotActionsJob(deposit_action, [])
+ result = await job.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.ConfiguredActionDetails)
+ assert processed_actions[0].action == octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value
+ assert processed_actions[0].config is not None
+ assert "automation" in processed_actions[0].config
+ assert isinstance(processed_actions[0].config["exchange_account_details"], dict)
+ pre_deposit_portfolio = job.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert pre_deposit_portfolio["BTC"] == {
+ common_constants.PORTFOLIO_AVAILABLE: 0.01,
+ common_constants.PORTFOLIO_TOTAL: 0.01,
+ }
+
+ # step 2: run the deposit action
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script is not None and "blockchain_wallet_transfer" in next_actions[0].dsl_script
+ job2 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False),
+ []
+ )
+ result = await job2.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script is not None and "blockchain_wallet_transfer" in processed_actions[0].dsl_script
+ assert result.next_actions_description is None # no more actions to execute
+
+ # ensure deposit is successful
+ post_deposit_portfolio = job2.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert post_deposit_portfolio["BTC"] == {
+ common_constants.PORTFOLIO_AVAILABLE: pre_deposit_portfolio["BTC"][common_constants.PORTFOLIO_AVAILABLE] + deposit_action["params"]["BLOCKCHAIN_FROM_AMOUNT"],
+ common_constants.PORTFOLIO_TOTAL: pre_deposit_portfolio["BTC"][common_constants.PORTFOLIO_TOTAL] + deposit_action["params"]["BLOCKCHAIN_FROM_AMOUNT"],
+ }
+
+ async def test_run_withdraw_action(self, withdraw_action):
+ # step 1: configure the job
+ job = octobot_flow_client.OctoBotActionsJob(withdraw_action, [])
+ result = await job.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.ConfiguredActionDetails)
+ assert processed_actions[0].action == octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value
+ assert processed_actions[0].config is not None
+ assert "automation" in processed_actions[0].config
+ assert isinstance(processed_actions[0].config["exchange_account_details"], dict)
+ pre_withdraw_portfolio = job.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert pre_withdraw_portfolio["ETH"] == {
+ common_constants.PORTFOLIO_AVAILABLE: 2,
+ common_constants.PORTFOLIO_TOTAL: 2,
+ }
+
+ # step 2: run the withdraw action
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script.startswith("withdraw(")
+ job2 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False),
+ []
+ )
+ result = await job2.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script.startswith("withdraw(")
+ assert result.next_actions_description is None # no more actions to execute
+
+ # ensure withdraw is successful
+ post_withdraw_portfolio = job2.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert post_withdraw_portfolio == {} # portfolio should now be empty
+
+ async def test_run_multiple_actions_bundle_no_wait(self, multiple_actions_bundle_no_wait):
+ # step 1: configure the job
+ job = octobot_flow_client.OctoBotActionsJob(multiple_actions_bundle_no_wait, [])
+ # ensure wait keywords have been considered
+ result = await job.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.ConfiguredActionDetails)
+ assert processed_actions[0].action == octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value
+ assert processed_actions[0].config is not None
+ assert "automation" in processed_actions[0].config
+ assert isinstance(processed_actions[0].config["exchange_account_details"], dict)
+ pre_trade_portfolio = job.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert pre_trade_portfolio["BTC"] == {
+ common_constants.PORTFOLIO_AVAILABLE: 1,
+ common_constants.PORTFOLIO_TOTAL: 1,
+ }
+
+ # step 2: run the deposit action
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1 # only the deposit action should be executable as the trade action depends on it
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script is not None and "blockchain_wallet_transfer" in next_actions[0].dsl_script
+ job2 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False),
+ []
+ )
+ result = await job2.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script is not None and "blockchain_wallet_transfer" in processed_actions[0].dsl_script
+ assert processed_actions[0].result is not None
+ assert len(processed_actions[0].result[DSL_operators.CREATED_TRANSACTIONS_KEY]) == len(get_deposit_and_withdrawal_details(processed_actions)) == 1
+ assert len(get_deposit_and_withdrawal_details(processed_actions)) == 1
+ transaction = get_deposit_and_withdrawal_details(processed_actions)[0]
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.CURRENCY.value] == "BTC"
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.AMOUNT.value] == decimal.Decimal("1")
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.NETWORK.value] == BLOCKCHAIN
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.ADDRESS_TO.value] == "0x123_simulated_deposit_address_BTC"
+
+
+ # step 3: run the trade action
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1 # only the trade action should be executable now: all others have been executed already
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script.startswith("limit(")
+ job3 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False),
+ []
+ )
+ result = await job3.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script.startswith("limit(")
+ assert len(get_created_orders(processed_actions)) == 1
+ limit_order = get_created_orders(processed_actions)[0]
+ assert limit_order["symbol"] == "ETH/BTC"
+ assert limit_order["amount"] == decimal.Decimal("1")
+ assert limit_order["type"] == "limit"
+ assert limit_order["side"] == "buy"
+ assert result.next_actions_description is None # no more actions to execute
+
+ # ensure trades are taken into account in portfolio
+ post_deposit_portfolio = job3.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+
+ assert "ETH" not in post_deposit_portfolio # ETH order has not been executed (still open)
+
+ assert post_deposit_portfolio["BTC"][common_constants.PORTFOLIO_TOTAL] == 2
+ # created a buy order but not executed: locked BTC in portfolio
+ assert post_deposit_portfolio["BTC"][common_constants.PORTFOLIO_AVAILABLE] < post_deposit_portfolio["BTC"][common_constants.PORTFOLIO_TOTAL]
+
+
+ async def test_run_multiple_actions_bundle_with_wait(self, multiple_action_bundle_with_wait):
+ # step 1: configure the job
+ job = octobot_flow_client.OctoBotActionsJob(multiple_action_bundle_with_wait, [])
+ # ensure wait keywords have been considered
+ automation = job.description.state["automation"]
+ dag = automation["actions_dag"]
+ assert len(dag["actions"]) == 6 # 6 actions: init, deposit, wait, trade, wait, withdraw
+ dsl_scripts = [action["dsl_script"] for action in dag["actions"][1:]]
+ assert all(
+ dsl_script.startswith(keyword)
+ for dsl_script, keyword in zip(dsl_scripts, ["blockchain_wallet_transfer", "wait", "market", "wait", "withdraw"])
+ )
+ # run the job
+ result = await job.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.ConfiguredActionDetails)
+ assert processed_actions[0].action == octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value
+ assert processed_actions[0].config is not None
+ assert "automation" in processed_actions[0].config
+ assert isinstance(processed_actions[0].config["exchange_account_details"], dict)
+ pre_trade_portfolio = job.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert pre_trade_portfolio["BTC"] == {
+ common_constants.PORTFOLIO_AVAILABLE: 1,
+ common_constants.PORTFOLIO_TOTAL: 1,
+ }
+
+ # step 2: run the deposit action
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script is not None and "blockchain_wallet_transfer" in next_actions[0].dsl_script
+ job2 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False),
+ []
+ )
+ result = await job2.run()
+ next_actions_description = result.next_actions_description
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script is not None and "blockchain_wallet_transfer" in processed_actions[0].dsl_script
+ assert processed_actions[0].result is not None
+ assert len(processed_actions[0].result[DSL_operators.CREATED_TRANSACTIONS_KEY]) == len(get_deposit_and_withdrawal_details(processed_actions)) == 1
+ transaction = processed_actions[0].result[DSL_operators.CREATED_TRANSACTIONS_KEY][0]
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.CURRENCY.value] == "BTC"
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.AMOUNT.value] == decimal.Decimal("1")
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.NETWORK.value] == BLOCKCHAIN
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.ADDRESS_TO.value] == "0x123_simulated_deposit_address_BTC"
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script.startswith("wait(")
+
+ # step 3.A: run the wait action
+ job3 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False),
+ []
+ )
+ result = await job3.run()
+ next_actions_description = result.next_actions_description
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script.startswith("wait(")
+ # next action is wait again: waiting time has not been reached yet
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script.startswith("wait(")
+ assert next_actions[0].previous_execution_result
+ last_execution_result = dsl_interpreter.ReCallingOperatorResult.from_dict(
+ next_actions[0].previous_execution_result[dsl_interpreter.ReCallingOperatorResult.__name__]
+ )
+ waiting_time = last_execution_result.last_execution_result[dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value]
+
+ # step 3.B: complete the wait action
+ with mock.patch.object(time, "time", mock.Mock(return_value=time.time() + waiting_time)):
+ job4 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False),
+ []
+ )
+ result = await job4.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script.startswith("wait(")
+ assert processed_actions[0].executed_at is not None and processed_actions[0].executed_at > 0
+
+ next_actions_description = result.next_actions_description
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script.startswith("market(")
+ post_deposit_portfolio = job2.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert post_deposit_portfolio["BTC"] == {
+ common_constants.PORTFOLIO_AVAILABLE: 2,
+ common_constants.PORTFOLIO_TOTAL: 2,
+ }
+
+ # step 4: run the trade action
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script.startswith("market(")
+ job5 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False),
+ []
+ )
+ result = await job5.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script.startswith("market(")
+ assert processed_actions[0].result is not None
+ assert len(processed_actions[0].result[DSL_operators.CREATED_ORDERS_KEY]) == len(get_created_orders(processed_actions)) == 1
+ post_trade_portfolio = job5.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert post_trade_portfolio["BTC"][common_constants.PORTFOLIO_AVAILABLE] < post_deposit_portfolio["BTC"][common_constants.PORTFOLIO_AVAILABLE]
+ assert 0.990 < post_trade_portfolio["ETH"][common_constants.PORTFOLIO_AVAILABLE] <= 0.999
+ assert 0.990 < post_trade_portfolio["ETH"][common_constants.PORTFOLIO_TOTAL] <= 0.999
+ # step 5.A: run the wait action
+ next_actions_description = result.next_actions_description
+ job6 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False),
+ []
+ )
+ result = await job6.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script.startswith("wait(")
+ assert processed_actions[0].previous_execution_result
+ last_execution_result = dsl_interpreter.ReCallingOperatorResult.from_dict(
+ processed_actions[0].previous_execution_result[dsl_interpreter.ReCallingOperatorResult.__name__]
+ )
+ waiting_time = last_execution_result.last_execution_result[dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value]
+
+ # step 5.B: complete the wait action
+ next_actions_description = result.next_actions_description
+ with mock.patch.object(time, "time", mock.Mock(return_value=time.time() + waiting_time)):
+ job7 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False),
+ []
+ )
+ result = await job7.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script.startswith("wait(")
+ assert processed_actions[0].executed_at is not None and processed_actions[0].executed_at > 0
+
+
+
+ # step 6: run the withdraw action
+ next_actions_description = result.next_actions_description
+ assert next_actions_description is not None
+ parsed_state = octobot_flow.AutomationState.from_dict(next_actions_description.state)
+ next_actions = parsed_state.automation.actions_dag.get_executable_actions()
+ assert len(next_actions) == 1
+ assert isinstance(next_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert next_actions[0].dsl_script.startswith("withdraw(")
+ job8 = octobot_flow_client.OctoBotActionsJob(
+ next_actions_description.to_dict(include_default_values=False),
+ []
+ )
+ result = await job8.run()
+ assert len(result.processed_actions) == 1
+ processed_actions = result.processed_actions
+ assert len(processed_actions) == 1
+ assert isinstance(processed_actions[0], octobot_flow.entities.DSLScriptActionDetails)
+ assert processed_actions[0].dsl_script.startswith("withdraw(")
+ assert processed_actions[0].result is not None
+ assert len(processed_actions[0].result[DSL_operators.CREATED_WITHDRAWALS_KEY]) == len(get_deposit_and_withdrawal_details(processed_actions)) == 1
+ transaction = processed_actions[0].result[DSL_operators.CREATED_WITHDRAWALS_KEY][0]
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.CURRENCY.value] == "ETH"
+ assert 0.990 < transaction[trading_enums.ExchangeConstantsTransactionColumns.AMOUNT.value] <= 0.999
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.NETWORK.value] == "ethereum"
+ assert transaction[trading_enums.ExchangeConstantsTransactionColumns.ADDRESS_TO.value] == "0x1234567890123456789012345678901234567890"
+ post_withdraw_portfolio = job8.after_execution_state.automation.client_exchange_account_elements.portfolio.content
+ assert post_withdraw_portfolio["BTC"] == post_trade_portfolio["BTC"]
+ assert "ETH" not in post_withdraw_portfolio
+ assert result.next_actions_description is None # no more actions to execute
diff --git a/packages/node/tests/scheduler/test_octobot_lib.py b/packages/node/tests/scheduler/test_octobot_lib.py
deleted file mode 100644
index 836719b8a..000000000
--- a/packages/node/tests/scheduler/test_octobot_lib.py
+++ /dev/null
@@ -1,581 +0,0 @@
-# Drakkar-Software OctoBot-Node
-# Copyright (c) Drakkar-Software, All rights reserved.
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 3.0 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library.
-import pytest
-
-import octobot_node.scheduler.octobot_lib as octobot_lib
-import octobot_commons.constants as common_constants
-RUN_TESTS = True
-try:
- raise ImportError("test")
- import octobot_trading.constants
- import octobot_trading.errors
-
- import octobot_wrapper.keywords.internal.overrides.custom_action_trading_mode as custom_action_trading_mode
- import octobot_wrapper.keywords.internal.constants as kw_constants
- import octobot_wrapper.keywords.internal.enums as kw_enums
-
- BLOCKCHAIN = octobot_trading.constants.SIMULATED_BLOCKCHAIN_NETWORK
-except ImportError:
- # tests will be skipped if octobot_trading or octobot_wrapper are not installed
- RUN_TESTS = False
- BLOCKCHAIN = "unavailable"
-
-
-# All test coroutines will be treated as marked.
-pytestmark = pytest.mark.asyncio
-
-
-@pytest.fixture
-def market_order_action():
- return {
- "params": {
- "ACTIONS": "trade",
- "EXCHANGE_FROM": "binance",
- "ORDER_SYMBOL": "ETH/BTC",
- "ORDER_AMOUNT": 1,
- "ORDER_TYPE": "market",
- "ORDER_SIDE": "BUY",
- "SIMULATED_PORTFOLIO": {
- "BTC": 1,
- },
- }
- }
-
-
-@pytest.fixture
-def limit_order_action():
- return {
- "params": {
- "ACTIONS": "trade",
- "EXCHANGE_FROM": "binance",
- "ORDER_SYMBOL": "ETH/BTC",
- "ORDER_AMOUNT": 1,
- "ORDER_PRICE": "-10%",
- "ORDER_TYPE": "limit",
- "ORDER_SIDE": "BUY",
- "SIMULATED_PORTFOLIO": {
- "BTC": 1,
- },
- }
- }
-
-
-@pytest.fixture
-def stop_loss_order_action():
- return {
- "params": {
- "ACTIONS": "trade",
- "EXCHANGE_FROM": "binance",
- "ORDER_SYMBOL": "ETH/BTC",
- "ORDER_TYPE": "stop",
- "ORDER_AMOUNT": "10%",
- "ORDER_SIDE": "SELL",
- "ORDER_STOP_PRICE": "-10%",
- "SIMULATED_PORTFOLIO": {
- "ETH": 1,
- },
- }
- }
-
-
-@pytest.fixture
-def cancel_order_action():
- return {
- "params": {
- "ACTIONS": "cancel",
- "EXCHANGE_FROM": "binance",
- "ORDER_SYMBOL": "ETH/BTC",
- "ORDER_SIDE": "BUY",
- }
- }
-
-
-@pytest.fixture
-def polymarket_order_action():
- return {
- "params": {
- "ACTIONS": "trade",
- "EXCHANGE_FROM": "polymarket",
- "ORDER_SYMBOL": "what-price-will-bitcoin-hit-in-january-2026/USDC:USDC-260131-0-YES",
- "ORDER_AMOUNT": 1,
- "ORDER_TYPE": "market",
- "ORDER_SIDE": "BUY",
- "SIMULATED_PORTFOLIO": {
- "USDC": 100,
- },
- }
- }
-
-
-@pytest.fixture
-def deposit_action():
- return {
- "params": {
- "ACTIONS": "deposit",
- "EXCHANGE_TO": "binance",
- "BLOCKCHAIN_FROM_ASSET": "BTC",
- "BLOCKCHAIN_FROM_AMOUNT": 1,
- "BLOCKCHAIN_FROM": BLOCKCHAIN,
- "SIMULATED_PORTFOLIO": {
- "BTC": 0.01,
- },
- }
- }
-
-
-@pytest.fixture
-def withdraw_action():
- return {
- "params": {
- "ACTIONS": "withdraw",
- "EXCHANGE_FROM": "binance",
- "BLOCKCHAIN_TO": "ethereum",
- "BLOCKCHAIN_TO_ASSET": "ETH",
- "BLOCKCHAIN_TO_ADDRESS": "0x1234567890123456789012345678901234567890",
- "SIMULATED_PORTFOLIO": {
- "ETH": 2,
- },
- },
- }
-
-
-@pytest.fixture
-def create_limit_and_cancel_order_action(limit_order_action, cancel_order_action):
- all = {
- "params": {
- **limit_order_action["params"],
- **cancel_order_action["params"],
- **{
- "MIN_DELAY": 0,
- "MAX_DELAY": 0,
- }
- }
- }
- all["params"]["SIMULATED_PORTFOLIO"] = {
- "BTC": 1,
- }
- all["params"]["ACTIONS"] = "trade,wait,cancel"
- return all
-
-
-@pytest.fixture
-def multiple_actions_bundle_no_wait(deposit_action, limit_order_action):
- all = {
- "params": {
- **deposit_action["params"],
- **limit_order_action["params"],
- }
- }
- all["params"]["SIMULATED_PORTFOLIO"] = {
- "BTC": 1,
- }
- all["params"]["ACTIONS"] = "deposit,trade"
- return all
-
-
-@pytest.fixture
-def multiple_action_bundle_with_wait(deposit_action, market_order_action, withdraw_action):
- all = {
- "params": {
- **deposit_action["params"],
- **market_order_action["params"],
- **withdraw_action["params"],
- **{
- "MIN_DELAY": 0.1,
- "MAX_DELAY": 0.15,
- }
- }
- }
- all["params"]["SIMULATED_PORTFOLIO"] = {
- "BTC": 1,
- }
- all["params"]["ACTIONS"] = "deposit,wait,trade,wait,withdraw"
- return all
-
-
-def misses_required_octobot_lib_import():
- try:
- if not RUN_TESTS:
- return "OctoBot dependencies are not installed"
- import mini_octobot
- return None
- except ImportError:
- return "octobot_lib is not installed"
-
-class TestOctoBotActionsJob:
-
- def setup_method(self):
- if message := misses_required_octobot_lib_import():
- pytest.skip(reason=message)
- octobot_trading.constants.ALLOW_FUNDS_TRANSFER = True
-
- def teardown_method(self):
- octobot_trading.constants.ALLOW_FUNDS_TRANSFER = False
-
- async def test_run_market_order_action(self, market_order_action):
- # step 1: configure the task
- job = octobot_lib.OctoBotActionsJob(market_order_action)
- result = await job.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == kw_enums.CustomActionExclusiveFormattedContentConfigKeys.APPLY_CONFIGURATION.value
- pre_trade_portfolio = job.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert pre_trade_portfolio["BTC"] == {
- common_constants.PORTFOLIO_AVAILABLE: 1,
- common_constants.PORTFOLIO_TOTAL: 1,
- }
-
- # step 2: run the trade action
- next_actions_description = result.next_actions_description
- assert next_actions_description is not None
- assert len(next_actions_description.immediate_actions) == 1
- assert next_actions_description.immediate_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.BUY_SIGNAL
- job2 = octobot_lib.OctoBotActionsJob(
- next_actions_description.to_dict(include_default_values=False)
- )
- result = await job2.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.BUY_SIGNAL
- assert len(result.get_created_orders()) == 1
- order = result.get_created_orders()[0]
- assert order["symbol"] == "ETH/BTC"
- assert order["amount"] == 1
- assert order["type"] == "market"
- assert order["side"] == "buy"
- assert result.next_actions_description is None # no more actions to execute
-
- # ensure deposit is successful
- post_deposit_portfolio = job2.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert post_deposit_portfolio["BTC"][common_constants.PORTFOLIO_AVAILABLE] < pre_trade_portfolio["BTC"][common_constants.PORTFOLIO_AVAILABLE]
- assert post_deposit_portfolio["BTC"][common_constants.PORTFOLIO_TOTAL] < pre_trade_portfolio["BTC"][common_constants.PORTFOLIO_TOTAL]
-
- # bought ETH - fees
- assert post_deposit_portfolio["ETH"][common_constants.PORTFOLIO_AVAILABLE] == 0.999
- assert post_deposit_portfolio["ETH"][common_constants.PORTFOLIO_TOTAL] == 0.999
-
- async def test_run_limit_order_action(self, limit_order_action):
- # step 1: configure the task
- job = octobot_lib.OctoBotActionsJob(limit_order_action)
- result = await job.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == kw_enums.CustomActionExclusiveFormattedContentConfigKeys.APPLY_CONFIGURATION.value
- pre_trade_portfolio = job.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert pre_trade_portfolio["BTC"] == {
- common_constants.PORTFOLIO_AVAILABLE: 1,
- common_constants.PORTFOLIO_TOTAL: 1,
- }
-
- # step 2: run the trade action
- next_actions_description = result.next_actions_description
- assert next_actions_description is not None
- assert len(next_actions_description.immediate_actions) == 1
- assert next_actions_description.immediate_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.BUY_SIGNAL
- job2 = octobot_lib.OctoBotActionsJob(
- next_actions_description.to_dict(include_default_values=False)
- )
- result = await job2.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.BUY_SIGNAL
- assert len(result.get_created_orders()) == 1
- order = result.get_created_orders()[0]
- assert order["symbol"] == "ETH/BTC"
- assert order["amount"] == 1
- assert 0.001 < order["limit_price"] < 0.2
- assert order["type"] == "limit"
- assert order["side"] == "buy"
- assert result.next_actions_description is None # no more actions to execute
-
- async def test_run_stop_loss_order_action(self, stop_loss_order_action):
- # step 1: configure the task
- job = octobot_lib.OctoBotActionsJob(stop_loss_order_action)
- result = await job.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == kw_enums.CustomActionExclusiveFormattedContentConfigKeys.APPLY_CONFIGURATION.value
- pre_trade_portfolio = job.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert pre_trade_portfolio["ETH"] == {
- common_constants.PORTFOLIO_AVAILABLE: 1,
- common_constants.PORTFOLIO_TOTAL: 1,
- }
-
- # step 2: run the trade action
- next_actions_description = result.next_actions_description
- assert next_actions_description is not None
- assert len(next_actions_description.immediate_actions) == 1
- assert next_actions_description.immediate_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.SELL_SIGNAL
- job2 = octobot_lib.OctoBotActionsJob(
- next_actions_description.to_dict(include_default_values=False)
- )
- result = await job2.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.SELL_SIGNAL
- assert len(result.get_created_orders()) == 1
- order = result.get_created_orders()[0]
- assert order["symbol"] == "ETH/BTC"
- assert order["amount"] == 0.1 # 10% of 1 ETH
- assert 0.001 < order["limit_price"] < 0.2
- assert order["type"] == "stop_loss"
- assert order["side"] == "sell"
- assert result.next_actions_description is None # no more actions to execute
-
- async def test_run_cancel_limit_order_action(self, create_limit_and_cancel_order_action):
- # step 1: configure the task
- job = octobot_lib.OctoBotActionsJob(create_limit_and_cancel_order_action)
- result = await job.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == kw_enums.CustomActionExclusiveFormattedContentConfigKeys.APPLY_CONFIGURATION.value
- pre_trade_portfolio = job.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert pre_trade_portfolio["BTC"] == {
- common_constants.PORTFOLIO_AVAILABLE: 1,
- common_constants.PORTFOLIO_TOTAL: 1,
- }
-
- # step 2: run the trade action
- next_actions_description = result.next_actions_description
- assert next_actions_description is not None
- assert len(next_actions_description.immediate_actions) == 1
- assert next_actions_description.immediate_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.BUY_SIGNAL
- job2 = octobot_lib.OctoBotActionsJob(
- next_actions_description.to_dict(include_default_values=False)
- )
- result = await job2.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.BUY_SIGNAL
- assert len(result.get_created_orders()) == 1
- order = result.get_created_orders()[0]
- assert order["symbol"] == "ETH/BTC"
- assert order["amount"] == 1
- assert 0.001 < order["limit_price"] < 0.2
- assert order["type"] == "limit"
- assert order["side"] == "buy"
- assert result.next_actions_description is not None
-
- # step 3: run the cancel action
- next_actions_description = result.next_actions_description
- assert next_actions_description is not None
- assert len(next_actions_description.immediate_actions) == 1
- assert next_actions_description.immediate_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.CANCEL_SIGNAL
- job3 = octobot_lib.OctoBotActionsJob(
- next_actions_description.to_dict(include_default_values=False)
- )
- result = await job3.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.CANCEL_SIGNAL
- assert result.processed_actions[0].result["cancelled_orders_count"] == 1
- assert result.next_actions_description is None # no more actions to execute
-
- async def test_polymarket_trade_action(self, polymarket_order_action): # TODO: update once polymarket is fullly supported
- # step 1: configure the task
- job = octobot_lib.OctoBotActionsJob(polymarket_order_action)
- result = await job.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == kw_enums.CustomActionExclusiveFormattedContentConfigKeys.APPLY_CONFIGURATION.value
- pre_trade_portfolio = job.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert pre_trade_portfolio["USDC"] == {
- common_constants.PORTFOLIO_AVAILABLE: 100,
- common_constants.PORTFOLIO_TOTAL: 100,
- }
-
- # step 2: run the trade action
- next_actions_description = result.next_actions_description
- assert next_actions_description is not None
- assert len(next_actions_description.immediate_actions) == 1
- assert next_actions_description.immediate_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.BUY_SIGNAL
- job2 = octobot_lib.OctoBotActionsJob(
- next_actions_description.to_dict(include_default_values=False)
- )
- with pytest.raises(octobot_trading.errors.FailedRequest): # TODO: update once supported
- result = await job2.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.BUY_SIGNAL
- assert len(result.get_created_orders()) == 1
- order = result.get_created_orders()[0]
- assert order["symbol"] == "what-price-will-bitcoin-hit-in-january-2026/USDC:USDC-260131-0-YES"
- assert order["amount"] == 1
- assert order["type"] == "market"
- assert order["side"] == "buy"
-
- async def test_run_deposit_action(self, deposit_action):
- # step 1: configure the task
- job = octobot_lib.OctoBotActionsJob(deposit_action)
- result = await job.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == kw_enums.CustomActionExclusiveFormattedContentConfigKeys.APPLY_CONFIGURATION.value
- pre_deposit_portfolio = job.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert pre_deposit_portfolio["BTC"] == {
- common_constants.PORTFOLIO_AVAILABLE: 0.01,
- common_constants.PORTFOLIO_TOTAL: 0.01,
- }
-
- # step 2: run the deposit action
- next_actions_description = result.next_actions_description
- assert next_actions_description is not None
- assert len(next_actions_description.immediate_actions) == 1
- assert next_actions_description.immediate_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.TRANSFER_FUNDS_SIGNAL
- job2 = octobot_lib.OctoBotActionsJob(
- next_actions_description.to_dict(include_default_values=False)
- )
- result = await job2.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.TRANSFER_FUNDS_SIGNAL
- assert result.next_actions_description is None # no more actions to execute
-
- # ensure deposit is successful
- post_deposit_portfolio = job2.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert post_deposit_portfolio["BTC"] == {
- common_constants.PORTFOLIO_AVAILABLE: pre_deposit_portfolio["BTC"][common_constants.PORTFOLIO_AVAILABLE] + deposit_action["params"]["BLOCKCHAIN_FROM_AMOUNT"],
- common_constants.PORTFOLIO_TOTAL: pre_deposit_portfolio["BTC"][common_constants.PORTFOLIO_TOTAL] + deposit_action["params"]["BLOCKCHAIN_FROM_AMOUNT"],
- }
-
- async def test_run_withdraw_action(self, withdraw_action):
- # step 1: configure the task
- job = octobot_lib.OctoBotActionsJob(withdraw_action)
- result = await job.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == kw_enums.CustomActionExclusiveFormattedContentConfigKeys.APPLY_CONFIGURATION.value
- pre_withdraw_portfolio = job.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert pre_withdraw_portfolio["ETH"] == {
- common_constants.PORTFOLIO_AVAILABLE: 2,
- common_constants.PORTFOLIO_TOTAL: 2,
- }
-
- # step 2: run the withdraw action
- next_actions_description = result.next_actions_description
- assert next_actions_description is not None
- assert len(next_actions_description.immediate_actions) == 1
- assert next_actions_description.immediate_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.WITHDRAW_FUNDS_SIGNAL
- job2 = octobot_lib.OctoBotActionsJob(
- next_actions_description.to_dict(include_default_values=False)
- )
- result = await job2.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.WITHDRAW_FUNDS_SIGNAL
- assert result.next_actions_description is None # no more actions to execute
-
- # ensure withdraw is successful
- post_withdraw_portfolio = job2.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert post_withdraw_portfolio == {} # portfolio should now be empty
-
- async def test_run_multiple_actions_bundle_no_wait(self, multiple_actions_bundle_no_wait):
- # step 1: configure the task
- job = octobot_lib.OctoBotActionsJob(multiple_actions_bundle_no_wait)
- result = await job.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == kw_enums.CustomActionExclusiveFormattedContentConfigKeys.APPLY_CONFIGURATION.value
- pre_trade_portfolio = job.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert pre_trade_portfolio["BTC"] == {
- common_constants.PORTFOLIO_AVAILABLE: 1,
- common_constants.PORTFOLIO_TOTAL: 1,
- }
-
- # step 2: run the deposit and trade actions
- next_actions_description = result.next_actions_description
- assert next_actions_description is not None
- assert len(next_actions_description.immediate_actions) == 2
- assert next_actions_description.immediate_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.TRANSFER_FUNDS_SIGNAL
- assert next_actions_description.immediate_actions[1].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.BUY_SIGNAL
- job2 = octobot_lib.OctoBotActionsJob(
- next_actions_description.to_dict(include_default_values=False)
- )
- result = await job2.run()
- assert len(result.processed_actions) == 2
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.TRANSFER_FUNDS_SIGNAL
- assert result.processed_actions[0].result["amount"] == 1
- assert result.processed_actions[1].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.BUY_SIGNAL
- assert len(result.get_created_orders()) == 1
- limit_order = result.get_created_orders()[0]
- assert limit_order["symbol"] == "ETH/BTC"
- assert limit_order["amount"] == 1
- assert limit_order["type"] == "limit"
- assert limit_order["side"] == "buy"
- assert result.next_actions_description is None # no more actions to execute
-
- # ensure trades are taken into account in portfolio
- post_deposit_portfolio = job2.after_execution_state.bots[0].exchange_account_elements.portfolio.content
-
- assert "ETH" not in post_deposit_portfolio # ETH order has not been executed (still open)
-
- assert post_deposit_portfolio["BTC"][common_constants.PORTFOLIO_TOTAL] == 2
- # created a buy order but not executed: locked BTC in portfolio
- assert post_deposit_portfolio["BTC"][common_constants.PORTFOLIO_AVAILABLE] < post_deposit_portfolio["BTC"][common_constants.PORTFOLIO_TOTAL]
-
-
- async def test_run_multiple_actions_bundle_with_wait(self, multiple_action_bundle_with_wait):
- # step 1: configure the task
- job = octobot_lib.OctoBotActionsJob(multiple_action_bundle_with_wait)
- result = await job.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == kw_enums.CustomActionExclusiveFormattedContentConfigKeys.APPLY_CONFIGURATION.value
- pre_trade_portfolio = job.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert pre_trade_portfolio["BTC"] == {
- common_constants.PORTFOLIO_AVAILABLE: 1,
- common_constants.PORTFOLIO_TOTAL: 1,
- }
-
- # step 2: run the deposit action
- next_actions_description = result.next_actions_description
- assert next_actions_description is not None
- assert len(next_actions_description.immediate_actions) == 1
- assert next_actions_description.immediate_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.TRANSFER_FUNDS_SIGNAL
- job2 = octobot_lib.OctoBotActionsJob(
- next_actions_description.to_dict(include_default_values=False)
- )
- result = await job2.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.TRANSFER_FUNDS_SIGNAL
- assert result.processed_actions[0].result["amount"] == 1
- assert result.next_actions_description is not None
- assert len(result.next_actions_description.immediate_actions) == 1
- assert result.next_actions_description.immediate_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.BUY_SIGNAL
- post_deposit_portfolio = job2.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert post_deposit_portfolio["BTC"] == {
- common_constants.PORTFOLIO_AVAILABLE: 2,
- common_constants.PORTFOLIO_TOTAL: 2,
- }
-
- # step 3: run the trade action
- next_actions_description = result.next_actions_description
- assert next_actions_description is not None
- assert len(next_actions_description.immediate_actions) == 1
- assert next_actions_description.immediate_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.BUY_SIGNAL
- job3 = octobot_lib.OctoBotActionsJob(
- next_actions_description.to_dict(include_default_values=False)
- )
- result = await job3.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.BUY_SIGNAL
- assert len(result.get_created_orders()) == 1
- post_trade_portfolio = job3.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert post_trade_portfolio["BTC"][common_constants.PORTFOLIO_AVAILABLE] < post_deposit_portfolio["BTC"][common_constants.PORTFOLIO_AVAILABLE]
- assert post_trade_portfolio["ETH"] == {
- common_constants.PORTFOLIO_AVAILABLE: 0.999,
- common_constants.PORTFOLIO_TOTAL: 0.999,
- }
-
- # step 4: run the withdraw action
- next_actions_description = result.next_actions_description
- assert next_actions_description is not None
- assert len(next_actions_description.immediate_actions) == 1
- assert next_actions_description.immediate_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.WITHDRAW_FUNDS_SIGNAL
- job4 = octobot_lib.OctoBotActionsJob(
- next_actions_description.to_dict(include_default_values=False)
- )
- result = await job4.run()
- assert len(result.processed_actions) == 1
- assert result.processed_actions[0].config[kw_constants.CUSTOM_ACTION_OPEN_SOURCE_FORMAT_KEY][custom_action_trading_mode.CustomActionTradingMode.SIGNAL_KEY] == custom_action_trading_mode.CustomActionTradingMode.WITHDRAW_FUNDS_SIGNAL
- assert result.processed_actions[0].result["amount"] == 0.999
- post_withdraw_portfolio = job4.after_execution_state.bots[0].exchange_account_elements.portfolio.content
- assert post_withdraw_portfolio["BTC"] == post_trade_portfolio["BTC"]
- assert "ETH" not in post_withdraw_portfolio
- assert result.next_actions_description is None # no more actions to execute
diff --git a/packages/node/tests/scheduler/test_tasks.py b/packages/node/tests/scheduler/test_tasks.py
index 68c46e0d5..8beeba0e2 100644
--- a/packages/node/tests/scheduler/test_tasks.py
+++ b/packages/node/tests/scheduler/test_tasks.py
@@ -59,21 +59,19 @@ async def test_trigger_all_task_types(self, schedule_task, temp_dbos_scheduler):
for task_type in octobot_node.models.TaskType:
schedule_task.type = task_type.value
with mock.patch.object(
- temp_dbos_scheduler.BOT_WORKFLOW_QUEUE, "enqueue_async", mock.AsyncMock()
+ temp_dbos_scheduler.AUTOMATION_WORKFLOW_QUEUE, "enqueue_async", mock.AsyncMock()
) as mock_enqueue_async:
result = await octobot_node.scheduler.tasks.trigger_task(schedule_task)
assert result is True
mock_enqueue_async.assert_called_once()
call_kwargs = mock_enqueue_async.call_args[1]
- assert "t" in call_kwargs
- assert call_kwargs["t"].name.startswith(f"{schedule_task.name}_")
assert "inputs" in call_kwargs
+ assert len(call_kwargs["inputs"]) == 1
inputs = call_kwargs["inputs"]
assert inputs["task"] == schedule_task.model_dump(exclude_defaults=True)
- assert inputs["delay"] == 1
- with pytest.raises(ValueError, match="Invalid task type"):
+ with pytest.raises(ValueError, match="Unsupported task type"):
with mock.patch.object(
- temp_dbos_scheduler.BOT_WORKFLOW_QUEUE, "enqueue_async", mock.AsyncMock()
+ temp_dbos_scheduler.AUTOMATION_WORKFLOW_QUEUE, "enqueue_async", mock.AsyncMock()
) as mock_enqueue_async:
schedule_task.type = "invalid_type"
await octobot_node.scheduler.tasks.trigger_task(schedule_task)
diff --git a/packages/node/tests/scheduler/test_tasks_recovery.py b/packages/node/tests/scheduler/test_tasks_recovery.py
new file mode 100644
index 000000000..23d9f5220
--- /dev/null
+++ b/packages/node/tests/scheduler/test_tasks_recovery.py
@@ -0,0 +1,137 @@
+# This file is part of OctoBot Node (https://github.com/Drakkar-Software/OctoBot-Node)
+# Copyright (c) 2025 Drakkar-Software, All rights reserved.
+#
+# OctoBot is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import pytest
+import tempfile
+import dbos
+import logging
+import time
+
+import octobot_node.scheduler
+
+QUEUE = dbos.Queue(name="test_queue")
+
+WF_TO_CREATE = 10
+WF_SLEEP_TIME = 1.5 # note: reducing this value wont speed up the test
+
+async def _init_dbos_scheduler(db_file_name: str, reset_database: bool = False):
+ config: dbos.DBOSConfig = {
+ "name": "scheduler_test",
+ "system_database_url": f"sqlite:///{db_file_name}",
+ "max_executor_threads": 2, # 2 is the minimum number of threads to let dbos recover properly with pending workflows
+ }
+ dbos.DBOS(config=config)
+ if reset_database:
+ dbos.DBOS.reset_system_database()
+ octobot_node.scheduler.SCHEDULER.INSTANCE = dbos.DBOS
+
+
+class TestSchedulerRecovery:
+
+ @pytest.mark.asyncio
+ async def test_recover_after_shutdown(self):
+ completed_workflows = []
+ with tempfile.NamedTemporaryFile() as temp_file:
+ await _init_dbos_scheduler(temp_file.name, reset_database=True)
+
+ @octobot_node.scheduler.SCHEDULER.INSTANCE.dbos_class()
+ class Sleeper():
+ @staticmethod
+ @octobot_node.scheduler.SCHEDULER.INSTANCE.workflow()
+ async def sleeper_workflow(identifier: float) -> float:
+ logging.info(f"sleeper_workflow {identifier} started")
+ await dbos.DBOS.sleep_async(WF_SLEEP_TIME)
+ logging.info(f"sleeper_workflow {identifier} done")
+ completed_workflows.append(identifier)
+ return identifier
+
+ logging.info(f"Launching DBOS instance 1 ...")
+ octobot_node.scheduler.SCHEDULER.INSTANCE.launch()
+ logging.info(f"DBOS instance 1 launched")
+
+ # 1. simple execution
+ t0 = time.time()
+ for i in range(WF_TO_CREATE):
+ await QUEUE.enqueue_async(Sleeper.sleeper_workflow, i)
+ wfs = await octobot_node.scheduler.SCHEDULER.INSTANCE.list_workflows_async(
+ status=["ENQUEUED", "PENDING"]
+ )
+ assert len(wfs) == WF_TO_CREATE
+ for wf_status in wfs:
+ handle = await octobot_node.scheduler.SCHEDULER.INSTANCE.retrieve_workflow_async(wf_status.workflow_id)
+ assert 0 <= await handle.get_result() < WF_TO_CREATE
+ duration = time.time() - t0
+ logging.info(f"Workflow batch completed in {duration} seconds")
+ max_duration = WF_TO_CREATE * WF_SLEEP_TIME * 0.9 # 90% of the 1 by 1 time to ensure asynchronous execution. usually 3 to 4 seconds on a normal machine
+ assert duration <= max_duration, f"Workflow batch part 1 completed in {duration} seconds, expected <= {max_duration}"
+ assert sorted(completed_workflows) == list(range(WF_TO_CREATE))
+ completed_workflows.clear()
+
+ # 2. enqueue 10 more and restart
+ for i in range(WF_TO_CREATE):
+ await QUEUE.enqueue_async(Sleeper.sleeper_workflow, i)
+ logging.info(f"Destroying DBOS instance 1 ...")
+ octobot_node.scheduler.SCHEDULER.INSTANCE.destroy()
+ logging.info(f"DBOS instance 1 destroyed")
+
+ # 3. restart and check completed workflows
+ logging.info(f"Launching DBOS instance 2 ...")
+ await _init_dbos_scheduler(temp_file.name)
+ octobot_node.scheduler.SCHEDULER.INSTANCE.launch()
+ logging.info(f"DBOS instance 2 launched")
+ all_wfs = await octobot_node.scheduler.SCHEDULER.INSTANCE.list_workflows_async()
+ assert len(all_wfs) == WF_TO_CREATE * 2
+ pending_wfs = await octobot_node.scheduler.SCHEDULER.INSTANCE.list_workflows_async(
+ status=["ENQUEUED", "PENDING"]
+ )
+ assert len(pending_wfs) == WF_TO_CREATE
+ # enqueue a second batch of workflows
+ for i in range(WF_TO_CREATE, WF_TO_CREATE*2):
+ await QUEUE.enqueue_async(Sleeper.sleeper_workflow, i)
+ t0 = time.time()
+ for wf_status in await octobot_node.scheduler.SCHEDULER.INSTANCE.list_workflows_async():
+ handle = await octobot_node.scheduler.SCHEDULER.INSTANCE.retrieve_workflow_async(wf_status.workflow_id)
+ assert 0 <= await handle.get_result() < WF_TO_CREATE*2
+ duration = time.time() - t0
+ logging.info(f"2 parallel workflow batches completed in {duration} seconds")
+ max_duration = WF_TO_CREATE * WF_SLEEP_TIME * 2 * 0.9 # 90% of the 1 by 1 time to ensure asynchronous execution. usually 3 to 4 seconds on a normal machine
+ assert duration < max_duration, f"Workflow batch part 2 completed in {duration} seconds, expected <= {max_duration}"
+ assert sorted(completed_workflows) == list(range(WF_TO_CREATE*2))
+ logging.info(f"Destroying DBOS instance 2 ...")
+ octobot_node.scheduler.SCHEDULER.INSTANCE.destroy()
+ logging.info(f"DBOS instance 2 destroyed")
+
+ # 4. restart and check completed workflows
+ logging.info(f"Launching DBOS instance 3 ...")
+ await _init_dbos_scheduler(temp_file.name)
+ octobot_node.scheduler.SCHEDULER.INSTANCE.launch()
+ logging.info(f"DBOS instance 3 launched")
+ # all 30 worflows are now historized
+ pending_wfs = await octobot_node.scheduler.SCHEDULER.INSTANCE.list_workflows_async(
+ status=["ENQUEUED", "PENDING"]
+ )
+ assert pending_wfs == []
+ all_wfs = await octobot_node.scheduler.SCHEDULER.INSTANCE.list_workflows_async()
+ assert len(all_wfs) == WF_TO_CREATE * 3
+ logging.info(f"Destroying DBOS instance 3 ...")
+ octobot_node.scheduler.SCHEDULER.INSTANCE.destroy()
+ logging.info(f"DBOS instance 3 destroyed")
+
+
\ No newline at end of file
diff --git a/packages/node/tests/scheduler/workflows/__init__.py b/packages/node/tests/scheduler/workflows/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/packages/node/tests/scheduler/workflows/test_automation_workflow.py b/packages/node/tests/scheduler/workflows/test_automation_workflow.py
new file mode 100644
index 000000000..180dbca17
--- /dev/null
+++ b/packages/node/tests/scheduler/workflows/test_automation_workflow.py
@@ -0,0 +1,742 @@
+# Drakkar-Software OctoBot-Node
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+
+import asyncio
+import json
+import functools
+import mock
+import pytest
+import time
+import typing
+import tempfile
+import dbos
+
+import octobot_trading.constants
+
+import octobot_node.scheduler
+import octobot_node.scheduler.workflows
+import octobot_node.errors as errors
+import octobot_node.models
+import octobot_node.scheduler.workflows.params as params
+import octobot_node.scheduler.octobot_flow_client as octobot_flow_client
+import octobot_node.scheduler.task_context as task_context
+
+
+from tests.scheduler import temp_dbos_scheduler, init_and_destroy_scheduler
+
+
+IMPORTED_OCTOBOT_FLOW = True
+AUTOMATION_WORKFLOW_IMPORTED = False
+try:
+ import octobot_flow.entities
+ import octobot_flow.enums
+
+except ImportError:
+ IMPORTED_OCTOBOT_FLOW = False
+
+
+@pytest.fixture
+def import_automation_workflow():
+ global AUTOMATION_WORKFLOW_IMPORTED
+ if not AUTOMATION_WORKFLOW_IMPORTED:
+ with tempfile.NamedTemporaryFile() as temp_file:
+ init_and_destroy_scheduler(temp_file.name)
+ import octobot_node.scheduler.workflows.automation_workflow
+ AUTOMATION_WORKFLOW_IMPORTED = True
+
+
+def _automation_state_dict(actions: list[dict[str, typing.Any]]) -> dict[str, typing.Any]:
+ """Build automation state dict with raw action dicts (JSON-serializable)."""
+ return {
+ "automation": {
+ "metadata": {"automation_id": "automation_1"},
+ "actions_dag": {"actions": actions},
+ }
+ }
+
+
+@pytest.fixture
+def parsed_inputs():
+ task = octobot_node.models.Task(
+ name="test_task",
+ content="{}",
+ type=octobot_node.models.TaskType.EXECUTE_ACTIONS.value,
+ )
+ return params.AutomationWorkflowInputs(task=task, execution_time=0)
+
+
+@pytest.fixture
+def task():
+ return octobot_node.models.Task(
+ name="test_task",
+ content="{}",
+ type=octobot_node.models.TaskType.EXECUTE_ACTIONS.value,
+ )
+
+
+@pytest.fixture
+def iteration_result():
+ return params.AutomationWorkflowIterationResult(
+ progress_status=params.ProgressStatus(
+ latest_step="action_1",
+ next_step="action_2",
+ next_step_at=0.0,
+ remaining_steps=1,
+ error=None,
+ should_stop=False,
+ ),
+ next_iteration_description='{"state": {"automation": {}}}',
+ )
+
+def required_imports(func):
+ @functools.wraps(func)
+ async def wrapper(*args, **kwargs):
+ if not IMPORTED_OCTOBOT_FLOW:
+ pytest.skip(reason="octobot_flow is not installed")
+ return await func(*args, **kwargs)
+ return wrapper
+
+
+class TestExecuteAutomation:
+ # use a minimal amount of tests to avoid wasting time initializing the scheduler
+ @pytest.mark.asyncio
+ @required_imports
+ async def test_execute_automation(
+ self, temp_dbos_scheduler, parsed_inputs, iteration_result
+ ):
+ # 1. No delay: calls iteration and stops when _should_continue returns False
+ inputs = parsed_inputs.to_dict(include_default_values=False)
+ iter_result = params.AutomationWorkflowIterationResult(
+ progress_status=iteration_result.progress_status,
+ next_iteration_description=None,
+ )
+ mock_iteration = mock.AsyncMock(return_value=iter_result.to_dict(include_default_values=False))
+ mock_should_continue = mock.Mock(return_value=False)
+ mock_process = mock.AsyncMock()
+
+ with mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "execute_iteration",
+ mock_iteration,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_should_continue_workflow",
+ mock_should_continue,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_process_pending_priority_actions_and_reschedule",
+ mock_process,
+ ):
+ handle = await temp_dbos_scheduler.INSTANCE.start_workflow_async(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow.execute_automation,
+ inputs=inputs,
+ )
+ assert await handle.get_result() is None
+ mock_iteration.assert_called_once_with(inputs, [])
+ mock_should_continue.assert_called_once()
+ mock_process.assert_not_called()
+
+ # 2. With delay: waits, calls iteration, _process_pending not called
+ parsed_inputs.execution_time = time.time() + 100
+ inputs = parsed_inputs.to_dict(include_default_values=False)
+ mock_wait = mock.AsyncMock(return_value=[])
+ mock_iteration = mock.AsyncMock(return_value=iteration_result.to_dict(include_default_values=False))
+ mock_should_continue = mock.Mock(return_value=False)
+ mock_process = mock.AsyncMock()
+
+ with mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_wait_and_trigger_on_priority_actions",
+ mock_wait,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "execute_iteration",
+ mock_iteration,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_should_continue_workflow",
+ mock_should_continue,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_process_pending_priority_actions_and_reschedule",
+ mock_process,
+ ):
+ handle = await temp_dbos_scheduler.INSTANCE.start_workflow_async(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow.execute_automation,
+ inputs=inputs,
+ )
+ assert await handle.get_result() is None
+ mock_wait.assert_called_once()
+ mock_iteration.assert_called_once_with(inputs, [])
+ mock_process.assert_not_called()
+
+ # 3. With delay, _should_continue True: _process_pending called
+ inputs = parsed_inputs.to_dict(include_default_values=False)
+ mock_wait = mock.AsyncMock(return_value=[])
+ mock_iteration = mock.AsyncMock(return_value=iteration_result.to_dict(include_default_values=False))
+ mock_should_continue = mock.Mock(return_value=True)
+ mock_process = mock.AsyncMock(return_value=True)
+
+ with mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_wait_and_trigger_on_priority_actions",
+ mock_wait,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "execute_iteration",
+ mock_iteration,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_should_continue_workflow",
+ mock_should_continue,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_process_pending_priority_actions_and_reschedule",
+ mock_process,
+ ):
+ handle = await temp_dbos_scheduler.INSTANCE.start_workflow_async(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow.execute_automation,
+ inputs=inputs,
+ )
+ assert await handle.get_result() is None
+ mock_wait.assert_called_once()
+ mock_iteration.assert_called_once_with(inputs, [])
+ mock_should_continue.assert_called_once()
+ mock_process.assert_awaited_once_with(parsed_inputs, iteration_result)
+
+ # 4. Priority actions passed to iteration
+ inputs = parsed_inputs.to_dict(include_default_values=False)
+ priority_actions = [{"action": "stop"}]
+ mock_wait = mock.AsyncMock(return_value=priority_actions)
+ mock_iteration = mock.AsyncMock(return_value=iteration_result.to_dict(include_default_values=False))
+ mock_should_continue = mock.Mock(return_value=False)
+ mock_process = mock.AsyncMock()
+
+ with mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_wait_and_trigger_on_priority_actions",
+ mock_wait,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "execute_iteration",
+ mock_iteration,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_should_continue_workflow",
+ mock_should_continue,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_process_pending_priority_actions_and_reschedule",
+ mock_process,
+ ):
+ handle = await temp_dbos_scheduler.INSTANCE.start_workflow_async(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow.execute_automation,
+ inputs=inputs,
+ )
+ assert await handle.get_result() is None
+ mock_iteration.assert_called_once_with(inputs, priority_actions)
+ mock_process.assert_not_called()
+
+ # 5. Exception is caught and logged
+ parsed_inputs.execution_time = 0
+ inputs = parsed_inputs.to_dict(include_default_values=False)
+ mock_iteration = mock.AsyncMock(side_effect=ValueError("test error"))
+ mock_logger = mock.Mock()
+ mock_process = mock.AsyncMock()
+
+ with mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "execute_iteration",
+ mock_iteration,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_should_continue_workflow",
+ mock.Mock(return_value=False),
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "get_logger",
+ mock.Mock(return_value=mock_logger),
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_process_pending_priority_actions_and_reschedule",
+ mock_process,
+ ):
+ handle = await temp_dbos_scheduler.INSTANCE.start_workflow_async(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow.execute_automation,
+ inputs=inputs,
+ )
+ assert await handle.get_result() is None
+ mock_logger.exception.assert_called_once()
+ mock_process.assert_not_called()
+
+
+class TestExecuteIteration:
+ def setup_method(self):
+ octobot_trading.constants.ALLOW_FUNDS_TRANSFER = True
+
+ def teardown_method(self):
+ octobot_trading.constants.ALLOW_FUNDS_TRANSFER = False
+
+ @pytest.mark.asyncio
+ @required_imports
+ async def test_execute_iteration_returns_iteration_result(self, import_automation_workflow, task):
+ task.content = json.dumps({"params": {"ACTIONS": "trade", "EXCHANGE_FROM": "binance",
+ "ORDER_SYMBOL": "ETH/BTC", "ORDER_AMOUNT": 1, "ORDER_TYPE": "market",
+ "ORDER_SIDE": "BUY", "SIMULATED_PORTFOLIO": {"BTC": 1}}})
+ inputs = params.AutomationWorkflowInputs(task=task, execution_time=0).to_dict(include_default_values=False)
+
+ action = octobot_flow.entities.ConfiguredActionDetails(
+ id="action_1",
+ action="trade",
+ )
+
+ mock_result = octobot_flow_client.OctoBotActionsJobResult(
+ processed_actions=[action],
+ next_actions_description=None,
+ actions_dag=None,
+ should_stop=False,
+ )
+ mock_job = mock.Mock()
+ mock_job.run = mock.AsyncMock(return_value=mock_result)
+
+ with mock.patch.object(task_context, "encrypted_task", mock.MagicMock()) as mock_encrypted:
+ mock_encrypted.return_value.__enter__ = mock.Mock(return_value=None)
+ mock_encrypted.return_value.__exit__ = mock.Mock(return_value=None)
+ with mock.patch.object(
+ octobot_flow_client,
+ "OctoBotActionsJob",
+ mock.Mock(return_value=mock_job),
+ ):
+ result = await octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow.execute_iteration(inputs, [])
+
+ assert "progress_status" in result
+ assert "next_iteration_description" in result
+ parsed_progress_status = params.ProgressStatus.model_validate(result["progress_status"])
+ assert parsed_progress_status.latest_step == "trade"
+ assert parsed_progress_status.error is None
+ assert parsed_progress_status.should_stop is False
+
+ @pytest.mark.asyncio
+ async def test_execute_iteration_invalid_task_type_raises_workflow_input_error(self, import_automation_workflow, task):
+ task.type = "invalid_type"
+ task.content = "{}"
+ inputs = params.AutomationWorkflowInputs(task=task, execution_time=0).to_dict(include_default_values=False)
+
+ with mock.patch.object(task_context, "encrypted_task", mock.MagicMock()) as mock_encrypted:
+ mock_encrypted.return_value.__enter__ = mock.Mock(return_value=None)
+ mock_encrypted.return_value.__exit__ = mock.Mock(return_value=None)
+ with pytest.raises(errors.WorkflowInputError, match="Invalid task type"):
+ await octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow.execute_iteration(inputs, [])
+
+ @pytest.mark.asyncio
+ @required_imports
+ async def test_execute_iteration_execution_error_sets_progress_error(self, import_automation_workflow, task):
+ task.content = json.dumps({"params": {"ACTIONS": "trade", "EXCHANGE_FROM": "binance",
+ "ORDER_SYMBOL": "ETH/BTC", "ORDER_AMOUNT": 1, "ORDER_TYPE": "market",
+ "ORDER_SIDE": "BUY", "SIMULATED_PORTFOLIO": {"BTC": 1}}})
+ inputs = params.AutomationWorkflowInputs(task=task, execution_time=0).to_dict(include_default_values=False)
+
+ action = octobot_flow.entities.ConfiguredActionDetails(
+ id="action_1",
+ action="trade",
+ error_status="some_error",
+ )
+
+ mock_result = octobot_flow_client.OctoBotActionsJobResult(
+ processed_actions=[action],
+ next_actions_description=None,
+ actions_dag=None,
+ should_stop=False,
+ )
+ mock_job = mock.Mock()
+ mock_job.run = mock.AsyncMock(return_value=mock_result)
+
+ with mock.patch.object(task_context, "encrypted_task", mock.MagicMock()) as mock_encrypted:
+ mock_encrypted.return_value.__enter__ = mock.Mock(return_value=None)
+ mock_encrypted.return_value.__exit__ = mock.Mock(return_value=None)
+ with mock.patch.object(
+ octobot_flow_client,
+ "OctoBotActionsJob",
+ mock.Mock(return_value=mock_job),
+ ):
+ result = await octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow.execute_iteration(inputs, [])
+
+ parsed_progress_status = params.ProgressStatus.model_validate(result["progress_status"])
+ assert parsed_progress_status.error == "some_error"
+
+
+class TestWaitAndTriggerOnPriorityActions:
+ @pytest.mark.asyncio
+ async def test_wait_and_trigger_returns_empty_when_no_actions(self, import_automation_workflow, parsed_inputs):
+ with mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.SCHEDULER.INSTANCE,
+ "recv_async",
+ mock.AsyncMock(return_value=[]),
+ ):
+ result = await octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._wait_and_trigger_on_priority_actions(
+ parsed_inputs, 0
+ )
+ assert result == []
+
+ @pytest.mark.asyncio
+ async def test_wait_and_trigger_returns_actions_when_received(self, import_automation_workflow, parsed_inputs):
+ priority_actions = [{"action": "stop"}]
+ with mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.SCHEDULER.INSTANCE,
+ "recv_async",
+ mock.AsyncMock(return_value=priority_actions),
+ ):
+ result = await octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._wait_and_trigger_on_priority_actions(
+ parsed_inputs, 0
+ )
+ assert result == priority_actions
+
+
+class TestProcessPendingPriorityActionsAndReschedule:
+ @pytest.mark.asyncio
+ async def test_process_pending_returns_false_when_no_next_iteration(self, import_automation_workflow, parsed_inputs, iteration_result):
+ iteration_result.next_iteration_description = None
+ result = await octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._process_pending_priority_actions_and_reschedule(
+ parsed_inputs, iteration_result
+ )
+ assert result is False
+
+ @pytest.mark.asyncio
+ async def test_process_pending_schedules_next_when_no_priority_actions(
+ self, import_automation_workflow, parsed_inputs, iteration_result
+ ):
+ mock_wait = mock.AsyncMock(return_value=[])
+ mock_schedule = mock.AsyncMock()
+
+ with mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_wait_and_trigger_on_priority_actions",
+ mock_wait,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_schedule_next_iteration",
+ mock_schedule,
+ ):
+ result = await octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._process_pending_priority_actions_and_reschedule(
+ parsed_inputs, iteration_result
+ )
+ assert result is True
+ mock_wait.assert_awaited_once_with(parsed_inputs, 0)
+ mock_schedule.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_process_pending_returns_false_when_should_stop(self, import_automation_workflow, parsed_inputs, iteration_result):
+ iteration_result.progress_status.should_stop = True
+ mock_wait = mock.AsyncMock(return_value=[])
+
+ with mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_wait_and_trigger_on_priority_actions",
+ mock_wait,
+ ):
+ result = await octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._process_pending_priority_actions_and_reschedule(
+ parsed_inputs, iteration_result
+ )
+ assert result is True
+
+ @pytest.mark.asyncio
+ async def test_process_pending_raises_when_no_next_iteration_after_priority_actions(
+ self, import_automation_workflow, parsed_inputs, iteration_result
+ ):
+ result_without_next = params.AutomationWorkflowIterationResult(
+ progress_status=params.ProgressStatus(
+ latest_step="done",
+ next_step=None,
+ next_step_at=None,
+ remaining_steps=0,
+ error=None,
+ should_stop=False,
+ ),
+ next_iteration_description=None,
+ )
+ mock_wait = mock.AsyncMock(side_effect=[[{"action": "stop"}], []])
+ mock_iteration = mock.AsyncMock(
+ return_value=result_without_next.to_dict(include_default_values=False)
+ )
+
+ with mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_wait_and_trigger_on_priority_actions",
+ mock_wait,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "execute_iteration",
+ mock_iteration,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_should_continue_workflow",
+ mock.Mock(return_value=True),
+ ):
+ with pytest.raises(
+ errors.WorkflowPriorityActionExecutionError,
+ match="no next iteration description after processing priority actions",
+ ):
+ await octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._process_pending_priority_actions_and_reschedule(
+ parsed_inputs, iteration_result
+ )
+
+ @pytest.mark.asyncio
+ async def test_process_pending_with_priority_actions_schedules_next_when_iteration_has_next(
+ self, import_automation_workflow, parsed_inputs, iteration_result
+ ):
+ result_with_next = params.AutomationWorkflowIterationResult(
+ progress_status=params.ProgressStatus(
+ latest_step="step_1",
+ next_step="step_2",
+ next_step_at=0.0,
+ remaining_steps=1,
+ error=None,
+ should_stop=False,
+ ),
+ next_iteration_description='{"state": {"automation": {}}}',
+ )
+ mock_wait = mock.AsyncMock(side_effect=[[{"action": "stop"}], []])
+ mock_iteration = mock.AsyncMock(
+ return_value=result_with_next.to_dict(include_default_values=False)
+ )
+ mock_schedule = mock.AsyncMock()
+
+ with mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_wait_and_trigger_on_priority_actions",
+ mock_wait,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "execute_iteration",
+ mock_iteration,
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_should_continue_workflow",
+ mock.Mock(return_value=True),
+ ), mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow,
+ "_schedule_next_iteration",
+ mock_schedule,
+ ):
+ result = await octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._process_pending_priority_actions_and_reschedule(
+ parsed_inputs, iteration_result
+ )
+
+ assert result is True
+ mock_wait.assert_awaited()
+ mock_iteration.assert_called_once()
+ mock_schedule.assert_called_once()
+
+
+class TestScheduleNextIteration:
+ @pytest.mark.asyncio
+ async def test_schedule_next_iteration_enqueues_workflow(self, import_automation_workflow, parsed_inputs, iteration_result):
+ mock_enqueue = mock.AsyncMock()
+ next_desc = iteration_result.next_iteration_description
+
+ with mock.patch.object(
+ octobot_node.scheduler.workflows.automation_workflow.SCHEDULER.AUTOMATION_WORKFLOW_QUEUE,
+ "enqueue_async",
+ mock_enqueue,
+ ):
+ await octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._schedule_next_iteration(
+ parsed_inputs, next_desc, iteration_result.progress_status
+ )
+ mock_enqueue.assert_called_once()
+ call_args = mock_enqueue.call_args
+ assert call_args[0][0] == octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow.execute_automation
+ assert "inputs" in call_args[1]
+
+
+class TestCreateNextIterationInputs:
+ def test_create_next_iteration_inputs_returns_correct_dict(self, import_automation_workflow, task):
+ parsed_inputs = params.AutomationWorkflowInputs(task=task, execution_time=0)
+ next_iteration_description = '{"state": {}}'
+ next_execution_time = 123.0
+
+ result = octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._create_next_iteration_inputs(
+ parsed_inputs, next_iteration_description, next_execution_time
+ )
+ assert "task" in result
+ parsed_result = params.AutomationWorkflowInputs.from_dict(result)
+ task = parsed_result.task
+ content = task.get("content") if isinstance(task, dict) else task.content
+ assert content == next_iteration_description
+ assert parsed_result.execution_time == 123.0
+
+ def test_create_next_iteration_inputs_uses_zero_when_execution_time_none(self, import_automation_workflow, task): #todo
+ parsed_inputs = params.AutomationWorkflowInputs(task=task, execution_time=0)
+ result = octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._create_next_iteration_inputs(
+ parsed_inputs, "{}", None
+ )
+ result = params.AutomationWorkflowInputs.from_dict(result)
+ assert result.execution_time == 0
+
+
+class TestShouldContinueWorkflow:
+ def test_should_continue_returns_stop_on_error_when_error(self, import_automation_workflow, parsed_inputs):
+ progress = params.ProgressStatus(error="some_error", should_stop=False)
+ assert octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._should_continue_workflow(
+ parsed_inputs, progress, True
+ ) is True
+ assert octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._should_continue_workflow(
+ parsed_inputs, progress, False
+ ) is False
+
+ def test_should_continue_returns_false_when_should_stop(self, import_automation_workflow, parsed_inputs):
+ progress = params.ProgressStatus(error=None, should_stop=True)
+ assert octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._should_continue_workflow(
+ parsed_inputs, progress, True
+ ) is False
+ assert octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._should_continue_workflow(
+ parsed_inputs, progress, False
+ ) is False
+
+ def test_should_continue_returns_true_by_no_reason_to_stop(self, import_automation_workflow, parsed_inputs):
+ progress = params.ProgressStatus(error=None, should_stop=False)
+ assert octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._should_continue_workflow(
+ parsed_inputs, progress, True
+ ) is True
+ assert octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._should_continue_workflow(
+ parsed_inputs, progress, False
+ ) is True
+
+
+class TestGetActionsSummary:
+ def test_get_actions_summary_empty_returns_empty_string(self, import_automation_workflow):
+ assert octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._get_actions_summary([]) == ""
+
+ @pytest.mark.asyncio
+ @required_imports
+ async def test_get_actions_summary_joins_action_summaries(self, import_automation_workflow):
+ action1 = octobot_flow.entities.ConfiguredActionDetails(id="action_1", action="action_1")
+ action2 = octobot_flow.entities.DSLScriptActionDetails(id="action_2", dsl_script="action_2('plop')")
+ result = octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._get_actions_summary([action1, action2])
+ assert result == "action_1, action_2('plop')"
+
+ # with minimal=True, only the first operator name is returned
+ result = octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._get_actions_summary([action1, action2], minimal=True)
+ assert result == "action_1, action_2"
+
+ def test_get_actions_summary_minimal_calls_get_summary_with_minimal(self, import_automation_workflow):
+ mock_action = mock.Mock()
+ mock_action.get_summary = mock.Mock(return_value="sum")
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow._get_actions_summary([mock_action], minimal=True)
+ mock_action.get_summary.assert_called_once_with(minimal=True)
+
+
+class TestGetLogger:
+ def test_get_logger_uses_task_name(self, import_automation_workflow, parsed_inputs):
+ with mock.patch("octobot_commons.logging.get_logger", mock.Mock()) as mock_get_logger:
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow.get_logger(parsed_inputs)
+ mock_get_logger.assert_called_once_with("test_task")
+
+ def test_get_logger_uses_class_name_when_task_name_none(self, import_automation_workflow):
+ task = octobot_node.models.Task(name=None, content="{}")
+ parsed_inputs = params.AutomationWorkflowInputs(task=task, execution_time=0)
+ with mock.patch("octobot_commons.logging.get_logger", mock.Mock()) as mock_get_logger:
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow.get_logger(parsed_inputs)
+ mock_get_logger.assert_called_once_with(octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow.__name__)
+
+
+class TestExecuteAutomationIntegration:
+ def setup_method(self):
+ octobot_trading.constants.ALLOW_FUNDS_TRANSFER = True
+
+ def teardown_method(self):
+ octobot_trading.constants.ALLOW_FUNDS_TRANSFER = False
+
+ @pytest.mark.asyncio
+ @required_imports
+ async def test_execute_automation_full_workflow_three_iterations( #todo
+ self,
+ import_automation_workflow,
+ temp_dbos_scheduler,
+ ):
+ init_action = {
+ "id": "action_init",
+ "action": octobot_flow.enums.ActionType.APPLY_CONFIGURATION.value,
+ "config": {
+ "automation": {"metadata": {"automation_id": "automation_1"}},
+ "client_exchange_account_elements": {
+ "portfolio": {
+ "content": {
+ "ETH": {"total": 1, "available": 1},
+ },
+ },
+ },
+ },
+ }
+ dsl_action_1 = {
+ "id": "action_dsl_1",
+ "dsl_script": "1 if True else 2",
+ "dependencies": [{"action_id": "action_init"}],
+ }
+ dsl_action_2 = {
+ "id": "action_dsl_2",
+ "dsl_script": "1 if True else 2",
+ "dependencies": [{"action_id": "action_dsl_1"}],
+ }
+ all_actions = [init_action, dsl_action_1, dsl_action_2]
+ state_dict = _automation_state_dict(all_actions)
+ state_dict["automation"]["client_exchange_account_elements"] = {
+ "portfolio": {"content": {"ETH": {"total": 1, "available": 1}}},
+ }
+ state_dict["automation"]["execution"] = {
+ "previous_execution": {
+ "trigger_time": time.time() - 600,
+ "trigger_reason": "scheduled",
+ "strategy_execution_time": time.time() - 590,
+ },
+ "current_execution": {"trigger_reason": "scheduled"},
+ }
+ task_content = json.dumps({"state": state_dict})
+ task = octobot_node.models.Task(
+ name="test_automation",
+ content=task_content,
+ type=octobot_node.models.TaskType.EXECUTE_ACTIONS.value,
+ )
+ inputs = params.AutomationWorkflowInputs(task=task, execution_time=0).to_dict(
+ include_default_values=False
+ )
+ inputs["task"] = task.model_dump(exclude_defaults=True)
+
+ recv_path = "octobot_node.scheduler.workflows.automation_workflow.SCHEDULER.INSTANCE.recv_async"
+ with mock.patch(recv_path, mock.AsyncMock(return_value=[])):
+ await temp_dbos_scheduler.AUTOMATION_WORKFLOW_QUEUE.enqueue_async(
+ octobot_node.scheduler.workflows.automation_workflow.AutomationWorkflow.execute_automation,
+ inputs=inputs,
+ )
+
+ max_wait = 30
+ poll_interval = 0.5
+ elapsed = 0
+ while elapsed < max_wait:
+ workflows = await temp_dbos_scheduler.INSTANCE.list_workflows_async()
+ pending = [w for w in workflows if w.status in (
+ dbos.WorkflowStatusString.PENDING.value, dbos.WorkflowStatusString.ENQUEUED.value
+ )]
+ if not pending and len(workflows) >= 3:
+ break
+ await asyncio.sleep(poll_interval)
+ elapsed += poll_interval
+
+ assert len(workflows) >= 3, f"Expected at least 3 workflows, got {len(workflows)}"
+ assert not pending, f"Expected no pending workflows, got {pending}"
+
+
+ completed = [w for w in workflows if w.status == dbos.WorkflowStatusString.SUCCESS.value]
+ assert len(completed) >= 3, f"Expected at least 3 completed workflows, got {len(completed)}"
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_private_data_operators/__init__.py b/packages/tentacles/Meta/DSL_operators/automation_operators/__init__.py
similarity index 69%
rename from packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_private_data_operators/__init__.py
rename to packages/tentacles/Meta/DSL_operators/automation_operators/__init__.py
index f29ccbe2c..0516a710f 100644
--- a/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_private_data_operators/__init__.py
+++ b/packages/tentacles/Meta/DSL_operators/automation_operators/__init__.py
@@ -15,12 +15,10 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
-import tentacles.Meta.DSL_operators.exchange_operators.exchange_private_data_operators.portfolio_operators
-from tentacles.Meta.DSL_operators.exchange_operators.exchange_private_data_operators.portfolio_operators import (
- PortfolioOperator,
- create_portfolio_operators,
+import tentacles.Meta.DSL_operators.automation_operators.automation_management
+from tentacles.Meta.DSL_operators.automation_operators.automation_management import (
+ StopAutomationOperator,
)
__all__ = [
- "PortfolioOperator",
- "create_portfolio_operators",
+ "StopAutomationOperator",
]
\ No newline at end of file
diff --git a/packages/tentacles/Meta/DSL_operators/automation_operators/automation_management.py b/packages/tentacles/Meta/DSL_operators/automation_operators/automation_management.py
new file mode 100644
index 000000000..22c85551b
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/automation_operators/automation_management.py
@@ -0,0 +1,37 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import octobot_commons.dsl_interpreter as dsl_interpreter
+
+import octobot_flow.entities
+
+
+class StopAutomationOperator(dsl_interpreter.CallOperator):
+ MIN_PARAMS = 0
+ MAX_PARAMS = 0
+ DESCRIPTION = "Signals the automation to stop."
+ EXAMPLE = "stop_automation()"
+
+ @staticmethod
+ def get_name() -> str:
+ return "stop_automation"
+
+ def compute(self) -> dict:
+ return {
+ octobot_flow.entities.PostIterationActionsDetails.__name__:
+ octobot_flow.entities.PostIterationActionsDetails(
+ stop_automation=True
+ ).to_dict(include_default_values=False)
+ }
diff --git a/packages/tentacles/Meta/DSL_operators/automation_operators/metadata.json b/packages/tentacles/Meta/DSL_operators/automation_operators/metadata.json
new file mode 100644
index 000000000..319240c83
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/automation_operators/metadata.json
@@ -0,0 +1,6 @@
+{
+ "version": "1.2.0",
+ "origin_package": "OctoBot-Default-Tentacles",
+ "tentacles": [],
+ "tentacles-requirements": []
+}
\ No newline at end of file
diff --git a/packages/tentacles/Meta/DSL_operators/automation_operators/tests/test_automation_management.py b/packages/tentacles/Meta/DSL_operators/automation_operators/tests/test_automation_management.py
new file mode 100644
index 000000000..f222f098a
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/automation_operators/tests/test_automation_management.py
@@ -0,0 +1,68 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import pytest
+
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_commons.errors
+import octobot_flow.entities
+
+import tentacles.Meta.DSL_operators.automation_operators.automation_management as automation_management
+
+
+@pytest.fixture
+def interpreter():
+ return dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators()
+ )
+
+
+def _assert_stop_automation_result(result):
+ assert isinstance(result, dict)
+ assert octobot_flow.entities.PostIterationActionsDetails.__name__ in result
+ details = octobot_flow.entities.PostIterationActionsDetails.from_dict(
+ result[octobot_flow.entities.PostIterationActionsDetails.__name__]
+ )
+ assert details.stop_automation is True
+
+
+@pytest.mark.asyncio
+async def test_stop_automation_call_as_dsl(interpreter):
+ assert "stop_automation" in interpreter.operators_by_name
+
+ result = await interpreter.interprete("stop_automation()")
+ _assert_stop_automation_result(result)
+
+
+def test_stop_automation_operator_compute():
+ operator = automation_management.StopAutomationOperator()
+ result = operator.compute()
+ _assert_stop_automation_result(result)
+
+
+@pytest.mark.asyncio
+async def test_stop_automation_operator_invalid_parameters(interpreter):
+ with pytest.raises(
+ octobot_commons.errors.InvalidParametersError,
+ match="supports up to 0 parameters",
+ ):
+ await interpreter.interprete("stop_automation(1)")
+
+
+def test_stop_automation_operator_docs():
+ docs = automation_management.StopAutomationOperator.get_docs()
+ assert docs.name == "stop_automation"
+ assert "stop" in docs.description.lower()
+ assert docs.example == "stop_automation()"
diff --git a/packages/tentacles/Meta/DSL_operators/blockchain_wallet_operators/__init__.py b/packages/tentacles/Meta/DSL_operators/blockchain_wallet_operators/__init__.py
new file mode 100644
index 000000000..0e02768af
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/blockchain_wallet_operators/__init__.py
@@ -0,0 +1,26 @@
+# pylint: disable=R0801
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+
+import tentacles.Meta.DSL_operators.blockchain_wallet_operators.blockchain_wallet_ops
+from tentacles.Meta.DSL_operators.blockchain_wallet_operators.blockchain_wallet_ops import (
+ create_blockchain_wallet_operators,
+ CREATED_TRANSACTIONS_KEY,
+)
+__all__ = [
+ "create_blockchain_wallet_operators",
+ "CREATED_TRANSACTIONS_KEY",
+]
\ No newline at end of file
diff --git a/packages/tentacles/Meta/DSL_operators/blockchain_wallet_operators/blockchain_wallet_ops.py b/packages/tentacles/Meta/DSL_operators/blockchain_wallet_operators/blockchain_wallet_ops.py
new file mode 100644
index 000000000..16a74f833
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/blockchain_wallet_operators/blockchain_wallet_ops.py
@@ -0,0 +1,150 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import typing
+import dataclasses
+import decimal
+
+import octobot_commons.dataclasses
+import octobot_commons.errors
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_trading.exchanges
+import octobot_trading.api
+import octobot_trading.enums
+import octobot_trading.constants
+import octobot_trading.blockchain_wallets as blockchain_wallets
+
+
+@dataclasses.dataclass
+class BlockchainWalletBalanceParams(octobot_commons.dataclasses.FlexibleDataclass):
+ blockchain_descriptor: blockchain_wallets.BlockchainDescriptor # descriptor of the blockchain to use
+ wallet_descriptor: blockchain_wallets.WalletDescriptor # descriptor of the wallet to use
+ asset: str
+
+
+@dataclasses.dataclass
+class TransferFundsParams(octobot_commons.dataclasses.FlexibleDataclass):
+ blockchain_descriptor: blockchain_wallets.BlockchainDescriptor # descriptor of the blockchain to use
+ wallet_descriptor: blockchain_wallets.WalletDescriptor # descriptor of the wallet to use
+ asset: str
+ amount: float
+ address: typing.Optional[str] = None # recipient address of the transfer
+ destination_exchange: typing.Optional[str] = None # recipient address of the transfer on the exchange
+
+
+BLOCKCHAIN_WALLET_LIBRARY = "blockchain_wallet"
+
+CREATED_TRANSACTIONS_KEY = "created_transactions"
+
+
+class BlockchainWalletOperator(dsl_interpreter.PreComputingCallOperator):
+ @staticmethod
+ def get_library() -> str:
+ # this is a contextual operator, so it should not be included by default in the get_all_operators function return values
+ return BLOCKCHAIN_WALLET_LIBRARY
+
+ @classmethod
+ def get_blockchain_wallet_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(name="blockchain_descriptor", description="descriptor of the blockchain to use as in octobot_trading.blockchain_wallets.BlockchainDescriptor", required=True, type=dict),
+ dsl_interpreter.OperatorParameter(name="wallet_descriptor", description="descriptor of the wallet to use as in octobot_trading.blockchain_wallets.WalletDescriptor", required=True, type=dict),
+ ]
+
+
+def create_blockchain_wallet_operators(
+ exchange_manager: typing.Optional[octobot_trading.exchanges.ExchangeManager],
+) -> typing.List[type[BlockchainWalletOperator]]:
+
+ class _BlockchainWalletBalanceOperator(BlockchainWalletOperator):
+ DESCRIPTION = "Returns the balance of the asset in the blockchain wallet"
+ EXAMPLE = "blockchain_wallet_balance({blockchain_descriptor}, {wallet_descriptor}, 'BTC')"
+
+ @staticmethod
+ def get_name() -> str:
+ return "blockchain_wallet_balance"
+
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ *cls.get_blockchain_wallet_parameters(),
+ dsl_interpreter.OperatorParameter(name="asset", description="the asset to get the balance for", required=True, type=str),
+ ]
+
+ async def pre_compute(self) -> None:
+ param_by_name = self.get_computed_value_by_parameter()
+ blockchain_wallet_balance_params = BlockchainWalletBalanceParams.from_dict(param_by_name)
+ async with octobot_trading.api.blockchain_wallet_context(
+ blockchain_wallets.BlockchainWalletParameters(
+ blockchain_descriptor=blockchain_wallet_balance_params.blockchain_descriptor,
+ wallet_descriptor=blockchain_wallet_balance_params.wallet_descriptor,
+ ),
+ exchange_manager.trader if exchange_manager else None
+ ) as wallet:
+ wallet_balance = await wallet.get_balance()
+ self.value = float(
+ wallet_balance[blockchain_wallet_balance_params.asset][
+ octobot_trading.constants.CONFIG_PORTFOLIO_FREE
+ ] if blockchain_wallet_balance_params.asset in wallet_balance else octobot_trading.constants.ZERO
+ )
+
+ class _BlockchainWalletTransferOperator(BlockchainWalletOperator):
+ DESCRIPTION = "Withdraws an asset from the exchange's portfolio. requires ALLOW_FUNDS_TRANSFER env to be True (disabled by default to protect funds)"
+ EXAMPLE = "blockchain_wallet_transfer({blockchain_descriptor}, {wallet_descriptor}, 'BTC', 0.1, '{address}')"
+
+ @staticmethod
+ def get_name() -> str:
+ return "blockchain_wallet_transfer"
+
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ *cls.get_blockchain_wallet_parameters(),
+ dsl_interpreter.OperatorParameter(name="asset", description="the asset to transfer", required=True, type=str),
+ dsl_interpreter.OperatorParameter(name="amount", description="the amount to transfer", required=True, type=float),
+ dsl_interpreter.OperatorParameter(name="address", description="the address to transfer to", required=False, type=str, default=None),
+ dsl_interpreter.OperatorParameter(name="destination_exchange", description="the exchange to transfer to", required=False, type=str, default=None),
+ ]
+
+ async def pre_compute(self) -> None:
+ await super().pre_compute()
+ param_by_name = self.get_computed_value_by_parameter()
+ transfer_funds_params = TransferFundsParams.from_dict(param_by_name)
+ async with octobot_trading.api.blockchain_wallet_context(
+ blockchain_wallets.BlockchainWalletParameters(
+ blockchain_descriptor=transfer_funds_params.blockchain_descriptor,
+ wallet_descriptor=transfer_funds_params.wallet_descriptor,
+ ),
+ exchange_manager.trader if exchange_manager else None
+ ) as wallet:
+ if transfer_funds_params.address:
+ address = transfer_funds_params.address
+ elif transfer_funds_params.destination_exchange == exchange_manager.exchange_name:
+ address = (
+ await exchange_manager.trader.get_deposit_address(transfer_funds_params.asset)
+ )[octobot_trading.enums.ExchangeConstantsDepositAddressColumns.ADDRESS.value]
+ else:
+ raise octobot_commons.errors.DSLInterpreterError(
+ f"Unsupported destination exchange: {transfer_funds_params.destination_exchange}"
+ )
+ # requires ALLOW_FUNDS_TRANSFER env to be True (disabled by default to protect funds)
+ created_transaction = await wallet.withdraw(
+ transfer_funds_params.asset,
+ decimal.Decimal(str(transfer_funds_params.amount)),
+ transfer_funds_params.blockchain_descriptor.network,
+ address,
+ )
+ self.value = {CREATED_TRANSACTIONS_KEY: [created_transaction]}
+
+ return [_BlockchainWalletBalanceOperator, _BlockchainWalletTransferOperator]
diff --git a/packages/tentacles/Meta/DSL_operators/blockchain_wallet_operators/metadata.json b/packages/tentacles/Meta/DSL_operators/blockchain_wallet_operators/metadata.json
new file mode 100644
index 000000000..319240c83
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/blockchain_wallet_operators/metadata.json
@@ -0,0 +1,6 @@
+{
+ "version": "1.2.0",
+ "origin_package": "OctoBot-Default-Tentacles",
+ "tentacles": [],
+ "tentacles-requirements": []
+}
\ No newline at end of file
diff --git a/packages/tentacles/Meta/DSL_operators/blockchain_wallet_operators/tests/test_blockchain_wallet_ops.py b/packages/tentacles/Meta/DSL_operators/blockchain_wallet_operators/tests/test_blockchain_wallet_ops.py
new file mode 100644
index 000000000..8db3585b0
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/blockchain_wallet_operators/tests/test_blockchain_wallet_ops.py
@@ -0,0 +1,212 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import decimal
+import pytest
+import pytest_asyncio
+
+import octobot_commons.errors
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_trading.constants
+import octobot_trading.enums
+import octobot_trading.errors
+import octobot_trading.blockchain_wallets as blockchain_wallets
+import octobot_trading.blockchain_wallets.simulator.blockchain_wallet_simulator as blockchain_wallet_simulator
+
+import tentacles.Meta.DSL_operators.blockchain_wallet_operators.blockchain_wallet_ops as blockchain_wallet_ops
+
+from tentacles.Meta.DSL_operators.exchange_operators.tests import (
+ backtesting_config,
+ fake_backtesting,
+ backtesting_exchange_manager,
+ backtesting_trader,
+)
+
+
+BLOCKCHAIN_DESCRIPTOR = {
+ "blockchain": blockchain_wallets.BlockchainWalletSimulator.BLOCKCHAIN,
+ "network": octobot_trading.constants.SIMULATED_BLOCKCHAIN_NETWORK,
+ "native_coin_symbol": "ETH",
+}
+WALLET_DESCRIPTOR = {"address": "0x1234567890123456789012345678901234567890"}
+
+
+def _wallet_descriptor_with_eth_balance(amount: float):
+ return {
+ **WALLET_DESCRIPTOR,
+ "specific_config": {
+ blockchain_wallet_simulator.BlockchainWalletSimulatorConfigurationKeys.ASSETS.value: [
+ {
+ blockchain_wallet_simulator.BlockchainWalletSimulatorConfigurationKeys.ASSET.value: "ETH",
+ blockchain_wallet_simulator.BlockchainWalletSimulatorConfigurationKeys.AMOUNT.value: amount,
+ }
+ ]
+ },
+ }
+
+
+@pytest_asyncio.fixture
+async def blockchain_wallet_operators(backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ return blockchain_wallet_ops.create_blockchain_wallet_operators(exchange_manager)
+
+
+@pytest_asyncio.fixture
+async def interpreter(blockchain_wallet_operators):
+ return dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators()
+ + blockchain_wallet_operators
+ )
+
+
+class TestBlockchainWalletBalanceOperator:
+ @pytest.mark.asyncio
+ async def test_pre_compute(self, blockchain_wallet_operators):
+ balance_op_class, _ = blockchain_wallet_operators
+
+ operator = balance_op_class(
+ BLOCKCHAIN_DESCRIPTOR,
+ _wallet_descriptor_with_eth_balance(1.5),
+ "ETH",
+ )
+ await operator.pre_compute()
+ assert operator.value == 1.5
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_asset_not_in_balance(self, blockchain_wallet_operators):
+ balance_op_class, _ = blockchain_wallet_operators
+
+ operator = balance_op_class(
+ BLOCKCHAIN_DESCRIPTOR,
+ _wallet_descriptor_with_eth_balance(10.0),
+ "BTC",
+ )
+ await operator.pre_compute()
+ assert operator.value == float(octobot_trading.constants.ZERO)
+
+ def test_compute_without_pre_compute(self, blockchain_wallet_operators):
+ balance_op_class, _ = blockchain_wallet_operators
+ operator = balance_op_class(BLOCKCHAIN_DESCRIPTOR, WALLET_DESCRIPTOR, "BTC")
+ with pytest.raises(
+ octobot_commons.errors.DSLInterpreterError,
+ match="has not been pre_computed",
+ ):
+ operator.compute()
+
+ @pytest.mark.asyncio
+ async def test_blockchain_wallet_balance_call_as_dsl(self, interpreter):
+ blockchain_descriptor = BLOCKCHAIN_DESCRIPTOR
+ wallet_descriptor = _wallet_descriptor_with_eth_balance(1.5)
+ assert await interpreter.interprete(
+ f"blockchain_wallet_balance({blockchain_descriptor}, {wallet_descriptor}, 'ETH')"
+ ) == 1.5
+ assert await interpreter.interprete(
+ f"blockchain_wallet_balance({blockchain_descriptor}, {wallet_descriptor}, 'BTC')"
+ ) == 0.0
+
+
+class TestBlockchainWalletTransferOperator:
+ @pytest.mark.asyncio
+ async def test_pre_compute_with_address(self, blockchain_wallet_operators):
+ _, transfer_op_class = blockchain_wallet_operators
+
+ octobot_trading.constants.ALLOW_FUNDS_TRANSFER = True
+ operator = transfer_op_class(
+ BLOCKCHAIN_DESCRIPTOR,
+ _wallet_descriptor_with_eth_balance(10.0),
+ "ETH",
+ 0.1,
+ address="0xrecipient123",
+ )
+ await operator.pre_compute()
+ octobot_trading.constants.ALLOW_FUNDS_TRANSFER = False
+
+ assert operator.value is not None
+ assert isinstance(operator.value, dict)
+ assert "created_transactions" in operator.value
+ assert len(operator.value["created_transactions"]) == 1
+ tx = operator.value["created_transactions"][0]
+ assert octobot_trading.enums.ExchangeConstantsTransactionColumns.TXID.value in tx
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_with_destination_exchange(self, blockchain_wallet_operators):
+ _, transfer_op_class = blockchain_wallet_operators
+
+ octobot_trading.constants.ALLOW_FUNDS_TRANSFER = True
+ operator = transfer_op_class(
+ BLOCKCHAIN_DESCRIPTOR,
+ _wallet_descriptor_with_eth_balance(10.0),
+ "ETH",
+ 0.5,
+ destination_exchange="binanceus",
+ )
+ await operator.pre_compute()
+ octobot_trading.constants.ALLOW_FUNDS_TRANSFER = False
+
+ assert operator.value is not None
+ assert isinstance(operator.value, dict)
+ assert "created_transactions" in operator.value
+ assert len(operator.value["created_transactions"]) == 1
+ tx = operator.value["created_transactions"][0]
+ assert octobot_trading.enums.ExchangeConstantsTransactionColumns.TXID.value in tx
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_unsupported_destination_exchange(self, blockchain_wallet_operators):
+ _, transfer_op_class = blockchain_wallet_operators
+
+ operator = transfer_op_class(
+ BLOCKCHAIN_DESCRIPTOR,
+ WALLET_DESCRIPTOR,
+ "BTC",
+ 0.1,
+ destination_exchange="unknown_exchange",
+ )
+ with pytest.raises(
+ octobot_commons.errors.DSLInterpreterError,
+ match="Unsupported destination exchange: unknown_exchange",
+ ):
+ await operator.pre_compute()
+
+ @pytest.mark.asyncio
+ async def test_blockchain_wallet_transfer_call_as_dsl(self, interpreter):
+ blockchain_descriptor = BLOCKCHAIN_DESCRIPTOR
+ wallet_descriptor = _wallet_descriptor_with_eth_balance(1.5)
+ octobot_trading.constants.ALLOW_FUNDS_TRANSFER = False
+ with pytest.raises(
+ octobot_trading.errors.DisabledFundsTransferError,
+ match="Funds transfer is not enabled",
+ ):
+ await interpreter.interprete(
+ f"blockchain_wallet_transfer({blockchain_descriptor}, {wallet_descriptor}, 'ETH', 0.1, address='0xrecipient123')"
+ )
+ octobot_trading.constants.ALLOW_FUNDS_TRANSFER = True
+ result = await interpreter.interprete(
+ f"blockchain_wallet_transfer({blockchain_descriptor}, {wallet_descriptor}, 'ETH', 0.1, address='0xrecipient123')"
+ )
+ assert "created_transactions" in result
+ assert len(result["created_transactions"]) == 1
+ tx = result["created_transactions"][0]
+ assert tx[octobot_trading.enums.ExchangeConstantsTransactionColumns.TXID.value]
+ assert tx[octobot_trading.enums.ExchangeConstantsTransactionColumns.ADDRESS_FROM.value] == "0x1234567890123456789012345678901234567890"
+ assert tx[octobot_trading.enums.ExchangeConstantsTransactionColumns.ADDRESS_TO.value] == "0xrecipient123"
+ assert tx[octobot_trading.enums.ExchangeConstantsTransactionColumns.AMOUNT.value] == decimal.Decimal('0.1')
+ assert tx[octobot_trading.enums.ExchangeConstantsTransactionColumns.CURRENCY.value] == "ETH"
+ assert tx[octobot_trading.enums.ExchangeConstantsTransactionColumns.FEE.value] is None
+ assert tx[octobot_trading.enums.ExchangeConstantsTransactionColumns.INTERNAL.value] is False
+ result = await interpreter.interprete(
+ f"blockchain_wallet_transfer({blockchain_descriptor}, {wallet_descriptor}, 'ETH', 0.1, destination_exchange='binanceus')"
+ )
+ assert result and isinstance(result, dict)
+ assert "created_transactions" in result
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/__init__.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/__init__.py
index 85f601cb0..6ab032d26 100644
--- a/packages/tentacles/Meta/DSL_operators/exchange_operators/__init__.py
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/__init__.py
@@ -21,10 +21,15 @@
ExchangeDataDependency,
create_ohlcv_operators,
)
-import tentacles.Meta.DSL_operators.exchange_operators.exchange_private_data_operators
-from tentacles.Meta.DSL_operators.exchange_operators.exchange_private_data_operators import (
- PortfolioOperator,
+import tentacles.Meta.DSL_operators.exchange_operators.exchange_personal_data_operators
+from tentacles.Meta.DSL_operators.exchange_operators.exchange_personal_data_operators import (
create_portfolio_operators,
+ create_cancel_order_operators,
+ create_create_order_operators,
+ create_futures_contracts_operators,
+ CREATED_ORDERS_KEY,
+ CANCELLED_ORDERS_KEY,
+ CREATED_WITHDRAWALS_KEY,
)
@@ -32,6 +37,11 @@
"OHLCVOperator",
"ExchangeDataDependency",
"create_ohlcv_operators",
- "PortfolioOperator",
"create_portfolio_operators",
+ "create_cancel_order_operators",
+ "create_create_order_operators",
+ "create_futures_contracts_operators",
+ "CREATED_ORDERS_KEY",
+ "CANCELLED_ORDERS_KEY",
+ "CREATED_WITHDRAWALS_KEY",
]
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_operator.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_operator.py
index 363647dc7..c644012fc 100644
--- a/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_operator.py
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_operator.py
@@ -14,17 +14,13 @@
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
-import octobot_trading.exchanges
-
-import octobot_commons.dsl_interpreter.operators.call_operator as dsl_interpreter_call_operator
-import octobot_trading.modes.script_keywords as script_keywords
+import octobot_commons.dsl_interpreter
EXCHANGE_LIBRARY = "exchange"
-UNINITIALIZED_VALUE = object()
-class ExchangeOperator(dsl_interpreter_call_operator.CallOperator):
+class ExchangeOperator(octobot_commons.dsl_interpreter.PreComputingCallOperator):
@staticmethod
def get_library() -> str:
@@ -32,9 +28,3 @@ def get_library() -> str:
Get the library of the operator.
"""
return EXCHANGE_LIBRARY
-
- async def get_context(
- self, exchange_manager: octobot_trading.exchanges.ExchangeManager
- ) -> script_keywords.Context:
- # todo later: handle exchange manager without initialized trading modes
- return script_keywords.get_base_context(next(iter(exchange_manager.trading_modes)))
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/__init__.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/__init__.py
new file mode 100644
index 000000000..b2153d78c
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/__init__.py
@@ -0,0 +1,44 @@
+# pylint: disable=R0801
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+
+import tentacles.Meta.DSL_operators.exchange_operators.exchange_personal_data_operators.portfolio_operators
+from tentacles.Meta.DSL_operators.exchange_operators.exchange_personal_data_operators.portfolio_operators import (
+ create_portfolio_operators,
+ CREATED_WITHDRAWALS_KEY,
+)
+import tentacles.Meta.DSL_operators.exchange_operators.exchange_personal_data_operators.cancel_order_operators
+from tentacles.Meta.DSL_operators.exchange_operators.exchange_personal_data_operators.cancel_order_operators import (
+ create_cancel_order_operators,
+ CANCELLED_ORDERS_KEY,
+)
+import tentacles.Meta.DSL_operators.exchange_operators.exchange_personal_data_operators.create_order_operators
+from tentacles.Meta.DSL_operators.exchange_operators.exchange_personal_data_operators.create_order_operators import (
+ create_create_order_operators, CREATED_ORDERS_KEY
+)
+import tentacles.Meta.DSL_operators.exchange_operators.exchange_personal_data_operators.futures_contracts_operators
+from tentacles.Meta.DSL_operators.exchange_operators.exchange_personal_data_operators.futures_contracts_operators import (
+ create_futures_contracts_operators,
+)
+__all__ = [
+ "create_portfolio_operators",
+ "create_cancel_order_operators",
+ "create_create_order_operators",
+ "create_futures_contracts_operators",
+ "CREATED_ORDERS_KEY",
+ "CANCELLED_ORDERS_KEY",
+ "CREATED_WITHDRAWALS_KEY",
+]
\ No newline at end of file
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/cancel_order_operators.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/cancel_order_operators.py
new file mode 100644
index 000000000..e8334b86c
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/cancel_order_operators.py
@@ -0,0 +1,112 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import typing
+
+import octobot_commons.constants
+import octobot_commons.errors
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_commons.signals
+import octobot_trading.exchanges
+import octobot_trading.enums
+import octobot_trading.errors
+import octobot_trading.modes.abstract_trading_mode
+import octobot_trading.dsl
+
+import tentacles.Meta.DSL_operators.exchange_operators.exchange_operator as exchange_operator
+
+
+CANCELLED_ORDERS_KEY = "cancelled_orders"
+
+
+def create_cancel_order_operators(
+ exchange_manager: typing.Optional[octobot_trading.exchanges.ExchangeManager],
+ trading_mode: typing.Optional[octobot_trading.modes.abstract_trading_mode.AbstractTradingMode] = None,
+ dependencies: typing.Optional[octobot_commons.signals.SignalDependencies] = None,
+ wait_for_cancelling: bool = True,
+) -> list:
+
+ class _CancelOrderOperator(exchange_operator.ExchangeOperator):
+ DESCRIPTION = "Cancels one or many orders"
+ EXAMPLE = "cancel_order('BTC/USDT', side='buy')"
+
+ @staticmethod
+ def get_name() -> str:
+ return "cancel_order"
+
+ @staticmethod
+ def get_library() -> str:
+ # this is a contextual operator, so it should not be included by default in the get_all_operators function return values
+ return octobot_commons.constants.CONTEXTUAL_OPERATORS_LIBRARY
+
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(name="symbol", description="the symbol of the orders to cancel", required=True, type=str, default=None),
+ dsl_interpreter.OperatorParameter(name="side", description="the side of the orders to cancel", required=False, type=str, default=None),
+ dsl_interpreter.OperatorParameter(name="tag", description="the tag of the orders to cancel", required=False, type=str, default=None),
+ dsl_interpreter.OperatorParameter(name="exchange_order_ids", description="the exchange id of the orders to cancel", required=False, type=list[str], default=None),
+ ]
+
+ def get_dependencies(self) -> typing.List[dsl_interpreter.InterpreterDependency]:
+ local_dependencies = []
+ if symbol := self.get_input_value_by_parameter().get("symbol"):
+ local_dependencies.append(octobot_trading.dsl.SymbolDependency(symbol=symbol))
+ return super().get_dependencies() + local_dependencies
+
+ async def pre_compute(self) -> None:
+ await super().pre_compute()
+ if exchange_manager is None:
+ raise octobot_commons.errors.DSLInterpreterError(
+ "exchange_manager is required for cancel_order operator"
+ )
+ cancelled_order_ids = []
+ param_by_name = self.get_computed_value_by_parameter()
+ if side := param_by_name.get("side"):
+ side = octobot_trading.enums.TradeOrderSide(side)
+ exchange_order_ids = param_by_name.get("exchange_order_ids")
+ to_cancel = [
+ order
+ for order in exchange_manager.exchange_personal_data.orders_manager.get_open_orders(
+ symbol=param_by_name.get("symbol"), tag=param_by_name.get("tag"), active=None
+ )
+ if (
+ not (order.is_cancelled() or order.is_closed())
+ and (side is None or (side is order.side))
+ and (exchange_order_ids is None or (order.exchange_order_id in exchange_order_ids)) # type: ignore
+ )
+ ]
+ for order in to_cancel:
+ if trading_mode:
+ cancelled, _ = await trading_mode.cancel_order(
+ order, wait_for_cancelling=wait_for_cancelling, dependencies=dependencies
+ )
+ else:
+ cancelled = await exchange_manager.trader.cancel_order(
+ order, wait_for_cancelling=wait_for_cancelling
+ )
+ if cancelled:
+ cancelled_order_ids.append(order.exchange_order_id)
+ if not cancelled_order_ids:
+ description = {k: v for k, v in param_by_name.items() if v}
+ raise octobot_trading.errors.OrderDescriptionNotFoundError(
+ f"No [{exchange_manager.exchange_name}] order found matching {description}"
+ )
+ self.value = {CANCELLED_ORDERS_KEY: cancelled_order_ids}
+
+
+ return [
+ _CancelOrderOperator,
+ ]
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/create_order_operators.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/create_order_operators.py
new file mode 100644
index 000000000..9649c67e9
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/create_order_operators.py
@@ -0,0 +1,256 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import typing
+import asyncio
+import json
+
+import octobot_commons.constants
+import octobot_commons.errors
+import octobot_commons.signals as commons_signals
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_commons.tentacles_management as tentacles_management
+
+import octobot_trading.personal_data
+import octobot_trading.exchanges
+import octobot_trading.enums
+import octobot_trading.modes
+import octobot_trading.errors
+import octobot_trading.dsl
+
+import tentacles.Meta.DSL_operators.exchange_operators.exchange_operator as exchange_operator
+
+
+CREATED_ORDERS_KEY = "created_orders"
+
+
+_CANCEL_POLICIES_CACHE = {}
+def _parse_cancel_policy(kwargs: dict) -> typing.Optional[octobot_trading.personal_data.OrderCancelPolicy]:
+ if policy := kwargs.get("cancel_policy"):
+ lowercase_policy = policy.casefold()
+ if not _CANCEL_POLICIES_CACHE:
+ _CANCEL_POLICIES_CACHE.update({
+ policy.__name__.casefold(): policy
+ for policy in tentacles_management.get_all_classes_from_parent(octobot_trading.personal_data.OrderCancelPolicy)
+ })
+ try:
+ policy_class = _CANCEL_POLICIES_CACHE[lowercase_policy]
+ policy_params = kwargs.get("cancel_policy_params")
+ parsed_policy_params = json.loads(policy_params.replace("'", '"')) if isinstance(policy_params, str) else policy_params
+ return policy_class(**(parsed_policy_params or {})) # type: ignore
+ except KeyError:
+ raise octobot_commons.errors.InvalidParametersError(
+ f"Unknown cancel policy: {policy}. Available policies: {', '.join(_CANCEL_POLICIES_CACHE.keys())}"
+ )
+ return None
+
+
+class CreateOrderOperator(exchange_operator.ExchangeOperator):
+ def __init__(self, *parameters: dsl_interpreter.OperatorParameterType, **kwargs: typing.Any):
+ super().__init__(*parameters, **kwargs)
+ self.param_by_name: dict[str, dsl_interpreter.ComputedOperatorParameterType] = dsl_interpreter.UNINITIALIZED_VALUE # type: ignore
+
+ @staticmethod
+ def get_library() -> str:
+ # this is a contextual operator, so it should not be included by default in the get_all_operators function return values
+ return octobot_commons.constants.CONTEXTUAL_OPERATORS_LIBRARY
+
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return (
+ cls.get_first_required_parameters() +
+ cls.get_second_required_parameters() +
+ cls.get_last_parameters()
+ )
+
+ @classmethod
+ def get_first_required_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(name="side", description="the side of the order", required=True, type=str),
+ dsl_interpreter.OperatorParameter(name="symbol", description="the symbol of the order", required=True, type=str),
+ dsl_interpreter.OperatorParameter(name="amount", description="the amount of the order", required=True, type=float),
+ ]
+
+ @classmethod
+ def get_second_required_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return []
+
+ @classmethod
+ def get_last_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(name="reduce_only", description="whether the order is reduce only", required=False, type=bool),
+ dsl_interpreter.OperatorParameter(name="tag", description="the tag of the order", required=False, type=str),
+ dsl_interpreter.OperatorParameter(name="take_profit_prices", description="the price or price offset of the take profit order(s)", required=False, type=list[str]),
+ dsl_interpreter.OperatorParameter(name="take_profit_volume_percents", description="% volume of the entry for each take profit", required=False, type=list[float]),
+ dsl_interpreter.OperatorParameter(name="stop_loss_price", description="the stop loss price or price offset of the order", required=False, type=str),
+ dsl_interpreter.OperatorParameter(name="trailing_profile", description="the trailing profile of the order", required=False, type=dict),
+ dsl_interpreter.OperatorParameter(name="cancel_policy", description="the cancel policy of the order", required=False, type=str),
+ dsl_interpreter.OperatorParameter(name="cancel_policy_params", description="the cancel policy params of the order", required=False, type=dict),
+ dsl_interpreter.OperatorParameter(name="active_order_swap_strategy", description="the type of the active order swap strategy", required=False, type=str),
+ dsl_interpreter.OperatorParameter(name="active_order_swap_strategy_params", description="the params of the active order swap strategy", required=False, type=dict),
+ dsl_interpreter.OperatorParameter(name="params", description="additional params for the order", required=False, type=dict),
+ dsl_interpreter.OperatorParameter(name="allow_holdings_adaptation", description="allow reducing the order amount to account for available holdings", required=False, type=bool),
+ ]
+
+ def get_dependencies(self) -> typing.List[dsl_interpreter.InterpreterDependency]:
+ local_dependencies = []
+ if symbol := self.get_input_value_by_parameter().get("symbol"):
+ local_dependencies.append(octobot_trading.dsl.SymbolDependency(symbol=symbol))
+ return super().get_dependencies() + local_dependencies
+
+ async def create_base_orders_and_associated_elements(self) -> list[octobot_trading.personal_data.Order]:
+ order_factory = self.get_order_factory()
+ maybe_cancel_policy = _parse_cancel_policy(self.param_by_name)
+ try:
+ amount = self.param_by_name["amount"]
+ if not amount:
+ raise octobot_commons.errors.InvalidParameterFormatError("amount is missing")
+ orders = await order_factory.create_base_orders_and_associated_elements(
+ order_type=self.param_by_name["order_type"],
+ symbol=self.param_by_name["symbol"],
+ side=octobot_trading.enums.TradeOrderSide(self.param_by_name["side"]),
+ amount=amount,
+ price=self.param_by_name.get("price", None),
+ reduce_only=self.param_by_name.get("reduce_only", False),
+ allow_holdings_adaptation=self.param_by_name.get("allow_holdings_adaptation", False),
+ tag=self.param_by_name.get("tag", None),
+ exchange_creation_params=self.param_by_name.get("params", None),
+ cancel_policy=maybe_cancel_policy,
+ stop_loss_price=self.param_by_name.get("stop_loss_price", None),
+ take_profit_prices=self.param_by_name.get("take_profit_prices", None),
+ take_profit_volume_percents=self.param_by_name.get("take_profit_volume_percents", None),
+ trailing_profile_type=self.param_by_name.get("trailing_profile", None),
+ active_order_swap_strategy_type=self.param_by_name.get(
+ "active_order_swap_strategy", octobot_trading.personal_data.StopFirstActiveOrderSwapStrategy.__name__
+ ),
+ active_order_swap_strategy_params=self.param_by_name.get("active_order_swap_strategy_params", {}),
+ )
+ except octobot_trading.errors.UnSupportedSymbolError as e:
+ raise octobot_commons.errors.InvalidParametersError(
+ f"Invalid parameters: {e}"
+ ) from e
+ except octobot_trading.errors.InvalidArgumentError as e:
+ raise octobot_commons.errors.InvalidParameterFormatError(e) from e
+ except asyncio.TimeoutError as e:
+ raise octobot_commons.errors.DSLInterpreterError(
+ f"Impossible to create order for {self.param_by_name["symbol"]} on {order_factory.exchange_manager.exchange_name}: {e} and is necessary to compute the order details."
+ )
+ return orders
+
+ async def pre_compute(self) -> None:
+ await super().pre_compute()
+ self.param_by_name = self.get_computed_value_by_parameter()
+ self.param_by_name["order_type"] = self.get_order_type()
+ order_factory = self.get_order_factory()
+ orders = await self.create_base_orders_and_associated_elements()
+ created_orders = []
+ for order in orders:
+ created_order = await order_factory.create_order_on_exchange(order)
+ if created_order is None:
+ raise octobot_commons.errors.DSLInterpreterError(
+ f"Failed to create {order.symbol} {order.order_type.name} order on {order.exchange_manager.exchange_name}"
+ )
+ else:
+ created_orders.append(created_order)
+ self.value = {CREATED_ORDERS_KEY: [order.to_dict() for order in created_orders]}
+
+ def get_order_type(self) -> octobot_trading.enums.TraderOrderType:
+ raise NotImplementedError("get_order_type must be implemented")
+
+ def get_order_factory(self) -> octobot_trading.personal_data.OrderFactory:
+ raise NotImplementedError("get_order_factory must be implemented")
+
+def create_create_order_operators(
+ exchange_manager: typing.Optional[octobot_trading.exchanges.ExchangeManager],
+ trading_mode: typing.Optional[octobot_trading.modes.AbstractTradingMode] = None,
+ dependencies: typing.Optional[commons_signals.SignalDependencies] = None,
+ wait_for_creation: bool = True,
+ try_to_handle_unconfigured_symbol: bool = False,
+) -> list[type[CreateOrderOperator]]:
+ _order_factory = octobot_trading.personal_data.OrderFactory(
+ exchange_manager, trading_mode, dependencies, wait_for_creation, try_to_handle_unconfigured_symbol
+ )
+
+ class _FactoryMixin:
+ def get_order_factory(self) -> octobot_trading.personal_data.OrderFactory:
+ try:
+ _order_factory.validate()
+ except ValueError as e:
+ raise octobot_commons.errors.DSLInterpreterError(e) from e
+ return _order_factory
+
+ class _MarketOrderOperator(_FactoryMixin, CreateOrderOperator):
+ DESCRIPTION = "Creates a market order"
+ EXAMPLE = "market('buy', 'BTC/USDT', 0.01)"
+
+ @staticmethod
+ def get_name() -> str:
+ return "market"
+
+ def get_order_type(self) -> octobot_trading.enums.TraderOrderType:
+ return (
+ octobot_trading.enums.TraderOrderType.BUY_MARKET
+ if self.param_by_name["side"] == octobot_trading.enums.TradeOrderSide.BUY.value else octobot_trading.enums.TraderOrderType.SELL_MARKET
+ )
+
+ class _LimitOrderOperator(_FactoryMixin, CreateOrderOperator):
+ DESCRIPTION = "Creates a limit order"
+ EXAMPLE = "limit('buy', 'BTC/USDT', 0.01, price='-1%')"
+
+ @staticmethod
+ def get_name() -> str:
+ return "limit"
+
+ @classmethod
+ def get_second_required_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(name="price", description="the limit price of the order: a flat or offset price", required=True, type=str),
+ ]
+
+ def get_order_type(self) -> octobot_trading.enums.TraderOrderType:
+ return (
+ octobot_trading.enums.TraderOrderType.BUY_LIMIT
+ if self.param_by_name["side"] == octobot_trading.enums.TradeOrderSide.BUY.value else octobot_trading.enums.TraderOrderType.SELL_LIMIT
+ )
+
+ class _StopLossOrderOperator(_FactoryMixin, CreateOrderOperator):
+ DESCRIPTION = "Creates a stop market order"
+ EXAMPLE = "stop_loss('buy', 'BTC/USDT', 0.01, price='-1%')"
+
+ @staticmethod
+ def get_name() -> str:
+ return "stop_loss"
+
+
+ async def pre_compute(self) -> None:
+ self.get_order_factory()._ensure_supported_order_type(
+ octobot_trading.enums.TraderOrderType.STOP_LOSS
+ )
+ return await super().pre_compute()
+
+ @classmethod
+ def get_second_required_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(name="price", description="the trigger price of the order: a flat or offset price", required=True, type=str),
+ ]
+
+ def get_order_type(self) -> octobot_trading.enums.TraderOrderType:
+ return octobot_trading.enums.TraderOrderType.STOP_LOSS
+
+ return [
+ _MarketOrderOperator,
+ _LimitOrderOperator,
+ _StopLossOrderOperator,
+ ]
\ No newline at end of file
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/futures_contracts_operators.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/futures_contracts_operators.py
new file mode 100644
index 000000000..a484b7285
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/futures_contracts_operators.py
@@ -0,0 +1,69 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import typing
+import decimal
+
+import octobot_commons.constants
+import octobot_commons.errors
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_trading.exchanges
+
+import tentacles.Meta.DSL_operators.exchange_operators.exchange_operator as exchange_operator
+
+
+class FuturesContractsOperator(exchange_operator.ExchangeOperator):
+ @staticmethod
+ def get_library() -> str:
+ # this is a contextual operator, so it should not be included by default in the get_all_operators function return values
+ return octobot_commons.constants.CONTEXTUAL_OPERATORS_LIBRARY
+
+
+def create_futures_contracts_operators(
+ exchange_manager: typing.Optional[octobot_trading.exchanges.ExchangeManager],
+) -> typing.List[type[FuturesContractsOperator]]:
+
+ class _SetLeverageOperator(FuturesContractsOperator):
+ DESCRIPTION = "Sets the leverage for the futures contract"
+ EXAMPLE = "set_leverage('BTC/USDT:USDT', 10)"
+
+ @staticmethod
+ def get_name() -> str:
+ return "set_leverage"
+
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(name="symbol", description="the symbol of the futures contract", required=True, type=str),
+ dsl_interpreter.OperatorParameter(name="leverage", description="the leverage to set", required=True, type=float),
+ ]
+
+ async def pre_compute(self) -> None:
+ await super().pre_compute()
+ if exchange_manager is None:
+ raise octobot_commons.errors.DSLInterpreterError(
+ "exchange_manager is required for set_leverage operator"
+ )
+ param_by_name = self.get_computed_value_by_parameter()
+ leverage = decimal.Decimal(str(param_by_name["leverage"]))
+ await exchange_manager.trader.set_leverage(
+ param_by_name["symbol"],
+ None,
+ leverage,
+ )
+ self.value = float(leverage)
+
+
+ return [_SetLeverageOperator]
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/portfolio_operators.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/portfolio_operators.py
new file mode 100644
index 000000000..7464a4e48
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_personal_data_operators/portfolio_operators.py
@@ -0,0 +1,135 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import typing
+import dataclasses
+import decimal
+
+import octobot_commons.dataclasses
+import octobot_commons.constants
+import octobot_commons.errors
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_trading.personal_data
+import octobot_trading.exchanges
+import octobot_trading.api
+
+import tentacles.Meta.DSL_operators.exchange_operators.exchange_operator as exchange_operator
+
+
+CREATED_WITHDRAWALS_KEY = "created_withdrawals"
+
+
+@dataclasses.dataclass
+class WithdrawFundsParams(octobot_commons.dataclasses.FlexibleDataclass):
+ asset: str
+ network: str # network to withdraw to
+ address: str # recipient address of the withdrawal
+ amount: typing.Optional[float] = None # defaults to all available balance if unspecified
+ tag: str = ""
+ params: dict = dataclasses.field(default_factory=dict) # extra parameters specific to the exchange API endpoint
+
+
+class PortfolioOperator(exchange_operator.ExchangeOperator):
+ @staticmethod
+ def get_library() -> str:
+ # this is a contextual operator, so it should not be included by default in the get_all_operators function return values
+ return octobot_commons.constants.CONTEXTUAL_OPERATORS_LIBRARY
+
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(name="asset", description="the asset to get the value for", required=False, type=str),
+ ]
+
+
+def create_portfolio_operators(
+ exchange_manager: typing.Optional[octobot_trading.exchanges.ExchangeManager],
+) -> typing.List[type[PortfolioOperator]]:
+
+ def _get_asset_holdings(asset: str) -> octobot_trading.personal_data.Asset:
+ if exchange_manager is None:
+ raise octobot_commons.errors.DSLInterpreterError(
+ "exchange_manager is required for portfolio operators"
+ )
+ return octobot_trading.api.get_portfolio_currency(exchange_manager, asset)
+
+ class _TotalOperator(PortfolioOperator):
+ DESCRIPTION = "Returns the total holdings of the asset in the portfolio"
+ EXAMPLE = "total('BTC')"
+
+ @staticmethod
+ def get_name() -> str:
+ return "total"
+
+ async def pre_compute(self) -> None:
+ await super().pre_compute()
+ asset = self.get_computed_parameters()[0]
+ self.value = float(_get_asset_holdings(asset).total)
+
+ class _AvailableOperator(PortfolioOperator):
+ DESCRIPTION = "Returns the available holdings of the asset in the portfolio"
+ EXAMPLE = "available('BTC')"
+
+ @staticmethod
+ def get_name() -> str:
+ return "available"
+
+ async def pre_compute(self) -> None:
+ await super().pre_compute()
+ asset = self.get_computed_parameters()[0]
+ self.value = float(_get_asset_holdings(asset).available)
+
+ class _WithdrawOperator(PortfolioOperator):
+ DESCRIPTION = "Withdraws an asset from the exchange's portfolio. requires ALLOW_FUNDS_TRANSFER env to be True (disabled by default to protect funds)"
+ EXAMPLE = "withdraw('BTC', 'ethereum', '0x1234567890abcdef1234567890abcdef12345678', 0.1)"
+
+ @staticmethod
+ def get_name() -> str:
+ return "withdraw"
+
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(name="asset", description="the asset to withdraw", required=True, type=str),
+ dsl_interpreter.OperatorParameter(name="network", description="the network to withdraw to", required=True, type=str),
+ dsl_interpreter.OperatorParameter(name="address", description="the address to withdraw to", required=True, type=str),
+ dsl_interpreter.OperatorParameter(name="amount", description="the amount to withdraw", required=False, type=float, default=None),
+ dsl_interpreter.OperatorParameter(name="tag", description="a tag to associate with the withdrawal", required=False, type=str, default=None),
+ dsl_interpreter.OperatorParameter(name="params", description="extra parameters specific to the exchange API endpoint", required=False, type=dict),
+ ]
+
+
+ async def pre_compute(self) -> None:
+ await super().pre_compute()
+ if exchange_manager is None:
+ raise octobot_commons.errors.DSLInterpreterError(
+ "exchange_manager is required for withdraw operator"
+ )
+ param_by_name = self.get_computed_value_by_parameter()
+ withdraw_funds_params = WithdrawFundsParams.from_dict(param_by_name)
+ amount = withdraw_funds_params.amount or (
+ octobot_trading.api.get_portfolio_currency(exchange_manager, withdraw_funds_params.asset).available
+ )
+ created_withdrawal = await exchange_manager.trader.withdraw(
+ withdraw_funds_params.asset,
+ decimal.Decimal(str(amount)),
+ withdraw_funds_params.network,
+ withdraw_funds_params.address,
+ tag=withdraw_funds_params.tag,
+ params=withdraw_funds_params.params
+ )
+ self.value = {CREATED_WITHDRAWALS_KEY: [created_withdrawal]}
+
+ return [_TotalOperator, _AvailableOperator, _WithdrawOperator]
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_private_data_operators/portfolio_operators.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_private_data_operators/portfolio_operators.py
deleted file mode 100644
index b3dd7adb3..000000000
--- a/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_private_data_operators/portfolio_operators.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# pylint: disable=missing-class-docstring,missing-function-docstring
-# Drakkar-Software OctoBot-Commons
-# Copyright (c) Drakkar-Software, All rights reserved.
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 3.0 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library.
-import typing
-
-import octobot_commons.constants
-import octobot_commons.errors
-import octobot_commons.dsl_interpreter as dsl_interpreter
-import octobot_trading.personal_data
-import octobot_trading.exchanges
-import octobot_trading.api
-
-import tentacles.Meta.DSL_operators.exchange_operators.exchange_operator as exchange_operator
-
-
-class PortfolioOperator(exchange_operator.ExchangeOperator):
- def __init__(self, *parameters: dsl_interpreter.OperatorParameterType, **kwargs: typing.Any):
- super().__init__(*parameters, **kwargs)
- self.value: dsl_interpreter_operator.ComputedOperatorParameterType = exchange_operator.UNINITIALIZED_VALUE # type: ignore
-
- @staticmethod
- def get_library() -> str:
- # this is a contextual operator, so it should not be included by default in the get_all_operators function return values
- return octobot_commons.constants.CONTEXTUAL_OPERATORS_LIBRARY
-
- @staticmethod
- def get_parameters() -> list[dsl_interpreter.OperatorParameter]:
- return [
- dsl_interpreter.OperatorParameter(name="asset", description="the asset to get the value for", required=False, type=str),
- ]
-
- def compute(self) -> dsl_interpreter.ComputedOperatorParameterType:
- if self.value is exchange_operator.UNINITIALIZED_VALUE:
- raise octobot_commons.errors.DSLInterpreterError("{self.__class__.__name__} has not been initialized")
- return self.value
-
-
-def create_portfolio_operators(
- exchange_manager: typing.Optional[octobot_trading.exchanges.ExchangeManager],
-) -> typing.List[type[PortfolioOperator]]:
-
- def _get_asset_holdings(asset: str) -> octobot_trading.personal_data.Asset:
- return octobot_trading.api.get_portfolio_currency(exchange_manager, asset)
-
- class _TotalOperator(PortfolioOperator):
- DESCRIPTION = "Returns the total holdings of the asset in the portfolio"
- EXAMPLE = "total('BTC')"
-
- @staticmethod
- def get_name() -> str:
- return "total"
-
- async def pre_compute(self) -> None:
- await super().pre_compute()
- asset = self.get_computed_parameters()[0]
- self.value = float(_get_asset_holdings(asset).total)
-
- class _AvailableOperator(PortfolioOperator):
- DESCRIPTION = "Returns the available holdings of the asset in the portfolio"
- EXAMPLE = "available('BTC')"
-
- @staticmethod
- def get_name() -> str:
- return "available"
-
- async def pre_compute(self) -> None:
- await super().pre_compute()
- asset = self.get_computed_parameters()[0]
- self.value = float(_get_asset_holdings(asset).available)
-
-
- return [_TotalOperator, _AvailableOperator]
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_public_data_operators/ohlcv_operators.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_public_data_operators/ohlcv_operators.py
index 3438917e9..34e54d40c 100644
--- a/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_public_data_operators/ohlcv_operators.py
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/exchange_public_data_operators/ohlcv_operators.py
@@ -27,33 +27,27 @@
import octobot_trading.exchange_data
import octobot_trading.api
import octobot_trading.constants
+import octobot_trading.dsl
import tentacles.Meta.DSL_operators.exchange_operators.exchange_operator as exchange_operator
@dataclasses.dataclass
-class ExchangeDataDependency(dsl_interpreter.InterpreterDependency):
- exchange_manager_id: str
- symbol: typing.Optional[str]
- time_frame: typing.Optional[str]
+class ExchangeDataDependency(octobot_trading.dsl.SymbolDependency):
data_source: str = octobot_trading.constants.OHLCV_CHANNEL
def __hash__(self) -> int:
- return hash((self.exchange_manager_id, self.symbol, self.time_frame, self.data_source))
+ return hash((self.symbol, self.time_frame, self.data_source))
class OHLCVOperator(exchange_operator.ExchangeOperator):
- def __init__(self, *parameters: dsl_interpreter.OperatorParameterType, **kwargs: typing.Any):
- super().__init__(*parameters, **kwargs)
- self.value: dsl_interpreter_operator.ComputedOperatorParameterType = exchange_operator.UNINITIALIZED_VALUE # type: ignore
-
@staticmethod
def get_library() -> str:
# this is a contextual operator, so it should not be included by default in the get_all_operators function return values
return octobot_commons.constants.CONTEXTUAL_OPERATORS_LIBRARY
- @staticmethod
- def get_parameters() -> list[dsl_interpreter.OperatorParameter]:
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
return [
dsl_interpreter.OperatorParameter(name="symbol", description="the symbol to get the OHLCV data for", required=False, type=str),
dsl_interpreter.OperatorParameter(name="time_frame", description="the time frame to get the OHLCV data for", required=False, type=str),
@@ -69,12 +63,6 @@ def get_symbol_and_time_frame(self) -> typing.Tuple[typing.Optional[str], typing
)
return None, None
- def compute(self) -> dsl_interpreter.ComputedOperatorParameterType:
- if self.value is exchange_operator.UNINITIALIZED_VALUE:
- raise octobot_commons.errors.DSLInterpreterError("{self.__class__.__name__} has not been initialized")
- return self.value
-
-
def create_ohlcv_operators(
exchange_manager: typing.Optional[octobot_trading.exchanges.ExchangeManager],
symbol: typing.Optional[str],
@@ -84,13 +72,14 @@ def create_ohlcv_operators(
] = None
) -> typing.List[type[OHLCVOperator]]:
- if exchange_manager is None and candle_manager_by_time_frame_by_symbol is None:
- raise octobot_commons.errors.InvalidParametersError("exchange_manager or candle_manager_by_time_frame_by_symbol must be provided")
-
def _get_candles_values_with_latest_kline_if_available(
input_symbol: typing.Optional[str], input_time_frame: typing.Optional[str],
value_type: commons_enums.PriceIndexes, limit: int = -1
) -> np.ndarray:
+ if exchange_manager is None and candle_manager_by_time_frame_by_symbol is None:
+ raise octobot_commons.errors.DSLInterpreterError(
+ "exchange_manager or candle_manager_by_time_frame_by_symbol must be provided"
+ )
_symbol = input_symbol or symbol
_time_frame = input_time_frame or time_frame
if exchange_manager is None:
@@ -125,20 +114,28 @@ def _get_candles_values_with_latest_kline_if_available(
)
return candles_values
- def _get_dependencies() -> typing.List[ExchangeDataDependency]:
+ def _static_get_dependencies() -> typing.List[ExchangeDataDependency]:
return [
ExchangeDataDependency(
- exchange_manager_id=octobot_trading.api.get_exchange_manager_id(exchange_manager),
symbol=symbol,
- time_frame=time_frame
+ time_frame=time_frame,
)
- ]
+ ] if symbol else []
class _LocalOHLCVOperator(OHLCVOperator):
PRICE_INDEX: commons_enums.PriceIndexes = None # type: ignore
def get_dependencies(self) -> typing.List[dsl_interpreter.InterpreterDependency]:
- return super().get_dependencies() + _get_dependencies()
+ local_dependencies = _static_get_dependencies()
+ param_by_name = self.get_input_value_by_parameter()
+ if symbol := param_by_name.get("symbol"):
+ symbol_dep = ExchangeDataDependency(
+ symbol=symbol,
+ time_frame=param_by_name.get("time_frame"),
+ )
+ if symbol_dep not in local_dependencies:
+ local_dependencies.append(symbol_dep)
+ return super().get_dependencies() + local_dependencies
async def pre_compute(self) -> None:
await super().pre_compute()
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/__init__.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/__init__.py
index 8e78ab0a6..14a64e624 100644
--- a/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/__init__.py
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/__init__.py
@@ -13,250 +13,64 @@
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
-import mock
import pytest
-import typing
+import pytest_asyncio
-import numpy as np
+import octobot_commons.constants as commons_constants
+import octobot_commons.tests.test_config as test_config
+import octobot_backtesting.backtesting as backtesting_module
+import octobot_backtesting.constants as backtesting_constants
+import octobot_backtesting.time as backtesting_time
+import octobot_trading.exchanges as exchanges
+import octobot_trading.exchanges.exchange_manager as exchange_manager_module
-import octobot_commons.enums
-import octobot_commons.errors
-import octobot_commons.constants
-import octobot_commons.dsl_interpreter as dsl_interpreter
-import tentacles.Meta.DSL_operators.exchange_operators as exchange_operators
+pytestmark = pytest.mark.asyncio
-SYMBOL = "BTC/USDT"
-SYMBOL2 = "ETH/USDT"
-TIME_FRAME = "1h"
-TIME_FRAME2 = "4h"
-KLINE_SIGNATURE = 0.00666
+@pytest_asyncio.fixture
+async def backtesting_config():
+ config = dict(test_config.load_test_config())
+ config[backtesting_constants.CONFIG_BACKTESTING] = {}
+ config[backtesting_constants.CONFIG_BACKTESTING][commons_constants.CONFIG_ENABLED_OPTION] = True
+ return config
-@pytest.fixture
-def historical_prices():
- return np.array([
- 81.59, 81.06, 82.87, 83, 83.61, 83.15, 82.84, 83.99, 84.55, 84.36, 85.53, 86.54, 86.89,
- 87.77, 87.29, 87.18, 87.01, 89.02, 89.68, 90.36, 92.83, 93.37, 93.02, 93.45, 94.13,
- 93.12, 93.18, 92.08, 92.82, 92.92, 92.25, 92.22
- ])
-@pytest.fixture
-def historical_times(historical_prices):
- return np.array([
- i + 10 for i in range(len(historical_prices))
- ], dtype=np.float64)
-
-
-@pytest.fixture
-def historical_volume(historical_prices):
- base_volume_pattern = [
- # will create an int np.array, which will updated to float64 to comply with tulipy requirements
- 903, 1000, 2342, 992, 900, 1231, 1211, 1113
- ]
- return np.array(base_volume_pattern*(len(historical_prices) // len(base_volume_pattern) + 1), dtype=np.float64)[:len(historical_prices)]
-
-
-def _get_candle_managers(historical_prices, historical_volume, historical_times):
- btc_1h_candles_manager = mock.Mock(
- get_symbol_open_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy()),
- get_symbol_high_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy()),
- get_symbol_low_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy()),
- get_symbol_close_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy()),
- get_symbol_volume_candles=mock.Mock(side_effect=lambda _ : historical_volume.copy()),
- get_symbol_time_candles=mock.Mock(side_effect=lambda _ : historical_times.copy()),
- time_candles_index=len(historical_times),
- open_candles_index=len(historical_prices),
- high_candles_index=len(historical_prices),
- low_candles_index=len(historical_prices),
- close_candles_index=len(historical_prices),
- volume_candles_index=len(historical_volume),
- time_candles=historical_times,
- )
- eth_1h_candles_manager = mock.Mock(
- get_symbol_open_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() / 2),
- get_symbol_high_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() / 2),
- get_symbol_low_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() / 2),
- get_symbol_close_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() / 2),
- get_symbol_volume_candles=mock.Mock(side_effect=lambda _ : historical_volume.copy() / 2),
- get_symbol_time_candles=mock.Mock(side_effect=lambda _ : historical_times.copy() / 2),
- time_candles_index=len(historical_times),
- open_candles_index=len(historical_prices),
- high_candles_index=len(historical_prices),
- low_candles_index=len(historical_prices),
- close_candles_index=len(historical_prices),
- volume_candles_index=len(historical_volume),
- time_candles=historical_times / 2,
- )
- btc_4h_candles_manager = mock.Mock(
- get_symbol_open_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() * 2),
- get_symbol_high_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() * 2),
- get_symbol_low_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() * 2),
- get_symbol_close_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() * 2),
- get_symbol_volume_candles=mock.Mock(side_effect=lambda _ : historical_volume.copy() * 2),
- get_symbol_time_candles=mock.Mock(side_effect=lambda _ : historical_times.copy() * 2),
- time_candles_index=len(historical_times),
- open_candles_index=len(historical_prices),
- high_candles_index=len(historical_prices),
- low_candles_index=len(historical_prices),
- close_candles_index=len(historical_prices),
- volume_candles_index=len(historical_volume),
- time_candles=historical_times * 2,
- )
- return (
- btc_1h_candles_manager,
- eth_1h_candles_manager,
- btc_4h_candles_manager,
- )
-
-
-def _get_kline(candles_manager: mock.Mock, signature: float, kline_time_delta: typing.Optional[float]) -> list:
- kline = [0] * len(octobot_commons.enums.PriceIndexes)
- kline[octobot_commons.enums.PriceIndexes.IND_PRICE_TIME.value] = (
- candles_manager.get_symbol_time_candles(-1)[-1] + kline_time_delta
- if kline_time_delta is not None
- else candles_manager.get_symbol_time_candles(-1)[-1]
- )
- kline[octobot_commons.enums.PriceIndexes.IND_PRICE_OPEN.value] = candles_manager.get_symbol_open_candles(-1)[-1] + signature
- kline[octobot_commons.enums.PriceIndexes.IND_PRICE_HIGH.value] = candles_manager.get_symbol_high_candles(-1)[-1] + signature
- kline[octobot_commons.enums.PriceIndexes.IND_PRICE_LOW.value] = candles_manager.get_symbol_low_candles(-1)[-1] + signature
- kline[octobot_commons.enums.PriceIndexes.IND_PRICE_CLOSE.value] = candles_manager.get_symbol_close_candles(-1)[-1] + signature
- kline[octobot_commons.enums.PriceIndexes.IND_PRICE_VOL.value] = candles_manager.get_symbol_volume_candles(-1)[-1] + signature
- return kline
-
-
-def _get_symbol_data_factory(
- btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager, kline_type: str
-):
- def _get_symbol_data(symbol: str, **kwargs):
- symbol_candles = {}
- one_h_candles_manager = btc_1h_candles_manager if symbol == SYMBOL else eth_1h_candles_manager if symbol == SYMBOL2 else None
- four_h_candles_manager = btc_4h_candles_manager if symbol == SYMBOL else None # no 4h eth candles
- if one_h_candles_manager is None and four_h_candles_manager is None:
- raise octobot_commons.errors.InvalidParametersError(f"Symbol {symbol} not found")
- symbol_candles[octobot_commons.enums.TimeFrames(TIME_FRAME)] = one_h_candles_manager
- if four_h_candles_manager:
- symbol_candles[octobot_commons.enums.TimeFrames(TIME_FRAME2)] = four_h_candles_manager
- if kline_type == "no_kline":
- symbol_klines = {}
- elif kline_type == "same_time_kline":
- symbol_klines = {
- octobot_commons.enums.TimeFrames(TIME_FRAME): mock.Mock(kline=_get_kline(one_h_candles_manager, KLINE_SIGNATURE, None)),
- }
- if four_h_candles_manager:
- symbol_klines[octobot_commons.enums.TimeFrames(TIME_FRAME2)] = mock.Mock(kline=_get_kline(four_h_candles_manager, KLINE_SIGNATURE, None))
- elif kline_type == "new_time_kline":
- symbol_klines = {
- octobot_commons.enums.TimeFrames(TIME_FRAME): mock.Mock(kline=_get_kline(
- one_h_candles_manager, KLINE_SIGNATURE,
- octobot_commons.enums.TimeFramesMinutes[octobot_commons.enums.TimeFrames(TIME_FRAME)] * octobot_commons.constants.MINUTE_TO_SECONDS
- )),
- }
- if four_h_candles_manager:
- symbol_klines[octobot_commons.enums.TimeFrames(TIME_FRAME2)] = mock.Mock(kline=_get_kline(
- four_h_candles_manager, KLINE_SIGNATURE,
- octobot_commons.enums.TimeFramesMinutes[octobot_commons.enums.TimeFrames(TIME_FRAME2)] * octobot_commons.constants.MINUTE_TO_SECONDS
- ))
- else:
- raise NotImplementedError(f"Kline type {kline_type} not implemented")
- return mock.Mock(
- symbol_candles=symbol_candles,
- symbol_klines=symbol_klines
- )
- return _get_symbol_data
-
-
-@pytest.fixture
-def exchange_manager_with_candles(historical_prices, historical_volume, historical_times):
- btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager = _get_candle_managers(
- historical_prices, historical_volume, historical_times
- )
- return mock.Mock(
- id="exchange_manager_id",
- exchange_name="binance",
- exchange_symbols_data=mock.Mock(
- get_exchange_symbol_data=_get_symbol_data_factory(
- btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager, "no_kline"
- )
- )
+@pytest_asyncio.fixture
+async def fake_backtesting(backtesting_config):
+ return backtesting_module.Backtesting(
+ config=backtesting_config,
+ exchange_ids=[],
+ matrix_id="",
+ backtesting_files=[],
)
-@pytest.fixture
-def exchange_manager_with_candles_and_klines(historical_prices, historical_volume, historical_times):
- btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager = _get_candle_managers(
- historical_prices, historical_volume, historical_times
+@pytest_asyncio.fixture
+async def backtesting_exchange_manager(backtesting_config, fake_backtesting):
+ exchange_manager_instance = exchange_manager_module.ExchangeManager(
+ backtesting_config, "binanceus"
)
- return mock.Mock(
- id="exchange_manager_id",
- exchange_name="binance",
- exchange_symbols_data=mock.Mock(
- get_exchange_symbol_data=_get_symbol_data_factory(
- btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager, "same_time_kline"
- )
- )
- )
-
-
-@pytest.fixture
-def exchange_manager_with_candles_and_new_candle_klines(historical_prices, historical_volume, historical_times):
- btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager = _get_candle_managers(
- historical_prices, historical_volume, historical_times
- )
- return mock.Mock(
- id="exchange_manager_id",
- exchange_name="binance",
- exchange_symbols_data=mock.Mock(
- get_exchange_symbol_data=_get_symbol_data_factory(
- btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager, "new_time_kline"
- )
- )
- )
-
-
-@pytest.fixture
-def candle_manager_by_time_frame_by_symbol(historical_prices, historical_volume, historical_times):
- btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager = _get_candle_managers(
- historical_prices, historical_volume, historical_times
- )
- return {
- TIME_FRAME: {
- SYMBOL: btc_1h_candles_manager,
- SYMBOL2: eth_1h_candles_manager,
- },
- TIME_FRAME2: {
- SYMBOL: btc_4h_candles_manager,
- },
- }
-
-
-@pytest.fixture
-def interpreter(exchange_manager_with_candles):
- return dsl_interpreter.Interpreter(
- dsl_interpreter.get_all_operators() +
- exchange_operators.create_ohlcv_operators(exchange_manager_with_candles, SYMBOL, TIME_FRAME)
- )
-
-
-@pytest.fixture
-def interpreter_with_exchange_manager_and_klines(exchange_manager_with_candles_and_klines):
- return dsl_interpreter.Interpreter(
- dsl_interpreter.get_all_operators() +
- exchange_operators.create_ohlcv_operators(exchange_manager_with_candles_and_klines, SYMBOL, TIME_FRAME)
- )
-
-
-@pytest.fixture
-def interpreter_with_exchange_manager_and_new_candle_klines(exchange_manager_with_candles_and_new_candle_klines):
- return dsl_interpreter.Interpreter(
- dsl_interpreter.get_all_operators() +
- exchange_operators.create_ohlcv_operators(exchange_manager_with_candles_and_new_candle_klines, SYMBOL, TIME_FRAME)
+ exchange_manager_instance.is_backtesting = True
+ exchange_manager_instance.use_cached_markets = False
+ exchange_manager_instance.is_spot_only = True
+ exchange_manager_instance.is_margin = False
+ exchange_manager_instance.is_future = False
+ exchange_manager_instance.backtesting = fake_backtesting
+ exchange_manager_instance.backtesting.time_manager = backtesting_time.TimeManager(
+ backtesting_config
)
+ await exchange_manager_instance.initialize(exchange_config_by_exchange=None)
+ try:
+ yield exchange_manager_instance
+ finally:
+ await exchange_manager_instance.stop()
-@pytest.fixture
-def interpreter_with_candle_manager_by_time_frame_by_symbol(candle_manager_by_time_frame_by_symbol):
- return dsl_interpreter.Interpreter(
- dsl_interpreter.get_all_operators() +
- exchange_operators.create_ohlcv_operators(None, SYMBOL, TIME_FRAME, candle_manager_by_time_frame_by_symbol)
+@pytest_asyncio.fixture
+async def backtesting_trader(backtesting_config, backtesting_exchange_manager):
+ trader_instance = exchanges.TraderSimulator(
+ backtesting_config, backtesting_exchange_manager
)
+ await trader_instance.initialize()
+ return backtesting_config, backtesting_exchange_manager, trader_instance
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_personal_data_operators/test_cancel_order_operators.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_personal_data_operators/test_cancel_order_operators.py
new file mode 100644
index 000000000..1d84ca59f
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_personal_data_operators/test_cancel_order_operators.py
@@ -0,0 +1,415 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import decimal
+import mock
+import pytest
+import pytest_asyncio
+
+import octobot_commons.errors
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_trading.enums
+import octobot_trading.errors as trading_errors
+import octobot_trading.personal_data as trading_personal_data
+import octobot_trading.dsl
+
+import tentacles.Meta.DSL_operators.exchange_operators.exchange_personal_data_operators.cancel_order_operators as cancel_order_operators
+
+from tentacles.Meta.DSL_operators.exchange_operators.tests import (
+ backtesting_config,
+ fake_backtesting,
+ backtesting_exchange_manager,
+ backtesting_trader,
+)
+
+SYMBOL = "BTC/USDT"
+EXCHANGE_ORDER_ID = "order-123"
+
+
+def _create_mock_order(exchange_order_id: str, side: str = "buy", symbol: str = SYMBOL):
+ order = mock.Mock()
+ order.exchange_order_id = exchange_order_id
+ order.symbol = symbol
+ order.side = octobot_trading.enums.TradeOrderSide(side)
+ order.is_cancelled = mock.Mock(return_value=False)
+ order.is_closed = mock.Mock(return_value=False)
+ return order
+
+
+@pytest_asyncio.fixture
+async def cancel_order_operators_list(backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ return cancel_order_operators.create_cancel_order_operators(exchange_manager)
+
+
+@pytest_asyncio.fixture
+async def cancel_order_operators_with_trading_mode(backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ mock_trading_mode = mock.Mock()
+ mock_trading_mode.cancel_order = mock.AsyncMock(return_value=(True, None))
+ mock_dependencies = mock.Mock()
+ operators_list = cancel_order_operators.create_cancel_order_operators(
+ exchange_manager,
+ trading_mode=mock_trading_mode,
+ dependencies=mock_dependencies,
+ )
+ return operators_list, mock_trading_mode, mock_dependencies
+
+
+@pytest_asyncio.fixture
+async def interpreter(cancel_order_operators_list):
+ return dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators()
+ + cancel_order_operators_list
+ )
+
+
+@pytest_asyncio.fixture
+async def no_exchange_manager_cancel_order_operators_list():
+ return cancel_order_operators.create_cancel_order_operators(None)
+
+
+@pytest_asyncio.fixture
+async def no_exchange_manager_interpreter(no_exchange_manager_cancel_order_operators_list):
+ return dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators()
+ + no_exchange_manager_cancel_order_operators_list
+ )
+
+
+@pytest_asyncio.fixture
+async def maybe_exchange_manager_interpreter(request, interpreter, no_exchange_manager_interpreter):
+ """Parametrized fixture that yields either interpreter or no_exchange_manager_interpreter."""
+ selected_value = request.param
+ if selected_value == "interpreter":
+ return interpreter
+ elif selected_value == "no_exchange_manager_interpreter":
+ return no_exchange_manager_interpreter
+ raise ValueError(f"Invalid selected_value: {selected_value}")
+
+
+class TestCancelOrderOperator:
+ @pytest.mark.asyncio
+ async def test_pre_compute_cancels_matching_orders(self, cancel_order_operators_list, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ cancel_order_op_class, = cancel_order_operators_list
+
+ order1 = _create_mock_order("order-1")
+ order2 = _create_mock_order("order-2")
+ mock_orders = [order1, order2]
+
+ with mock.patch.object(
+ exchange_manager.exchange_personal_data.orders_manager,
+ "get_open_orders",
+ return_value=mock_orders,
+ ), mock.patch.object(
+ exchange_manager.trader,
+ "cancel_order",
+ mock.AsyncMock(side_effect=[True, True]),
+ ) as cancel_order_mock:
+ operator = cancel_order_op_class(
+ SYMBOL,
+ exchange_order_ids=["order-1", "order-2"],
+ )
+ await operator.pre_compute()
+
+ assert operator.value == {"cancelled_orders": ["order-1", "order-2"]}
+ assert cancel_order_mock.await_count == 2
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_no_orders_to_cancel(self, cancel_order_operators_list, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ cancel_order_op_class, = cancel_order_operators_list
+
+ with mock.patch.object(
+ exchange_manager.exchange_personal_data.orders_manager,
+ "get_open_orders",
+ return_value=[],
+ ), mock.patch.object(
+ exchange_manager.trader,
+ "cancel_order",
+ mock.AsyncMock(),
+ ) as cancel_order_mock:
+ operator = cancel_order_op_class(
+ SYMBOL,
+ exchange_order_ids=["order-1"],
+ )
+ with pytest.raises(
+ trading_errors.OrderDescriptionNotFoundError,
+ match="No .* order found matching",
+ ):
+ await operator.pre_compute()
+
+ cancel_order_mock.assert_not_awaited()
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_filters_by_exchange_order_ids(self, cancel_order_operators_list, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ cancel_order_op_class, = cancel_order_operators_list
+
+ order1 = _create_mock_order("order-1")
+ order2 = _create_mock_order("order-2")
+ order3 = _create_mock_order("order-3")
+ mock_orders = [order1, order2, order3]
+
+ with mock.patch.object(
+ exchange_manager.exchange_personal_data.orders_manager,
+ "get_open_orders",
+ return_value=mock_orders,
+ ), mock.patch.object(
+ exchange_manager.trader,
+ "cancel_order",
+ mock.AsyncMock(side_effect=[True, True]),
+ ) as cancel_order_mock:
+ operator = cancel_order_op_class(
+ SYMBOL,
+ exchange_order_ids=["order-1", "order-3"],
+ )
+ await operator.pre_compute()
+
+ assert operator.value == {"cancelled_orders": ["order-1", "order-3"]}
+ assert cancel_order_mock.await_count == 2
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_filters_by_side(self, cancel_order_operators_list, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ cancel_order_op_class, = cancel_order_operators_list
+
+ buy_order = _create_mock_order("order-1", side="buy")
+ sell_order = _create_mock_order("order-2", side="sell")
+ mock_orders = [buy_order, sell_order]
+
+ with mock.patch.object(
+ exchange_manager.exchange_personal_data.orders_manager,
+ "get_open_orders",
+ return_value=mock_orders,
+ ), mock.patch.object(
+ exchange_manager.trader,
+ "cancel_order",
+ mock.AsyncMock(return_value=True),
+ ) as cancel_order_mock:
+ operator = cancel_order_op_class(
+ SYMBOL,
+ side="buy",
+ exchange_order_ids=["order-1", "order-2"],
+ )
+ await operator.pre_compute()
+
+ assert operator.value == {"cancelled_orders": ["order-1"]}
+ cancel_order_mock.assert_awaited_once_with(buy_order, wait_for_cancelling=True)
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_skips_cancelled_orders(self, cancel_order_operators_list, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ cancel_order_op_class, = cancel_order_operators_list
+
+ order1 = _create_mock_order("order-1")
+ order1.is_cancelled = mock.Mock(return_value=True)
+ order2 = _create_mock_order("order-2")
+ mock_orders = [order1, order2]
+
+ with mock.patch.object(
+ exchange_manager.exchange_personal_data.orders_manager,
+ "get_open_orders",
+ return_value=mock_orders,
+ ), mock.patch.object(
+ exchange_manager.trader,
+ "cancel_order",
+ mock.AsyncMock(return_value=True),
+ ) as cancel_order_mock:
+ operator = cancel_order_op_class(
+ SYMBOL,
+ exchange_order_ids=["order-1", "order-2"],
+ )
+ await operator.pre_compute()
+
+ assert operator.value == {"cancelled_orders": ["order-2"]}
+ cancel_order_mock.assert_awaited_once_with(order2, wait_for_cancelling=True)
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_skips_closed_orders(self, cancel_order_operators_list, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ cancel_order_op_class, = cancel_order_operators_list
+
+ order1 = _create_mock_order("order-1")
+ order1.is_closed = mock.Mock(return_value=True)
+ order2 = _create_mock_order("order-2")
+ mock_orders = [order1, order2]
+
+ with mock.patch.object(
+ exchange_manager.exchange_personal_data.orders_manager,
+ "get_open_orders",
+ return_value=mock_orders,
+ ), mock.patch.object(
+ exchange_manager.trader,
+ "cancel_order",
+ mock.AsyncMock(return_value=True),
+ ) as cancel_order_mock:
+ operator = cancel_order_op_class(
+ SYMBOL,
+ exchange_order_ids=["order-1", "order-2"],
+ )
+ await operator.pre_compute()
+
+ assert operator.value == {"cancelled_orders": ["order-2"]}
+ cancel_order_mock.assert_awaited_once_with(order2, wait_for_cancelling=True)
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_does_not_append_when_cancel_fails(self, cancel_order_operators_list, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ cancel_order_op_class, = cancel_order_operators_list
+
+ order1 = _create_mock_order("order-1")
+ order2 = _create_mock_order("order-2")
+ mock_orders = [order1, order2]
+
+ with mock.patch.object(
+ exchange_manager.exchange_personal_data.orders_manager,
+ "get_open_orders",
+ return_value=mock_orders,
+ ), mock.patch.object(
+ exchange_manager.trader,
+ "cancel_order",
+ mock.AsyncMock(side_effect=[False, True]),
+ ) as cancel_order_mock:
+ operator = cancel_order_op_class(
+ SYMBOL,
+ exchange_order_ids=["order-1", "order-2"],
+ )
+ await operator.pre_compute()
+
+ assert operator.value == {"cancelled_orders": ["order-2"]}
+ assert cancel_order_mock.await_count == 2
+
+ def test_compute_without_pre_compute(self, cancel_order_operators_list):
+ cancel_order_op_class, = cancel_order_operators_list
+ operator = cancel_order_op_class(
+ SYMBOL,
+ exchange_order_ids=["order-1"],
+ )
+ with pytest.raises(
+ octobot_commons.errors.DSLInterpreterError,
+ match="has not been pre_computed",
+ ):
+ operator.compute()
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_uses_trading_mode_when_provided(
+ self, cancel_order_operators_with_trading_mode, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ operators_list, mock_trading_mode, mock_dependencies = cancel_order_operators_with_trading_mode
+ cancel_order_op_class, = operators_list
+
+ order1 = _create_mock_order("order-1")
+ mock_orders = [order1]
+
+ with mock.patch.object(
+ exchange_manager.exchange_personal_data.orders_manager,
+ "get_open_orders",
+ return_value=mock_orders,
+ ):
+ operator = cancel_order_op_class(
+ SYMBOL,
+ exchange_order_ids=["order-1"],
+ )
+ await operator.pre_compute()
+
+ assert operator.value == {"cancelled_orders": ["order-1"]}
+ mock_trading_mode.cancel_order.assert_awaited_once()
+ call_args = mock_trading_mode.cancel_order.call_args
+ assert call_args[0][0] == order1
+ assert call_args[1]["wait_for_cancelling"] is True
+ assert call_args[1]["dependencies"] is mock_dependencies
+
+ @pytest.mark.asyncio
+ async def test_cancel_order_call_as_dsl(self, interpreter, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+
+ if SYMBOL not in exchange_manager.client_symbols:
+ exchange_manager.client_symbols.append(SYMBOL)
+ if SYMBOL not in exchange_manager.exchange_config.traded_symbol_pairs:
+ exchange_manager.exchange_config.traded_symbol_pairs.append(SYMBOL)
+
+ limit_buy = trading_personal_data.BuyLimitOrder(exchange_manager.trader)
+ limit_buy.update(
+ order_type=octobot_trading.enums.TraderOrderType.BUY_LIMIT,
+ symbol=SYMBOL,
+ exchange_order_id=EXCHANGE_ORDER_ID,
+ current_price=decimal.Decimal("50000"),
+ quantity=decimal.Decimal("0.01"),
+ price=decimal.Decimal("50000"),
+ )
+ await exchange_manager.exchange_personal_data.orders_manager.upsert_order_instance(limit_buy)
+
+ open_orders = exchange_manager.exchange_personal_data.orders_manager.get_open_orders(symbol=SYMBOL)
+ assert len(open_orders) == 1
+ assert open_orders[0].exchange_order_id == EXCHANGE_ORDER_ID
+
+ result = await interpreter.interprete(
+ f"cancel_order('{SYMBOL}', exchange_order_ids=['{EXCHANGE_ORDER_ID}'])"
+ )
+ assert result == {"cancelled_orders": [EXCHANGE_ORDER_ID]}
+
+ open_orders_after = exchange_manager.exchange_personal_data.orders_manager.get_open_orders(symbol=SYMBOL)
+ assert len(open_orders_after) == 0
+ assert limit_buy.is_cancelled()
+
+ @pytest.mark.asyncio
+ async def test_cancel_order_call_as_dsl_with_side(self, interpreter, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+
+ buy_order = _create_mock_order("order-buy", side="buy")
+ mock_orders = [buy_order]
+
+ with mock.patch.object(
+ exchange_manager.exchange_personal_data.orders_manager,
+ "get_open_orders",
+ return_value=mock_orders,
+ ), mock.patch.object(
+ exchange_manager.trader,
+ "cancel_order",
+ mock.AsyncMock(return_value=True),
+ ):
+ result = await interpreter.interprete(
+ f"cancel_order('{SYMBOL}', side='buy', exchange_order_ids=['order-buy'])"
+ )
+ assert result == {"cancelled_orders": ["order-buy"]}
+
+
+class TestGetDependencies:
+ """Tests for get_dependencies using DSL syntax and the interpreter."""
+
+ @pytest.mark.parametrize(
+ "maybe_exchange_manager_interpreter",
+ ["interpreter", "no_exchange_manager_interpreter"],
+ indirect=True,
+ )
+ def test_cancel_order_get_dependencies_from_interpreter(
+ self, maybe_exchange_manager_interpreter
+ ):
+ maybe_exchange_manager_interpreter.prepare(
+ f"cancel_order('{SYMBOL}', exchange_order_ids=['{EXCHANGE_ORDER_ID}'])"
+ )
+ assert maybe_exchange_manager_interpreter.get_dependencies() == [
+ octobot_trading.dsl.SymbolDependency(symbol=SYMBOL),
+ ]
+ symbol = "ETH/USDT"
+ maybe_exchange_manager_interpreter.prepare(
+ f"cancel_order('{symbol}', tag='my_tag')"
+ )
+ assert maybe_exchange_manager_interpreter.get_dependencies() == [
+ octobot_trading.dsl.SymbolDependency(symbol=symbol),
+ ]
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_personal_data_operators/test_create_order_operators.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_personal_data_operators/test_create_order_operators.py
new file mode 100644
index 000000000..fd0b6a207
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_personal_data_operators/test_create_order_operators.py
@@ -0,0 +1,812 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import decimal
+import mock
+import pytest
+import pytest_asyncio
+
+import octobot_commons.constants as commons_constants
+import octobot_commons.errors
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_commons.symbols as commons_symbols
+import octobot_trading.enums
+import octobot_trading.errors as trading_errors
+import octobot_trading.personal_data as personal_data
+import octobot_trading.dsl
+
+import tentacles.Meta.DSL_operators.exchange_operators.exchange_personal_data_operators.create_order_operators as create_order_operators
+
+from tentacles.Meta.DSL_operators.exchange_operators.tests import (
+ backtesting_config,
+ fake_backtesting,
+ backtesting_exchange_manager,
+ backtesting_trader,
+)
+
+SYMBOL = "BTC/USDT"
+AMOUNT = 0.01
+PRICE = "50000"
+MARK_PRICE = decimal.Decimal("50000")
+
+
+def _create_mock_order(symbol: str = SYMBOL, side: str = "buy", order_type=None):
+ order = mock.Mock()
+ order.symbol = symbol
+ order.side = octobot_trading.enums.TradeOrderSide(side)
+ order.order_type = order_type or octobot_trading.enums.TraderOrderType.BUY_MARKET
+ order.to_dict = mock.Mock(return_value={"symbol": symbol, "side": side})
+ return order
+
+
+@pytest_asyncio.fixture
+async def create_order_operators_list(backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ return create_order_operators.create_create_order_operators(exchange_manager)
+
+
+@pytest_asyncio.fixture
+async def no_exchange_manager_create_order_operators_list():
+ return create_order_operators.create_create_order_operators(None)
+
+
+@pytest_asyncio.fixture
+async def create_order_operators_with_trading_mode(backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ mock_trading_mode = mock.Mock()
+ mock_trading_mode.create_order = mock.AsyncMock()
+ mock_dependencies = mock.Mock()
+ return create_order_operators.create_create_order_operators(
+ exchange_manager,
+ trading_mode=mock_trading_mode,
+ dependencies=mock_dependencies,
+ )
+
+
+@pytest_asyncio.fixture
+async def interpreter(create_order_operators_list):
+ return dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators()
+ + create_order_operators_list
+ )
+
+
+@pytest_asyncio.fixture
+async def no_exchange_manager_interpreter(no_exchange_manager_create_order_operators_list):
+ return dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators()
+ + no_exchange_manager_create_order_operators_list
+ )
+
+
+@pytest_asyncio.fixture
+async def maybe_exchange_manager_interpreter(request, interpreter, no_exchange_manager_interpreter):
+ """Parametrized fixture that yields either interpreter or no_exchange_manager_interpreter."""
+ selected_value = request.param
+ if selected_value == "interpreter":
+ return interpreter
+ elif selected_value == "no_exchange_manager_interpreter":
+ return no_exchange_manager_interpreter
+ raise ValueError(f"Invalid selected_value: {selected_value}")
+
+
+def _ensure_portfolio_config(backtesting_trader, portfolio_content):
+ _config, exchange_manager, _trader = backtesting_trader
+ if commons_constants.CONFIG_SIMULATOR not in _config:
+ _config[commons_constants.CONFIG_SIMULATOR] = {}
+ if commons_constants.CONFIG_STARTING_PORTFOLIO not in _config[commons_constants.CONFIG_SIMULATOR]:
+ _config[commons_constants.CONFIG_SIMULATOR][commons_constants.CONFIG_STARTING_PORTFOLIO] = {}
+ _config[commons_constants.CONFIG_SIMULATOR][commons_constants.CONFIG_STARTING_PORTFOLIO].update(
+ portfolio_content
+ )
+ exchange_manager.exchange_personal_data.portfolio_manager.apply_forced_portfolio(
+ _config[commons_constants.CONFIG_SIMULATOR][commons_constants.CONFIG_STARTING_PORTFOLIO]
+ )
+
+
+def _ensure_market_order_trading_context(backtesting_trader):
+ """Set up portfolio, symbol config, and mark price for real simulated order creation."""
+ _config, exchange_manager, _trader = backtesting_trader
+ _ensure_portfolio_config(backtesting_trader, {"BTC": 0, "USDT": 100000})
+
+ if SYMBOL not in exchange_manager.client_symbols:
+ exchange_manager.client_symbols.append(SYMBOL)
+ if SYMBOL not in exchange_manager.exchange_config.traded_symbol_pairs:
+ exchange_manager.exchange_config.traded_symbol_pairs.append(SYMBOL)
+ exchange_manager.exchange_config.traded_symbols.append(
+ commons_symbols.parse_symbol(SYMBOL)
+ )
+
+ symbol_data = exchange_manager.exchange_symbols_data.get_exchange_symbol_data(
+ SYMBOL, allow_creation=True
+ )
+ symbol_data.handle_mark_price_update(
+ MARK_PRICE, octobot_trading.enums.MarkPriceSources.EXCHANGE_MARK_PRICE.value
+ )
+
+
+def _ensure_sell_order_trading_context(backtesting_trader):
+ """Set up portfolio with BTC for sell orders, symbol config, and mark price."""
+ _config, exchange_manager, _trader = backtesting_trader
+ _ensure_portfolio_config(backtesting_trader, {"BTC": 1.0, "USDT": 0})
+
+ if SYMBOL not in exchange_manager.client_symbols:
+ exchange_manager.client_symbols.append(SYMBOL)
+ if SYMBOL not in exchange_manager.exchange_config.traded_symbol_pairs:
+ exchange_manager.exchange_config.traded_symbol_pairs.append(SYMBOL)
+ exchange_manager.exchange_config.traded_symbols.append(
+ commons_symbols.parse_symbol(SYMBOL)
+ )
+
+ symbol_data = exchange_manager.exchange_symbols_data.get_exchange_symbol_data(
+ SYMBOL, allow_creation=True
+ )
+ symbol_data.handle_mark_price_update(
+ MARK_PRICE, octobot_trading.enums.MarkPriceSources.EXCHANGE_MARK_PRICE.value
+ )
+
+
+
+class TestCreateOrderOnExchange:
+ @pytest.mark.asyncio
+ async def test_create_order_on_exchange_returns_order_via_trading_mode(
+ self, create_order_operators_with_trading_mode, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ market_op_class, _limit_op_class, _stop_loss_op_class = create_order_operators_with_trading_mode
+ mock_order = _create_mock_order()
+ factory = market_op_class("buy", SYMBOL, AMOUNT).get_order_factory()
+ factory.trading_mode.create_order = mock.AsyncMock(return_value=mock_order)
+
+ result = await factory.create_order_on_exchange(mock_order)
+
+ assert result is mock_order
+ factory.trading_mode.create_order.assert_awaited_once_with(
+ mock_order, dependencies=factory.dependencies, wait_for_creation=True
+ )
+
+ @pytest.mark.asyncio
+ async def test_create_order_on_exchange_uses_trader_when_no_trading_mode(
+ self, create_order_operators_list, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ market_op_class, _limit_op_class, _stop_loss_op_class = create_order_operators_list
+ mock_order = _create_mock_order()
+ factory = market_op_class("buy", SYMBOL, AMOUNT).get_order_factory()
+
+ with mock.patch.object(
+ exchange_manager.trader,
+ "create_order",
+ mock.AsyncMock(return_value=mock_order),
+ ) as mock_create_order:
+ result = await factory.create_order_on_exchange(mock_order)
+
+ assert result is mock_order
+ mock_create_order.assert_awaited_once_with(
+ mock_order, wait_for_creation=True
+ )
+
+ @pytest.mark.asyncio
+ async def test_create_order_on_exchange_forwards_wait_for_creation(
+ self, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ operators = create_order_operators.create_create_order_operators(
+ exchange_manager, wait_for_creation=False
+ )
+ market_op_class = operators[0]
+ mock_order = _create_mock_order()
+ factory = market_op_class("buy", SYMBOL, AMOUNT).get_order_factory()
+
+ with mock.patch.object(
+ exchange_manager.trader,
+ "create_order",
+ mock.AsyncMock(return_value=mock_order),
+ ) as mock_create_order:
+ await factory.create_order_on_exchange(mock_order)
+
+ mock_create_order.assert_awaited_once_with(
+ mock_order, wait_for_creation=False
+ )
+
+
+class TestCreateBaseOrderAndAssociatedElements:
+ @pytest.mark.asyncio
+ async def test_create_base_orders_and_associated_elements_raises_when_symbol_not_in_exchange(
+ self, create_order_operators_list, backtesting_trader
+ ):
+ UNKNOWN_SYMBOL = "NONEXISTENT/USDT"
+ _config, exchange_manager, _trader = backtesting_trader
+ market_op_class, _limit_op_class, _stop_loss_op_class = create_order_operators_list
+ factory = market_op_class("buy", SYMBOL, AMOUNT).get_order_factory()
+
+ with pytest.raises(
+ trading_errors.UnSupportedSymbolError,
+ match=r"Symbol NONEXISTENT/USDT not found in exchange traded symbols",
+ ):
+ await factory.create_base_orders_and_associated_elements(
+ symbol=UNKNOWN_SYMBOL,
+ side="buy",
+ amount=AMOUNT,
+ order_type=octobot_trading.enums.TraderOrderType.BUY_MARKET,
+ )
+
+
+class TestMarketOrderOperator:
+ @pytest.mark.asyncio
+ async def test_pre_compute_creates_market_order(
+ self, create_order_operators_list, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ market_op_class, _limit_op_class, _stop_loss_op_class = create_order_operators_list
+ mock_order = _create_mock_order()
+
+ factory = market_op_class("buy", SYMBOL, AMOUNT).get_order_factory()
+ with mock.patch.object(
+ factory,
+ "create_base_orders_and_associated_elements",
+ mock.AsyncMock(return_value=[mock_order]),
+ ), mock.patch.object(
+ factory,
+ "create_order_on_exchange",
+ mock.AsyncMock(return_value=mock_order),
+ ):
+ operator = market_op_class("buy", SYMBOL, AMOUNT)
+ await operator.pre_compute()
+
+ assert operator.value == {"created_orders": [{"symbol": SYMBOL, "side": "buy"}]}
+ factory.create_base_orders_and_associated_elements.assert_awaited_once()
+ call_kwargs = factory.create_base_orders_and_associated_elements.call_args[1]
+ assert call_kwargs["symbol"] == SYMBOL
+ assert call_kwargs["side"] == octobot_trading.enums.TradeOrderSide.BUY
+ assert call_kwargs["amount"] == AMOUNT
+ assert call_kwargs["order_type"] == octobot_trading.enums.TraderOrderType.BUY_MARKET
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_sell_market_order(
+ self, create_order_operators_list, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ market_op_class, _limit_op_class, _stop_loss_op_class = create_order_operators_list
+ mock_order = _create_mock_order(side="sell")
+
+ factory = market_op_class("sell", SYMBOL, AMOUNT).get_order_factory()
+ with mock.patch.object(
+ factory,
+ "create_base_orders_and_associated_elements",
+ mock.AsyncMock(return_value=[mock_order]),
+ ), mock.patch.object(
+ factory,
+ "create_order_on_exchange",
+ mock.AsyncMock(return_value=mock_order),
+ ):
+ operator = market_op_class("sell", SYMBOL, AMOUNT)
+ await operator.pre_compute()
+
+ call_kwargs = factory.create_base_orders_and_associated_elements.call_args[1]
+ assert call_kwargs["order_type"] == octobot_trading.enums.TraderOrderType.SELL_MARKET
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_raises_when_create_order_fails(
+ self, create_order_operators_list, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ market_op_class, _limit_op_class, _stop_loss_op_class = create_order_operators_list
+ mock_order = _create_mock_order()
+
+ factory = market_op_class("buy", SYMBOL, AMOUNT).get_order_factory()
+ with mock.patch.object(
+ factory,
+ "create_base_orders_and_associated_elements",
+ mock.AsyncMock(return_value=[mock_order]),
+ ), mock.patch.object(
+ factory,
+ "create_order_on_exchange",
+ mock.AsyncMock(return_value=None),
+ ):
+ operator = market_op_class("buy", SYMBOL, AMOUNT)
+ with pytest.raises(
+ octobot_commons.errors.DSLInterpreterError,
+ match="Failed to create",
+ ):
+ await operator.pre_compute()
+
+ def test_compute_without_pre_compute(self, create_order_operators_list):
+ market_op_class, _limit_op_class, _stop_loss_op_class = create_order_operators_list
+ operator = market_op_class("buy", SYMBOL, AMOUNT)
+ with pytest.raises(
+ octobot_commons.errors.DSLInterpreterError,
+ match="has not been pre_computed",
+ ):
+ operator.compute()
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_uses_trading_mode_when_provided(
+ self, create_order_operators_with_trading_mode, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ market_op_class, _limit_op_class, _stop_loss_op_class = create_order_operators_with_trading_mode
+ mock_order = _create_mock_order()
+
+ factory = market_op_class("buy", SYMBOL, AMOUNT).get_order_factory()
+ mock_trading_mode = factory.trading_mode
+ mock_trading_mode.create_order = mock.AsyncMock(return_value=mock_order)
+
+ with mock.patch.object(
+ factory,
+ "create_base_orders_and_associated_elements",
+ mock.AsyncMock(return_value=[mock_order]),
+ ), mock.patch.object(
+ exchange_manager.trader,
+ "create_order",
+ mock.AsyncMock(return_value=mock_order),
+ ):
+ operator = market_op_class("buy", SYMBOL, AMOUNT)
+ await operator.pre_compute()
+
+ assert operator.value == {"created_orders": [{"symbol": SYMBOL, "side": "buy"}]}
+ mock_trading_mode.create_order.assert_awaited_once()
+ call_args = mock_trading_mode.create_order.call_args
+ assert call_args[0][0] == mock_order
+ assert call_args[1]["dependencies"] is factory.dependencies
+ assert call_args[1]["wait_for_creation"] is True
+
+
+class TestLimitOrderOperator:
+ @pytest.mark.asyncio
+ async def test_pre_compute_creates_limit_order(
+ self, create_order_operators_list, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ _market_op_class, limit_op_class, _stop_loss_op_class = create_order_operators_list
+ mock_order = _create_mock_order(order_type=octobot_trading.enums.TraderOrderType.BUY_LIMIT)
+
+ factory = limit_op_class("buy", SYMBOL, AMOUNT, PRICE).get_order_factory()
+ with mock.patch.object(
+ factory,
+ "create_base_orders_and_associated_elements",
+ mock.AsyncMock(return_value=[mock_order]),
+ ), mock.patch.object(
+ factory,
+ "create_order_on_exchange",
+ mock.AsyncMock(return_value=mock_order),
+ ):
+ operator = limit_op_class("buy", SYMBOL, AMOUNT, PRICE)
+ await operator.pre_compute()
+
+ assert operator.value == {"created_orders": [{"symbol": SYMBOL, "side": "buy"}]}
+ call_kwargs = factory.create_base_orders_and_associated_elements.call_args[1]
+ assert call_kwargs["symbol"] == SYMBOL
+ assert call_kwargs["side"] == octobot_trading.enums.TradeOrderSide.BUY
+ assert call_kwargs["amount"] == AMOUNT
+ assert call_kwargs["price"] == PRICE
+ assert call_kwargs["order_type"] == octobot_trading.enums.TraderOrderType.BUY_LIMIT
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_forwards_allow_holdings_adaptation(
+ self, create_order_operators_list, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ _market_op_class, limit_op_class, _stop_loss_op_class = create_order_operators_list
+ mock_order = _create_mock_order(order_type=octobot_trading.enums.TraderOrderType.BUY_LIMIT)
+
+ factory = limit_op_class("buy", SYMBOL, AMOUNT, PRICE).get_order_factory()
+ with mock.patch.object(
+ factory,
+ "create_base_orders_and_associated_elements",
+ mock.AsyncMock(return_value=[mock_order]),
+ ), mock.patch.object(
+ factory,
+ "create_order_on_exchange",
+ mock.AsyncMock(return_value=mock_order),
+ ):
+ operator = limit_op_class(
+ "buy", SYMBOL, AMOUNT, PRICE, allow_holdings_adaptation=True
+ )
+ await operator.pre_compute()
+
+ call_kwargs = factory.create_base_orders_and_associated_elements.call_args[1]
+ assert call_kwargs["allow_holdings_adaptation"] is True
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_allow_holdings_adaptation_defaults_to_false(
+ self, create_order_operators_list, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ _market_op_class, limit_op_class, _stop_loss_op_class = create_order_operators_list
+ mock_order = _create_mock_order(order_type=octobot_trading.enums.TraderOrderType.BUY_LIMIT)
+
+ factory = limit_op_class("buy", SYMBOL, AMOUNT, PRICE).get_order_factory()
+ with mock.patch.object(
+ factory,
+ "create_base_orders_and_associated_elements",
+ mock.AsyncMock(return_value=[mock_order]),
+ ), mock.patch.object(
+ factory,
+ "create_order_on_exchange",
+ mock.AsyncMock(return_value=mock_order),
+ ):
+ operator = limit_op_class("buy", SYMBOL, AMOUNT, PRICE)
+ await operator.pre_compute()
+
+ call_kwargs = factory.create_base_orders_and_associated_elements.call_args[1]
+ assert call_kwargs.get("allow_holdings_adaptation", False) is False
+
+
+def _patch_stop_loss_supported(exchange_manager):
+ """Patch exchange to support STOP_LOSS orders (binanceus spot does not by default)."""
+ return mock.patch.object(
+ exchange_manager.exchange,
+ "is_supported_order_type",
+ mock.Mock(return_value=True),
+ )
+
+
+class TestStopLossOrderOperator:
+ @pytest.mark.asyncio
+ async def test_pre_compute_creates_stop_loss_order(
+ self, create_order_operators_list, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ _market_op_class, _limit_op_class, stop_loss_op_class = create_order_operators_list
+ mock_order = _create_mock_order(order_type=octobot_trading.enums.TraderOrderType.STOP_LOSS)
+
+ operator = stop_loss_op_class("buy", SYMBOL, AMOUNT, PRICE)
+ factory = operator.get_order_factory()
+ with _patch_stop_loss_supported(exchange_manager), mock.patch.object(
+ factory,
+ "create_base_orders_and_associated_elements",
+ mock.AsyncMock(return_value=[mock_order]),
+ ), mock.patch.object(
+ factory,
+ "create_order_on_exchange",
+ mock.AsyncMock(return_value=mock_order),
+ ):
+ await operator.pre_compute()
+
+ call_kwargs = factory.create_base_orders_and_associated_elements.call_args[1]
+ assert call_kwargs["order_type"] == octobot_trading.enums.TraderOrderType.STOP_LOSS
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_raises_when_stop_loss_unsupported(
+ self, create_order_operators_list, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ _market_op_class, _limit_op_class, stop_loss_op_class = create_order_operators_list
+
+ with mock.patch.object(
+ exchange_manager.exchange,
+ "is_supported_order_type",
+ mock.Mock(return_value=False),
+ ):
+ operator = stop_loss_op_class("buy", SYMBOL, AMOUNT, PRICE)
+ with pytest.raises(
+ trading_errors.NotSupportedOrderTypeError,
+ match="STOP_LOSS orders are not supported",
+ ):
+ await operator.pre_compute()
+
+
+class TestCreateOrderCallAsDsl:
+ @pytest.mark.asyncio
+ async def test_market_call_as_dsl(self, interpreter, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ _ensure_market_order_trading_context(backtesting_trader)
+
+ result = await interpreter.interprete(
+ f"market('buy', '{SYMBOL}', {AMOUNT})"
+ )
+
+ assert isinstance(result, dict)
+ assert "created_orders" in result
+ assert len(result["created_orders"]) == 1
+ created_order = result["created_orders"][0]
+ assert created_order["symbol"] == SYMBOL
+ assert created_order["side"] == octobot_trading.enums.TradeOrderSide.BUY.value
+ assert "id" in created_order or "exchange_id" in created_order
+
+ all_orders = exchange_manager.exchange_personal_data.orders_manager.get_all_orders(
+ symbol=SYMBOL
+ )
+ assert all_orders == []
+ trades = exchange_manager.exchange_personal_data.trades_manager.get_trades()
+ assert len(trades) == 1
+ created_trade = trades[0]
+ assert created_trade.trade_type == octobot_trading.enums.TraderOrderType.BUY_MARKET
+ assert created_trade.side == octobot_trading.enums.TradeOrderSide.BUY
+ assert created_trade.executed_price == MARK_PRICE
+ assert created_trade.executed_quantity == decimal.Decimal(str(AMOUNT))
+
+ @pytest.mark.asyncio
+ async def test_limit_call_as_dsl(
+ self, interpreter, backtesting_trader, create_order_operators_list
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ _ensure_market_order_trading_context(backtesting_trader)
+
+ order_price = 45000
+ result = await interpreter.interprete(
+ f"limit('buy', '{SYMBOL}', {AMOUNT}, {order_price}, reduce_only=True)"
+ )
+
+ assert isinstance(result, dict)
+ assert "created_orders" in result
+ assert len(result["created_orders"]) == 1
+ created_order = result["created_orders"][0]
+ assert created_order["symbol"] == SYMBOL
+ assert created_order["side"] == octobot_trading.enums.TradeOrderSide.BUY.value
+ assert "id" in created_order or "exchange_id" in created_order
+
+ all_orders = exchange_manager.exchange_personal_data.orders_manager.get_all_orders(
+ symbol=SYMBOL
+ )
+ assert len(all_orders) == 1
+ trades = exchange_manager.exchange_personal_data.trades_manager.get_trades()
+ assert len(trades) == 0
+ created_order = all_orders[0]
+ assert isinstance(created_order, personal_data.BuyLimitOrder)
+ assert created_order.origin_price == decimal.Decimal(str(order_price))
+ assert created_order.origin_quantity == decimal.Decimal(str(AMOUNT))
+ assert created_order.reduce_only == True
+
+ @pytest.mark.asyncio
+ async def test_limit_sell_with_take_profit_call_as_dsl(
+ self, interpreter, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ _ensure_sell_order_trading_context(backtesting_trader)
+
+ limit_price = 55000
+ take_profit_price = 52000
+ result = await interpreter.interprete(
+ f"limit('sell', '{SYMBOL}', {AMOUNT}, price='{limit_price}', "
+ f"take_profit_prices=['{take_profit_price}'])"
+ )
+
+ assert isinstance(result, dict)
+ assert "created_orders" in result
+ assert len(result["created_orders"]) == 1
+ created_order = result["created_orders"][0]
+ assert created_order["symbol"] == SYMBOL
+ assert created_order["side"] == octobot_trading.enums.TradeOrderSide.SELL.value
+ assert "id" in created_order or "exchange_id" in created_order
+
+ all_orders = exchange_manager.exchange_personal_data.orders_manager.get_all_orders(
+ symbol=SYMBOL
+ )
+ assert len(all_orders) == 1
+ base_order = all_orders[0]
+ assert isinstance(base_order, personal_data.SellLimitOrder)
+ assert base_order.origin_price == decimal.Decimal(str(limit_price))
+ assert base_order.origin_quantity == decimal.Decimal(str(AMOUNT))
+ assert len(base_order.chained_orders) == 1
+ tp_order = base_order.chained_orders[0]
+ assert isinstance(tp_order, personal_data.BuyLimitOrder)
+ assert tp_order.side == octobot_trading.enums.TradeOrderSide.BUY
+ assert tp_order.origin_price == decimal.Decimal(str(take_profit_price))
+ assert tp_order.origin_quantity == decimal.Decimal(str(AMOUNT))
+
+ @pytest.mark.asyncio
+ async def test_stop_loss_sell_call_as_dsl(self, interpreter, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ _ensure_sell_order_trading_context(backtesting_trader)
+
+ stop_price = 48000
+ with _patch_stop_loss_supported(exchange_manager):
+ result = await interpreter.interprete(
+ f"stop_loss('sell', '{SYMBOL}', {AMOUNT}, price='{stop_price}')"
+ )
+
+ assert isinstance(result, dict)
+ assert "created_orders" in result
+ assert len(result["created_orders"]) == 1
+ created_order = result["created_orders"][0]
+ assert created_order["symbol"] == SYMBOL
+ assert created_order["side"] == octobot_trading.enums.TradeOrderSide.SELL.value
+ assert "id" in created_order or "exchange_id" in created_order
+
+ all_orders = exchange_manager.exchange_personal_data.orders_manager.get_all_orders(
+ symbol=SYMBOL
+ )
+ assert len(all_orders) == 1
+ created_order = all_orders[0]
+ assert isinstance(created_order, personal_data.StopLossOrder)
+ assert created_order.side == octobot_trading.enums.TradeOrderSide.SELL
+ assert created_order.origin_price == decimal.Decimal(str(stop_price))
+ assert created_order.origin_quantity == decimal.Decimal(str(AMOUNT))
+
+ @pytest.mark.asyncio
+ async def test_stop_loss_call_as_dsl_raises_when_unsupported(
+ self, interpreter, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ _ensure_sell_order_trading_context(backtesting_trader)
+
+ stop_price = 48000
+ with mock.patch.object(
+ exchange_manager.exchange,
+ "is_supported_order_type",
+ mock.Mock(return_value=False),
+ ):
+ with pytest.raises(
+ trading_errors.NotSupportedOrderTypeError,
+ match="STOP_LOSS orders are not supported",
+ ):
+ await interpreter.interprete(
+ f"stop_loss('sell', '{SYMBOL}', {AMOUNT}, price='{stop_price}')"
+ )
+
+ @pytest.mark.asyncio
+ async def test_limit_with_chained_stop_loss_call_as_dsl_raises_when_unsupported(
+ self, interpreter, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ _ensure_market_order_trading_context(backtesting_trader)
+
+ limit_price = 50000
+ stop_loss_price = 48000
+ with mock.patch.object(
+ exchange_manager.exchange,
+ "is_supported_order_type",
+ mock.Mock(return_value=False),
+ ):
+ with pytest.raises(
+ trading_errors.NotSupportedOrderTypeError,
+ match="STOP_LOSS orders are not supported",
+ ):
+ await interpreter.interprete(
+ f"limit('buy', '{SYMBOL}', {AMOUNT}, price='{limit_price}', "
+ f"stop_loss_price='{stop_loss_price}')"
+ )
+
+ @pytest.mark.asyncio
+ async def test_limit_buy_with_take_profit_and_stop_loss_call_as_dsl(
+ self, interpreter, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ _ensure_market_order_trading_context(backtesting_trader)
+
+ limit_price = 50000
+ take_profit_price_offset = "10%"
+ stop_loss_price = 48000
+ tag = "test_tag"
+ cancel_policy = personal_data.ChainedOrderFillingPriceOrderCancelPolicy.__name__
+ active_order_swap_strategy = personal_data.TakeProfitFirstActiveOrderSwapStrategy.__name__
+ with _patch_stop_loss_supported(exchange_manager):
+ result = await interpreter.interprete(
+ f"limit('buy', '{SYMBOL}', {AMOUNT}, price='{limit_price}', "
+ f"take_profit_prices=['{take_profit_price_offset}'], stop_loss_price='{stop_loss_price}', tag='{tag}', cancel_policy='{cancel_policy}', active_order_swap_strategy='{active_order_swap_strategy}')"
+ )
+
+ assert isinstance(result, dict)
+ assert "created_orders" in result
+ assert len(result["created_orders"]) == 1
+ created_order = result["created_orders"][0]
+ assert created_order["symbol"] == SYMBOL
+ assert created_order["side"] == octobot_trading.enums.TradeOrderSide.BUY.value
+ assert "id" in created_order or "exchange_id" in created_order
+
+ all_orders = exchange_manager.exchange_personal_data.orders_manager.get_all_orders(
+ symbol=SYMBOL
+ )
+ assert len(all_orders) == 1
+ base_order = all_orders[0]
+ assert isinstance(base_order, personal_data.BuyLimitOrder)
+ assert base_order.origin_price == decimal.Decimal(str(limit_price))
+ assert base_order.origin_quantity == decimal.Decimal(str(AMOUNT))
+ assert base_order.tag == tag
+ assert isinstance(base_order.cancel_policy, personal_data.ChainedOrderFillingPriceOrderCancelPolicy)
+ assert len(base_order.chained_orders) == 2
+ stop_orders = [o for o in base_order.chained_orders if personal_data.is_stop_order(o.order_type)]
+ tp_orders = [o for o in base_order.chained_orders if not personal_data.is_stop_order(o.order_type)]
+ assert len(stop_orders) == 1
+ assert len(tp_orders) == 1
+ assert isinstance(stop_orders[0], personal_data.StopLossOrder)
+ assert isinstance(tp_orders[0], personal_data.SellLimitOrder)
+ assert tp_orders[0].tag == tag
+ assert stop_orders[0].tag == tag
+ assert stop_orders[0].side == octobot_trading.enums.TradeOrderSide.SELL
+ assert stop_orders[0].origin_price == decimal.Decimal(str(stop_loss_price))
+ assert stop_orders[0].origin_quantity == decimal.Decimal(str(AMOUNT))
+ assert tp_orders[0].side == octobot_trading.enums.TradeOrderSide.SELL
+ assert tp_orders[0].origin_price == decimal.Decimal("55000") # 50k + 10%
+ assert tp_orders[0].origin_quantity == decimal.Decimal(str(AMOUNT))
+ order_group = tp_orders[0].order_group
+ assert isinstance(order_group, personal_data.OneCancelsTheOtherOrderGroup)
+ assert isinstance(order_group.active_order_swap_strategy, personal_data.TakeProfitFirstActiveOrderSwapStrategy) # non default strategy
+ assert tp_orders[0].order_group is stop_orders[0].order_group
+
+ @pytest.mark.asyncio
+ async def test_limit_buy_with_many_take_profits_and_a_stop_loss_call_as_dsl(
+ self, interpreter, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ _ensure_market_order_trading_context(backtesting_trader)
+
+ limit_price = 50000
+ take_profit_price_offset_1 = "10%"
+ take_profit_price_offset_2 = "20%"
+ take_profit_price_offset_3 = "30%"
+ take_profit_volume_percents = [50, 20, 30]
+ stop_loss_price = 48000
+ trailing_profile = personal_data.TrailingProfileTypes.FILLED_TAKE_PROFIT.value
+ with _patch_stop_loss_supported(exchange_manager):
+ result = await interpreter.interprete(
+ f"limit('buy', '{SYMBOL}', {AMOUNT}, price='{limit_price}', "
+ f"take_profit_prices=['{take_profit_price_offset_1}', '{take_profit_price_offset_2}', '{take_profit_price_offset_3}'], take_profit_volume_percents=['{take_profit_volume_percents[0]}', '{take_profit_volume_percents[1]}', '{take_profit_volume_percents[2]}'], stop_loss_price='{stop_loss_price}', trailing_profile='{trailing_profile}')"
+ )
+
+ assert isinstance(result, dict)
+ assert "created_orders" in result
+ assert len(result["created_orders"]) == 1
+ created_order = result["created_orders"][0]
+ assert created_order["symbol"] == SYMBOL
+ assert created_order["side"] == octobot_trading.enums.TradeOrderSide.BUY.value
+ assert "id" in created_order or "exchange_id" in created_order
+
+ all_orders = exchange_manager.exchange_personal_data.orders_manager.get_all_orders(
+ symbol=SYMBOL
+ )
+ assert len(all_orders) == 1
+ base_order = all_orders[0]
+ assert isinstance(base_order, personal_data.BuyLimitOrder)
+ assert base_order.origin_price == decimal.Decimal(str(limit_price))
+ assert base_order.origin_quantity == decimal.Decimal(str(AMOUNT))
+ assert len(base_order.chained_orders) == 4
+ stop_orders = [o for o in base_order.chained_orders if personal_data.is_stop_order(o.order_type)]
+ tp_orders = [o for o in base_order.chained_orders if not personal_data.is_stop_order(o.order_type)]
+ assert len(stop_orders) == 1
+ assert len(tp_orders) == 3
+ assert isinstance(stop_orders[0], personal_data.StopLossOrder)
+ assert isinstance(tp_orders[0], personal_data.SellLimitOrder)
+ assert stop_orders[0].side == octobot_trading.enums.TradeOrderSide.SELL
+ assert stop_orders[0].origin_price == decimal.Decimal(str(stop_loss_price))
+ assert stop_orders[0].origin_quantity == decimal.Decimal(str(AMOUNT))
+ for i, tp_order in enumerate(tp_orders):
+ assert tp_order.side == octobot_trading.enums.TradeOrderSide.SELL
+ assert tp_order.origin_price == decimal.Decimal("50000") * decimal.Decimal(str(1 + (i + 1) * 0.1))
+ assert tp_order.origin_quantity == decimal.Decimal(str(AMOUNT)) * decimal.Decimal(str(take_profit_volume_percents[i] / 100))
+ order_group = tp_order.order_group
+ assert isinstance(order_group, personal_data.TrailingOnFilledTPBalancedOrderGroup)
+ assert isinstance(order_group.active_order_swap_strategy, personal_data.StopFirstActiveOrderSwapStrategy) # default strategy
+ assert tp_order.order_group is stop_orders[0].order_group
+
+
+class TestGetDependencies:
+ """Tests for get_dependencies using DSL syntax and the interpreter."""
+
+ @pytest.mark.parametrize(
+ "maybe_exchange_manager_interpreter",
+ ["interpreter", "no_exchange_manager_interpreter"],
+ indirect=True,
+ )
+ def test_market_order_get_dependencies_from_interpreter_with_exchange_manager(self, maybe_exchange_manager_interpreter):
+ # symbol 1
+ maybe_exchange_manager_interpreter.prepare(f"market('buy', '{SYMBOL}', {AMOUNT})")
+ assert maybe_exchange_manager_interpreter.get_dependencies() == [
+ octobot_trading.dsl.SymbolDependency(symbol=SYMBOL),
+ ]
+ # other symbol 2
+ symbol = "ETH/USDT"
+ maybe_exchange_manager_interpreter.prepare(f"market('sell', '{symbol}', 0.5)")
+ assert maybe_exchange_manager_interpreter.get_dependencies() == [
+ octobot_trading.dsl.SymbolDependency(symbol=symbol),
+ ]
+ symbol = "SOL/USDT"
+ # symbol 3 as keyword argument
+ maybe_exchange_manager_interpreter.prepare(f"market('sell', symbol='{symbol}', amount=0.5)")
+ assert maybe_exchange_manager_interpreter.get_dependencies() == [
+ octobot_trading.dsl.SymbolDependency(symbol=symbol),
+ ]
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_personal_data_operators/test_futures_contracts_operators.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_personal_data_operators/test_futures_contracts_operators.py
new file mode 100644
index 000000000..4573381d5
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_personal_data_operators/test_futures_contracts_operators.py
@@ -0,0 +1,131 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import mock
+import pytest
+import pytest_asyncio
+
+import octobot_commons.errors
+import octobot_commons.dsl_interpreter as dsl_interpreter
+
+import tentacles.Meta.DSL_operators.exchange_operators.exchange_personal_data_operators.futures_contracts_operators as futures_contracts_operators
+
+from tentacles.Meta.DSL_operators.exchange_operators.tests import (
+ backtesting_config,
+ fake_backtesting,
+ backtesting_exchange_manager,
+ backtesting_trader,
+)
+
+FUTURES_SYMBOL = "BTC/USDT:USDT"
+LEVERAGE = 10
+
+
+@pytest_asyncio.fixture
+async def futures_contracts_operators_list(backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ return futures_contracts_operators.create_futures_contracts_operators(exchange_manager)
+
+
+@pytest_asyncio.fixture
+async def interpreter(futures_contracts_operators_list):
+ return dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators()
+ + futures_contracts_operators_list
+ )
+
+
+class TestSetLeverageOperator:
+ @pytest.mark.asyncio
+ async def test_pre_compute_sets_leverage(self, futures_contracts_operators_list, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ set_leverage_op_class, = futures_contracts_operators_list
+
+ with mock.patch.object(
+ _trader,
+ "set_leverage",
+ mock.AsyncMock(return_value=True),
+ ) as set_leverage_mock:
+ operator = set_leverage_op_class(FUTURES_SYMBOL, LEVERAGE)
+ await operator.pre_compute()
+
+ assert operator.value == float(LEVERAGE)
+ set_leverage_mock.assert_awaited_once_with(
+ FUTURES_SYMBOL,
+ None,
+ mock.ANY,
+ )
+ call_args = set_leverage_mock.call_args
+ assert float(call_args[0][2]) == LEVERAGE
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_with_float_leverage(self, futures_contracts_operators_list, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ set_leverage_op_class, = futures_contracts_operators_list
+
+ leverage_value = 5.5
+ with mock.patch.object(
+ _trader,
+ "set_leverage",
+ mock.AsyncMock(return_value=True),
+ ) as set_leverage_mock:
+ operator = set_leverage_op_class(FUTURES_SYMBOL, leverage_value)
+ await operator.pre_compute()
+
+ assert operator.value == leverage_value
+ set_leverage_mock.assert_awaited_once()
+
+ def test_compute_without_pre_compute(self, futures_contracts_operators_list):
+ set_leverage_op_class, = futures_contracts_operators_list
+ operator = set_leverage_op_class(FUTURES_SYMBOL, LEVERAGE)
+ with pytest.raises(
+ octobot_commons.errors.DSLInterpreterError,
+ match="has not been pre_computed",
+ ):
+ operator.compute()
+
+ @pytest.mark.asyncio
+ async def test_set_leverage_call_as_dsl(self, interpreter, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+
+ with mock.patch.object(
+ _trader,
+ "set_leverage",
+ mock.AsyncMock(return_value=True),
+ ) as set_leverage_mock:
+ result = await interpreter.interprete(
+ f"set_leverage('{FUTURES_SYMBOL}', {LEVERAGE})"
+ )
+ assert result == float(LEVERAGE)
+ set_leverage_mock.assert_awaited_once_with(
+ FUTURES_SYMBOL,
+ None,
+ mock.ANY,
+ )
+
+ @pytest.mark.asyncio
+ async def test_set_leverage_call_as_dsl_with_leverage_param(self, interpreter, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+
+ with mock.patch.object(
+ _trader,
+ "set_leverage",
+ mock.AsyncMock(return_value=True),
+ ) as set_leverage_mock:
+ result = await interpreter.interprete(
+ f"set_leverage('{FUTURES_SYMBOL}', leverage={LEVERAGE})"
+ )
+ assert result == float(LEVERAGE)
+ set_leverage_mock.assert_awaited_once()
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_personal_data_operators/test_portfolio_operators.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_personal_data_operators/test_portfolio_operators.py
new file mode 100644
index 000000000..cbeed8fe9
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_personal_data_operators/test_portfolio_operators.py
@@ -0,0 +1,265 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import decimal
+import mock
+import octobot_commons.constants as commons_constants
+import pytest
+import pytest_asyncio
+
+import octobot_commons.errors
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_trading.constants
+
+import tentacles.Meta.DSL_operators.exchange_operators.exchange_personal_data_operators.portfolio_operators as portfolio_operators
+
+from tentacles.Meta.DSL_operators.exchange_operators.tests import (
+ backtesting_config,
+ fake_backtesting,
+ backtesting_exchange_manager,
+ backtesting_trader,
+)
+
+ASSET_BTC = "BTC"
+ASSET_USDT = "USDT"
+ASSET_ETH = "ETH"
+
+
+def _ensure_portfolio_config(backtesting_trader, portfolio_content):
+ _config, exchange_manager, _trader = backtesting_trader
+ if commons_constants.CONFIG_SIMULATOR not in _config:
+ _config[commons_constants.CONFIG_SIMULATOR] = {}
+ if commons_constants.CONFIG_STARTING_PORTFOLIO not in _config[commons_constants.CONFIG_SIMULATOR]:
+ _config[commons_constants.CONFIG_SIMULATOR][commons_constants.CONFIG_STARTING_PORTFOLIO] = {}
+ _config[commons_constants.CONFIG_SIMULATOR][commons_constants.CONFIG_STARTING_PORTFOLIO].update(
+ portfolio_content
+ )
+ exchange_manager.exchange_personal_data.portfolio_manager.apply_forced_portfolio(
+ _config[commons_constants.CONFIG_SIMULATOR][commons_constants.CONFIG_STARTING_PORTFOLIO]
+ )
+
+
+@pytest_asyncio.fixture
+async def portfolio_operators_list(backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ return portfolio_operators.create_portfolio_operators(exchange_manager)
+
+
+@pytest_asyncio.fixture
+async def interpreter(portfolio_operators_list):
+ return dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators()
+ + portfolio_operators_list
+ )
+
+
+class TestTotalOperator:
+ @pytest.mark.asyncio
+ async def test_pre_compute(self, portfolio_operators_list, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ total_op_class, _, _ = portfolio_operators_list
+
+ _ensure_portfolio_config(backtesting_trader, {ASSET_BTC: 1.5, ASSET_USDT: 1000})
+
+ operator = total_op_class(ASSET_BTC)
+ await operator.pre_compute()
+ assert operator.value == 1.5
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_asset_not_in_portfolio(self, portfolio_operators_list, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ total_op_class, _, _ = portfolio_operators_list
+
+ _ensure_portfolio_config(backtesting_trader, {ASSET_BTC: 1.5, ASSET_USDT: 1000})
+
+ operator = total_op_class(ASSET_ETH)
+ await operator.pre_compute()
+ assert operator.value == float(octobot_trading.constants.ZERO)
+
+ def test_compute_without_pre_compute(self, portfolio_operators_list):
+ total_op_class, _, _ = portfolio_operators_list
+ operator = total_op_class(ASSET_BTC)
+ with pytest.raises(
+ octobot_commons.errors.DSLInterpreterError,
+ match="has not been pre_computed",
+ ):
+ operator.compute()
+
+ @pytest.mark.asyncio
+ async def test_total_call_as_dsl(self, interpreter, backtesting_trader):
+ _ensure_portfolio_config(backtesting_trader, {ASSET_BTC: 2.0, ASSET_USDT: 500})
+
+ assert await interpreter.interprete(f"total('{ASSET_BTC}')") == 2.0
+ assert await interpreter.interprete(f"total('{ASSET_USDT}')") == 500.0
+ assert await interpreter.interprete(f"total('{ASSET_ETH}')") == 0.0
+
+
+class TestAvailableOperator:
+ @pytest.mark.asyncio
+ async def test_pre_compute(self, portfolio_operators_list, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ _, available_op_class, _ = portfolio_operators_list
+
+ _ensure_portfolio_config(backtesting_trader, {ASSET_BTC: 1.5, ASSET_USDT: 1000})
+
+ operator = available_op_class(ASSET_BTC)
+ await operator.pre_compute()
+ assert operator.value == 1.5
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_asset_not_in_portfolio(self, portfolio_operators_list, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ _, available_op_class, _ = portfolio_operators_list
+
+ _ensure_portfolio_config(backtesting_trader, {ASSET_BTC: 1.5, ASSET_USDT: 1000})
+
+ operator = available_op_class(ASSET_ETH)
+ await operator.pre_compute()
+ assert operator.value == float(octobot_trading.constants.ZERO)
+
+ def test_compute_without_pre_compute(self, portfolio_operators_list):
+ _, available_op_class, _ = portfolio_operators_list
+ operator = available_op_class(ASSET_BTC)
+ with pytest.raises(
+ octobot_commons.errors.DSLInterpreterError,
+ match="has not been pre_computed",
+ ):
+ operator.compute()
+
+ @pytest.mark.asyncio
+ async def test_available_call_as_dsl(self, interpreter, backtesting_trader):
+ _ensure_portfolio_config(backtesting_trader, {ASSET_BTC: 3.0, ASSET_USDT: 2000})
+
+ assert await interpreter.interprete(f"available('{ASSET_BTC}')") == 3.0
+ assert await interpreter.interprete(f"available('{ASSET_USDT}')") == 2000.0
+ assert await interpreter.interprete(f"available('{ASSET_ETH}')") == 0.0
+
+
+class TestWithdrawOperator:
+ NETWORK = "ethereum"
+ ADDRESS = "0x1234567890abcdef1234567890abcdef12345678"
+ WITHDRAW_RESULT = {"id": "withdrawal-123", "status": "ok"}
+
+ @pytest.mark.asyncio
+ async def test_pre_compute(self, portfolio_operators_list, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ _, _, withdraw_op_class = portfolio_operators_list
+ _ensure_portfolio_config(backtesting_trader, {ASSET_BTC: 1.0, ASSET_USDT: 1000})
+
+ with mock.patch.object(
+ exchange_manager.trader,
+ "withdraw",
+ mock.AsyncMock(return_value=self.WITHDRAW_RESULT),
+ ) as withdraw_mock:
+ operator = withdraw_op_class(ASSET_BTC, self.NETWORK, self.ADDRESS, 0.1)
+ await operator.pre_compute()
+
+ assert operator.value == {"created_withdrawals": [self.WITHDRAW_RESULT]}
+ withdraw_mock.assert_awaited_once_with(
+ ASSET_BTC,
+ decimal.Decimal("0.1"),
+ self.NETWORK,
+ self.ADDRESS,
+ tag=None,
+ params={},
+ )
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_uses_available_balance_when_amount_omitted(
+ self, portfolio_operators_list, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ _, _, withdraw_op_class = portfolio_operators_list
+ _ensure_portfolio_config(backtesting_trader, {ASSET_BTC: 0.5, ASSET_USDT: 1000})
+
+ with mock.patch.object(
+ exchange_manager.trader,
+ "withdraw",
+ mock.AsyncMock(return_value=self.WITHDRAW_RESULT),
+ ) as withdraw_mock:
+ operator = withdraw_op_class(ASSET_BTC, self.NETWORK, self.ADDRESS)
+ await operator.pre_compute()
+
+ assert operator.value == {"created_withdrawals": [self.WITHDRAW_RESULT]}
+ withdraw_mock.assert_awaited_once_with(
+ ASSET_BTC,
+ decimal.Decimal("0.5"),
+ self.NETWORK,
+ self.ADDRESS,
+ tag=None,
+ params={},
+ )
+
+ @pytest.mark.asyncio
+ async def test_pre_compute_with_tag_and_params(
+ self, portfolio_operators_list, backtesting_trader
+ ):
+ _config, exchange_manager, _trader = backtesting_trader
+ _, _, withdraw_op_class = portfolio_operators_list
+ _ensure_portfolio_config(backtesting_trader, {ASSET_BTC: 1.0})
+ tag = "memo-123"
+ params = {"fee": "low"}
+
+ with mock.patch.object(
+ exchange_manager.trader,
+ "withdraw",
+ mock.AsyncMock(return_value=self.WITHDRAW_RESULT),
+ ) as withdraw_mock:
+ operator = withdraw_op_class(
+ ASSET_BTC, self.NETWORK, self.ADDRESS, 0.1, tag=tag, params=params
+ )
+ await operator.pre_compute()
+
+ withdraw_mock.assert_awaited_once_with(
+ ASSET_BTC,
+ decimal.Decimal("0.1"),
+ self.NETWORK,
+ self.ADDRESS,
+ tag=tag,
+ params=params,
+ )
+
+ def test_compute_without_pre_compute(self, portfolio_operators_list):
+ _, _, withdraw_op_class = portfolio_operators_list
+ operator = withdraw_op_class(ASSET_BTC, self.NETWORK, self.ADDRESS, 0.1)
+ with pytest.raises(
+ octobot_commons.errors.DSLInterpreterError,
+ match="has not been pre_computed",
+ ):
+ operator.compute()
+
+ @pytest.mark.asyncio
+ async def test_withdraw_call_as_dsl(self, interpreter, backtesting_trader):
+ _config, exchange_manager, _trader = backtesting_trader
+ _ensure_portfolio_config(backtesting_trader, {ASSET_BTC: 2.0, ASSET_USDT: 1000})
+
+ with mock.patch.object(
+ exchange_manager.trader,
+ "withdraw",
+ mock.AsyncMock(return_value={"id": "wd-456", "status": "ok"}),
+ ) as withdraw_mock:
+ result = await interpreter.interprete(
+ f"withdraw('{ASSET_BTC}', '{self.NETWORK}', '{self.ADDRESS}', 1.5)"
+ )
+
+ assert result == {"created_withdrawals": [{"id": "wd-456", "status": "ok"}]}
+ withdraw_mock.assert_awaited_once_with(
+ ASSET_BTC,
+ decimal.Decimal("1.5"),
+ self.NETWORK,
+ self.ADDRESS,
+ tag=None,
+ params={},
+ )
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_public_data_operators/__init__.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_public_data_operators/__init__.py
new file mode 100644
index 000000000..4aa037e5d
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_public_data_operators/__init__.py
@@ -0,0 +1,270 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import mock
+import pytest
+import typing
+
+import numpy as np
+
+import octobot_commons.enums
+import octobot_commons.errors
+import octobot_commons.constants
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import tentacles.Meta.DSL_operators.exchange_operators as exchange_operators
+
+
+SYMBOL = "BTC/USDT"
+SYMBOL2 = "ETH/USDT"
+TIME_FRAME = "1h"
+TIME_FRAME2 = "4h"
+KLINE_SIGNATURE = 0.00666
+
+
+@pytest.fixture
+def historical_prices():
+ return np.array([
+ 81.59, 81.06, 82.87, 83, 83.61, 83.15, 82.84, 83.99, 84.55, 84.36, 85.53, 86.54, 86.89,
+ 87.77, 87.29, 87.18, 87.01, 89.02, 89.68, 90.36, 92.83, 93.37, 93.02, 93.45, 94.13,
+ 93.12, 93.18, 92.08, 92.82, 92.92, 92.25, 92.22
+ ])
+
+@pytest.fixture
+def historical_times(historical_prices):
+ return np.array([
+ i + 10 for i in range(len(historical_prices))
+ ], dtype=np.float64)
+
+
+@pytest.fixture
+def historical_volume(historical_prices):
+ base_volume_pattern = [
+ # will create an int np.array, which will updated to float64 to comply with tulipy requirements
+ 903, 1000, 2342, 992, 900, 1231, 1211, 1113
+ ]
+ return np.array(base_volume_pattern*(len(historical_prices) // len(base_volume_pattern) + 1), dtype=np.float64)[:len(historical_prices)]
+
+
+def _get_candle_managers(historical_prices, historical_volume, historical_times):
+ btc_1h_candles_manager = mock.Mock(
+ get_symbol_open_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy()),
+ get_symbol_high_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy()),
+ get_symbol_low_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy()),
+ get_symbol_close_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy()),
+ get_symbol_volume_candles=mock.Mock(side_effect=lambda _ : historical_volume.copy()),
+ get_symbol_time_candles=mock.Mock(side_effect=lambda _ : historical_times.copy()),
+ time_candles_index=len(historical_times),
+ open_candles_index=len(historical_prices),
+ high_candles_index=len(historical_prices),
+ low_candles_index=len(historical_prices),
+ close_candles_index=len(historical_prices),
+ volume_candles_index=len(historical_volume),
+ time_candles=historical_times,
+ )
+ eth_1h_candles_manager = mock.Mock(
+ get_symbol_open_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() / 2),
+ get_symbol_high_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() / 2),
+ get_symbol_low_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() / 2),
+ get_symbol_close_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() / 2),
+ get_symbol_volume_candles=mock.Mock(side_effect=lambda _ : historical_volume.copy() / 2),
+ get_symbol_time_candles=mock.Mock(side_effect=lambda _ : historical_times.copy() / 2),
+ time_candles_index=len(historical_times),
+ open_candles_index=len(historical_prices),
+ high_candles_index=len(historical_prices),
+ low_candles_index=len(historical_prices),
+ close_candles_index=len(historical_prices),
+ volume_candles_index=len(historical_volume),
+ time_candles=historical_times / 2,
+ )
+ btc_4h_candles_manager = mock.Mock(
+ get_symbol_open_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() * 2),
+ get_symbol_high_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() * 2),
+ get_symbol_low_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() * 2),
+ get_symbol_close_candles=mock.Mock(side_effect=lambda _ : historical_prices.copy() * 2),
+ get_symbol_volume_candles=mock.Mock(side_effect=lambda _ : historical_volume.copy() * 2),
+ get_symbol_time_candles=mock.Mock(side_effect=lambda _ : historical_times.copy() * 2),
+ time_candles_index=len(historical_times),
+ open_candles_index=len(historical_prices),
+ high_candles_index=len(historical_prices),
+ low_candles_index=len(historical_prices),
+ close_candles_index=len(historical_prices),
+ volume_candles_index=len(historical_volume),
+ time_candles=historical_times * 2,
+ )
+ return (
+ btc_1h_candles_manager,
+ eth_1h_candles_manager,
+ btc_4h_candles_manager,
+ )
+
+
+def _get_kline(candles_manager: mock.Mock, signature: float, kline_time_delta: typing.Optional[float]) -> list:
+ kline = [0] * len(octobot_commons.enums.PriceIndexes)
+ kline[octobot_commons.enums.PriceIndexes.IND_PRICE_TIME.value] = (
+ candles_manager.get_symbol_time_candles(-1)[-1] + kline_time_delta
+ if kline_time_delta is not None
+ else candles_manager.get_symbol_time_candles(-1)[-1]
+ )
+ kline[octobot_commons.enums.PriceIndexes.IND_PRICE_OPEN.value] = candles_manager.get_symbol_open_candles(-1)[-1] + signature
+ kline[octobot_commons.enums.PriceIndexes.IND_PRICE_HIGH.value] = candles_manager.get_symbol_high_candles(-1)[-1] + signature
+ kline[octobot_commons.enums.PriceIndexes.IND_PRICE_LOW.value] = candles_manager.get_symbol_low_candles(-1)[-1] + signature
+ kline[octobot_commons.enums.PriceIndexes.IND_PRICE_CLOSE.value] = candles_manager.get_symbol_close_candles(-1)[-1] + signature
+ kline[octobot_commons.enums.PriceIndexes.IND_PRICE_VOL.value] = candles_manager.get_symbol_volume_candles(-1)[-1] + signature
+ return kline
+
+
+def _get_symbol_data_factory(
+ btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager, kline_type: str
+):
+ def _get_symbol_data(symbol: str, **kwargs):
+ symbol_candles = {}
+ one_h_candles_manager = btc_1h_candles_manager if symbol == SYMBOL else eth_1h_candles_manager if symbol == SYMBOL2 else None
+ four_h_candles_manager = btc_4h_candles_manager if symbol == SYMBOL else None # no 4h eth candles
+ if one_h_candles_manager is None and four_h_candles_manager is None:
+ raise octobot_commons.errors.InvalidParametersError(f"Symbol {symbol} not found")
+ symbol_candles[octobot_commons.enums.TimeFrames(TIME_FRAME)] = one_h_candles_manager
+ if four_h_candles_manager:
+ symbol_candles[octobot_commons.enums.TimeFrames(TIME_FRAME2)] = four_h_candles_manager
+ if kline_type == "no_kline":
+ symbol_klines = {}
+ elif kline_type == "same_time_kline":
+ symbol_klines = {
+ octobot_commons.enums.TimeFrames(TIME_FRAME): mock.Mock(kline=_get_kline(one_h_candles_manager, KLINE_SIGNATURE, None)),
+ }
+ if four_h_candles_manager:
+ symbol_klines[octobot_commons.enums.TimeFrames(TIME_FRAME2)] = mock.Mock(kline=_get_kline(four_h_candles_manager, KLINE_SIGNATURE, None))
+ elif kline_type == "new_time_kline":
+ symbol_klines = {
+ octobot_commons.enums.TimeFrames(TIME_FRAME): mock.Mock(kline=_get_kline(
+ one_h_candles_manager, KLINE_SIGNATURE,
+ octobot_commons.enums.TimeFramesMinutes[octobot_commons.enums.TimeFrames(TIME_FRAME)] * octobot_commons.constants.MINUTE_TO_SECONDS
+ )),
+ }
+ if four_h_candles_manager:
+ symbol_klines[octobot_commons.enums.TimeFrames(TIME_FRAME2)] = mock.Mock(kline=_get_kline(
+ four_h_candles_manager, KLINE_SIGNATURE,
+ octobot_commons.enums.TimeFramesMinutes[octobot_commons.enums.TimeFrames(TIME_FRAME2)] * octobot_commons.constants.MINUTE_TO_SECONDS
+ ))
+ else:
+ raise NotImplementedError(f"Kline type {kline_type} not implemented")
+ return mock.Mock(
+ symbol_candles=symbol_candles,
+ symbol_klines=symbol_klines
+ )
+ return _get_symbol_data
+
+
+@pytest.fixture
+def exchange_manager_with_candles(historical_prices, historical_volume, historical_times):
+ btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager = _get_candle_managers(
+ historical_prices, historical_volume, historical_times
+ )
+ return mock.Mock(
+ id="exchange_manager_id",
+ exchange_name="binance",
+ exchange_symbols_data=mock.Mock(
+ get_exchange_symbol_data=_get_symbol_data_factory(
+ btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager, "no_kline"
+ )
+ )
+ )
+
+
+@pytest.fixture
+def exchange_manager_with_candles_and_klines(historical_prices, historical_volume, historical_times):
+ btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager = _get_candle_managers(
+ historical_prices, historical_volume, historical_times
+ )
+ return mock.Mock(
+ id="exchange_manager_id",
+ exchange_name="binance",
+ exchange_symbols_data=mock.Mock(
+ get_exchange_symbol_data=_get_symbol_data_factory(
+ btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager, "same_time_kline"
+ )
+ )
+ )
+
+
+@pytest.fixture
+def exchange_manager_with_candles_and_new_candle_klines(historical_prices, historical_volume, historical_times):
+ btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager = _get_candle_managers(
+ historical_prices, historical_volume, historical_times
+ )
+ return mock.Mock(
+ id="exchange_manager_id",
+ exchange_name="binance",
+ exchange_symbols_data=mock.Mock(
+ get_exchange_symbol_data=_get_symbol_data_factory(
+ btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager, "new_time_kline"
+ )
+ )
+ )
+
+
+@pytest.fixture
+def candle_manager_by_time_frame_by_symbol(historical_prices, historical_volume, historical_times):
+ btc_1h_candles_manager, eth_1h_candles_manager, btc_4h_candles_manager = _get_candle_managers(
+ historical_prices, historical_volume, historical_times
+ )
+ return {
+ TIME_FRAME: {
+ SYMBOL: btc_1h_candles_manager,
+ SYMBOL2: eth_1h_candles_manager,
+ },
+ TIME_FRAME2: {
+ SYMBOL: btc_4h_candles_manager,
+ },
+ }
+
+
+@pytest.fixture
+def interpreter(exchange_manager_with_candles):
+ return dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators() +
+ exchange_operators.create_ohlcv_operators(exchange_manager_with_candles, SYMBOL, TIME_FRAME)
+ )
+
+
+@pytest.fixture
+def interpreter_without_exchange_data():
+ return dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators() +
+ exchange_operators.create_ohlcv_operators(None, None, None, None)
+ )
+
+
+@pytest.fixture
+def interpreter_with_exchange_manager_and_klines(exchange_manager_with_candles_and_klines):
+ return dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators() +
+ exchange_operators.create_ohlcv_operators(exchange_manager_with_candles_and_klines, SYMBOL, TIME_FRAME)
+ )
+
+
+@pytest.fixture
+def interpreter_with_exchange_manager_and_new_candle_klines(exchange_manager_with_candles_and_new_candle_klines):
+ return dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators() +
+ exchange_operators.create_ohlcv_operators(exchange_manager_with_candles_and_new_candle_klines, SYMBOL, TIME_FRAME)
+ )
+
+
+@pytest.fixture
+def interpreter_with_candle_manager_by_time_frame_by_symbol(candle_manager_by_time_frame_by_symbol):
+ return dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators() +
+ exchange_operators.create_ohlcv_operators(None, SYMBOL, TIME_FRAME, candle_manager_by_time_frame_by_symbol)
+ )
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/test_mocks.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_public_data_operators/test_mocks.py
similarity index 98%
rename from packages/tentacles/Meta/DSL_operators/exchange_operators/tests/test_mocks.py
rename to packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_public_data_operators/test_mocks.py
index 2a5d22353..915f06a52 100644
--- a/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/test_mocks.py
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_public_data_operators/test_mocks.py
@@ -20,7 +20,7 @@
import octobot_commons.enums
import octobot_commons.constants
-from tentacles.Meta.DSL_operators.exchange_operators.tests import (
+from tentacles.Meta.DSL_operators.exchange_operators.tests.exchange_public_data_operators import (
historical_prices,
historical_volume,
historical_times,
diff --git a/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_public_data_operators/test_ohlcv_operators.py b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_public_data_operators/test_ohlcv_operators.py
index 75ca81a7f..ae121cec9 100644
--- a/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_public_data_operators/test_ohlcv_operators.py
+++ b/packages/tentacles/Meta/DSL_operators/exchange_operators/tests/exchange_public_data_operators/test_ohlcv_operators.py
@@ -22,13 +22,14 @@
import octobot_commons.enums
import octobot_commons.constants
import octobot_commons.logging
+import octobot_commons.dsl_interpreter as dsl_interpreter
import octobot_trading.api
import octobot_trading.constants
import tentacles.Meta.DSL_operators.exchange_operators as exchange_operators
import tentacles.Meta.DSL_operators.exchange_operators.exchange_public_data_operators.ohlcv_operators as ohlcv_operators
-from tentacles.Meta.DSL_operators.exchange_operators.tests import (
+from tentacles.Meta.DSL_operators.exchange_operators.tests.exchange_public_data_operators import (
SYMBOL,
TIME_FRAME,
KLINE_SIGNATURE,
@@ -43,6 +44,7 @@
interpreter_with_exchange_manager_and_klines,
interpreter_with_exchange_manager_and_new_candle_klines,
interpreter,
+ interpreter_without_exchange_data,
)
@@ -198,52 +200,63 @@ def _get_kline(symbol_data, time_frame):
assert "kline time (1000) is not equal to last candle time not the last time" in bot_log_mock.error.call_args[0][0]
-@pytest.mark.asyncio
-@pytest.mark.parametrize("operator", [
- ("open"),
- ("high"),
- ("low"),
- ("close"),
- ("volume"),
- ("time")
-])
-async def test_ohlcv_operators_dependencies(interpreter, operator, exchange_manager_with_candles):
- interpreter.prepare(f"{operator}")
- assert interpreter.get_dependencies() == [
- exchange_operators.ExchangeDataDependency(
- exchange_manager_id=octobot_trading.api.get_exchange_manager_id(exchange_manager_with_candles),
- symbol=SYMBOL,
- time_frame=TIME_FRAME,
- data_source=octobot_trading.constants.OHLCV_CHANNEL
- )
- ]
-
- # same dependency for all operators
- interpreter.prepare(f"{operator} + close + volume")
- assert interpreter.get_dependencies() == [
- exchange_operators.ExchangeDataDependency(
- exchange_manager_id=octobot_trading.api.get_exchange_manager_id(exchange_manager_with_candles),
- symbol=SYMBOL,
- time_frame=TIME_FRAME,
- data_source=octobot_trading.constants.OHLCV_CHANNEL
- )
- ]
-
- # SYMBOL + ETH/USDT dependency
- # => dynamic dependencies are not yet supported. Update this test when supported.
- interpreter.prepare(f"{operator} + close('ETH/USDT') + volume")
- assert interpreter.get_dependencies() == [
- exchange_operators.ExchangeDataDependency(
- exchange_manager_id=octobot_trading.api.get_exchange_manager_id(exchange_manager_with_candles),
- symbol=SYMBOL,
- time_frame=TIME_FRAME,
- data_source=octobot_trading.constants.OHLCV_CHANNEL
- ),
- # not identified as a dependency
- # exchange_operators.ExchangeDataDependency(
- # exchange_manager_id=octobot_trading.api.get_exchange_manager_id(exchange_manager_with_candles),
- # symbol="ETH/USDT",
- # time_frame=TIME_FRAME,
- # data_source=octobot_trading.constants.OHLCV_CHANNEL
- # ),
- ]
+class TestGetDependencies:
+ """Tests for get_dependencies using DSL syntax and the interpreter."""
+
+ @pytest.mark.asyncio
+ @pytest.mark.parametrize("operator", [
+ "open",
+ "high",
+ "low",
+ "close",
+ "volume",
+ "time"
+ ])
+ async def test_ohlcv_operators_dependencies(
+ self, interpreter, interpreter_without_exchange_data, operator
+ ):
+ interpreter.prepare(f"{operator}")
+ assert interpreter.get_dependencies() == [
+ exchange_operators.ExchangeDataDependency(
+ symbol=SYMBOL,
+ time_frame=TIME_FRAME,
+ data_source=octobot_trading.constants.OHLCV_CHANNEL
+ )
+ ]
+
+ # same dependency for all operators
+ interpreter.prepare(f"{operator} + close + volume")
+ assert interpreter.get_dependencies() == [
+ exchange_operators.ExchangeDataDependency(
+ symbol=SYMBOL,
+ time_frame=TIME_FRAME,
+ data_source=octobot_trading.constants.OHLCV_CHANNEL
+ )
+ ]
+
+ # SYMBOL + ETH/USDT dependency
+ interpreter.prepare(f"{operator} + close('ETH/USDT') + volume")
+ assert interpreter.get_dependencies() == [
+ exchange_operators.ExchangeDataDependency(
+ symbol=SYMBOL,
+ time_frame=TIME_FRAME,
+ data_source=octobot_trading.constants.OHLCV_CHANNEL
+ ),
+ exchange_operators.ExchangeDataDependency(
+ symbol="ETH/USDT",
+ time_frame=None,
+ data_source=octobot_trading.constants.OHLCV_CHANNEL
+ ),
+ ]
+
+ # now without exchange manager: SYMBOL is not returned as a dependency: only dynamic dependencies are returned
+ interpreter_without_exchange_data.prepare(f"{operator}")
+ assert interpreter_without_exchange_data.get_dependencies() == []
+ interpreter_without_exchange_data.prepare(f"{operator} + close('ETH/USDT') + volume")
+ assert interpreter_without_exchange_data.get_dependencies() == [
+ exchange_operators.ExchangeDataDependency(
+ symbol="ETH/USDT",
+ time_frame=None,
+ data_source=octobot_trading.constants.OHLCV_CHANNEL
+ ),
+ ]
diff --git a/packages/tentacles/Meta/DSL_operators/python_std_operators/__init__.py b/packages/tentacles/Meta/DSL_operators/python_std_operators/__init__.py
index e743fc103..3afd51e36 100644
--- a/packages/tentacles/Meta/DSL_operators/python_std_operators/__init__.py
+++ b/packages/tentacles/Meta/DSL_operators/python_std_operators/__init__.py
@@ -63,6 +63,11 @@
SinOperator,
CosOperator,
OscillatorOperator,
+ ErrorOperator,
+)
+import tentacles.Meta.DSL_operators.python_std_operators.base_resetting_operators as dsl_interpreter_base_resetting_operators
+from tentacles.Meta.DSL_operators.python_std_operators.base_resetting_operators import (
+ WaitOperator,
)
import tentacles.Meta.DSL_operators.python_std_operators.base_name_operators as dsl_interpreter_base_name_operators
from tentacles.Meta.DSL_operators.python_std_operators.base_name_operators import (
@@ -117,9 +122,11 @@
"SinOperator",
"CosOperator",
"OscillatorOperator",
+ "WaitOperator",
"PiOperator",
"IfExpOperator",
"SubscriptOperator",
"SliceOperator",
"ListOperator",
+ "ErrorOperator",
]
diff --git a/packages/tentacles/Meta/DSL_operators/python_std_operators/base_call_operators.py b/packages/tentacles/Meta/DSL_operators/python_std_operators/base_call_operators.py
index edc1e1e22..3404d594f 100644
--- a/packages/tentacles/Meta/DSL_operators/python_std_operators/base_call_operators.py
+++ b/packages/tentacles/Meta/DSL_operators/python_std_operators/base_call_operators.py
@@ -122,8 +122,8 @@ class RoundOperator(dsl_interpreter.CallOperator):
def get_name() -> str:
return "round"
- @staticmethod
- def get_parameters() -> list[dsl_interpreter.OperatorParameter]:
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
return [
dsl_interpreter.OperatorParameter(name="value", description="the value to round", required=True, type=list),
dsl_interpreter.OperatorParameter(name="digits", description="the number of digits to round to", required=False, type=int),
@@ -262,3 +262,17 @@ def compute(self) -> dsl_interpreter.ComputedOperatorParameterType:
oscillation_value = oscillation_range * oscillation
return base_value + oscillation_value
+
+
+class ErrorOperator(dsl_interpreter.CallOperator):
+ NAME = "error"
+ DESCRIPTION = "Raises a ErrorStatementEncountered exception with the given parameters."
+ EXAMPLE = "error('123-error')"
+
+ @staticmethod
+ def get_name() -> str:
+ return "error"
+
+ def compute(self):
+ params = self.get_computed_parameters()
+ raise octobot_commons.errors.ErrorStatementEncountered(*params)
diff --git a/packages/tentacles/Meta/DSL_operators/python_std_operators/base_resetting_operators.py b/packages/tentacles/Meta/DSL_operators/python_std_operators/base_resetting_operators.py
new file mode 100644
index 000000000..5d597086d
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/python_std_operators/base_resetting_operators.py
@@ -0,0 +1,89 @@
+# pylint: disable=missing-class-docstring,missing-function-docstring
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import typing
+import time
+import asyncio
+import random
+
+import octobot_commons.errors
+import octobot_commons.dsl_interpreter as dsl_interpreter
+
+
+"""
+Resetting operators are ReCallableOperatorMixin that can be called multiple times
+in order to execute a long lasting operation that can take several steps to complete.
+"""
+
+
+class WaitOperator(dsl_interpreter.PreComputingCallOperator, dsl_interpreter.ReCallableOperatorMixin):
+ NAME = "wait"
+ DESCRIPTION = "Pauses execution for the specified number of seconds. If return_remaining_time is True, instantly returns the remaining time to wait."
+ EXAMPLE = "wait(5)"
+
+ @staticmethod
+ def get_name() -> str:
+ return "wait"
+
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
+ return [
+ dsl_interpreter.OperatorParameter(name="min_seconds", description="minimum number of seconds to wait", required=True, type=float),
+ dsl_interpreter.OperatorParameter(name="max_seconds", description="maximum number of seconds to wait", required=False, type=float, default=None),
+ dsl_interpreter.OperatorParameter(name="return_remaining_time", description="if True, instantly returns the remaining time to wait", required=False, type=bool, default=False),
+ ] + cls.get_re_callable_parameters()
+
+ async def pre_compute(self) -> None:
+ await super().pre_compute()
+ param_by_name = self.get_computed_value_by_parameter()
+ if param_by_name["return_remaining_time"]:
+ self.value = self._compute_remaining_time(param_by_name)
+ else:
+ await asyncio.sleep(self._compute_sleep_time(param_by_name))
+ self.value = None
+
+ def _compute_remaining_time(
+ self, param_by_name: dict[str, typing.Any]
+ ) -> typing.Optional[dict[str, typing.Any]]:
+ current_time = time.time()
+ if last_execution_result := self.get_last_execution_result(param_by_name):
+ last_execution_time = last_execution_result[
+ dsl_interpreter.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value
+ ]
+ waiting_time = (
+ last_execution_result[dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value]
+ - (current_time - last_execution_time)
+ )
+ else:
+ waiting_time = self._compute_sleep_time(param_by_name)
+ if waiting_time <= 0:
+ # done waiting
+ return None
+ return self.build_re_callable_result(
+ last_execution_time=current_time,
+ waiting_time=waiting_time,
+ )
+
+ def _compute_sleep_time(self, param_by_name: dict[str, typing.Any]) -> float:
+ min_seconds = param_by_name["min_seconds"]
+ if min_seconds < 0:
+ raise octobot_commons.errors.InvalidParametersError(
+ f"wait() requires a non-negative numeric argument (seconds), got {min_seconds}"
+ )
+ max_seconds = param_by_name["max_seconds"]
+ if max_seconds is None:
+ return min_seconds
+ return random.randrange(int(min_seconds) * 1000, int(max_seconds) * 1000) / 1000
diff --git a/packages/tentacles/Meta/DSL_operators/python_std_operators/tests/test_base_operators.py b/packages/tentacles/Meta/DSL_operators/python_std_operators/tests/test_base_operators.py
index 80bfe5a14..02b46ebb0 100644
--- a/packages/tentacles/Meta/DSL_operators/python_std_operators/tests/test_base_operators.py
+++ b/packages/tentacles/Meta/DSL_operators/python_std_operators/tests/test_base_operators.py
@@ -17,6 +17,7 @@
import pytest
import mock
import time
+
import octobot_commons.dsl_interpreter as dsl_interpreter
import octobot_commons.errors
@@ -203,3 +204,19 @@ async def test_interpreter_insupported_operations(interpreter):
await interpreter.interprete("oscillate(100, 10, -1)")
with pytest.raises(octobot_commons.errors.InvalidParametersError):
await interpreter.interprete("oscillate(100, 10, 0)")
+
+
+@pytest.mark.asyncio
+async def test_error_operator(interpreter):
+ assert "error" in interpreter.operators_by_name
+
+ with pytest.raises(octobot_commons.errors.ErrorStatementEncountered, match="123-error"):
+ await interpreter.interprete("error('123-error')")
+
+ with pytest.raises(octobot_commons.errors.ErrorStatementEncountered):
+ await interpreter.interprete("error")
+
+ with pytest.raises(octobot_commons.errors.ErrorStatementEncountered, match="123-error"):
+ await interpreter.interprete("error('123-error') if True else 'ok'")
+
+ assert await interpreter.interprete("error('123-error') if False else 'ok'") == "ok"
diff --git a/packages/tentacles/Meta/DSL_operators/python_std_operators/tests/test_base_resetting_operators.py b/packages/tentacles/Meta/DSL_operators/python_std_operators/tests/test_base_resetting_operators.py
new file mode 100644
index 000000000..2c7c4c1d0
--- /dev/null
+++ b/packages/tentacles/Meta/DSL_operators/python_std_operators/tests/test_base_resetting_operators.py
@@ -0,0 +1,163 @@
+# Drakkar-Software OctoBot-Commons
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import asyncio
+import time
+
+import pytest
+import mock
+
+import tentacles.Meta.DSL_operators.python_std_operators.base_resetting_operators as base_resetting_operators
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_commons.errors
+
+
+@pytest.fixture
+def interpreter():
+ return dsl_interpreter.Interpreter(dsl_interpreter.get_all_operators())
+
+
+class TestWaitOperator:
+ @pytest.mark.asyncio
+ async def test_wait_operator(self, interpreter):
+ assert "wait" in interpreter.operators_by_name
+
+ # wait(0) returns None after 0 seconds (instant)
+ assert await interpreter.interprete("wait(0)") is None
+
+ with mock.patch.object(asyncio, "sleep", new=mock.AsyncMock()) as mock_sleep:
+ await interpreter.interprete("wait(1)")
+ mock_sleep.assert_awaited_once_with(1)
+
+ mock_sleep.reset_mock()
+
+ # wait with return_remaining_time=True returns ReCallingOperatorResult dict (wrapped format)
+ with mock.patch.object(time, "time", return_value=1000.0):
+ result = await interpreter.interprete("wait(5, return_remaining_time=True)")
+ assert dsl_interpreter.ReCallingOperatorResult.__name__ in result
+ inner = result[dsl_interpreter.ReCallingOperatorResult.__name__]
+ assert inner == {
+ "last_execution_result": {
+ dsl_interpreter.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value: 1000.0,
+ dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value: 5,
+ },
+ }
+ mock_sleep.assert_not_called()
+
+ @pytest.mark.asyncio
+ async def test_wait_operator_unit(self):
+ operator = base_resetting_operators.WaitOperator(1)
+
+ # _compute_sleep_time with min_seconds only
+ assert operator._compute_sleep_time({"min_seconds": 1, "max_seconds": None}) == 1
+ assert operator._compute_sleep_time({"min_seconds": 0, "max_seconds": None}) == 0
+
+ # _compute_sleep_time with negative raises
+ with pytest.raises(octobot_commons.errors.InvalidParametersError, match="non-negative"):
+ operator._compute_sleep_time({"min_seconds": -1, "max_seconds": None})
+
+ # _compute_sleep_time with min and max - returns value in range (mock random)
+ with mock.patch.object(base_resetting_operators.random, "randrange", return_value=1500):
+ assert operator._compute_sleep_time({"min_seconds": 1, "max_seconds": 2}) == 1.5
+
+ # _compute_remaining_time with no previous
+ with mock.patch.object(base_resetting_operators.time, "time", return_value=1000.0):
+ with mock.patch.object(base_resetting_operators.random, "randrange", return_value=3000):
+ result = operator._compute_remaining_time({
+ "min_seconds": 1, "max_seconds": 4,
+ dsl_interpreter.ReCallableOperatorMixin.LAST_EXECUTION_RESULT_KEY: None,
+ })
+ assert result is not None
+ last_result = result[dsl_interpreter.ReCallingOperatorResult.__name__]["last_execution_result"]
+ assert last_result[dsl_interpreter.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value] == 1000.0
+ assert last_result[dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value] == 3.0
+
+ # _compute_remaining_time with previous (ReCallingOperatorResult wrapped format)
+ with mock.patch.object(base_resetting_operators.time, "time", return_value=1002.0):
+ result = operator._compute_remaining_time({
+ "min_seconds": 1, "max_seconds": None,
+ dsl_interpreter.ReCallableOperatorMixin.LAST_EXECUTION_RESULT_KEY: {
+ dsl_interpreter.ReCallingOperatorResult.__name__: {
+ "last_execution_result": {
+ dsl_interpreter.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value: 1000.0,
+ dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value: 5.0,
+ },
+ },
+ },
+ })
+ assert result is not None
+ last_result = result[dsl_interpreter.ReCallingOperatorResult.__name__]["last_execution_result"]
+ assert last_result[dsl_interpreter.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value] == 1002.0
+ assert last_result[dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value] == 3.0 # 5 - (1002 - 1000)
+
+ # No mock: ensure random and time are actually called and return valid values
+ min_sec, max_sec = 1, 3
+ sleep_times = [
+ operator._compute_sleep_time({"min_seconds": min_sec, "max_seconds": max_sec})
+ for _ in range(20)
+ ]
+ for sleep_time in sleep_times:
+ assert min_sec <= sleep_time < max_sec
+ assert len(set(sleep_times)) > 1 # random produces varying values
+
+ result = operator._compute_remaining_time({
+ "min_seconds": 2, "max_seconds": 5,
+ dsl_interpreter.ReCallableOperatorMixin.LAST_EXECUTION_RESULT_KEY: None,
+ })
+ assert result is not None
+ last_result = result[dsl_interpreter.ReCallingOperatorResult.__name__]["last_execution_result"]
+ assert dsl_interpreter.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value in last_result
+ assert dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value in last_result
+ assert 2 <= last_result[dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value] < 5
+ assert last_result[dsl_interpreter.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value] > 0
+
+ previous = {
+ dsl_interpreter.ReCallingOperatorResult.__name__: {
+ "last_execution_result": {
+ dsl_interpreter.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value: time.time() - 1.0,
+ dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value: 5.0,
+ },
+ },
+ }
+ result = operator._compute_remaining_time({
+ "min_seconds": 1, "max_seconds": None,
+ dsl_interpreter.ReCallableOperatorMixin.LAST_EXECUTION_RESULT_KEY: previous,
+ })
+ assert result is not None
+ last_result = result[dsl_interpreter.ReCallingOperatorResult.__name__]["last_execution_result"]
+ prev_last_result = previous[dsl_interpreter.ReCallingOperatorResult.__name__]["last_execution_result"]
+ assert last_result[dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value] <= 5.0 # time has passed
+ assert last_result[dsl_interpreter.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value] >= prev_last_result[dsl_interpreter.ReCallingOperatorResultKeys.LAST_EXECUTION_TIME.value]
+
+ @pytest.mark.asyncio
+ async def test_wait_operator_pre_compute(self):
+ operator = base_resetting_operators.WaitOperator(0)
+ with mock.patch.object(asyncio, "sleep", new=mock.AsyncMock()) as mock_sleep:
+ await operator.pre_compute()
+ mock_sleep.assert_awaited_once_with(0)
+
+ operator_with_return = base_resetting_operators.WaitOperator(2, return_remaining_time=True)
+ with mock.patch.object(asyncio, "sleep", new=mock.AsyncMock()) as mock_sleep:
+ await operator_with_return.pre_compute()
+ mock_sleep.assert_not_awaited()
+ assert operator_with_return.value is not None
+ assert isinstance(operator_with_return.value, dict)
+ last_result = operator_with_return.value[dsl_interpreter.ReCallingOperatorResult.__name__]["last_execution_result"]
+ assert last_result[dsl_interpreter.ReCallingOperatorResultKeys.WAITING_TIME.value] == 2
+
+ @pytest.mark.asyncio
+ async def test_wait_operator_invalid_parameters(self, interpreter):
+ with pytest.raises(octobot_commons.errors.InvalidParametersError, match="non-negative"):
+ await interpreter.interprete("wait(-1)")
diff --git a/packages/tentacles/Meta/DSL_operators/ta_operators/tests/test_docs_examples.py b/packages/tentacles/Meta/DSL_operators/ta_operators/tests/test_docs_examples.py
index 5e19d8629..924e358aa 100644
--- a/packages/tentacles/Meta/DSL_operators/ta_operators/tests/test_docs_examples.py
+++ b/packages/tentacles/Meta/DSL_operators/ta_operators/tests/test_docs_examples.py
@@ -15,7 +15,7 @@
# License along with this library.
import pytest
-from tentacles.Meta.DSL_operators.exchange_operators.tests import (
+from tentacles.Meta.DSL_operators.exchange_operators.tests.exchange_public_data_operators import (
historical_prices,
historical_volume,
historical_times,
diff --git a/packages/tentacles/Meta/DSL_operators/ta_operators/tests/test_tulipy_technical_analysis_operators.py b/packages/tentacles/Meta/DSL_operators/ta_operators/tests/test_tulipy_technical_analysis_operators.py
index 0e10284e8..6b74f53fd 100644
--- a/packages/tentacles/Meta/DSL_operators/ta_operators/tests/test_tulipy_technical_analysis_operators.py
+++ b/packages/tentacles/Meta/DSL_operators/ta_operators/tests/test_tulipy_technical_analysis_operators.py
@@ -16,7 +16,7 @@
import pytest
import octobot_commons.errors
-from tentacles.Meta.DSL_operators.exchange_operators.tests import (
+from tentacles.Meta.DSL_operators.exchange_operators.tests.exchange_public_data_operators import (
historical_prices,
historical_volume,
historical_times,
diff --git a/packages/tentacles/Meta/DSL_operators/ta_operators/tulipy_technical_analysis_operators.py b/packages/tentacles/Meta/DSL_operators/ta_operators/tulipy_technical_analysis_operators.py
index 2f8d06afc..78f7652bc 100644
--- a/packages/tentacles/Meta/DSL_operators/ta_operators/tulipy_technical_analysis_operators.py
+++ b/packages/tentacles/Meta/DSL_operators/ta_operators/tulipy_technical_analysis_operators.py
@@ -63,8 +63,8 @@ class RSIOperator(ta_operator.TAOperator):
def get_name() -> str:
return "rsi"
- @staticmethod
- def get_parameters() -> list[dsl_interpreter.OperatorParameter]:
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
return [
dsl_interpreter.OperatorParameter(name="data", description="the data to compute the RSI on", required=True, type=list),
dsl_interpreter.OperatorParameter(name="period", description="the period to use for the RSI", required=True, type=int),
@@ -84,8 +84,8 @@ class MACDOperator(ta_operator.TAOperator):
def get_name() -> str:
return "macd"
- @staticmethod
- def get_parameters() -> list[dsl_interpreter.OperatorParameter]:
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
return [
dsl_interpreter.OperatorParameter(name="data", description="the data to compute the MACD on", required=True, type=list),
dsl_interpreter.OperatorParameter(name="short_period", description="the short period to use for the MACD", required=True, type=int),
@@ -110,8 +110,8 @@ class MAOperator(ta_operator.TAOperator):
def get_name() -> str:
return "ma"
- @staticmethod
- def get_parameters() -> list[dsl_interpreter.OperatorParameter]:
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
return [
dsl_interpreter.OperatorParameter(name="data", description="the data to compute the moving average on", required=True, type=list),
dsl_interpreter.OperatorParameter(name="period", description="the period to use for the moving average", required=True, type=int),
@@ -131,8 +131,8 @@ class EMAOperator(ta_operator.TAOperator):
def get_name() -> str:
return "ema"
- @staticmethod
- def get_parameters() -> list[dsl_interpreter.OperatorParameter]:
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
return [
dsl_interpreter.OperatorParameter(name="data", description="the data to compute the exponential moving average on", required=True, type=list),
dsl_interpreter.OperatorParameter(name="period", description="the period to use for the exponential moving average", required=True, type=int),
@@ -152,8 +152,8 @@ class VWMAOperator(ta_operator.TAOperator):
def get_name() -> str:
return "vwma"
- @staticmethod
- def get_parameters() -> list[dsl_interpreter.OperatorParameter]:
+ @classmethod
+ def get_parameters(cls) -> list[dsl_interpreter.OperatorParameter]:
return [
dsl_interpreter.OperatorParameter(name="data", description="the data to compute the volume weighted moving average on", required=True, type=list),
dsl_interpreter.OperatorParameter(name="volume", description="the volume data to use for the volume weighted moving average", required=True, type=list),
diff --git a/packages/tentacles/Meta/Keywords/scripting_library/backtesting/backtesting_data_collector.py b/packages/tentacles/Meta/Keywords/scripting_library/backtesting/backtesting_data_collector.py
index c03d87784..057766c48 100644
--- a/packages/tentacles/Meta/Keywords/scripting_library/backtesting/backtesting_data_collector.py
+++ b/packages/tentacles/Meta/Keywords/scripting_library/backtesting/backtesting_data_collector.py
@@ -27,7 +27,7 @@
import octobot_commons.logging
import octobot_trading.exchanges
-import octobot_trading.util.test_tools.exchange_data as exchange_data_import
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
import octobot_trading.util.test_tools.exchanges_test_tools as exchanges_test_tools
import octobot.community
diff --git a/packages/tentacles/Meta/Keywords/scripting_library/backtesting/backtesting_intialization.py b/packages/tentacles/Meta/Keywords/scripting_library/backtesting/backtesting_intialization.py
index f433ca81f..0acbd69c7 100644
--- a/packages/tentacles/Meta/Keywords/scripting_library/backtesting/backtesting_intialization.py
+++ b/packages/tentacles/Meta/Keywords/scripting_library/backtesting/backtesting_intialization.py
@@ -32,7 +32,7 @@
import octobot.backtesting.independent_backtesting
import octobot.backtesting.minimal_data_importer as minimal_data_importer
-import octobot_trading.util.test_tools.exchange_data as exchange_data_import
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
import octobot_trading.api
import tentacles.Meta.Keywords.scripting_library as scripting_library
diff --git a/packages/tentacles/Meta/Keywords/scripting_library/configuration/profile_data_configuration.py b/packages/tentacles/Meta/Keywords/scripting_library/configuration/profile_data_configuration.py
index 538e7bfe9..80c36b8f5 100644
--- a/packages/tentacles/Meta/Keywords/scripting_library/configuration/profile_data_configuration.py
+++ b/packages/tentacles/Meta/Keywords/scripting_library/configuration/profile_data_configuration.py
@@ -25,7 +25,6 @@
import octobot_commons.configuration as commons_configuration
import octobot_commons.profiles as commons_profiles
import octobot_commons.profiles.profile_data as commons_profile_data
-import octobot_commons.tentacles_management as tentacles_management
import octobot_commons.time_frame_manager as time_frame_manager
import octobot_commons.symbols
import octobot_commons.logging
@@ -34,8 +33,9 @@
import octobot_trading.constants as trading_constants
import octobot_trading.exchanges as exchanges
-import octobot_trading.util.test_tools.exchange_data as exchange_data_import
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
import octobot_trading.api
+import octobot_trading.enums
import octobot_tentacles_manager.api
import octobot_tentacles_manager.configuration
@@ -358,6 +358,16 @@ def _get_is_auth_required_exchange(
)
+def get_required_candles_count(profile_data: commons_profiles.ProfileData, min_candles_count: int) -> int:
+ for tentacle_config in profile_data.tentacles:
+ if common_constants.CONFIG_TENTACLES_REQUIRED_CANDLES_COUNT in tentacle_config.config:
+ return max(
+ tentacle_config.config[common_constants.CONFIG_TENTACLES_REQUIRED_CANDLES_COUNT],
+ min_candles_count
+ )
+ return min_candles_count
+
+
def _set_portfolio(
profile_data: commons_profiles.ProfileData,
portfolio: dict
@@ -365,6 +375,18 @@ def _set_portfolio(
profile_data.trader_simulator.starting_portfolio = get_formatted_portfolio(portfolio)
+def update_position_levarage(
+ position: exchange_data_import.PositionDetails, updated_contracts_by_symbol: dict
+):
+ leverage = float(
+ updated_contracts_by_symbol[
+ position.contract[octobot_trading.enums.ExchangeConstantsMarginContractColumns.PAIR.value]
+ ].current_leverage
+ )
+ position.contract[octobot_trading.enums.ExchangeConstantsMarginContractColumns.CURRENT_LEVERAGE.value] = leverage
+ position.position[octobot_trading.enums.ExchangeConstantsPositionColumns.LEVERAGE.value] = leverage
+
+
def get_formatted_portfolio(portfolio: dict):
for asset in portfolio.values():
if common_constants.PORTFOLIO_AVAILABLE not in asset:
@@ -496,7 +518,7 @@ def get_traded_coins(
def get_time_frames(
profile_data: commons_profiles.ProfileData, for_historical_data=False
-):
+) -> list[str]:
for config in get_config_by_tentacle(profile_data).values():
if evaluators_constants.STRATEGIES_REQUIRED_TIME_FRAME in config:
return config[evaluators_constants.STRATEGIES_REQUIRED_TIME_FRAME]
diff --git a/packages/tentacles/Meta/Keywords/scripting_library/exchanges/local_exchange.py b/packages/tentacles/Meta/Keywords/scripting_library/exchanges/local_exchange.py
index efe29e02a..8f99fe17e 100644
--- a/packages/tentacles/Meta/Keywords/scripting_library/exchanges/local_exchange.py
+++ b/packages/tentacles/Meta/Keywords/scripting_library/exchanges/local_exchange.py
@@ -2,7 +2,7 @@
import typing
import octobot_trading.exchanges as exchanges
-import octobot_trading.util.test_tools.exchange_data as exchange_data_import
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
import tentacles.Meta.Keywords.scripting_library.configuration.profile_data_configuration as profile_data_configuration
diff --git a/packages/tentacles/Meta/Keywords/scripting_library/tests/backtesting/test_collect_data_and_run_backtesting.py b/packages/tentacles/Meta/Keywords/scripting_library/tests/backtesting/test_collect_data_and_run_backtesting.py
index 6946de228..cb6f26266 100644
--- a/packages/tentacles/Meta/Keywords/scripting_library/tests/backtesting/test_collect_data_and_run_backtesting.py
+++ b/packages/tentacles/Meta/Keywords/scripting_library/tests/backtesting/test_collect_data_and_run_backtesting.py
@@ -21,7 +21,7 @@
import octobot_trading.api
import octobot_trading.exchanges.connectors.ccxt.ccxt_clients_cache as ccxt_clients_cache
-import octobot_trading.util.test_tools.exchange_data as exchange_data_import
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
import tentacles.Meta.Keywords.scripting_library as scripting_library
import tentacles.Trading.Mode.index_trading_mode.index_distribution as index_distribution
@@ -76,12 +76,12 @@ async def test_collect_candles_without_backend_and_run_backtesting(trading_mode_
)
# 2. collect candles
- ccxt_clients_cache._MARKETS_BY_EXCHANGE.clear()
+ ccxt_clients_cache._SHARED_MARKETS_EXCHANGE_BY_EXCHANGE.clear()
await scripting_library.init_exchange_market_status_and_populate_backtesting_exchange_data(
exchange_data, profile_data
)
# cached markets have been updated and now contain this exchange markets
- assert len(ccxt_clients_cache._MARKETS_BY_EXCHANGE) == 1
+ assert len(ccxt_clients_cache._SHARED_MARKETS_EXCHANGE_BY_EXCHANGE) == 1
# ensure collected datas are correct
assert len(exchange_data.markets) == 2
assert sorted([market.symbol for market in exchange_data.markets]) == ["BTC/USDT", "ETH/USDT"]
diff --git a/packages/tentacles/Trading/Exchange/hollaex/hollaex_exchange.py b/packages/tentacles/Trading/Exchange/hollaex/hollaex_exchange.py
index 35e140c59..4c9b7fda6 100644
--- a/packages/tentacles/Trading/Exchange/hollaex/hollaex_exchange.py
+++ b/packages/tentacles/Trading/Exchange/hollaex/hollaex_exchange.py
@@ -58,9 +58,12 @@ async def load_symbol_markets(
if self.exchange_manager.exchange_name not in _REFRESHED_EXCHANGE_FEE_TIERS_BY_EXCHANGE_NAME:
authenticated_cache = self.exchange_manager.exchange.requires_authentication_for_this_configuration_only()
# always update fees cache using all markets to avoid market filter side effects from the current client
- all_markets = ccxt_clients_cache.get_exchange_parsed_markets(
- ccxt_clients_cache.get_client_key(self.client, authenticated_cache)
- )
+ if trading_constants.USE_CCXT_SHARED_MARKETS_CACHE:
+ all_markets = list(self.client.markets.values())
+ else:
+ all_markets = ccxt_clients_cache.get_exchange_parsed_markets(
+ ccxt_clients_cache.get_client_key(self.client, authenticated_cache)
+ )
await self._refresh_exchange_fee_tiers(all_markets)
async def disable_quick_trade_only_pairs(self):
diff --git a/packages/tentacles/Trading/Mode/dsl_trading_mode/__init__.py b/packages/tentacles/Trading/Mode/dsl_trading_mode/__init__.py
new file mode 100644
index 000000000..463a795bb
--- /dev/null
+++ b/packages/tentacles/Trading/Mode/dsl_trading_mode/__init__.py
@@ -0,0 +1 @@
+from .dsl_trading import DSLTradingMode
\ No newline at end of file
diff --git a/packages/tentacles/Trading/Mode/dsl_trading_mode/config/DSLTradingMode.json b/packages/tentacles/Trading/Mode/dsl_trading_mode/config/DSLTradingMode.json
new file mode 100644
index 000000000..2a78a5872
--- /dev/null
+++ b/packages/tentacles/Trading/Mode/dsl_trading_mode/config/DSLTradingMode.json
@@ -0,0 +1,4 @@
+{
+ "default_config": [],
+ "required_strategies": []
+}
\ No newline at end of file
diff --git a/packages/tentacles/Trading/Mode/dsl_trading_mode/dsl_call_result.py b/packages/tentacles/Trading/Mode/dsl_trading_mode/dsl_call_result.py
new file mode 100644
index 000000000..45ec9de96
--- /dev/null
+++ b/packages/tentacles/Trading/Mode/dsl_trading_mode/dsl_call_result.py
@@ -0,0 +1,26 @@
+# Drakkar-Software OctoBot
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+
+import dataclasses
+import typing
+import octobot_commons.dataclasses
+
+@dataclasses.dataclass
+class DSLCallResult(octobot_commons.dataclasses.FlexibleDataclass):
+ statement: str
+ success: typing.Optional[bool] = None
+ result: typing.Optional[octobot_commons.dataclasses.FlexibleDataclass] = None
+ error: typing.Optional[str] = None
diff --git a/packages/tentacles/Trading/Mode/dsl_trading_mode/dsl_trading.py b/packages/tentacles/Trading/Mode/dsl_trading_mode/dsl_trading.py
new file mode 100644
index 000000000..941ac6f3b
--- /dev/null
+++ b/packages/tentacles/Trading/Mode/dsl_trading_mode/dsl_trading.py
@@ -0,0 +1,148 @@
+# Drakkar-Software OctoBot
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import typing
+import octobot_commons.enums as commons_enums
+import octobot_commons.errors as commons_errors
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_commons.signals as commons_signals
+
+import octobot_trading.modes as trading_modes
+import octobot_trading.enums as trading_enums
+
+import tentacles.Meta.DSL_operators as dsl_operators
+
+
+class DSLTradingModeConsumer(trading_modes.AbstractTradingModeConsumer):
+ # should not be used
+ pass
+
+
+class DSLTradingModeProducer(trading_modes.AbstractTradingModeProducer):
+ async def set_final_eval(
+ self, matrix_id: str, cryptocurrency: str, symbol: str, time_frame, trigger_source: str
+ ):
+ self.logger.info(
+ f"Executing DSL script trigger by {matrix_id=}, {cryptocurrency=}, {symbol=}, {time_frame=}, {trigger_source=}"
+ )
+ result = await self.trading_mode.interpret_dsl_script() # type: ignore
+ self.logger.info(f"DSL script successfully executed. Result: {result.result}")
+
+ @classmethod
+ def get_should_cancel_loaded_orders(cls) -> bool:
+ """
+ Called by cancel_symbol_open_orders => return true if OctoBot should cancel all orders for a symbol including
+ orders already existing when OctoBot started up
+ """
+ return True
+
+
+class DSLTradingMode(trading_modes.AbstractTradingMode):
+ MODE_PRODUCER_CLASSES = [DSLTradingModeProducer]
+ MODE_CONSUMER_CLASSES = [DSLTradingModeConsumer]
+ DSL_SCRIPT = "dsl_script"
+
+ def __init__(self, config, exchange_manager):
+ super().__init__(config, exchange_manager)
+ self.dsl_script: str = ""
+ self.interpreter: dsl_interpreter.Interpreter = None # type: ignore
+
+ def init_user_inputs(self, inputs: dict) -> None:
+ """
+ Called right before starting the tentacle, should define all the tentacle's user inputs unless
+ those are defined somewhere else.
+ """
+ default_config = self.get_default_config()
+ new_script = str(
+ self.UI.user_input(
+ self.DSL_SCRIPT, commons_enums.UserInputTypes.TEXT, default_config[self.DSL_SCRIPT],
+ inputs, other_schema_values={"minLength": 0},
+ title="DSL script: The DSL script to use for the trading mode."
+ )
+ )
+ self.set_dsl_script(new_script, raise_on_error=False, dependencies=None)
+
+
+ def set_dsl_script(
+ self,
+ dsl_script: str,
+ raise_on_error: bool = True,
+ dependencies: typing.Optional[commons_signals.SignalDependencies] = None
+ ):
+ if self.interpreter is None:
+ self.interpreter = self._create_interpreter(dependencies)
+ if self.dsl_script != dsl_script:
+ self.dsl_script = dsl_script
+ self.on_new_dsl_script(raise_on_error)
+
+ def on_new_dsl_script(
+ self, raise_on_error: bool = True,
+ ):
+ try:
+ self.interpreter.prepare(self.dsl_script)
+ self.logger.info(f"DSL script successfully loaded: '{self.dsl_script}'")
+ except commons_errors.DSLInterpreterError as err:
+ self.logger.exception(err, True, f"Error when parsing DSL script '{self.dsl_script}': {err}")
+ if raise_on_error:
+ raise err
+ except Exception as err:
+ self.logger.exception(err, True, f"Unexpected error when parsing DSL script '{self.dsl_script}': {err}")
+ if raise_on_error:
+ raise err
+
+ def _create_interpreter(
+ self, dependencies: typing.Optional[commons_signals.SignalDependencies]
+ ) -> dsl_interpreter.Interpreter:
+ return dsl_interpreter.Interpreter(
+ dsl_interpreter.get_all_operators()
+ + dsl_operators.create_ohlcv_operators(self.exchange_manager, None, None)
+ + dsl_operators.create_portfolio_operators(self.exchange_manager)
+ + dsl_operators.create_create_order_operators(
+ self.exchange_manager, trading_mode=self, dependencies=dependencies
+ )
+ + dsl_operators.create_cancel_order_operators(
+ self.exchange_manager, trading_mode=self, dependencies=dependencies
+ )
+ + dsl_operators.create_blockchain_wallet_operators(self.exchange_manager)
+ )
+
+ async def interpret_dsl_script(self) -> dsl_interpreter.DSLCallResult:
+ return await self.interpreter.compute_expression_with_result()
+
+ async def stop(self):
+ await super().stop()
+ self.interpreter = None # type: ignore
+
+ @classmethod
+ def get_default_config(
+ cls,
+ ) -> dict:
+ return {
+ cls.DSL_SCRIPT: "",
+ }
+
+ @classmethod
+ def get_is_symbol_wildcard(cls) -> bool:
+ return True
+
+ @classmethod
+ def get_supported_exchange_types(cls) -> list:
+ """
+ :return: The list of supported exchange types
+ """
+ return [
+ trading_enums.ExchangeTypes.SPOT,
+ trading_enums.ExchangeTypes.FUTURE,
+ ]
diff --git a/packages/tentacles/Trading/Mode/dsl_trading_mode/metadata.json b/packages/tentacles/Trading/Mode/dsl_trading_mode/metadata.json
new file mode 100644
index 000000000..38f9a1ddd
--- /dev/null
+++ b/packages/tentacles/Trading/Mode/dsl_trading_mode/metadata.json
@@ -0,0 +1,6 @@
+{
+ "version": "1.2.0",
+ "origin_package": "OctoBot-Default-Tentacles",
+ "tentacles": ["DSLTradingMode"],
+ "tentacles-requirements": []
+}
\ No newline at end of file
diff --git a/packages/tentacles/Trading/Mode/dsl_trading_mode/resources/DSLTradingMode.md b/packages/tentacles/Trading/Mode/dsl_trading_mode/resources/DSLTradingMode.md
new file mode 100644
index 000000000..d1c756b3c
--- /dev/null
+++ b/packages/tentacles/Trading/Mode/dsl_trading_mode/resources/DSLTradingMode.md
@@ -0,0 +1 @@
+DSL Trading mode executes the configured DSL statement everytime it gets triggered by evaluators.
diff --git a/packages/tentacles/Trading/Mode/dsl_trading_mode/tests/__init__.py b/packages/tentacles/Trading/Mode/dsl_trading_mode/tests/__init__.py
new file mode 100644
index 000000000..974dd1623
--- /dev/null
+++ b/packages/tentacles/Trading/Mode/dsl_trading_mode/tests/__init__.py
@@ -0,0 +1,15 @@
+# Drakkar-Software OctoBot-Tentacles
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
diff --git a/packages/tentacles/Trading/Mode/dsl_trading_mode/tests/test_dsl_trading_mode.py b/packages/tentacles/Trading/Mode/dsl_trading_mode/tests/test_dsl_trading_mode.py
new file mode 100644
index 000000000..a04bb0ef7
--- /dev/null
+++ b/packages/tentacles/Trading/Mode/dsl_trading_mode/tests/test_dsl_trading_mode.py
@@ -0,0 +1,277 @@
+# Drakkar-Software OctoBot-Tentacles
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import logging
+import os
+
+import mock
+import pytest
+
+import async_channel.util as channel_util
+import octobot_backtesting.api as backtesting_api
+import octobot_commons.constants as commons_constants
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_commons.enums as commons_enums
+import octobot_commons.errors as commons_errors
+import octobot_commons.tests.test_config as test_config
+import octobot_tentacles_manager.api as tentacles_manager_api
+import octobot_trading.enums as trading_enums
+import octobot_trading.exchange_channel as exchanges_channel
+import octobot_trading.exchanges as exchanges
+
+import tentacles.Trading.Mode.dsl_trading_mode.dsl_trading as dsl_trading
+import tests.test_utils.config as test_utils_config
+import tests.test_utils.test_exchanges as test_exchanges
+
+
+class TestDSLTradingModeClassMethods:
+ def test_get_default_config(self):
+ config = dsl_trading.DSLTradingMode.get_default_config()
+ assert config == {dsl_trading.DSLTradingMode.DSL_SCRIPT: ""}
+
+ def test_get_is_symbol_wildcard(self):
+ assert dsl_trading.DSLTradingMode.get_is_symbol_wildcard() is True
+
+ def test_get_supported_exchange_types(self):
+ supported = dsl_trading.DSLTradingMode.get_supported_exchange_types()
+ assert trading_enums.ExchangeTypes.SPOT in supported
+ assert trading_enums.ExchangeTypes.FUTURE in supported
+ assert len(supported) == 2
+
+
+class TestDSLTradingMode:
+ def test_init(self):
+ config = {}
+ exchange_manager = mock.Mock()
+ mode = dsl_trading.DSLTradingMode(config, exchange_manager)
+ assert mode.dsl_script == ""
+ assert mode.interpreter is None
+ assert mode.config is config
+ assert mode.exchange_manager is exchange_manager
+
+ def test_create_interpreter(self):
+ config = test_config.load_test_config()
+ exchange_manager = test_exchanges.get_test_exchange_manager(config, "binance")
+ mode = dsl_trading.DSLTradingMode(config, exchange_manager)
+
+ interpreter = mode._create_interpreter(None)
+
+ assert isinstance(interpreter, dsl_interpreter.Interpreter)
+ assert len(interpreter.operators_by_name) > 0
+ assert "close" in interpreter.operators_by_name # create_ohlcv_operators
+ assert "total" in interpreter.operators_by_name # create_portfolio_operators
+ assert "market" in interpreter.operators_by_name # create_create_order_operators
+ assert "cancel_order" in interpreter.operators_by_name # create_cancel_order_operators
+ assert "blockchain_wallet_balance" in interpreter.operators_by_name # create_blockchain_wallet_operators
+
+ @pytest.mark.asyncio
+ async def test_initialize(self):
+ tentacles_manager_api.reload_tentacle_info()
+ config = test_config.load_test_config()
+ config[commons_constants.CONFIG_SIMULATOR][commons_constants.CONFIG_STARTING_PORTFOLIO]["USDT"] = 2000
+ exchange_manager = test_exchanges.get_test_exchange_manager(config, "binance")
+ exchange_manager.tentacles_setup_config = test_utils_config.load_test_tentacles_config()
+ exchange_manager.is_simulated = True
+ exchange_manager.is_backtesting = True
+ exchange_manager.use_cached_markets = False
+ try:
+ backtesting = await backtesting_api.initialize_backtesting(
+ config,
+ exchange_ids=[exchange_manager.id],
+ matrix_id=None,
+ data_files=[
+ os.path.join(
+ test_config.TEST_CONFIG_FOLDER,
+ "AbstractExchangeHistoryCollector_1586017993.616272.data",
+ )
+ ],
+ )
+ exchange_manager.exchange = exchanges.ExchangeSimulator(
+ exchange_manager.config, exchange_manager, backtesting
+ )
+ await exchange_manager.exchange.initialize()
+ for exchange_channel_class_type in [
+ exchanges_channel.ExchangeChannel,
+ exchanges_channel.TimeFrameExchangeChannel,
+ ]:
+ await channel_util.create_all_subclasses_channel(
+ exchange_channel_class_type,
+ exchanges_channel.set_chan,
+ exchange_manager=exchange_manager,
+ )
+ trader = exchanges.TraderSimulator(config, exchange_manager)
+ await trader.initialize()
+
+ mode = dsl_trading.DSLTradingMode(config, exchange_manager)
+ mode.symbol = None if mode.get_is_symbol_wildcard() else "BTC/USDT"
+
+ await mode.initialize()
+
+ assert len(mode.producers) == 1
+ assert isinstance(mode.producers[0], dsl_trading.DSLTradingModeProducer)
+ assert len(mode.consumers) == 1
+ assert isinstance(mode.consumers[0], dsl_trading.DSLTradingModeConsumer)
+ assert mode.trading_config is not None
+ finally:
+ for importer in backtesting_api.get_importers(exchange_manager.exchange.backtesting):
+ await backtesting_api.stop_importer(importer)
+ await exchange_manager.exchange.backtesting.stop()
+ await exchange_manager.stop()
+
+ def test_init_user_inputs_same_script(self):
+ config = {}
+ exchange_manager = mock.Mock()
+ mode = dsl_trading.DSLTradingMode(config, exchange_manager)
+ mode.UI = mock.Mock()
+ mode.UI.user_input = mock.Mock(return_value="1 + 1")
+ mode.dsl_script = "1 + 1"
+ mode.on_new_dsl_script = mock.Mock()
+ mock_interpreter = mock.Mock()
+ with mock.patch.object(
+ dsl_trading.DSLTradingMode, "_create_interpreter",
+ mock.Mock(return_value=mock_interpreter)
+ ):
+ mode.init_user_inputs({})
+
+ mode.UI.user_input.assert_called_once_with(
+ dsl_trading.DSLTradingMode.DSL_SCRIPT,
+ commons_enums.UserInputTypes.TEXT,
+ "",
+ {},
+ other_schema_values={"minLength": 0},
+ title="DSL script: The DSL script to use for the trading mode."
+ )
+ mode.on_new_dsl_script.assert_not_called()
+
+ def test_init_user_inputs_new_script(self):
+ config = {}
+ exchange_manager = mock.Mock()
+ mode = dsl_trading.DSLTradingMode(config, exchange_manager)
+ mode.UI = mock.Mock()
+ mode.UI.user_input = mock.Mock(return_value="close[-1]")
+ mode.dsl_script = ""
+ mode.on_new_dsl_script = mock.Mock()
+ mock_interpreter = mock.Mock()
+ with mock.patch.object(
+ dsl_trading.DSLTradingMode, "_create_interpreter",
+ mock.Mock(return_value=mock_interpreter)
+ ):
+ mode.init_user_inputs({})
+
+ assert mode.dsl_script == "close[-1]"
+ mode.on_new_dsl_script.assert_called_once()
+
+ def test_on_new_dsl_script_success(self, caplog):
+ config = test_config.load_test_config()
+ exchange_manager = test_exchanges.get_test_exchange_manager(config, "binance")
+ mode = dsl_trading.DSLTradingMode(config, exchange_manager)
+
+ with caplog.at_level(logging.INFO):
+ mode.set_dsl_script("1 + 1")
+
+ assert "DSL script successfully loaded" in caplog.text
+ assert isinstance(mode.interpreter.get_dependencies(), list)
+
+ def test_on_new_dsl_script_dsl_interpreter_error(self, caplog):
+ config = test_config.load_test_config()
+ exchange_manager = test_exchanges.get_test_exchange_manager(config, "binance")
+ mode = dsl_trading.DSLTradingMode(config, exchange_manager)
+
+ with caplog.at_level(logging.ERROR):
+ mode.set_dsl_script("undefined_operator_xyz()", raise_on_error=False)
+
+ assert "Error when parsing DSL script" in caplog.text
+
+ def test_on_new_dsl_script_dsl_interpreter_error_raises_when_raise_on_error_true(
+ self, caplog
+ ):
+ config = test_config.load_test_config()
+ exchange_manager = test_exchanges.get_test_exchange_manager(config, "binance")
+ mode = dsl_trading.DSLTradingMode(config, exchange_manager)
+
+ with caplog.at_level(logging.ERROR), pytest.raises(commons_errors.DSLInterpreterError):
+ mode.set_dsl_script("undefined_operator_xyz()", raise_on_error=True)
+
+ assert "Error when parsing DSL script" in caplog.text
+
+ def test_on_new_dsl_script_unexpected_error(self, caplog):
+ config = test_config.load_test_config()
+ exchange_manager = test_exchanges.get_test_exchange_manager(config, "binance")
+ mode = dsl_trading.DSLTradingMode(config, exchange_manager)
+
+ with caplog.at_level(logging.ERROR):
+ mode.set_dsl_script("syntax error {", raise_on_error=False)
+
+ assert "Unexpected error when parsing DSL script" in caplog.text
+
+ def test_on_new_dsl_script_unexpected_error_raises_when_raise_on_error_true(
+ self, caplog
+ ):
+ config = test_config.load_test_config()
+ exchange_manager = test_exchanges.get_test_exchange_manager(config, "binance")
+ mode = dsl_trading.DSLTradingMode(config, exchange_manager)
+
+ with caplog.at_level(logging.ERROR), pytest.raises(SyntaxError):
+ mode.set_dsl_script("syntax error {", raise_on_error=True)
+
+ assert "Unexpected error when parsing DSL script" in caplog.text
+
+ @pytest.mark.asyncio
+ async def test_stop(self):
+ config = {}
+ exchange_manager = mock.Mock()
+ mode = dsl_trading.DSLTradingMode(config, exchange_manager)
+ mode.interpreter = mock.Mock()
+ with mock.patch.object(
+ dsl_trading.trading_modes.AbstractTradingMode, "stop", mock.AsyncMock()
+ ) as super_stop_mock:
+ await mode.stop()
+ super_stop_mock.assert_awaited_once()
+ assert mode.interpreter is None
+
+
+class TestDSLTradingModeProducer:
+ @pytest.mark.asyncio
+ async def test_set_final_eval(self):
+ config = {}
+ exchange_manager = mock.Mock()
+ trading_mode = dsl_trading.DSLTradingMode(config, exchange_manager)
+ trading_mode.interpreter = mock.Mock()
+ dsl_result = mock.Mock(result=42)
+ trading_mode.interpreter.compute_expression_with_result = mock.AsyncMock(
+ return_value=dsl_result
+ )
+
+ producer = dsl_trading.DSLTradingModeProducer(
+ channel=mock.Mock(),
+ config=config,
+ trading_mode=trading_mode,
+ exchange_manager=exchange_manager
+ )
+ producer.logger = mock.Mock()
+
+ await producer.set_final_eval(
+ matrix_id="matrix_1",
+ cryptocurrency="Bitcoin",
+ symbol="BTC/USDT",
+ time_frame="1h",
+ trigger_source="ohlcv"
+ )
+
+ assert producer.logger.info.call_count == 2
+ assert "matrix_1" in producer.logger.info.call_args_list[0][0][0]
+ assert "Bitcoin" in producer.logger.info.call_args_list[0][0][0]
+ assert "BTC/USDT" in producer.logger.info.call_args_list[0][0][0]
+ trading_mode.interpreter.compute_expression_with_result.assert_awaited_once()
diff --git a/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/actions_params.py b/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/actions_params.py
index 3fa4db0dc..0afbe0c11 100644
--- a/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/actions_params.py
+++ b/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/actions_params.py
@@ -30,11 +30,13 @@ class EnsureExchangeBalanceParams(octobot_commons.dataclasses.FlexibleDataclass)
class EnsureBlockchainWalletBalanceParams(octobot_commons.dataclasses.FlexibleDataclass):
asset: str
holdings: float
- wallet_details: blockchain_wallets.BlockchainWalletParameters # details of the wallet to transfer from
+ blockchain_descriptor: blockchain_wallets.BlockchainDescriptor # details of the blockchain to transfer from
+ wallet_descriptor: blockchain_wallets.WalletDescriptor # details of the wallet to transfer from
@dataclasses.dataclass
class WithdrawFundsParams(octobot_commons.dataclasses.FlexibleDataclass):
+ # mapped to DSL "withdraw" parameters
asset: str
network: str # network to withdraw to
address: str # recipient address of the withdrawal
@@ -45,8 +47,10 @@ class WithdrawFundsParams(octobot_commons.dataclasses.FlexibleDataclass):
@dataclasses.dataclass
class TransferFundsParams(octobot_commons.dataclasses.FlexibleDataclass):
+ # mapped to DSL "blockchain_wallet_transfer" parameters
asset: str
amount: float
address: typing.Optional[str] # recipient address of the transfer
- wallet_details: blockchain_wallets.BlockchainWalletParameters # details of the wallet to transfer from
+ blockchain_descriptor: blockchain_wallets.BlockchainDescriptor # details of the blockchain to transfer from
+ wallet_descriptor: blockchain_wallets.WalletDescriptor # details of the wallet to transfer from
destination_exchange: typing.Optional[str] = None # recipient address of the transfer on the exchange
diff --git a/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/signal_to_dsl_translator.py b/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/signal_to_dsl_translator.py
new file mode 100644
index 000000000..c0a3ba3b3
--- /dev/null
+++ b/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/signal_to_dsl_translator.py
@@ -0,0 +1,215 @@
+# Drakkar-Software OctoBot-Tentacles
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+
+import json
+import typing
+
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import tentacles.Meta.DSL_operators as dsl_operators
+import octobot_trading.exchanges as trading_exchanges
+
+TRADING_VIEW_TO_DSL_PARAM = {
+ "SYMBOL": "symbol",
+ "VOLUME": "amount",
+ "PRICE": "price",
+ "REDUCE_ONLY": "reduce_only",
+ "TAG": "tag",
+ "STOP_PRICE": "stop_loss_price",
+ "TAKE_PROFIT_PRICE": "take_profit_prices",
+ "TAKE_PROFIT_VOLUME_RATIO": "take_profit_volume_percents",
+ "EXCHANGE_ORDER_IDS": "exchange_order_ids",
+ "SIDE": "side",
+ "TRAILING_PROFILE": "trailing_profile",
+ "CANCEL_POLICY": "cancel_policy",
+ "CANCEL_POLICY_PARAMS": "cancel_policy_params",
+}
+
+PARAM_PREFIX = "PARAM_"
+
+
+class SignalToDSLTranslator:
+ def __init__(self, exchange_manager: trading_exchanges.ExchangeManager):
+ self.exchange_manager = exchange_manager
+
+ def _map_other_params_to_dsl(
+ self, other_params: dict[str, typing.Any], operator_params: list[dsl_interpreter.OperatorParameter]
+ ) -> dict[str, typing.Any]:
+ operator_param_names = {p.name for p in operator_params}
+ dsl_params = {}
+ params_dict = {}
+ for key, value in other_params.items():
+ if not isinstance(key, str):
+ continue
+ dsl_key = TRADING_VIEW_TO_DSL_PARAM.get(key, key.lower())
+ if key.startswith(PARAM_PREFIX):
+ param_name = key[len(PARAM_PREFIX):]
+ params_dict[param_name] = value
+ elif dsl_key in operator_param_names:
+ dsl_params[dsl_key] = value
+ if params_dict and "params" in operator_param_names:
+ dsl_params["params"] = params_dict
+ return dsl_params
+
+ def _format_value(self, value: typing.Any, param_type: type) -> str:
+ if value is None:
+ return "None"
+ if isinstance(value, bool):
+ return "True" if value else "False"
+ if isinstance(value, (int, float)):
+ return repr(value)
+ if isinstance(value, str):
+ try:
+ parsed = json.loads(value)
+ if isinstance(parsed, list):
+ return repr(parsed)
+ if isinstance(parsed, dict):
+ return repr(parsed)
+ except (json.JSONDecodeError, TypeError):
+ pass
+ return repr(value)
+ if isinstance(value, list):
+ return repr(value)
+ if isinstance(value, dict):
+ return repr(value)
+ return repr(value)
+
+ def _adapt_special_format_values_for_param(
+ self, value: typing.Any, param_name: str, param_type: type
+ ) -> typing.Any:
+ if param_name == "take_profit_prices":
+ if isinstance(value, list):
+ return value
+ if isinstance(value, (str, int, float)):
+ return [value] if value else []
+ if param_name == "take_profit_volume_percents":
+ if isinstance(value, list):
+ return [float(v) for v in value]
+ if isinstance(value, (str, int, float)):
+ return [float(value)] if value else []
+ if param_name == "exchange_order_ids" and isinstance(value, str):
+ return [oid.strip() for oid in value.split(",") if oid.strip()]
+ return value
+
+ def _get_operator_class(
+ self,
+ keyword: str,
+ ) -> typing.Optional[dsl_interpreter.Operator]:
+ allowed_operators = self._get_allowed_keywords()
+ for op in allowed_operators:
+ if op.get_name() == keyword:
+ return op
+ return None
+
+ def _collect_list_param_values(
+ self, params: dict[str, typing.Any], base_key: str
+ ) -> list[typing.Any]:
+ standalone = params.get(base_key)
+ numbered: list[tuple[int, typing.Any]] = []
+ prefix = f"{base_key}_"
+ for key, value in params.items():
+ if not isinstance(key, str) or not key.startswith(prefix):
+ continue
+ suffix = key[len(prefix):]
+ try:
+ index = int(suffix)
+ numbered.append((index, value))
+ except ValueError:
+ continue
+ numbered.sort(key=lambda item: item[0])
+ if standalone is not None and standalone != "":
+ return [standalone] + [v for _, v in numbered]
+ return [v for _, v in numbered]
+
+ def _pre_process_params(
+ self,
+ operator_class: dsl_interpreter.Operator,
+ params: dict[str, typing.Any]
+ ) -> dict[str, typing.Any]:
+ result = {
+ k: v
+ for k, v in params.items()
+ if not isinstance(k, str)
+ or not (
+ k.startswith("TAKE_PROFIT_PRICE_") or k.startswith("TAKE_PROFIT_VOLUME_RATIO_")
+ )
+ }
+ if operator_class.get_name() == "stop_loss" and "STOP_PRICE" in params:
+ # special casee for stop loss price: used as price when creating a stop loss order
+ result["PRICE"] = params["STOP_PRICE"]
+ result.pop("STOP_PRICE")
+ for base_key in ("TAKE_PROFIT_PRICE", "TAKE_PROFIT_VOLUME_RATIO"):
+ if base_key in params or any(
+ isinstance(k, str) and k.startswith(f"{base_key}_")
+ for k in params
+ ):
+ values = self._collect_list_param_values(params, base_key)
+ if values:
+ result[base_key] = values
+ elif base_key in params:
+ result[base_key] = [params[base_key]] if params[base_key] not in (None, "") else []
+ return result
+
+ def _resolve_operator_params(
+ self,
+ operator_class: dsl_interpreter.Operator,
+ params: dict[str, typing.Any],
+ other_params: dict[str, typing.Any]
+ ) -> list[str]:
+ operator_params = operator_class.get_parameters()
+ adapted_other = self._pre_process_params(operator_class, other_params)
+ mapped_other = self._map_other_params_to_dsl(adapted_other, operator_params)
+ merged = dict(params)
+ for dsl_key, value in mapped_other.items():
+ if dsl_key not in merged:
+ merged[dsl_key] = value
+ required_params = [p for p in operator_params if p.required]
+ optional_params = [p for p in operator_params if not p.required]
+ positional_parts = []
+ keyword_parts = []
+ for param_def in required_params:
+ name = param_def.name
+ if name in merged:
+ value = merged[name]
+ value = self._adapt_special_format_values_for_param(value, name, param_def.type)
+ positional_parts.append(
+ self._format_value(value, param_def.type)
+ )
+ for param_def in optional_params:
+ name = param_def.name
+ if name in merged:
+ value = merged[name]
+ value = self._adapt_special_format_values_for_param(value, name, param_def.type)
+ keyword_parts.append(f"{name}={self._format_value(value, param_def.type)}")
+ return positional_parts + keyword_parts
+
+ def translate_signal(
+ self, keyword: typing.Optional[str], params: dict[str, typing.Any], other_params: dict[str, typing.Any]
+ ) -> str:
+ if not keyword:
+ return "None"
+ if operator_class := self._get_operator_class(keyword):
+ all_params = self._resolve_operator_params(operator_class, params, other_params)
+ return f"{operator_class.get_name()}({', '.join(all_params)})"
+ return "None"
+
+
+ def _get_allowed_keywords(self) -> list[dsl_interpreter.Operator]:
+ return (
+ dsl_operators.create_create_order_operators(self.exchange_manager) +
+ dsl_operators.create_cancel_order_operators(self.exchange_manager) +
+ dsl_operators.create_blockchain_wallet_operators(self.exchange_manager) +
+ dsl_operators.create_portfolio_operators(self.exchange_manager)
+ ) # type: ignore
diff --git a/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/tests/test_trading_view_signals_trading.py b/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/tests/test_trading_view_signals_trading.py
index ae7614e29..420321f5d 100644
--- a/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/tests/test_trading_view_signals_trading.py
+++ b/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/tests/test_trading_view_signals_trading.py
@@ -24,7 +24,9 @@
import octobot_backtesting.api as backtesting_api
import octobot_commons.asyncio_tools as asyncio_tools
import octobot_commons.constants as commons_constants
+import octobot_commons.errors as commons_errors
import octobot_commons.symbols as commons_symbols
+import octobot_commons.dsl_interpreter as dsl_interpreter
import octobot_commons.tests.test_config as test_config
import octobot_trading.constants as trading_constants
import octobot_trading.api as trading_api
@@ -419,9 +421,7 @@ async def test_trading_view_signal_callback(tools):
await mode._trading_view_signal_callback({"metadata": signal})
signal_callback_mock.assert_awaited_once_with({
mode.EXCHANGE_KEY: exchange_manager.exchange_name,
- mode.SYMBOL_KEY: commons_symbols.parse_symbol(symbol).merged_str_base_and_quote_only_symbol(
- market_separator=""
- ),
+ mode.SYMBOL_KEY: str(commons_symbols.parse_symbol(symbol)),
mode.SIGNAL_KEY: "BUY",
"HEELLO": True,
"PLOP": False,
@@ -500,84 +500,72 @@ async def test_trading_view_signal_callback(tools):
signal_callback_mock.reset_mock()
logger_info_mock.reset_mock()
+ # Test WITHDRAW_FUNDS signal with full params - relevant signal, calls signal_callback
+ with mock.patch('octobot_trading.constants.ALLOW_FUNDS_TRANSFER', True):
+ signal = f"""
+ EXCHANGE={exchange_manager.exchange_name}
+ SYMBOL={symbol}
+ SIGNAL={mode.WITHDRAW_FUNDS_SIGNAL}
+ ASSET=BTC
+ AMOUNT=0.1
+ NETWORK=bitcoin
+ ADDRESS=1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa
+ """
+ await mode._trading_view_signal_callback({"metadata": signal})
+ signal_callback_mock.assert_awaited_once()
+ call_args = signal_callback_mock.call_args[0][0]
+ assert call_args[mode.SIGNAL_KEY] == mode.WITHDRAW_FUNDS_SIGNAL
+ assert call_args["ASSET"] == "BTC"
+ assert call_args["AMOUNT"] == "0.1"
+ assert call_args["NETWORK"] == "bitcoin"
+ signal_callback_mock.reset_mock()
+
+ # Test TRANSFER_FUNDS signal with full params - requires SYMBOL for is_relevant_signal
+ # (non-order signals with missing EXCHANGE/SYMBOL go to _process_or_ignore_non_order_signal)
+ with mock.patch('octobot_trading.constants.ALLOW_FUNDS_TRANSFER', True):
+ signal = f"""
+ EXCHANGE={exchange_manager.exchange_name}
+ SYMBOL={symbol}
+ SIGNAL={mode.TRANSFER_FUNDS_SIGNAL}
+ """
+ await mode._trading_view_signal_callback({"metadata": signal})
+ signal_callback_mock.assert_awaited_once()
+ call_args = signal_callback_mock.call_args[0][0]
+ assert call_args[mode.SIGNAL_KEY] == mode.TRANSFER_FUNDS_SIGNAL
+ signal_callback_mock.reset_mock()
+
async def test_signal_callback(tools):
exchange_manager, symbol, mode, producer, consumer = tools
context = script_keywords.get_base_context(producer.trading_mode)
- with mock.patch.object(producer, "_set_state", mock.AsyncMock()) as _set_state_mock, \
- mock.patch.object(mode, "set_leverage", mock.AsyncMock()) as set_leverage_mock:
+ with mock.patch.object(dsl_interpreter.Interpreter, "compute_expression", mock.AsyncMock()) as compute_expression_mock:
await producer.signal_callback({
mode.EXCHANGE_KEY: exchange_manager.exchange_name,
- mode.SYMBOL_KEY: "unused",
+ mode.SYMBOL_KEY: "BTC/USDT",
mode.SIGNAL_KEY: "BUY",
+ mode.VOLUME_KEY: "0.0001",
}, context)
- _set_state_mock.assert_awaited_once()
- set_leverage_mock.assert_not_called()
- assert _set_state_mock.await_args[0][1] == symbol
- assert _set_state_mock.await_args[0][2] == trading_view_signals_trading.SignalActions.CREATE_ORDERS
- assert _set_state_mock.await_args[0][3] == trading_enums.EvaluatorStates.VERY_LONG
- assert compare_dict_with_nan(_set_state_mock.await_args[0][4], {
- consumer.PRICE_KEY: trading_constants.ZERO,
- consumer.VOLUME_KEY: trading_constants.ZERO,
- consumer.STOP_PRICE_KEY: decimal.Decimal(math.nan),
- consumer.STOP_ONLY: False,
- consumer.TAKE_PROFIT_PRICE_KEY: decimal.Decimal(math.nan),
- consumer.ADDITIONAL_TAKE_PROFIT_PRICES_KEY: [],
- consumer.ADDITIONAL_TAKE_PROFIT_VOLUME_RATIOS_KEY: [],
- consumer.REDUCE_ONLY_KEY: False,
- consumer.TAG_KEY: None,
- consumer.TRAILING_PROFILE: None,
- consumer.EXCHANGE_ORDER_IDS: None,
- consumer.LEVERAGE: None,
- consumer.ORDER_EXCHANGE_CREATION_PARAMS: {},
- consumer.CANCEL_POLICY: None,
- consumer.CANCEL_POLICY_PARAMS: None,
- })
- assert compare_dict_with_nan(_set_state_mock.await_args[0][5], {
- mode.EXCHANGE_KEY: exchange_manager.exchange_name,
- mode.SYMBOL_KEY: "unused",
- mode.SIGNAL_KEY: "BUY",
- })
- _set_state_mock.reset_mock()
+ assert mode.dsl_script == "market('buy', 'BTC/USDT', '0.0001')"
+ compute_expression_mock.assert_awaited_once()
+ compute_expression_mock.reset_mock()
await producer.signal_callback({
mode.EXCHANGE_KEY: exchange_manager.exchange_name,
mode.SYMBOL_KEY: "unused",
mode.SIGNAL_KEY: "SELL",
mode.ORDER_TYPE_SIGNAL: "stop",
- mode.STOP_PRICE_KEY: 25000,
+ mode.STOP_PRICE_KEY: "-5%",
mode.VOLUME_KEY: "12%",
mode.TAG_KEY: "stop_1_tag",
mode.CANCEL_POLICY: trading_personal_data.ExpirationTimeOrderCancelPolicy.__name__,
mode.CANCEL_POLICY_PARAMS: {
"expiration_time": 1000.0,
},
- consumer.EXCHANGE_ORDER_IDS: None,
-
+ mode.EXCHANGE_ORDER_IDS: None,
}, context)
- set_leverage_mock.assert_not_called()
- _set_state_mock.assert_awaited_once()
- assert _set_state_mock.await_args[0][1] == symbol
- assert _set_state_mock.await_args[0][2] == trading_view_signals_trading.SignalActions.CREATE_ORDERS
- assert _set_state_mock.await_args[0][3] == trading_enums.EvaluatorStates.SHORT
- assert compare_dict_with_nan(_set_state_mock.await_args[0][4], {
- consumer.PRICE_KEY: trading_constants.ZERO,
- consumer.VOLUME_KEY: decimal.Decimal("1.2"),
- consumer.STOP_PRICE_KEY: decimal.Decimal("25000"),
- consumer.STOP_ONLY: True,
- consumer.TAKE_PROFIT_PRICE_KEY: decimal.Decimal(math.nan),
- consumer.ADDITIONAL_TAKE_PROFIT_PRICES_KEY: [],
- consumer.ADDITIONAL_TAKE_PROFIT_VOLUME_RATIOS_KEY: [],
- consumer.REDUCE_ONLY_KEY: False,
- consumer.TAG_KEY: "stop_1_tag",
- consumer.EXCHANGE_ORDER_IDS: None,
- consumer.TRAILING_PROFILE: None,
- consumer.LEVERAGE: None,
- consumer.ORDER_EXCHANGE_CREATION_PARAMS: {},
- consumer.CANCEL_POLICY: trading_personal_data.ExpirationTimeOrderCancelPolicy.__name__,
- consumer.CANCEL_POLICY_PARAMS: {'expiration_time': 1000.0},
- })
- _set_state_mock.reset_mock()
+ assert mode.dsl_script == "stop_loss('sell', 'unused', '12%', '-5%', tag='stop_1_tag', cancel_policy='ExpirationTimeOrderCancelPolicy', cancel_policy_params={'expiration_time': 1000.0})"
+ compute_expression_mock.assert_awaited_once()
+ compute_expression_mock.reset_mock()
await producer.signal_callback({
mode.EXCHANGE_KEY: exchange_manager.exchange_name,
@@ -591,38 +579,14 @@ async def test_signal_callback(tools):
mode.TAKE_PROFIT_PRICE_KEY: "22222",
mode.EXCHANGE_ORDER_IDS: ["ab1", "aaaaa"],
mode.CANCEL_POLICY: "chainedorderfillingpriceordercancelpolicy",
- consumer.LEVERAGE: 22,
+ mode.LEVERAGE: 22,
"PARAM_TAG_1": "ttt",
"PARAM_Plop": False,
}, context)
- set_leverage_mock.assert_called_once()
- assert set_leverage_mock.mock_calls[0].args[2] == decimal.Decimal(22)
- set_leverage_mock.reset_mock()
- _set_state_mock.assert_awaited_once()
- assert _set_state_mock.await_args[0][1] == symbol
- assert _set_state_mock.await_args[0][2] == trading_view_signals_trading.SignalActions.CREATE_ORDERS
- assert _set_state_mock.await_args[0][3] == trading_enums.EvaluatorStates.SHORT
- assert compare_dict_with_nan(_set_state_mock.await_args[0][4], {
- consumer.PRICE_KEY: decimal.Decimal("123"),
- consumer.VOLUME_KEY: decimal.Decimal("1.2"),
- consumer.STOP_PRICE_KEY: decimal.Decimal("12"),
- consumer.STOP_ONLY: False,
- consumer.TAKE_PROFIT_PRICE_KEY: decimal.Decimal("22222"),
- consumer.ADDITIONAL_TAKE_PROFIT_PRICES_KEY: [],
- consumer.ADDITIONAL_TAKE_PROFIT_VOLUME_RATIOS_KEY: [],
- consumer.REDUCE_ONLY_KEY: True,
- consumer.TAG_KEY: None,
- mode.EXCHANGE_ORDER_IDS: ["ab1", "aaaaa"],
- consumer.TRAILING_PROFILE: None,
- consumer.LEVERAGE: 22,
- consumer.ORDER_EXCHANGE_CREATION_PARAMS: {
- "TAG_1": "ttt",
- "Plop": False,
- },
- consumer.CANCEL_POLICY: trading_personal_data.ChainedOrderFillingPriceOrderCancelPolicy.__name__,
- consumer.CANCEL_POLICY_PARAMS: None,
- })
- _set_state_mock.reset_mock()
+ expected_dsl = "limit('sell', 'unused', '12%', '123', reduce_only=True, take_profit_prices=['22222'], stop_loss_price='12', cancel_policy='chainedorderfillingpriceordercancelpolicy', params={'TAG_1': 'ttt', 'Plop': False})"
+ assert mode.dsl_script == expected_dsl
+ compute_expression_mock.assert_awaited_once()
+ compute_expression_mock.reset_mock()
# with trailing profile and TP volume
await producer.signal_callback({
@@ -639,39 +603,15 @@ async def test_signal_callback(tools):
mode.EXCHANGE_ORDER_IDS: ["ab1", "aaaaa"],
mode.TRAILING_PROFILE: "fiLLED_take_profit",
mode.CANCEL_POLICY: "expirationtimeordercancelpolicy",
- mode.CANCEL_POLICY_PARAMS: "{'expiration_time': 1000.0}",
- consumer.LEVERAGE: 22,
+ mode.CANCEL_POLICY_PARAMS: {"expiration_time": 1000.0},
+ mode.LEVERAGE: 22,
"PARAM_TAG_1": "ttt",
"PARAM_Plop": False,
}, context)
- set_leverage_mock.assert_called_once()
- assert set_leverage_mock.mock_calls[0].args[2] == decimal.Decimal(22)
- set_leverage_mock.reset_mock()
- _set_state_mock.assert_awaited_once()
- assert _set_state_mock.await_args[0][1] == symbol
- assert _set_state_mock.await_args[0][2] == trading_view_signals_trading.SignalActions.CREATE_ORDERS
- assert _set_state_mock.await_args[0][3] == trading_enums.EvaluatorStates.SHORT
- assert compare_dict_with_nan(_set_state_mock.await_args[0][4], {
- consumer.PRICE_KEY: decimal.Decimal("123"),
- consumer.VOLUME_KEY: decimal.Decimal("1.2"),
- consumer.STOP_PRICE_KEY: decimal.Decimal("12"),
- consumer.STOP_ONLY: False,
- consumer.TAKE_PROFIT_PRICE_KEY: decimal.Decimal("22222"),
- consumer.ADDITIONAL_TAKE_PROFIT_PRICES_KEY: [],
- consumer.ADDITIONAL_TAKE_PROFIT_VOLUME_RATIOS_KEY: [decimal.Decimal(1)],
- consumer.REDUCE_ONLY_KEY: True,
- consumer.TAG_KEY: None,
- mode.EXCHANGE_ORDER_IDS: ["ab1", "aaaaa"],
- consumer.LEVERAGE: 22,
- consumer.TRAILING_PROFILE: "filled_take_profit",
- consumer.ORDER_EXCHANGE_CREATION_PARAMS: {
- "TAG_1": "ttt",
- "Plop": False,
- },
- consumer.CANCEL_POLICY: trading_personal_data.ExpirationTimeOrderCancelPolicy.__name__,
- consumer.CANCEL_POLICY_PARAMS: {'expiration_time': 1000.0},
- })
- _set_state_mock.reset_mock()
+ expected_dsl = "limit('sell', 'unused', '12%', '123', reduce_only=True, take_profit_prices=['22222'], take_profit_volume_percents=[1.0], stop_loss_price='12', trailing_profile='fiLLED_take_profit', cancel_policy='expirationtimeordercancelpolicy', cancel_policy_params={'expiration_time': 1000.0}, params={'TAG_1': 'ttt', 'Plop': False})"
+ assert mode.dsl_script == expected_dsl
+ compute_expression_mock.assert_awaited_once()
+ compute_expression_mock.reset_mock()
# future exchange: call set_leverage
exchange_manager.is_future = True
@@ -697,414 +637,418 @@ async def test_signal_callback(tools):
f"{mode.TAKE_PROFIT_VOLUME_RATIO_KEY}_1": "1.122",
f"{mode.TAKE_PROFIT_VOLUME_RATIO_KEY}_2": "0.2222",
mode.EXCHANGE_ORDER_IDS: ["ab1", "aaaaa"],
- consumer.LEVERAGE: 22,
+ mode.LEVERAGE: 22,
"PARAM_TAG_1": "ttt",
"PARAM_Plop": False,
}, context)
- set_leverage_mock.assert_called_once()
- assert set_leverage_mock.mock_calls[0].args[2] == decimal.Decimal("22")
- _set_state_mock.assert_awaited_once()
- assert _set_state_mock.await_args[0][1] == symbol
- assert _set_state_mock.await_args[0][2] == trading_view_signals_trading.SignalActions.CREATE_ORDERS
- assert _set_state_mock.await_args[0][3] == trading_enums.EvaluatorStates.SHORT
- assert compare_dict_with_nan(_set_state_mock.await_args[0][4], {
- consumer.PRICE_KEY: decimal.Decimal("123"),
- consumer.VOLUME_KEY: decimal.Decimal("0.8130081300813008130081300813"),
- consumer.STOP_PRICE_KEY: decimal.Decimal("6308.27549999"),
- consumer.STOP_ONLY: False,
- consumer.TAKE_PROFIT_PRICE_KEY: decimal.Decimal("nan"), # only additional TP orders are provided
- consumer.ADDITIONAL_TAKE_PROFIT_PRICES_KEY: [
- decimal.Decimal("7129.52833333"), decimal.Decimal("7131.52833333"), decimal.Decimal('11453.19499999')
- ],
- consumer.ADDITIONAL_TAKE_PROFIT_VOLUME_RATIOS_KEY: [
- decimal.Decimal("1"), decimal.Decimal("1.122"), decimal.Decimal("0.2222"),
- ],
- consumer.REDUCE_ONLY_KEY: False,
- consumer.TAG_KEY: None,
- mode.EXCHANGE_ORDER_IDS: ["ab1", "aaaaa"],
- consumer.TRAILING_PROFILE: None,
- consumer.LEVERAGE: 22,
- consumer.ORDER_EXCHANGE_CREATION_PARAMS: {
- "TAG_1": "ttt",
- "Plop": False,
- },
- consumer.CANCEL_POLICY: None,
- consumer.CANCEL_POLICY_PARAMS: None,
- })
- _set_state_mock.reset_mock()
- set_leverage_mock.reset_mock()
+ expected_dsl = "limit('sell', 'unused', '100q', '123@', reduce_only=False, take_profit_prices=['120.333333333333333d', '122.333333333333333d', '4444d'], take_profit_volume_percents=[1.0, 1.122, 0.2222], stop_loss_price='-10%', params={'TAG_1': 'ttt', 'Plop': False})"
+ assert mode.dsl_script == expected_dsl
+ compute_expression_mock.assert_awaited_once()
+ compute_expression_mock.reset_mock()
+ exchange_manager.is_future = False
- with pytest.raises(errors.MissingFunds):
+ with pytest.raises(commons_errors.DSLInterpreterError, match="market requires at least 3 parameter"):
await producer.signal_callback({
mode.EXCHANGE_KEY: exchange_manager.exchange_name,
- mode.SYMBOL_KEY: "unused",
mode.SIGNAL_KEY: "SelL",
- mode.PRICE_KEY: "123000q", # price = 123
- mode.VOLUME_KEY: "11111b", # base amount: not enough funds
- mode.REDUCE_ONLY_KEY: True,
- mode.ORDER_TYPE_SIGNAL: "LiMiT",
- mode.STOP_PRICE_KEY: "-10%", # price - 10%
- mode.TAKE_PROFIT_PRICE_KEY: "120.333333333333333d", # price + 120.333333333333333
- mode.EXCHANGE_ORDER_IDS: ["ab1", "aaaaa"],
- mode.LEVERAGE: None,
- "PARAM_TAG_1": "ttt",
- "PARAM_Plop": False,
}, context)
- set_leverage_mock.assert_not_called()
- _set_state_mock.assert_not_called()
- with pytest.raises(errors.InvalidArgumentError):
- await producer.signal_callback({
- mode.EXCHANGE_KEY: exchange_manager.exchange_name,
- mode.SYMBOL_KEY: "unused",
- mode.SIGNAL_KEY: "DSDSDDSS",
- mode.PRICE_KEY: "123000q", # price = 123
- mode.VOLUME_KEY: "11111b", # base amount: not enough funds
- mode.REDUCE_ONLY_KEY: True,
- mode.ORDER_TYPE_SIGNAL: "LiMiT",
- mode.STOP_PRICE_KEY: "-10%", # price - 10%
- mode.TAKE_PROFIT_PRICE_KEY: "120.333333333333333d", # price + 120.333333333333333
- mode.EXCHANGE_ORDER_IDS: ["ab1", "aaaaa"],
- mode.LEVERAGE: None,
- "PARAM_TAG_1": "ttt",
- "PARAM_Plop": False,
- }, context)
- set_leverage_mock.assert_not_called()
- _set_state_mock.assert_not_called()
+ # Unknown signal: translates to "None" (no-op), no exception
+ await producer.signal_callback({
+ mode.EXCHANGE_KEY: exchange_manager.exchange_name,
+ mode.SYMBOL_KEY: "unused",
+ mode.SIGNAL_KEY: "DSDSDDSS",
+ mode.PRICE_KEY: "123",
+ mode.VOLUME_KEY: "0.001",
+ }, context)
+ assert mode.dsl_script == "None"
+ compute_expression_mock.assert_awaited_once()
+ compute_expression_mock.reset_mock()
- with pytest.raises(errors.InvalidCancelPolicyError):
+ # WITHDRAW_FUNDS signal
+ with mock.patch('octobot_trading.constants.ALLOW_FUNDS_TRANSFER', True):
await producer.signal_callback({
mode.EXCHANGE_KEY: exchange_manager.exchange_name,
- mode.SYMBOL_KEY: "unused",
- mode.SIGNAL_KEY: "SelL",
- mode.CANCEL_POLICY: "unknown_cancel_policy",
+ mode.SYMBOL_KEY: symbol,
+ mode.SIGNAL_KEY: mode.WITHDRAW_FUNDS_SIGNAL,
+ "asset": "BTC",
+ "amount": 0.1,
+ "network": "bitcoin",
+ "address": "1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa",
}, context)
- set_leverage_mock.assert_not_called()
- _set_state_mock.assert_not_called()
+ assert mode.dsl_script == "withdraw('BTC', 'bitcoin', '1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa', amount=0.1)"
+ compute_expression_mock.assert_awaited_once()
+ compute_expression_mock.reset_mock()
- # Test meta action only signal - should return early without calling _parse_order_details or _set_state
- _set_state_mock.reset_mock()
+ # Test meta action only signal - signal_callback still calls call_dsl_script for all signals
prev_value = set(mode.META_ACTION_ONLY_SIGNALS)
try:
mode.META_ACTION_ONLY_SIGNALS.add("buy")
- with mock.patch.object(producer, "_parse_order_details", mock.AsyncMock()) as _parse_order_details_mock, \
- mock.patch.object(producer, "apply_cancel_policies", mock.AsyncMock(return_value=(True, None))) as apply_cancel_policies_mock, \
- mock.patch.object(producer, "_process_pre_state_update_actions", mock.AsyncMock()) as _process_pre_state_update_actions_mock, \
- mock.patch.object(producer, "_process_meta_actions", mock.AsyncMock()) as _process_meta_actions_mock:
+ with mock.patch.object(producer, "apply_cancel_policies", mock.AsyncMock(return_value=(True, None))) as apply_cancel_policies_mock:
await producer.signal_callback({
mode.EXCHANGE_KEY: exchange_manager.exchange_name,
mode.SYMBOL_KEY: "unused",
mode.SIGNAL_KEY: "BUY",
+ mode.VOLUME_KEY: "0.001",
}, context)
- # Should call apply_cancel_policies, _process_pre_state_update_actions, and _process_meta_actions
apply_cancel_policies_mock.assert_awaited_once()
- _process_pre_state_update_actions_mock.assert_awaited_once()
- _process_meta_actions_mock.assert_awaited_once()
- # Should NOT call _parse_order_details or _set_state (early return)
- _parse_order_details_mock.assert_not_awaited()
- _set_state_mock.assert_not_awaited()
+ assert mode.dsl_script == "market('buy', 'unused', '0.001')"
+ compute_expression_mock.assert_awaited_once()
finally:
mode.__class__.META_ACTION_ONLY_SIGNALS = prev_value
+async def test_signal_callback_transfer_funds_signal(tools, blockchain_wallet_details):
+ exchange_manager, symbol, mode, producer, consumer = tools
+ context = script_keywords.get_base_context(producer.trading_mode)
+ with mock.patch('octobot_trading.constants.ALLOW_FUNDS_TRANSFER', True), \
+ mock.patch.object(dsl_interpreter.Interpreter, "compute_expression", mock.AsyncMock()) as compute_expression_mock:
+ blockchain_descriptor = {
+ "blockchain": blockchain_wallet_details.blockchain_descriptor.blockchain,
+ "network": blockchain_wallet_details.blockchain_descriptor.network,
+ "native_coin_symbol": blockchain_wallet_details.blockchain_descriptor.native_coin_symbol,
+ }
+ wallet_descriptor = {
+ "address": blockchain_wallet_details.wallet_descriptor.address,
+ "private_key": blockchain_wallet_details.wallet_descriptor.private_key,
+ "specific_config": blockchain_wallet_details.wallet_descriptor.specific_config,
+ }
+ await producer.signal_callback({
+ mode.EXCHANGE_KEY: exchange_manager.exchange_name,
+ mode.SYMBOL_KEY: symbol,
+ mode.SIGNAL_KEY: mode.TRANSFER_FUNDS_SIGNAL,
+ "blockchain_descriptor": blockchain_descriptor,
+ "wallet_descriptor": wallet_descriptor,
+ "asset": BLOCKCHAIN_WALLET_ASSET,
+ "amount": 1.0,
+ "address": "0x1234567890123456789012345678901234567890",
+ }, context)
+ assert mode.dsl_script == (
+ "blockchain_wallet_transfer({'blockchain': 'simulated', 'network': 'SIMULATED', 'native_coin_symbol': 'ETH'}, {'address': '0x1234567890123456789012345678901234567890', 'private_key': '0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890', 'specific_config': {'assets': [{'asset': 'ETH', 'amount': 10.0}]}}, 'ETH', 1.0, address='0x1234567890123456789012345678901234567890')"
+ )
+ compute_expression_mock.assert_awaited_once()
+
+
async def test_signal_callback_with_meta_actions(tools):
exchange_manager, symbol, mode, producer, consumer = tools
mode.CANCEL_PREVIOUS_ORDERS = True
context = script_keywords.get_base_context(producer.trading_mode)
- with mock.patch.object(producer, "_set_state", mock.AsyncMock()) as _set_state_mock, \
- mock.patch.object(mode, "set_leverage", mock.AsyncMock()) as set_leverage_mock, \
- mock.patch.object(producer, "cancel_symbol_open_orders", mock.AsyncMock()) as cancel_symbol_open_orders_mock:
+ with mock.patch.object(producer, "cancel_symbol_open_orders", mock.AsyncMock()) as cancel_symbol_open_orders_mock, \
+ mock.patch.object(dsl_interpreter.Interpreter, "compute_expression", mock.AsyncMock()) as compute_expression_mock:
await producer.signal_callback({
mode.SIGNAL_KEY: mode.ENSURE_EXCHANGE_BALANCE_SIGNAL,
}, context)
- _set_state_mock.assert_awaited_once()
- set_leverage_mock.assert_not_called()
- cancel_symbol_open_orders_mock.assert_not_called() # not called for meta actions even when CANCEL_PREVIOUS_ORDERS is True
- assert _set_state_mock.await_args[0][1] == symbol
- assert _set_state_mock.await_args[0][2] == trading_view_signals_trading.SignalActions.ENSURE_EXCHANGE_BALANCE
- assert _set_state_mock.await_args[0][3] == trading_enums.EvaluatorStates.NEUTRAL
- assert compare_dict_with_nan(_set_state_mock.await_args[0][4], {
- consumer.PRICE_KEY: trading_constants.ZERO,
- consumer.VOLUME_KEY: trading_constants.ZERO,
- consumer.STOP_PRICE_KEY: decimal.Decimal(math.nan),
- consumer.STOP_ONLY: False,
- consumer.TAKE_PROFIT_PRICE_KEY: decimal.Decimal(math.nan),
- consumer.ADDITIONAL_TAKE_PROFIT_PRICES_KEY: [],
- consumer.ADDITIONAL_TAKE_PROFIT_VOLUME_RATIOS_KEY: [],
- consumer.REDUCE_ONLY_KEY: False,
- consumer.TAG_KEY: None,
- consumer.TRAILING_PROFILE: None,
- consumer.EXCHANGE_ORDER_IDS: None,
- consumer.LEVERAGE: None,
- consumer.ORDER_EXCHANGE_CREATION_PARAMS: {},
- consumer.CANCEL_POLICY: None,
- consumer.CANCEL_POLICY_PARAMS: None,
- })
- assert compare_dict_with_nan(_set_state_mock.await_args[0][5], {
- mode.SIGNAL_KEY: mode.ENSURE_EXCHANGE_BALANCE_SIGNAL,
- })
- _set_state_mock.reset_mock()
+ cancel_symbol_open_orders_mock.assert_not_called() # not called for meta actions even when CANCEL_PREVIOUS_ORDERS is True
+ assert mode.dsl_script == "None" # ensure_exchange_balance has no DSL operator
+ compute_expression_mock.assert_awaited_once()
async def test_signal_callback_with_cancel_policies(tools):
exchange_manager, symbol, mode, producer, consumer = tools
context = script_keywords.get_base_context(producer.trading_mode)
mode.CANCEL_PREVIOUS_ORDERS = True
- print(f"{mode.META_ACTION_ONLY_SIGNALS=}")
async def _apply_cancel_policies(*args, **kwargs):
return True, trading_signals.get_orders_dependencies([mock.Mock(order_id="123"), mock.Mock(order_id="456-cancel_policy")])
async def _cancel_symbol_open_orders(*args, **kwargs):
return True, trading_signals.get_orders_dependencies([mock.Mock(order_id="456-cancel_symbol_open_orders")])
- with mock.patch.object(producer, "_set_state", mock.AsyncMock()) as _set_state_mock, \
- mock.patch.object(producer, "_process_pre_state_update_actions", mock.AsyncMock()) as _process_pre_state_update_actions_mock, \
- mock.patch.object(producer, "_parse_order_details", mock.AsyncMock(return_value=(
- trading_view_signals_trading.SignalActions.CREATE_ORDERS,
- trading_enums.EvaluatorStates.LONG,
- {}
- ))) as _parse_order_details_mock, \
- mock.patch.object(producer, "apply_cancel_policies", mock.AsyncMock(side_effect=_apply_cancel_policies)) as apply_cancel_policies_mock, \
- mock.patch.object(producer, "cancel_symbol_open_orders", mock.AsyncMock(side_effect=_cancel_symbol_open_orders)) as cancel_symbol_open_orders_mock:
+ with mock.patch.object(producer, "apply_cancel_policies", mock.AsyncMock(side_effect=_apply_cancel_policies)) as apply_cancel_policies_mock, \
+ mock.patch.object(producer, "cancel_symbol_open_orders", mock.AsyncMock(side_effect=_cancel_symbol_open_orders)) as cancel_symbol_open_orders_mock, \
+ mock.patch.object(dsl_interpreter.Interpreter, "compute_expression", mock.AsyncMock()) as compute_expression_mock:
await producer.signal_callback({
mode.EXCHANGE_KEY: exchange_manager.exchange_name,
mode.SYMBOL_KEY: "unused",
mode.SIGNAL_KEY: "BUY",
+ mode.VOLUME_KEY: "0.001",
}, context)
- _process_pre_state_update_actions_mock.assert_awaited_once()
- _parse_order_details_mock.assert_awaited_once()
apply_cancel_policies_mock.assert_awaited_once()
cancel_symbol_open_orders_mock.assert_awaited_once()
- _set_state_mock.assert_awaited_once()
- assert _set_state_mock.mock_calls[0].kwargs["dependencies"] == trading_signals.get_orders_dependencies([
- mock.Mock(order_id="123"),
- mock.Mock(order_id="456-cancel_policy"),
- mock.Mock(order_id="456-cancel_symbol_open_orders")
- ])
+ assert mode.dsl_script == "market('buy', 'unused', '0.001')"
+ compute_expression_mock.assert_awaited_once()
+
mode.CANCEL_PREVIOUS_ORDERS = False
- with mock.patch.object(producer, "_set_state", mock.AsyncMock()) as _set_state_mock, \
- mock.patch.object(producer, "_process_pre_state_update_actions", mock.AsyncMock()) as _process_pre_state_update_actions_mock, \
- mock.patch.object(producer, "_parse_order_details", mock.AsyncMock(return_value=(trading_view_signals_trading.SignalActions.CREATE_ORDERS, trading_enums.EvaluatorStates.LONG, {}))) as _parse_order_details_mock, \
- mock.patch.object(producer, "apply_cancel_policies", mock.AsyncMock(side_effect=_apply_cancel_policies)) as apply_cancel_policies_mock, \
- mock.patch.object(producer, "cancel_symbol_open_orders", mock.AsyncMock(side_effect=_cancel_symbol_open_orders)) as cancel_symbol_open_orders_mock:
+ with mock.patch.object(producer, "apply_cancel_policies", mock.AsyncMock(side_effect=_apply_cancel_policies)) as apply_cancel_policies_mock, \
+ mock.patch.object(producer, "cancel_symbol_open_orders", mock.AsyncMock(side_effect=_cancel_symbol_open_orders)) as cancel_symbol_open_orders_mock, \
+ mock.patch.object(dsl_interpreter.Interpreter, "compute_expression", mock.AsyncMock()) as compute_expression_mock:
await producer.signal_callback({
mode.EXCHANGE_KEY: exchange_manager.exchange_name,
mode.SYMBOL_KEY: "unused",
mode.SIGNAL_KEY: "BUY",
+ mode.VOLUME_KEY: "0.001",
}, context)
- _process_pre_state_update_actions_mock.assert_awaited_once()
- _parse_order_details_mock.assert_awaited_once()
apply_cancel_policies_mock.assert_awaited_once()
- cancel_symbol_open_orders_mock.assert_not_called() # CANCEL_PREVIOUS_ORDERS is False
- _set_state_mock.assert_awaited_once()
- assert _set_state_mock.mock_calls[0].kwargs["dependencies"] == trading_signals.get_orders_dependencies([
- mock.Mock(order_id="123"),
- mock.Mock(order_id="456-cancel_policy"),
- ])
+ cancel_symbol_open_orders_mock.assert_not_called() # CANCEL_PREVIOUS_ORDERS is False
+ assert mode.dsl_script == "market('buy', 'unused', '0.001')"
+ compute_expression_mock.assert_awaited_once()
-async def test_set_state(tools):
- exchange_manager, symbol, mode, producer, consumer = tools
- cryptocurrency = mode.cryptocurrency
- order_data = {
- consumer.PRICE_KEY: decimal.Decimal("100"),
- consumer.VOLUME_KEY: decimal.Decimal("1.0"),
- }
+async def test_before_signal_processing(tools):
+ _exchange_manager, symbol, mode, producer, consumer = tools
parsed_data = {
- mode.EXCHANGE_KEY: exchange_manager.exchange_name,
+ mode.EXCHANGE_KEY: _exchange_manager.exchange_name,
mode.SYMBOL_KEY: symbol,
- mode.SIGNAL_KEY: "BUY",
+ mode.SIGNAL_KEY: "buy",
}
- dependencies = trading_signals.get_orders_dependencies([mock.Mock(order_id="test_order")])
-
- # Test CREATE_ORDERS action - should call submit_trading_evaluation
- exchange_manager.is_backtesting = False
- with mock.patch.object(producer, "submit_trading_evaluation", mock.AsyncMock()) as submit_mock, \
- mock.patch.object(producer, "_send_alert_notification", mock.AsyncMock()) as send_notification_mock, \
- mock.patch.object(producer.logger, "info") as logger_info_mock:
- producer.state = trading_enums.EvaluatorStates.NEUTRAL
- producer.final_eval = -0.5
-
- await producer._set_state(
- cryptocurrency,
- symbol,
- trading_view_signals_trading.SignalActions.CREATE_ORDERS,
- trading_enums.EvaluatorStates.LONG,
- order_data,
- parsed_data,
- dependencies=dependencies
- )
-
- # State should be updated
- assert producer.state == trading_enums.EvaluatorStates.LONG
- logger_info_mock.assert_called()
- assert "new state" in str(logger_info_mock.call_args_list).lower()
-
- # Should call submit_trading_evaluation
- submit_mock.assert_awaited_once()
- call_args = submit_mock.await_args
- assert call_args.kwargs["cryptocurrency"] == cryptocurrency
- assert call_args.kwargs["symbol"] == symbol
- assert call_args.kwargs["time_frame"] is None
- assert call_args.kwargs["final_note"] == producer.final_eval
- assert call_args.kwargs["state"] == trading_enums.EvaluatorStates.LONG
- assert call_args.kwargs["data"] == order_data
- assert call_args.kwargs["dependencies"] == dependencies
-
- send_notification_mock.assert_awaited_once_with(symbol, trading_enums.EvaluatorStates.LONG)
- exchange_manager.is_backtesting = True
- submit_mock.reset_mock()
- logger_info_mock.reset_mock()
+ mock_dependencies = trading_signals.get_orders_dependencies([mock.Mock(order_id="dep-1")])
- # Test ENSURE_EXCHANGE_BALANCE action - should call process_non_creating_orders_actions
- with mock.patch.object(producer, "process_non_creating_orders_actions", mock.AsyncMock()) as process_actions_mock, \
- mock.patch.object(producer, "submit_trading_evaluation", mock.AsyncMock()) as submit_mock:
- producer.state = trading_enums.EvaluatorStates.NEUTRAL
-
- await producer._set_state(
- cryptocurrency,
- symbol,
- trading_view_signals_trading.SignalActions.ENSURE_EXCHANGE_BALANCE,
- trading_enums.EvaluatorStates.NEUTRAL,
- order_data,
- parsed_data,
- dependencies=dependencies
- )
-
- # Should call process_non_creating_orders_actions
- process_actions_mock.assert_awaited_once_with(
- trading_view_signals_trading.SignalActions.ENSURE_EXCHANGE_BALANCE,
- symbol,
- order_data,
- parsed_data
- )
- # Should not call submit_trading_evaluation
- submit_mock.assert_not_awaited()
- process_actions_mock.reset_mock()
-
- # Test CANCEL_ORDERS action - should call process_non_creating_orders_actions
- with mock.patch.object(producer, "process_non_creating_orders_actions", mock.AsyncMock()) as process_actions_mock, \
- mock.patch.object(producer, "submit_trading_evaluation", mock.AsyncMock()) as submit_mock:
- await producer._set_state(
- cryptocurrency,
- symbol,
- trading_view_signals_trading.SignalActions.CANCEL_ORDERS,
- trading_enums.EvaluatorStates.NEUTRAL,
- order_data,
- parsed_data,
- dependencies=dependencies
- )
-
- process_actions_mock.assert_awaited_once_with(
- trading_view_signals_trading.SignalActions.CANCEL_ORDERS,
- symbol,
- order_data,
- parsed_data
- )
- submit_mock.assert_not_awaited()
- process_actions_mock.reset_mock()
-
- # Test with None dependencies
- with mock.patch.object(producer, "submit_trading_evaluation", mock.AsyncMock()) as submit_mock:
- await producer._set_state(
- cryptocurrency,
- symbol,
- trading_view_signals_trading.SignalActions.CREATE_ORDERS,
- trading_enums.EvaluatorStates.LONG,
- order_data,
- parsed_data,
- dependencies=None
- )
-
- submit_mock.assert_awaited_once()
- assert submit_mock.await_args.kwargs["dependencies"] is None
+ with mock.patch.object(
+ producer, "_updated_orders_to_cancel", mock.AsyncMock(return_value=mock_dependencies)
+ ) as updated_orders_mock, mock.patch.object(
+ producer, "_update_leverage_if_necessary", mock.AsyncMock()
+ ) as update_leverage_mock:
+ result = await producer._before_signal_processing(parsed_data)
+ updated_orders_mock.assert_awaited_once_with(parsed_data)
+ update_leverage_mock.assert_awaited_once_with(parsed_data)
+ assert result is mock_dependencies
-async def test_process_non_creating_orders_actions(tools):
- exchange_manager, symbol, mode, producer, consumer = tools
- order_data = {
- consumer.EXCHANGE_ORDER_IDS: ["order_1", "order_2"],
- consumer.TAG_KEY: "test_tag",
- }
+
+async def test_before_signal_processing_update_leverage_calls_set_leverage(tools):
+ _exchange_manager, symbol, mode, producer, consumer = tools
parsed_data = {
- mode.EXCHANGE_KEY: exchange_manager.exchange_name,
+ mode.EXCHANGE_KEY: _exchange_manager.exchange_name,
mode.SYMBOL_KEY: symbol,
+ mode.SIGNAL_KEY: "buy",
+ mode.LEVERAGE: 5,
}
- # Test CANCEL_ORDERS action
- with mock.patch.object(producer, "cancel_orders_from_order_data", mock.AsyncMock(return_value=(True, None))) \
- as cancel_orders_mock:
- await producer.process_non_creating_orders_actions(
- trading_view_signals_trading.SignalActions.CANCEL_ORDERS,
- symbol,
- order_data,
- parsed_data
- )
- cancel_orders_mock.assert_awaited_once_with(symbol, order_data, parsed_data)
- cancel_orders_mock.reset_mock()
-
- # Test ENSURE_EXCHANGE_BALANCE action
- with mock.patch.object(producer, "ensure_exchange_balance", mock.AsyncMock()) as ensure_exchange_mock:
- await producer.process_non_creating_orders_actions(
- trading_view_signals_trading.SignalActions.ENSURE_EXCHANGE_BALANCE,
- symbol,
- order_data,
- parsed_data
- )
- ensure_exchange_mock.assert_awaited_once_with(parsed_data)
- ensure_exchange_mock.reset_mock()
-
- # Test ENSURE_BLOCKCHAIN_WALLET_BALANCE action
- with mock.patch.object(producer, "ensure_blockchain_wallet_balance", mock.AsyncMock()) \
- as ensure_blockchain_mock:
- await producer.process_non_creating_orders_actions(
- trading_view_signals_trading.SignalActions.ENSURE_BLOCKCHAIN_WALLET_BALANCE,
- symbol,
- order_data,
- parsed_data
- )
- ensure_blockchain_mock.assert_awaited_once_with(parsed_data)
- ensure_blockchain_mock.reset_mock()
-
- # Test WITHDRAW_FUNDS action
- with mock.patch.object(producer, "withdraw_funds", mock.AsyncMock()) as withdraw_funds_mock:
- await producer.process_non_creating_orders_actions(
- trading_view_signals_trading.SignalActions.WITHDRAW_FUNDS,
- symbol,
- order_data,
- parsed_data
- )
- withdraw_funds_mock.assert_awaited_once_with(parsed_data)
- withdraw_funds_mock.reset_mock()
-
- # Test TRANSFER_FUNDS action
- with mock.patch.object(producer, "transfer_funds", mock.AsyncMock()) as transfer_funds_mock:
- await producer.process_non_creating_orders_actions(
- trading_view_signals_trading.SignalActions.TRANSFER_FUNDS,
- symbol,
- order_data,
- parsed_data
- )
- transfer_funds_mock.assert_awaited_once_with(parsed_data)
- transfer_funds_mock.reset_mock()
-
- # Test unknown action - should raise InvalidArgumentError
- with pytest.raises(errors.InvalidArgumentError, match="Unknown action"):
- await producer.process_non_creating_orders_actions(
- trading_view_signals_trading.SignalActions.NO_ACTION,
- symbol,
- order_data,
- parsed_data
+ with mock.patch.object(
+ producer, "_updated_orders_to_cancel", mock.AsyncMock(return_value=None)
+ ), mock.patch.object(
+ mode, "set_leverage", mock.AsyncMock()
+ ) as set_leverage_mock:
+ await producer._before_signal_processing(parsed_data)
+
+ set_leverage_mock.assert_awaited_once_with(
+ symbol, None, decimal.Decimal("5")
)
- # Test CREATE_ORDERS action - should raise InvalidArgumentError (not handled by this method)
- with pytest.raises(errors.InvalidArgumentError, match="Unknown action"):
- await producer.process_non_creating_orders_actions(
- trading_view_signals_trading.SignalActions.CREATE_ORDERS,
- symbol,
- order_data,
- parsed_data
- )
+
+async def test_before_signal_processing_update_leverage_skips_when_no_symbol(tools):
+ _exchange_manager, symbol, mode, producer, consumer = tools
+ parsed_data = {
+ mode.EXCHANGE_KEY: _exchange_manager.exchange_name,
+ mode.SIGNAL_KEY: "buy",
+ mode.LEVERAGE: 5,
+ }
+
+ with mock.patch.object(
+ producer, "_updated_orders_to_cancel", mock.AsyncMock(return_value=None)
+ ), mock.patch.object(
+ mode, "set_leverage", mock.AsyncMock()
+ ) as set_leverage_mock, mock.patch.object(
+ producer.logger, "error"
+ ) as logger_error_mock:
+ await producer._before_signal_processing(parsed_data)
+
+ set_leverage_mock.assert_not_awaited()
+ logger_error_mock.assert_called_once()
+ assert "symbol" in str(logger_error_mock.call_args).lower()
+
+
+# async def test_set_state(tools):
+# exchange_manager, symbol, mode, producer, consumer = tools
+# cryptocurrency = mode.cryptocurrency
+# order_data = {
+# consumer.PRICE_KEY: decimal.Decimal("100"),
+# consumer.VOLUME_KEY: decimal.Decimal("1.0"),
+# }
+# parsed_data = {
+# mode.EXCHANGE_KEY: exchange_manager.exchange_name,
+# mode.SYMBOL_KEY: symbol,
+# mode.SIGNAL_KEY: "BUY",
+# }
+# dependencies = trading_signals.get_orders_dependencies([mock.Mock(order_id="test_order")])
+
+# # Test CREATE_ORDERS action - should call submit_trading_evaluation
+# exchange_manager.is_backtesting = False
+# with mock.patch.object(producer, "submit_trading_evaluation", mock.AsyncMock()) as submit_mock, \
+# mock.patch.object(producer, "_send_alert_notification", mock.AsyncMock()) as send_notification_mock, \
+# mock.patch.object(producer.logger, "info") as logger_info_mock:
+# producer.state = trading_enums.EvaluatorStates.NEUTRAL
+# producer.final_eval = -0.5
+
+# await producer._set_state(
+# cryptocurrency,
+# symbol,
+# trading_view_signals_trading.SignalActions.CREATE_ORDERS,
+# trading_enums.EvaluatorStates.LONG,
+# order_data,
+# parsed_data,
+# dependencies=dependencies
+# )
+
+# # State should be updated
+# assert producer.state == trading_enums.EvaluatorStates.LONG
+# logger_info_mock.assert_called()
+# assert "new state" in str(logger_info_mock.call_args_list).lower()
+
+# # Should call submit_trading_evaluation
+# submit_mock.assert_awaited_once()
+# call_args = submit_mock.await_args
+# assert call_args.kwargs["cryptocurrency"] == cryptocurrency
+# assert call_args.kwargs["symbol"] == symbol
+# assert call_args.kwargs["time_frame"] is None
+# assert call_args.kwargs["final_note"] == producer.final_eval
+# assert call_args.kwargs["state"] == trading_enums.EvaluatorStates.LONG
+# assert call_args.kwargs["data"] == order_data
+# assert call_args.kwargs["dependencies"] == dependencies
+
+# send_notification_mock.assert_awaited_once_with(symbol, trading_enums.EvaluatorStates.LONG)
+# exchange_manager.is_backtesting = True
+# submit_mock.reset_mock()
+# logger_info_mock.reset_mock()
+
+# # Test ENSURE_EXCHANGE_BALANCE action - should call process_non_creating_orders_actions
+# with mock.patch.object(producer, "process_non_creating_orders_actions", mock.AsyncMock()) as process_actions_mock, \
+# mock.patch.object(producer, "submit_trading_evaluation", mock.AsyncMock()) as submit_mock:
+# producer.state = trading_enums.EvaluatorStates.NEUTRAL
+
+# await producer._set_state(
+# cryptocurrency,
+# symbol,
+# trading_view_signals_trading.SignalActions.ENSURE_EXCHANGE_BALANCE,
+# trading_enums.EvaluatorStates.NEUTRAL,
+# order_data,
+# parsed_data,
+# dependencies=dependencies
+# )
+
+# # Should call process_non_creating_orders_actions
+# process_actions_mock.assert_awaited_once_with(
+# trading_view_signals_trading.SignalActions.ENSURE_EXCHANGE_BALANCE,
+# symbol,
+# order_data,
+# parsed_data
+# )
+# # Should not call submit_trading_evaluation
+# submit_mock.assert_not_awaited()
+# process_actions_mock.reset_mock()
+
+# # Test CANCEL_ORDERS action - should call process_non_creating_orders_actions
+# with mock.patch.object(producer, "process_non_creating_orders_actions", mock.AsyncMock()) as process_actions_mock, \
+# mock.patch.object(producer, "submit_trading_evaluation", mock.AsyncMock()) as submit_mock:
+# await producer._set_state(
+# cryptocurrency,
+# symbol,
+# trading_view_signals_trading.SignalActions.CANCEL_ORDERS,
+# trading_enums.EvaluatorStates.NEUTRAL,
+# order_data,
+# parsed_data,
+# dependencies=dependencies
+# )
+
+# process_actions_mock.assert_awaited_once_with(
+# trading_view_signals_trading.SignalActions.CANCEL_ORDERS,
+# symbol,
+# order_data,
+# parsed_data
+# )
+# submit_mock.assert_not_awaited()
+# process_actions_mock.reset_mock()
+
+# # Test with None dependencies
+# with mock.patch.object(producer, "submit_trading_evaluation", mock.AsyncMock()) as submit_mock:
+# await producer._set_state(
+# cryptocurrency,
+# symbol,
+# trading_view_signals_trading.SignalActions.CREATE_ORDERS,
+# trading_enums.EvaluatorStates.LONG,
+# order_data,
+# parsed_data,
+# dependencies=None
+# )
+
+# submit_mock.assert_awaited_once()
+# assert submit_mock.await_args.kwargs["dependencies"] is None
+
+
+# async def test_process_non_creating_orders_actions(tools):
+# exchange_manager, symbol, mode, producer, consumer = tools
+# order_data = {
+# consumer.EXCHANGE_ORDER_IDS: ["order_1", "order_2"],
+# consumer.TAG_KEY: "test_tag",
+# }
+# parsed_data = {
+# mode.EXCHANGE_KEY: exchange_manager.exchange_name,
+# mode.SYMBOL_KEY: symbol,
+# }
+
+# # Test CANCEL_ORDERS action
+# with mock.patch.object(producer, "cancel_orders_from_order_data", mock.AsyncMock(return_value=(True, None))) \
+# as cancel_orders_mock:
+# await producer.process_non_creating_orders_actions(
+# trading_view_signals_trading.SignalActions.CANCEL_ORDERS,
+# symbol,
+# order_data,
+# parsed_data
+# )
+# cancel_orders_mock.assert_awaited_once_with(symbol, order_data, parsed_data)
+# cancel_orders_mock.reset_mock()
+
+# # Test ENSURE_EXCHANGE_BALANCE action
+# with mock.patch.object(producer, "ensure_exchange_balance", mock.AsyncMock()) as ensure_exchange_mock:
+# await producer.process_non_creating_orders_actions(
+# trading_view_signals_trading.SignalActions.ENSURE_EXCHANGE_BALANCE,
+# symbol,
+# order_data,
+# parsed_data
+# )
+# ensure_exchange_mock.assert_awaited_once_with(parsed_data)
+# ensure_exchange_mock.reset_mock()
+
+# # Test ENSURE_BLOCKCHAIN_WALLET_BALANCE action
+# with mock.patch.object(producer, "ensure_blockchain_wallet_balance", mock.AsyncMock()) \
+# as ensure_blockchain_mock:
+# await producer.process_non_creating_orders_actions(
+# trading_view_signals_trading.SignalActions.ENSURE_BLOCKCHAIN_WALLET_BALANCE,
+# symbol,
+# order_data,
+# parsed_data
+# )
+# ensure_blockchain_mock.assert_awaited_once_with(parsed_data)
+# ensure_blockchain_mock.reset_mock()
+
+# # Test WITHDRAW_FUNDS action
+# with mock.patch.object(producer, "withdraw_funds", mock.AsyncMock()) as withdraw_funds_mock:
+# await producer.process_non_creating_orders_actions(
+# trading_view_signals_trading.SignalActions.WITHDRAW_FUNDS,
+# symbol,
+# order_data,
+# parsed_data
+# )
+# withdraw_funds_mock.assert_awaited_once_with(parsed_data)
+# withdraw_funds_mock.reset_mock()
+
+# # Test TRANSFER_FUNDS action
+# with mock.patch.object(producer, "transfer_funds", mock.AsyncMock()) as transfer_funds_mock:
+# await producer.process_non_creating_orders_actions(
+# trading_view_signals_trading.SignalActions.TRANSFER_FUNDS,
+# symbol,
+# order_data,
+# parsed_data
+# )
+# transfer_funds_mock.assert_awaited_once_with(parsed_data)
+# transfer_funds_mock.reset_mock()
+
+# # Test unknown action - should raise InvalidArgumentError
+# with pytest.raises(errors.InvalidArgumentError, match="Unknown action"):
+# await producer.process_non_creating_orders_actions(
+# trading_view_signals_trading.SignalActions.NO_ACTION,
+# symbol,
+# order_data,
+# parsed_data
+# )
+
+# # Test CREATE_ORDERS action - should raise InvalidArgumentError (not handled by this method)
+# with pytest.raises(errors.InvalidArgumentError, match="Unknown action"):
+# await producer.process_non_creating_orders_actions(
+# trading_view_signals_trading.SignalActions.CREATE_ORDERS,
+# symbol,
+# order_data,
+# parsed_data
+# )
async def test_ensure_exchange_balance(tools):
@@ -1145,7 +1089,8 @@ async def test_ensure_blockchain_wallet_balance(tools, blockchain_wallet_details
parsed_data = {
"asset": BLOCKCHAIN_WALLET_ASSET,
"holdings": 5.0,
- "wallet_details": blockchain_wallet_details,
+ "blockchain_descriptor": blockchain_wallet_details.blockchain_descriptor,
+ "wallet_descriptor": blockchain_wallet_details.wallet_descriptor,
}
async with trading_api.blockchain_wallet_context(blockchain_wallet_details, exchange_manager.trader) as wallet:
@@ -1185,72 +1130,72 @@ async def test_ensure_blockchain_wallet_balance(tools, blockchain_wallet_details
assert "available: 0" in str(exc_info.value)
-async def test_withdraw_funds(tools):
- exchange_manager, symbol, mode, producer, consumer = tools
- asset = "BTC"
- amount = 0.1
- network = "bitcoin"
- address = "1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa"
- tag = "test_tag"
- params = {"test_param": "test_value"}
+# async def test_withdraw_funds(tools):
+# exchange_manager, symbol, mode, producer, consumer = tools
+# asset = "BTC"
+# amount = 0.1
+# network = "bitcoin"
+# address = "1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa"
+# tag = "test_tag"
+# params = {"test_param": "test_value"}
- parsed_data = {
- "asset": asset,
- "amount": amount,
- "network": network,
- "address": address,
- "tag": tag,
- "params": params,
- }
+# parsed_data = {
+# "asset": asset,
+# "amount": amount,
+# "network": network,
+# "address": address,
+# "tag": tag,
+# "params": params,
+# }
- # Set portfolio balance to have enough for withdrawal
- portfolio = exchange_manager.exchange_personal_data.portfolio_manager.portfolio
- portfolio._update_portfolio_data(asset, total_value=decimal.Decimal("1.0"), available_value=decimal.Decimal("1.0"), replace_value=True)
+# # Set portfolio balance to have enough for withdrawal
+# portfolio = exchange_manager.exchange_personal_data.portfolio_manager.portfolio
+# portfolio._update_portfolio_data(asset, total_value=decimal.Decimal("1.0"), available_value=decimal.Decimal("1.0"), replace_value=True)
- # Test successful withdrawal
- with mock.patch('octobot_trading.constants.ALLOW_FUNDS_TRANSFER', True), \
- mock.patch.object(producer.logger, "info") as logger_info_mock, \
- mock.patch.object(producer.exchange_manager.trader, "_withdraw_on_exchange", mock.AsyncMock(wraps=producer.exchange_manager.trader._withdraw_on_exchange)) as _withdraw_on_exchange_mock:
- await producer.withdraw_funds(parsed_data)
- _withdraw_on_exchange_mock.assert_awaited_once_with(asset, decimal.Decimal(str(amount)), network, address, tag=tag, params=params)
- logger_info_mock.assert_called_once()
- assert "Withdrawn" in str(logger_info_mock.call_args)
- assert asset in str(logger_info_mock.call_args)
- _withdraw_on_exchange_mock.reset_mock()
- logger_info_mock.reset_mock()
+# # Test successful withdrawal
+# with mock.patch('octobot_trading.constants.ALLOW_FUNDS_TRANSFER', True), \
+# mock.patch.object(producer.logger, "info") as logger_info_mock, \
+# mock.patch.object(producer.exchange_manager.trader, "_withdraw_on_exchange", mock.AsyncMock(wraps=producer.exchange_manager.trader._withdraw_on_exchange)) as _withdraw_on_exchange_mock:
+# await producer.withdraw_funds(parsed_data)
+# _withdraw_on_exchange_mock.assert_awaited_once_with(asset, decimal.Decimal(str(amount)), network, address, tag=tag, params=params)
+# logger_info_mock.assert_called_once()
+# assert "Withdrawn" in str(logger_info_mock.call_args)
+# assert asset in str(logger_info_mock.call_args)
+# _withdraw_on_exchange_mock.reset_mock()
+# logger_info_mock.reset_mock()
- # Test when ALLOW_FUNDS_TRANSFER is False - should raise DisabledFundsTransferError
- with mock.patch('octobot_trading.constants.ALLOW_FUNDS_TRANSFER', False):
- with pytest.raises(errors.DisabledFundsTransferError):
- await producer.withdraw_funds(parsed_data)
- _withdraw_on_exchange_mock.assert_not_awaited()
-
-async def test_transfer_funds(tools, blockchain_wallet_details):
- exchange_manager, symbol, mode, producer, consumer = tools
- amount = 1.0
- address = "0x1234567890123456789012345678901234567890"
+# # Test when ALLOW_FUNDS_TRANSFER is False - should raise DisabledFundsTransferError
+# with mock.patch('octobot_trading.constants.ALLOW_FUNDS_TRANSFER', False):
+# with pytest.raises(errors.DisabledFundsTransferError):
+# await producer.withdraw_funds(parsed_data)
+# _withdraw_on_exchange_mock.assert_not_awaited()
+
+# async def test_transfer_funds(tools, blockchain_wallet_details):
+# exchange_manager, symbol, mode, producer, consumer = tools
+# amount = 1.0
+# address = "0x1234567890123456789012345678901234567890"
- parsed_data = {
- "asset": BLOCKCHAIN_WALLET_ASSET,
- "amount": amount,
- "address": address,
- "wallet_details": blockchain_wallet_details,
- }
+# parsed_data = {
+# "asset": BLOCKCHAIN_WALLET_ASSET,
+# "amount": amount,
+# "address": address,
+# "wallet_details": blockchain_wallet_details,
+# }
- # Test successful transfer
- with mock.patch('octobot_trading.constants.ALLOW_FUNDS_TRANSFER', True), \
- mock.patch.object(producer.logger, "info") as logger_info_mock, \
- mock.patch.object(blockchain_wallets.BlockchainWalletSimulator, "withdraw", mock.AsyncMock()) as withdraw_mock:
- await producer.transfer_funds(parsed_data)
- withdraw_mock.assert_awaited_once_with(
- BLOCKCHAIN_WALLET_ASSET, decimal.Decimal(str(amount)), trading_constants.SIMULATED_BLOCKCHAIN_NETWORK, address
- )
+# # Test successful transfer
+# with mock.patch('octobot_trading.constants.ALLOW_FUNDS_TRANSFER', True), \
+# mock.patch.object(producer.logger, "info") as logger_info_mock, \
+# mock.patch.object(blockchain_wallets.BlockchainWalletSimulator, "withdraw", mock.AsyncMock()) as withdraw_mock:
+# await producer.transfer_funds(parsed_data)
+# withdraw_mock.assert_awaited_once_with(
+# BLOCKCHAIN_WALLET_ASSET, decimal.Decimal(str(amount)), trading_constants.SIMULATED_BLOCKCHAIN_NETWORK, address
+# )
- logger_info_mock.assert_called_once()
- assert "Transferred" in str(logger_info_mock.call_args)
- assert BLOCKCHAIN_WALLET_ASSET in str(logger_info_mock.call_args)
- withdraw_mock.reset_mock()
- logger_info_mock.reset_mock()
+# logger_info_mock.assert_called_once()
+# assert "Transferred" in str(logger_info_mock.call_args)
+# assert BLOCKCHAIN_WALLET_ASSET in str(logger_info_mock.call_args)
+# withdraw_mock.reset_mock()
+# logger_info_mock.reset_mock()
async def test_is_non_order_signal(tools):
@@ -1310,6 +1255,43 @@ async def test_is_meta_action_only(tools):
mode.__class__.META_ACTION_ONLY_SIGNALS = prev_value
+async def test_functional_limit_buy_signal_end_to_end(tools):
+ """
+ Functional test: backtesting exchange_manager receives a limit buy signal,
+ processes it, and the resulting order is created and verified.
+ """
+ exchange_manager, symbol, mode, _producer, _consumer = tools
+
+ limit_price = 6900
+ order_volume = 0.01
+
+ initial_orders = trading_api.get_open_orders(exchange_manager, symbol=symbol)
+ assert len(initial_orders) == 0
+
+ limit_buy_signal = f"""
+ EXCHANGE={exchange_manager.exchange_name}
+ SYMBOL={symbol}
+ SIGNAL=BUY
+ ORDER_TYPE=limit
+ PRICE={limit_price}
+ VOLUME={order_volume}
+ """
+
+ await mode._trading_view_signal_callback({"metadata": limit_buy_signal})
+
+ await asyncio_tools.wait_asyncio_next_cycle()
+
+ orders = trading_api.get_open_orders(exchange_manager, symbol=symbol)
+ assert len(orders) == 1, f"Expected 1 order, got {len(orders)}: {orders}"
+
+ created_order = orders[0]
+ assert isinstance(created_order, trading_personal_data.BuyLimitOrder)
+ assert created_order.symbol == symbol
+ assert created_order.side == trading_enums.TradeOrderSide.BUY
+ assert created_order.origin_price == decimal.Decimal(str(limit_price))
+ assert created_order.origin_quantity == decimal.Decimal(str(order_volume))
+
+
def compare_dict_with_nan(d_1, d_2):
try:
for key, val in d_1.items():
diff --git a/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/tests/test_tradingview_signal_to_dsl_translator.py b/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/tests/test_tradingview_signal_to_dsl_translator.py
new file mode 100644
index 000000000..50e6f9832
--- /dev/null
+++ b/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/tests/test_tradingview_signal_to_dsl_translator.py
@@ -0,0 +1,370 @@
+# Drakkar-Software OctoBot-Tentacles
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import mock
+import pytest
+
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_trading.constants as trading_constants
+import octobot_trading.errors as trading_errors
+import tentacles.Trading.Mode.trading_view_signals_trading_mode.tradingview_signal_to_dsl_translator as tradingview_signal_to_dsl_translator
+import tentacles.Trading.Mode.trading_view_signals_trading_mode.trading_view_signals_trading as trading_view_signals_trading
+
+
+@pytest.fixture
+def exchange_manager():
+ return mock.Mock()
+
+
+@pytest.fixture
+def translator_cls():
+ return tradingview_signal_to_dsl_translator.TradingViewSignalToDSLTranslator
+
+
+def _make_operator_param(name, required=True):
+ return dsl_interpreter.OperatorParameter(
+ name=name, description=f"{name} param", required=required, type=str
+ )
+
+
+class TestMapOtherParamsToDsl:
+ def test_maps_known_tradingview_keys_to_dsl(self, translator_cls):
+ param_symbol = _make_operator_param("symbol")
+ param_amount = _make_operator_param("amount")
+ operator_params = [param_symbol, param_amount]
+ other_params = {
+ trading_view_signals_trading.TradingViewSignalsTradingMode.SYMBOL_KEY: "BTC/USDT",
+ trading_view_signals_trading.TradingViewSignalsTradingMode.VOLUME_KEY: 0.01,
+ }
+ result = translator_cls._map_other_params_to_dsl(other_params, operator_params)
+ assert result == {"symbol": "BTC/USDT", "amount": 0.01}
+
+ def test_skips_non_string_keys(self, translator_cls):
+ param_symbol = _make_operator_param("symbol")
+ operator_params = [param_symbol]
+ other_params = {123: "value", trading_view_signals_trading.TradingViewSignalsTradingMode.SYMBOL_KEY: "BTC/USDT"}
+ result = translator_cls._map_other_params_to_dsl(other_params, operator_params)
+ assert result == {"symbol": "BTC/USDT"}
+
+ def test_uses_lowercase_for_unknown_keys_matching_operator_param(self, translator_cls):
+ param_unknown = _make_operator_param("custom_param")
+ operator_params = [param_unknown]
+ other_params = {"CUSTOM_PARAM": "value"}
+ result = translator_cls._map_other_params_to_dsl(other_params, operator_params)
+ assert result == {"custom_param": "value"}
+
+ def test_collects_param_prefixed_keys_into_params_dict(self, translator_cls):
+ param_symbol = _make_operator_param("symbol")
+ param_params = _make_operator_param("params", required=False)
+ operator_params = [param_symbol, param_params]
+ prefix = trading_view_signals_trading.TradingViewSignalsTradingMode.PARAM_PREFIX_KEY
+ other_params = {
+ f"{prefix}custom_key": "custom_value",
+ f"{prefix}another": 42,
+ }
+ result = translator_cls._map_other_params_to_dsl(other_params, operator_params)
+ assert result == {"params": {"custom_key": "custom_value", "another": 42}}
+
+ def test_ignores_param_prefixed_when_params_not_in_operator(self, translator_cls):
+ param_symbol = _make_operator_param("symbol")
+ operator_params = [param_symbol]
+ prefix = trading_view_signals_trading.TradingViewSignalsTradingMode.PARAM_PREFIX_KEY
+ other_params = {f"{prefix}custom_key": "custom_value"}
+ result = translator_cls._map_other_params_to_dsl(other_params, operator_params)
+ assert result == {}
+
+
+class TestGetDslSignalKeywordAndParams:
+ mode = trading_view_signals_trading.TradingViewSignalsTradingMode
+
+ def test_raises_when_signal_key_missing(self, translator_cls):
+ parsed_data = {}
+ with pytest.raises(trading_errors.InvalidArgumentError) as exc_info:
+ translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+ assert self.mode.SIGNAL_KEY in str(exc_info.value)
+
+ def test_buy_signal_returns_market_keyword_with_side_buy(self, translator_cls):
+ parsed_data = {self.mode.SIGNAL_KEY: "buy"}
+ keyword, params = translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+ assert keyword == "market"
+ assert params["side"] == "buy"
+
+ def test_buy_signal_with_explicit_order_type(self, translator_cls):
+ parsed_data = {
+ self.mode.SIGNAL_KEY: "buy",
+ self.mode.ORDER_TYPE_SIGNAL: "limit",
+ }
+ keyword, params = translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+ assert keyword == "limit"
+ assert params["side"] == "buy"
+
+ def test_sell_signal_returns_market_keyword_with_side_sell(self, translator_cls):
+ parsed_data = {self.mode.SIGNAL_KEY: "sell"}
+ keyword, params = translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+ assert keyword == "market"
+ assert params["side"] == "sell"
+
+ def test_sell_signal_with_explicit_order_type(self, translator_cls):
+ parsed_data = {
+ self.mode.SIGNAL_KEY: "sell",
+ self.mode.ORDER_TYPE_SIGNAL: "limit",
+ }
+ keyword, params = translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+ assert keyword == "limit"
+ assert params["side"] == "sell"
+
+ def test_stop_order_type_maps_to_stop_loss(self, translator_cls):
+ parsed_data = {
+ self.mode.SIGNAL_KEY: "buy",
+ self.mode.ORDER_TYPE_SIGNAL: "stop",
+ }
+ keyword, params = translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+ assert keyword == "stop_loss"
+ assert params["side"] == "buy"
+
+ def test_default_order_type_is_limit_when_price_present(self, translator_cls):
+ parsed_data = {
+ self.mode.SIGNAL_KEY: "buy",
+ self.mode.PRICE_KEY: 50000.0,
+ }
+ keyword, params = translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+ assert keyword == "limit"
+ assert params["side"] == "buy"
+
+ def test_cancel_signal_returns_cancel_order_keyword(self, translator_cls):
+ parsed_data = {self.mode.SIGNAL_KEY: "cancel"}
+ keyword, params = translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+ assert keyword == "cancel_order"
+ assert params == {}
+
+ def test_unknown_signal_returns_none_keyword(self, translator_cls):
+ parsed_data = {self.mode.SIGNAL_KEY: "unknown_signal"}
+ keyword, params = translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+ assert keyword is None
+ assert params == {}
+
+ def test_signal_case_insensitive(self, translator_cls):
+ parsed_data = {self.mode.SIGNAL_KEY: "BUY"}
+ keyword, params = translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+ assert keyword == "market"
+ assert params["side"] == "buy"
+
+ parsed_data = {self.mode.SIGNAL_KEY: "SelL"}
+ keyword, params = translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+ assert keyword == "market"
+ assert params["side"] == "sell"
+
+ def test_withdraw_funds_raises_when_disabled(self, translator_cls):
+ parsed_data = {self.mode.SIGNAL_KEY: self.mode.WITHDRAW_FUNDS_SIGNAL}
+ with mock.patch.object(trading_constants, "ALLOW_FUNDS_TRANSFER", False):
+ with pytest.raises(trading_errors.DisabledFundsTransferError):
+ translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+
+ def test_withdraw_funds_returns_withdraw_when_enabled(self, translator_cls):
+ parsed_data = {self.mode.SIGNAL_KEY: self.mode.WITHDRAW_FUNDS_SIGNAL}
+ with mock.patch.object(trading_constants, "ALLOW_FUNDS_TRANSFER", True):
+ keyword, params = translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+ assert keyword == "withdraw"
+ assert params == {}
+
+ def test_transfer_funds_raises_when_disabled(self, translator_cls):
+ parsed_data = {self.mode.SIGNAL_KEY: self.mode.TRANSFER_FUNDS_SIGNAL}
+ with mock.patch.object(trading_constants, "ALLOW_FUNDS_TRANSFER", False):
+ with pytest.raises(trading_errors.DisabledFundsTransferError):
+ translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+
+ def test_transfer_funds_returns_blockchain_wallet_transfer_when_enabled(self, translator_cls):
+ parsed_data = {self.mode.SIGNAL_KEY: self.mode.TRANSFER_FUNDS_SIGNAL}
+ with mock.patch.object(trading_constants, "ALLOW_FUNDS_TRANSFER", True):
+ keyword, params = translator_cls._get_dsl_signal_keyword_and_params(parsed_data)
+ assert keyword == "blockchain_wallet_transfer"
+ assert params == {}
+
+
+class TestAdaptSpecialFormatValuesForParam:
+ def test_take_profit_prices_list_unchanged(self, translator_cls):
+ dsl_param = trading_view_signals_trading.TradingViewSignalsTradingMode.TRADINGVIEW_TO_DSL_PARAM[
+ trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY
+ ]
+ value = [100.0, 101.0]
+ assert translator_cls._adapt_special_format_values_for_param(dsl_param, value) == value
+
+ def test_take_profit_prices_scalar_to_list(self, translator_cls):
+ dsl_param = trading_view_signals_trading.TradingViewSignalsTradingMode.TRADINGVIEW_TO_DSL_PARAM[
+ trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY
+ ]
+ assert translator_cls._adapt_special_format_values_for_param(dsl_param, 100.0) == [100.0]
+ assert translator_cls._adapt_special_format_values_for_param(dsl_param, "101") == ["101"]
+
+ def test_take_profit_prices_empty_scalar_to_empty_list(self, translator_cls):
+ dsl_param = trading_view_signals_trading.TradingViewSignalsTradingMode.TRADINGVIEW_TO_DSL_PARAM[
+ trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY
+ ]
+ assert translator_cls._adapt_special_format_values_for_param(dsl_param, "") == []
+ assert translator_cls._adapt_special_format_values_for_param(dsl_param, 0) == []
+
+ def test_take_profit_volume_percents_list_to_float_list(self, translator_cls):
+ dsl_param = trading_view_signals_trading.TradingViewSignalsTradingMode.TRADINGVIEW_TO_DSL_PARAM[
+ trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_VOLUME_RATIO_KEY
+ ]
+ result = translator_cls._adapt_special_format_values_for_param(dsl_param, ["50", "50"])
+ assert result == [50.0, 50.0]
+
+ def test_take_profit_volume_percents_scalar_to_list(self, translator_cls):
+ dsl_param = trading_view_signals_trading.TradingViewSignalsTradingMode.TRADINGVIEW_TO_DSL_PARAM[
+ trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_VOLUME_RATIO_KEY
+ ]
+ result = translator_cls._adapt_special_format_values_for_param(dsl_param, "100")
+ assert result == [100.0]
+
+ def test_exchange_order_ids_string_to_list(self, translator_cls):
+ dsl_param = trading_view_signals_trading.TradingViewSignalsTradingMode.TRADINGVIEW_TO_DSL_PARAM[
+ trading_view_signals_trading.TradingViewSignalsTradingMode.EXCHANGE_ORDER_IDS
+ ]
+ result = translator_cls._adapt_special_format_values_for_param(dsl_param, "id1, id2 , id3")
+ assert result == ["id1", "id2", "id3"]
+
+ def test_other_param_unchanged(self, translator_cls):
+ result = translator_cls._adapt_special_format_values_for_param("other_param", "value")
+ assert result == "value"
+
+
+class TestGetOperatorClass:
+ def test_returns_operator_when_keyword_matches(self, translator_cls):
+ mock_op = mock.Mock()
+ mock_op.get_name.return_value = "market"
+ with mock.patch.object(translator_cls, "_get_allowed_keywords", return_value=[mock_op]):
+ result = translator_cls._get_operator_class("market")
+ assert result is mock_op
+
+ def test_returns_none_when_keyword_not_found(self, translator_cls):
+ mock_op = mock.Mock()
+ mock_op.get_name.return_value = "market"
+ with mock.patch.object(translator_cls, "_get_allowed_keywords", return_value=[mock_op]):
+ result = translator_cls._get_operator_class("unknown")
+ assert result is None
+
+
+class TestCollectNumberedListParamValues:
+ def test_collects_numbered_keys_in_order(self, translator_cls):
+ params = {
+ "TAKE_PROFIT_PRICE_2": 102.0,
+ "TAKE_PROFIT_PRICE_1": 101.0,
+ "TAKE_PROFIT_PRICE_3": 103.0,
+ }
+ result = translator_cls._collect_numbered_list_param_values(
+ params, trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY
+ )
+ assert result == [101.0, 102.0, 103.0]
+
+ def test_standalone_takes_precedence_with_numbered(self, translator_cls):
+ params = {
+ "TAKE_PROFIT_PRICE": 99.0,
+ "TAKE_PROFIT_PRICE_1": 101.0,
+ }
+ result = translator_cls._collect_numbered_list_param_values(
+ params, trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY
+ )
+ assert result == [99.0, 101.0]
+
+ def test_skips_invalid_suffix(self, translator_cls):
+ params = {
+ "TAKE_PROFIT_PRICE_1": 101.0,
+ "TAKE_PROFIT_PRICE_abc": 999.0,
+ }
+ result = translator_cls._collect_numbered_list_param_values(
+ params, trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY
+ )
+ assert result == [101.0]
+
+ def test_standalone_empty_returns_numbered_only(self, translator_cls):
+ params = {
+ "TAKE_PROFIT_PRICE": "",
+ "TAKE_PROFIT_PRICE_1": 101.0,
+ }
+ result = translator_cls._collect_numbered_list_param_values(
+ params, trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY
+ )
+ assert result == [101.0]
+
+
+class TestPreProcessSpecialParams:
+ def test_removes_numbered_take_profit_keys_from_result(self, translator_cls):
+ operator_class = mock.Mock()
+ operator_class.get_name.return_value = "limit"
+ params = {
+ trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY: 100.0,
+ f"{trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY}_1": 101.0,
+ }
+ result = translator_cls._pre_process_special_params(operator_class, params)
+ assert result == {
+ trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY: [100.0, 101.0]
+ }
+
+ def test_stop_loss_maps_stop_price_to_price(self, translator_cls):
+ operator_class = mock.Mock()
+ operator_class.get_name.return_value = "stop_loss"
+ params = {
+ trading_view_signals_trading.TradingViewSignalsTradingMode.STOP_PRICE_KEY: 50000.0,
+ }
+ result = translator_cls._pre_process_special_params(operator_class, params)
+ assert result == {
+ trading_view_signals_trading.TradingViewSignalsTradingMode.PRICE_KEY: 50000.0
+ }
+
+
+class TestResolveOperatorParams:
+ def test_merges_params_and_other_params(self, translator_cls):
+ operator_class = mock.Mock()
+ param_side = _make_operator_param("side")
+ param_symbol = _make_operator_param("symbol")
+ param_amount = _make_operator_param("amount")
+ operator_class.get_parameters.return_value = [param_side, param_symbol, param_amount]
+ operator_class.get_name.return_value = "market"
+ params = {"side": "buy", "symbol": "BTC/USDT"}
+ other_params = {trading_view_signals_trading.TradingViewSignalsTradingMode.VOLUME_KEY: 0.01}
+ result = translator_cls._resolve_operator_params(operator_class, params, other_params)
+ assert result == ["'buy'", "'BTC/USDT'", "0.01"]
+
+
+class TestTranslateSignal:
+ def test_returns_none_for_empty_keyword(self, translator_cls):
+ parsed_data_no_keyword = {trading_view_signals_trading.TradingViewSignalsTradingMode.SIGNAL_KEY: "invalid"}
+ assert translator_cls.translate_signal(parsed_data_no_keyword) == "None"
+ parsed_data_empty_order_type = {
+ trading_view_signals_trading.TradingViewSignalsTradingMode.SIGNAL_KEY: "buy",
+ trading_view_signals_trading.TradingViewSignalsTradingMode.ORDER_TYPE_SIGNAL: "",
+ }
+ assert translator_cls.translate_signal(parsed_data_empty_order_type) == "None"
+
+ def test_returns_none_for_unknown_keyword(self, translator_cls):
+ parsed_data = {
+ trading_view_signals_trading.TradingViewSignalsTradingMode.SIGNAL_KEY: "buy",
+ trading_view_signals_trading.TradingViewSignalsTradingMode.ORDER_TYPE_SIGNAL: "unknown_op",
+ }
+ with mock.patch.object(translator_cls, "_get_operator_class", return_value=None):
+ result = translator_cls.translate_signal(parsed_data)
+ assert result == "None"
+
+ def test_returns_dsl_expression_for_known_operator(self, translator_cls):
+ parsed_data = {trading_view_signals_trading.TradingViewSignalsTradingMode.SIGNAL_KEY: "buy"}
+ mock_op = mock.Mock()
+ mock_op.get_name.return_value = "market"
+ with mock.patch.object(translator_cls, "_get_operator_class", return_value=mock_op):
+ with mock.patch.object(
+ translator_cls, "_resolve_operator_params", return_value=["'buy'", "'BTC/USDT'", "0.01"]
+ ):
+ result = translator_cls.translate_signal(parsed_data)
+ assert result == "market('buy', 'BTC/USDT', 0.01)"
diff --git a/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/trading_view_signals_trading.py b/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/trading_view_signals_trading.py
index 521c9afc4..5a15c1a8e 100644
--- a/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/trading_view_signals_trading.py
+++ b/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/trading_view_signals_trading.py
@@ -14,9 +14,7 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import decimal
-import math
import typing
-import json
import copy
import enum
@@ -26,10 +24,11 @@
import octobot_commons.constants as commons_constants
import octobot_commons.logging as commons_logging
import octobot_commons.signals as commons_signals
-import octobot_commons.tentacles_management as tentacles_management
+import octobot_commons.errors as commons_errors
+import octobot_commons.dsl_interpreter as dsl_interpreter
import octobot_services.api as services_api
-import octobot_trading.personal_data as trading_personal_data
import octobot_trading.constants as trading_constants
+import octobot_trading.blockchain_wallets as blockchain_wallets
import octobot_trading.enums as trading_enums
import octobot_trading.exchanges as trading_exchanges
import octobot_trading.modes as trading_modes
@@ -46,11 +45,11 @@ class TradingViewServiceFeed:
def get_name(self, *args, **kwargs):
raise ImportError("trading_view_service_feed not installed")
trading_view_service_feed = TradingViewServiceFeedImportMock()
-import tentacles.Trading.Mode.daily_trading_mode.daily_trading as daily_trading_mode
+import tentacles.Trading.Mode.dsl_trading_mode.dsl_trading as dsl_trading_mode
import tentacles.Trading.Mode.trading_view_signals_trading_mode.actions_params as actions_params
import tentacles.Trading.Mode.trading_view_signals_trading_mode.errors as trading_view_signals_trading_mode_errors
import tentacles.Meta.Keywords.scripting_library as scripting_library
-
+import tentacles.Trading.Mode.trading_view_signals_trading_mode.tradingview_signal_to_dsl_translator as tradingview_signal_to_dsl_translator
_CANCEL_POLICIES_CACHE = {}
@@ -65,12 +64,11 @@ class SignalActions(enum.Enum):
TRANSFER_FUNDS = "transfer_funds" # requires ALLOW_FUNDS_TRANSFER env to be True (disabled by default to protect funds)
-class TradingViewSignalsTradingMode(trading_modes.AbstractTradingMode):
+class TradingViewSignalsTradingMode(dsl_trading_mode.DSLTradingMode):
SERVICE_FEED_CLASS = trading_view_service_feed.TradingViewServiceFeed if hasattr(trading_view_service_feed, 'TradingViewServiceFeed') else None
TRADINGVIEW_FUTURES_SUFFIXES = [".P"]
PARAM_SEPARATORS = [";", "\\n", "\n"]
GENERIC_USD_STABLECOIN_SYMBOL = "USD*"
-
EXCHANGE_KEY = "EXCHANGE"
TRADING_TYPE_KEY = "TRADING_TYPE" # expect a trading_enums.ExchangeTypes value
SYMBOL_KEY = "SYMBOL"
@@ -82,7 +80,7 @@ class TradingViewSignalsTradingMode(trading_modes.AbstractTradingMode):
STOP_PRICE_KEY = "STOP_PRICE"
TAG_KEY = "TAG"
EXCHANGE_ORDER_IDS = "EXCHANGE_ORDER_IDS"
- LEVERAGE = "LEVERAGE"
+ LEVERAGE = "LEVERAGE"
TAKE_PROFIT_PRICE_KEY = "TAKE_PROFIT_PRICE"
TAKE_PROFIT_VOLUME_RATIO_KEY = "TAKE_PROFIT_VOLUME_RATIO"
ALLOW_HOLDINGS_ADAPTATION_KEY = "ALLOW_HOLDINGS_ADAPTATION"
@@ -97,12 +95,31 @@ class TradingViewSignalsTradingMode(trading_modes.AbstractTradingMode):
STOP_SIGNAL = "stop"
CANCEL_SIGNAL = "cancel"
SIDE_PARAM_KEY = "SIDE"
+ ALLOW_HOLDINGS_ADAPTATION_KEY = "ALLOW_HOLDINGS_ADAPTATION"
# special signals, to be used programmatically
ENSURE_EXCHANGE_BALANCE_SIGNAL = "ensure_exchange_balance"
ENSURE_BLOCKCHAIN_WALLET_BALANCE_SIGNAL = "ensure_blockchain_wallet_balance"
WITHDRAW_FUNDS_SIGNAL = "withdraw_funds" # disabled by default unless ALLOW_FUNDS_TRANSFER is True
TRANSFER_FUNDS_SIGNAL = "transfer_funds" # disabled by default unless ALLOW_FUNDS_TRANSFER is True
+ TRADINGVIEW_TO_DSL_PARAM = {
+ # translation of TradingView signal parameters to DSL keywords parameters
+ SYMBOL_KEY: "symbol",
+ VOLUME_KEY: "amount",
+ PRICE_KEY: "price",
+ REDUCE_ONLY_KEY: "reduce_only",
+ TAG_KEY: "tag",
+ STOP_PRICE_KEY: "stop_loss_price",
+ TAKE_PROFIT_PRICE_KEY: "take_profit_prices",
+ TAKE_PROFIT_VOLUME_RATIO_KEY: "take_profit_volume_percents",
+ EXCHANGE_ORDER_IDS: "exchange_order_ids",
+ SIDE_PARAM_KEY: "side",
+ TRAILING_PROFILE: "trailing_profile",
+ CANCEL_POLICY: "cancel_policy",
+ CANCEL_POLICY_PARAMS: "cancel_policy_params",
+ ALLOW_HOLDINGS_ADAPTATION_KEY: "allow_holdings_adaptation",
+ }
+
NON_ORDER_SIGNALS = {
# signals that are not related to order management
# they will be only be processed by the 1st trading mode on this matrix
@@ -164,9 +181,6 @@ def get_current_state(self) -> (str, float):
def get_mode_producer_classes(self) -> list:
return [TradingViewSignalsModeProducer]
- def get_mode_consumer_classes(self) -> list:
- return [TradingViewSignalsModeConsumer]
-
async def _get_feed_consumers(self):
parsed_symbol = symbol_util.parse_symbol(self.symbol)
self.str_symbol = str(parsed_symbol)
@@ -306,7 +320,10 @@ async def _process_or_ignore_non_order_signal(self, parsed_data: dict) -> bool:
if self.is_non_order_signal(parsed_data):
if self.is_first_trading_mode_on_this_matrix():
self.logger.info(f"Non order signal {parsed_data[self.SIGNAL_KEY]} processing")
- await self.producers[0].signal_callback(parsed_data, script_keywords.get_base_context(self))
+ try:
+ await self.producers[0].signal_callback(parsed_data, script_keywords.get_base_context(self))
+ except commons_errors.DSLInterpreterError as err:
+ self.logger.exception(err, True, f"Error when calling DSL script: {err}")
else:
self.logger.info(f"Non order signal {parsed_data[self.SIGNAL_KEY]} ignored: another trading mode on this matrix will process it")
return True
@@ -325,9 +342,12 @@ async def _trading_view_signal_callback(self, data):
self.logger.error(error)
try:
if self.is_relevant_signal(parsed_data):
+ parsed_data[self.SYMBOL_KEY] = self.str_symbol # make sure symbol is in the correct format
await self.producers[0].signal_callback(parsed_data, script_keywords.get_base_context(self))
else:
self._log_error_message_if_relevant(parsed_data, signal_data)
+ except commons_errors.DSLInterpreterError as err:
+ self.logger.exception(err, True, f"Error when calling DSL script: {err}")
except (
trading_errors.InvalidArgumentError,
trading_errors.InvalidCancelPolicyError,
@@ -354,37 +374,7 @@ def is_backtestable():
return False
-class TradingViewSignalsModeConsumer(daily_trading_mode.DailyTradingModeConsumer):
- def __init__(self, trading_mode):
- super().__init__(trading_mode)
- self.QUANTITY_MIN_PERCENT = decimal.Decimal(str(0.1))
- self.QUANTITY_MAX_PERCENT = decimal.Decimal(str(0.9))
-
- self.QUANTITY_MARKET_MIN_PERCENT = decimal.Decimal(str(0.5))
- self.QUANTITY_MARKET_MAX_PERCENT = trading_constants.ONE
- self.QUANTITY_BUY_MARKET_ATTENUATION = decimal.Decimal(str(0.2))
-
- self.BUY_LIMIT_ORDER_MAX_PERCENT = decimal.Decimal(str(0.995))
- self.BUY_LIMIT_ORDER_MIN_PERCENT = decimal.Decimal(str(0.99))
-
- self.USE_CLOSE_TO_CURRENT_PRICE = True
- self.CLOSE_TO_CURRENT_PRICE_DEFAULT_RATIO = decimal.Decimal(str(trading_mode.trading_config.get("close_to_current_price_difference",
- 0.02)))
- self.BUY_WITH_MAXIMUM_SIZE_ORDERS = trading_mode.trading_config.get("use_maximum_size_orders", False)
- self.SELL_WITH_MAXIMUM_SIZE_ORDERS = trading_mode.trading_config.get("use_maximum_size_orders", False)
- self.USE_STOP_ORDERS = False
-
-
-class TradingViewSignalsModeProducer(daily_trading_mode.DailyTradingModeProducer):
- def __init__(self, channel, config, trading_mode, exchange_manager):
- super().__init__(channel, config, trading_mode, exchange_manager)
- self.EVAL_BY_STATES = {
- trading_enums.EvaluatorStates.LONG: -0.6,
- trading_enums.EvaluatorStates.SHORT: 0.6,
- trading_enums.EvaluatorStates.VERY_LONG: -1,
- trading_enums.EvaluatorStates.VERY_SHORT: 1,
- trading_enums.EvaluatorStates.NEUTRAL: 0,
- }
+class TradingViewSignalsModeProducer(dsl_trading_mode.DSLTradingModeProducer):
def get_channels_registration(self):
# do not register on matrix or candles channels
@@ -394,171 +384,47 @@ async def set_final_eval(self, matrix_id: str, cryptocurrency: str, symbol: str,
# Ignore matrix calls
pass
- def _parse_pre_update_order_details(self, parsed_data):
- return {
- TradingViewSignalsModeConsumer.LEVERAGE:
- parsed_data.get(TradingViewSignalsTradingMode.LEVERAGE, None),
- }
-
- async def _parse_order_details(self, ctx, parsed_data) -> tuple[SignalActions, trading_enums.EvaluatorStates, dict]:
- signal = parsed_data[TradingViewSignalsTradingMode.SIGNAL_KEY].casefold()
- order_type = parsed_data.get(TradingViewSignalsTradingMode.ORDER_TYPE_SIGNAL, "").casefold()
- order_exchange_creation_params = {
- param_name.split(TradingViewSignalsTradingMode.PARAM_PREFIX_KEY)[1]: param_value
- for param_name, param_value in parsed_data.items()
- if param_name.startswith(TradingViewSignalsTradingMode.PARAM_PREFIX_KEY)
- }
- parsed_side = None
- action = None
- if signal == TradingViewSignalsTradingMode.SELL_SIGNAL:
- action = SignalActions.CREATE_ORDERS
- parsed_side = trading_enums.TradeOrderSide.SELL.value
- if order_type == TradingViewSignalsTradingMode.MARKET_SIGNAL:
- state = trading_enums.EvaluatorStates.VERY_SHORT
- elif order_type in (TradingViewSignalsTradingMode.LIMIT_SIGNAL, TradingViewSignalsTradingMode.STOP_SIGNAL):
- state = trading_enums.EvaluatorStates.SHORT
- else:
- state = trading_enums.EvaluatorStates.VERY_SHORT if self.trading_mode.USE_MARKET_ORDERS \
- else trading_enums.EvaluatorStates.SHORT
- elif signal == TradingViewSignalsTradingMode.BUY_SIGNAL:
- action = SignalActions.CREATE_ORDERS
- parsed_side = trading_enums.TradeOrderSide.BUY.value
- if order_type == TradingViewSignalsTradingMode.MARKET_SIGNAL:
- state = trading_enums.EvaluatorStates.VERY_LONG
- elif order_type in (TradingViewSignalsTradingMode.LIMIT_SIGNAL, TradingViewSignalsTradingMode.STOP_SIGNAL):
- state = trading_enums.EvaluatorStates.LONG
- else:
- state = trading_enums.EvaluatorStates.VERY_LONG if self.trading_mode.USE_MARKET_ORDERS \
- else trading_enums.EvaluatorStates.LONG
- else:
- state = trading_enums.EvaluatorStates.NEUTRAL
- if signal == TradingViewSignalsTradingMode.CANCEL_SIGNAL:
- action = SignalActions.CANCEL_ORDERS
- elif signal == TradingViewSignalsTradingMode.ENSURE_EXCHANGE_BALANCE_SIGNAL:
- action = SignalActions.ENSURE_EXCHANGE_BALANCE
- elif signal == TradingViewSignalsTradingMode.ENSURE_BLOCKCHAIN_WALLET_BALANCE_SIGNAL:
- action = SignalActions.ENSURE_BLOCKCHAIN_WALLET_BALANCE
- elif signal == TradingViewSignalsTradingMode.WITHDRAW_FUNDS_SIGNAL:
- if not trading_constants.ALLOW_FUNDS_TRANSFER:
- raise trading_errors.DisabledFundsTransferError(
- "Withdraw funds signal is not allowed when ALLOW_FUNDS_TRANSFER is disabled"
- )
- action = SignalActions.WITHDRAW_FUNDS
- elif signal == TradingViewSignalsTradingMode.TRANSFER_FUNDS_SIGNAL:
- if not trading_constants.ALLOW_FUNDS_TRANSFER:
- raise trading_errors.DisabledFundsTransferError(
- "Transfer funds signal is not allowed when ALLOW_FUNDS_TRANSFER is disabled"
- )
- action = SignalActions.TRANSFER_FUNDS
- if action is None:
- raise trading_errors.InvalidArgumentError(
- f"Unknown signal: {parsed_data[TradingViewSignalsTradingMode.SIGNAL_KEY]}, full data= {parsed_data}"
- )
- target_price = 0 if order_type == TradingViewSignalsTradingMode.MARKET_SIGNAL else (
- await self._parse_element(ctx, parsed_data, TradingViewSignalsTradingMode.PRICE_KEY, 0, True))
- stop_price = await self._parse_element(
- ctx, parsed_data, TradingViewSignalsTradingMode.STOP_PRICE_KEY, math.nan, True
- )
- tp_price = await self._parse_element(
- ctx, parsed_data, TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY, math.nan, True
- )
- additional_tp_volume_ratios = []
- if first_volume := await self._parse_element(
- ctx, parsed_data, TradingViewSignalsTradingMode.TAKE_PROFIT_VOLUME_RATIO_KEY, 0, False
- ):
- additional_tp_volume_ratios.append(first_volume)
- additional_tp_prices = await self._parse_additional_decimal_elements(
- ctx, parsed_data, f"{TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY}_", math.nan, True
- )
- additional_tp_volume_ratios += await self._parse_additional_decimal_elements(
- ctx, parsed_data, f"{TradingViewSignalsTradingMode.TAKE_PROFIT_VOLUME_RATIO_KEY}_", 0, False
- )
- allow_holdings_adaptation = parsed_data.get(TradingViewSignalsTradingMode.ALLOW_HOLDINGS_ADAPTATION_KEY, False)
- reduce_only = parsed_data.get(TradingViewSignalsTradingMode.REDUCE_ONLY_KEY, False)
- amount = await self._parse_volume(
- ctx, parsed_data, parsed_side, target_price, allow_holdings_adaptation, reduce_only
- )
- trailing_profile = parsed_data.get(TradingViewSignalsTradingMode.TRAILING_PROFILE)
- maybe_cancel_policy, cancel_policy_params = self._parse_cancel_policy(parsed_data)
- order_data = {
- TradingViewSignalsModeConsumer.PRICE_KEY: target_price,
- TradingViewSignalsModeConsumer.VOLUME_KEY: amount,
- TradingViewSignalsModeConsumer.STOP_PRICE_KEY: stop_price,
- TradingViewSignalsModeConsumer.STOP_ONLY: order_type == TradingViewSignalsTradingMode.STOP_SIGNAL,
- TradingViewSignalsModeConsumer.TAKE_PROFIT_PRICE_KEY: tp_price,
- TradingViewSignalsModeConsumer.ADDITIONAL_TAKE_PROFIT_PRICES_KEY: additional_tp_prices,
- TradingViewSignalsModeConsumer.ADDITIONAL_TAKE_PROFIT_VOLUME_RATIOS_KEY: additional_tp_volume_ratios,
- TradingViewSignalsModeConsumer.REDUCE_ONLY_KEY: reduce_only,
- TradingViewSignalsModeConsumer.TAG_KEY:
- parsed_data.get(TradingViewSignalsTradingMode.TAG_KEY, None),
- TradingViewSignalsModeConsumer.TRAILING_PROFILE: trailing_profile.casefold() if trailing_profile else None,
- TradingViewSignalsModeConsumer.CANCEL_POLICY: maybe_cancel_policy,
- TradingViewSignalsModeConsumer.CANCEL_POLICY_PARAMS: cancel_policy_params,
- TradingViewSignalsModeConsumer.EXCHANGE_ORDER_IDS:
- parsed_data.get(TradingViewSignalsTradingMode.EXCHANGE_ORDER_IDS, None),
- TradingViewSignalsModeConsumer.LEVERAGE:
- parsed_data.get(TradingViewSignalsTradingMode.LEVERAGE, None),
- TradingViewSignalsModeConsumer.ORDER_EXCHANGE_CREATION_PARAMS: order_exchange_creation_params,
- }
- return action, state, order_data
-
- def _parse_cancel_policy(self, parsed_data):
- if policy := parsed_data.get(TradingViewSignalsTradingMode.CANCEL_POLICY, None):
- lowercase_policy = policy.casefold()
- if not _CANCEL_POLICIES_CACHE:
- _CANCEL_POLICIES_CACHE.update({
- policy.__name__.casefold(): policy.__name__
- for policy in tentacles_management.get_all_classes_from_parent(trading_personal_data.OrderCancelPolicy)
- })
- try:
- policy_class = _CANCEL_POLICIES_CACHE[lowercase_policy]
- policy_params = parsed_data.get(TradingViewSignalsTradingMode.CANCEL_POLICY_PARAMS)
- parsed_policy_params = json.loads(policy_params.replace("'", '"')) if isinstance(policy_params, str) else policy_params
- return policy_class, parsed_policy_params
- except KeyError:
- raise trading_errors.InvalidCancelPolicyError(
- f"Unknown cancel policy: {policy}. Available policies: {', '.join(_CANCEL_POLICIES_CACHE.keys())}"
- )
-
- return None, None
-
- async def _parse_additional_decimal_elements(self, ctx, parsed_data, element_prefix, default, is_price):
- values: list[decimal.Decimal] = []
- for key, value in parsed_data.items():
- if key.startswith(element_prefix) and len(key.split(element_prefix)) == 2:
- values.append(await self._parse_element(ctx, parsed_data, key, default, is_price))
- return values
-
- async def _parse_element(self, ctx, parsed_data, key, default, is_price)-> decimal.Decimal:
- target_value = decimal.Decimal(str(default))
- value = parsed_data.get(key, 0)
- if is_price:
- if input_price_or_offset := value:
- target_value = await script_keywords.get_price_with_offset(
- ctx, input_price_or_offset, use_delta_type_as_flat_value=True
- )
- else:
- target_value = decimal.Decimal(str(value))
- return target_value
-
- async def _parse_volume(self, ctx, parsed_data, side, target_price, allow_holdings_adaptation, reduce_only):
- user_volume = str(parsed_data.get(TradingViewSignalsTradingMode.VOLUME_KEY, 0))
- if user_volume == "0":
- return trading_constants.ZERO
- return await script_keywords.get_amount_from_input_amount(
- context=ctx,
- input_amount=user_volume,
- side=side,
- reduce_only=reduce_only,
- is_stop_order=False,
- use_total_holding=False,
- target_price=target_price,
- # raise when not enough funds to create an order according to user input
- allow_holdings_adaptation=allow_holdings_adaptation,
+ async def call_dsl_script(
+ self, parsed_data: dict,
+ dependencies: typing.Optional[commons_signals.SignalDependencies] = None
+ ) -> dsl_interpreter.DSLCallResult:
+ dsl_script = tradingview_signal_to_dsl_translator.TradingViewSignalToDSLTranslator.translate_signal(
+ parsed_data
)
+ self.trading_mode.set_dsl_script(dsl_script, raise_on_error=True, dependencies=dependencies) # type: ignore
+ return await self.trading_mode.interpret_dsl_script() # type: ignore
@trading_modes.enabled_trader_only(raise_when_disabled=True)
async def signal_callback(self, parsed_data: dict, ctx):
+ dependencies = await self._before_signal_processing(parsed_data)
+ try:
+ signal = parsed_data[TradingViewSignalsTradingMode.SIGNAL_KEY].casefold()
+ except KeyError:
+ raise trading_errors.InvalidArgumentError(
+ f"{TradingViewSignalsTradingMode.SIGNAL_KEY} key "
+ f"not found in parsed data: {parsed_data}"
+ )
+ match signal:
+ # special cases for non-order signals
+ case SignalActions.ENSURE_EXCHANGE_BALANCE:
+ return await self.ensure_exchange_balance(parsed_data)
+ case SignalActions.ENSURE_BLOCKCHAIN_WALLET_BALANCE:
+ return await self.ensure_blockchain_wallet_balance(parsed_data)
+ case _:
+ # default case: most signal
+ result = await self.call_dsl_script(parsed_data, dependencies)
+ if result.result:
+ self.logger.info(f"DSL script successfully executed. Result: {result.result}")
+ else:
+ self.logger.error(f"Error when executing DSL script: {result.error}")
+ return result
+
+ async def _before_signal_processing(self, parsed_data: dict):
+ dependencies = await self._updated_orders_to_cancel(parsed_data)
+ await self._update_leverage_if_necessary(parsed_data)
+ return dependencies
+
+ async def _updated_orders_to_cancel(self, parsed_data: dict):
_, dependencies = await self.apply_cancel_policies()
is_order_signal = not self.trading_mode.is_non_order_signal(parsed_data)
if is_order_signal and self.trading_mode.CANCEL_PREVIOUS_ORDERS:
@@ -569,93 +435,20 @@ async def signal_callback(self, parsed_data: dict, ctx):
dependencies.extend(new_dependencies)
else:
dependencies = new_dependencies
- pre_update_data = self._parse_pre_update_order_details(parsed_data)
- await self._process_pre_state_update_actions(ctx, pre_update_data)
- await self._process_meta_actions(parsed_data)
- if self.trading_mode.is_meta_action_only(parsed_data):
- return
- action, state, order_data = await self._parse_order_details(ctx, parsed_data)
- self.final_eval = self.EVAL_BY_STATES[state]
- # Use daily trading mode state system
- await self._set_state(
- self.trading_mode.cryptocurrency, ctx.symbol, action, state, order_data, parsed_data, dependencies=dependencies
- )
+ return dependencies
- async def _process_pre_state_update_actions(self, context, data: dict):
+ async def _update_leverage_if_necessary(self, parsed_data: dict):
try:
- if leverage := data.get(TradingViewSignalsModeConsumer.LEVERAGE):
- await self.trading_mode.set_leverage(context.symbol, None, decimal.Decimal(str(leverage)))
+ if leverage := parsed_data.get(self.trading_mode.LEVERAGE):
+ if symbol := parsed_data.get(TradingViewSignalsTradingMode.SYMBOL_KEY):
+ await self.trading_mode.set_leverage(symbol, None, decimal.Decimal(str(leverage)))
+ else:
+ self.logger.error(f"Impossible to update leverage: symbol not found in parsed data: {parsed_data}")
except Exception as err:
self.logger.exception(
- err, True, f"Error when processing pre_state_update_actions: {err} (data: {data})"
+ err, True, f"Error when updating leverage: {err} (data: {parsed_data})"
)
- async def _process_meta_actions(self, parsed_data: dict):
- # implement in subclass if needed
- pass
-
- async def _set_state(
- self, cryptocurrency: str, symbol: str, action: SignalActions,
- new_state: trading_enums.EvaluatorStates, order_data: dict, parsed_data: dict,
- dependencies: typing.Optional[commons_signals.SignalDependencies] = None
- ):
- async with self.trading_mode_trigger():
- if self.state != new_state:
- self.state = new_state
- self.logger.info(f"[{symbol}] new state: {self.state.name}")
-
- # if new state is not neutral --> cancel orders and create new else keep orders
- if action == SignalActions.CREATE_ORDERS:
- # call orders creation from consumers
- await self.submit_trading_evaluation(cryptocurrency=cryptocurrency,
- symbol=symbol,
- time_frame=None,
- final_note=self.final_eval,
- state=self.state,
- data=order_data,
- dependencies=dependencies)
-
- # send_notification
- if not self.exchange_manager.is_backtesting:
- await self._send_alert_notification(symbol, new_state)
- else:
- await self.process_non_creating_orders_actions(action, symbol, order_data, parsed_data)
-
- async def process_non_creating_orders_actions(
- self, action: SignalActions, symbol: str, order_data: dict, parsed_data: dict
- ):
- match (action):
- case SignalActions.CANCEL_ORDERS:
- await self.cancel_orders_from_order_data(symbol, order_data, parsed_data)
- case SignalActions.ENSURE_EXCHANGE_BALANCE:
- await self.ensure_exchange_balance(parsed_data)
- case SignalActions.ENSURE_BLOCKCHAIN_WALLET_BALANCE:
- await self.ensure_blockchain_wallet_balance(parsed_data)
- case SignalActions.WITHDRAW_FUNDS:
- await self.withdraw_funds(parsed_data)
- case SignalActions.TRANSFER_FUNDS:
- await self.transfer_funds(parsed_data)
- case _:
- raise trading_errors.InvalidArgumentError(f"Unknown action: {action}.")
-
- async def cancel_orders_from_order_data(self, symbol: str, order_data: dict, parsed_data: dict) -> tuple[bool, typing.Optional[commons_signals.SignalDependencies]]:
- if not self.trading_mode.consumers:
- return False, None
-
- exchange_ids = order_data.get(TradingViewSignalsModeConsumer.EXCHANGE_ORDER_IDS, None)
- cancel_order_raw_side = order_data.get(
- TradingViewSignalsModeConsumer.ORDER_EXCHANGE_CREATION_PARAMS, {}
- ).get(TradingViewSignalsTradingMode.SIDE_PARAM_KEY, None) or parsed_data.get(TradingViewSignalsTradingMode.SIDE_PARAM_KEY, None)
- cancel_order_raw_side = cancel_order_raw_side.lower() if cancel_order_raw_side else None
- cancel_order_side = trading_enums.TradeOrderSide.BUY if cancel_order_raw_side == trading_enums.TradeOrderSide.BUY.value \
- else trading_enums.TradeOrderSide.SELL if cancel_order_raw_side == trading_enums.TradeOrderSide.SELL.value else None
- cancel_order_tag = order_data.get(TradingViewSignalsModeConsumer.TAG_KEY, None)
-
- # cancel open orders
- return await self.cancel_symbol_open_orders(
- symbol, side=cancel_order_side, tag=cancel_order_tag, exchange_order_ids=exchange_ids
- )
-
async def ensure_exchange_balance(self, parsed_data: dict) -> decimal.Decimal:
ensure_exchange_balance_params = actions_params.EnsureExchangeBalanceParams.from_dict(parsed_data)
holdings = trading_api.get_portfolio_currency(self.exchange_manager, ensure_exchange_balance_params.asset).available
@@ -672,7 +465,10 @@ async def ensure_exchange_balance(self, parsed_data: dict) -> decimal.Decimal:
async def ensure_blockchain_wallet_balance(self, parsed_data: dict) -> decimal.Decimal:
ensure_blockchain_wallet_balance_params = actions_params.EnsureBlockchainWalletBalanceParams.from_dict(parsed_data)
async with trading_api.blockchain_wallet_context(
- ensure_blockchain_wallet_balance_params.wallet_details,
+ blockchain_wallets.BlockchainWalletParameters(
+ blockchain_descriptor=ensure_blockchain_wallet_balance_params.blockchain_descriptor,
+ wallet_descriptor=ensure_blockchain_wallet_balance_params.wallet_descriptor,
+ ),
self.exchange_manager.trader
) as wallet:
wallet_balance = await wallet.get_balance()
@@ -683,59 +479,13 @@ async def ensure_blockchain_wallet_balance(self, parsed_data: dict) -> decimal.D
if balance < decimal.Decimal(str(ensure_blockchain_wallet_balance_params.holdings)):
raise trading_view_signals_trading_mode_errors.MissingFundsError(
f"Not enough {ensure_blockchain_wallet_balance_params.asset} available on "
- f"{ensure_blockchain_wallet_balance_params.wallet_details.blockchain_descriptor.network} "
+ f"{ensure_blockchain_wallet_balance_params.blockchain_descriptor.network} "
f"blockchain wallet: available: {balance}, required: {ensure_blockchain_wallet_balance_params.holdings}"
)
else:
self.logger.info(
f"Enough {ensure_blockchain_wallet_balance_params.asset} available on "
- f"{ensure_blockchain_wallet_balance_params.wallet_details.blockchain_descriptor.network} "
+ f"{ensure_blockchain_wallet_balance_params.blockchain_descriptor.network} "
f"blockchain wallet: available: {balance}, required: {ensure_blockchain_wallet_balance_params.holdings}"
)
return balance
-
- async def withdraw_funds(self, parsed_data: dict) -> dict:
- withdraw_funds_params = actions_params.WithdrawFundsParams.from_dict(parsed_data)
- # requires ALLOW_FUNDS_TRANSFER env to be True (disabled by default to protect funds)
- amount = withdraw_funds_params.amount or (
- trading_api.get_portfolio_currency(self.exchange_manager, withdraw_funds_params.asset).available
- )
- transaction = await self.exchange_manager.trader.withdraw(
- withdraw_funds_params.asset,
- decimal.Decimal(str(amount)),
- withdraw_funds_params.network,
- withdraw_funds_params.address,
- tag=withdraw_funds_params.tag,
- params=withdraw_funds_params.params
- )
- self.logger.info(
- f"Withdrawn {amount} {withdraw_funds_params.asset} "
- f"from {self.exchange_manager.exchange_name}: {transaction}"
- )
- return transaction
-
- async def transfer_funds(self, parsed_data: dict) -> dict:
- transfer_funds_params = actions_params.TransferFundsParams.from_dict(parsed_data)
- async with trading_api.blockchain_wallet_context(
- transfer_funds_params.wallet_details,
- self.exchange_manager.trader
- ) as wallet:
- if transfer_funds_params.address:
- address = transfer_funds_params.address
- elif transfer_funds_params.destination_exchange == self.exchange_manager.exchange_name:
- address = (
- await self.exchange_manager.trader.get_deposit_address(transfer_funds_params.asset)
- )[trading_enums.ExchangeConstantsDepositAddressColumns.ADDRESS.value]
- else:
- raise trading_errors.InvalidArgumentError(
- f"Unsupported destination exchange: {transfer_funds_params.destination_exchange}"
- )
- # requires ALLOW_FUNDS_TRANSFER env to be True (disabled by default to protect funds)
- transaction = await wallet.withdraw(
- transfer_funds_params.asset,
- decimal.Decimal(str(transfer_funds_params.amount)),
- transfer_funds_params.wallet_details.blockchain_descriptor.network,
- address,
- )
- self.logger.info(f"Transferred {transfer_funds_params.amount} {transfer_funds_params.asset}: {transaction}")
- return transaction
diff --git a/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/tradingview_signal_to_dsl_translator.py b/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/tradingview_signal_to_dsl_translator.py
new file mode 100644
index 000000000..54134cae3
--- /dev/null
+++ b/packages/tentacles/Trading/Mode/trading_view_signals_trading_mode/tradingview_signal_to_dsl_translator.py
@@ -0,0 +1,221 @@
+# Drakkar-Software OctoBot-Tentacles
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import typing
+
+import octobot_commons.dsl_interpreter as dsl_interpreter
+import octobot_commons.errors as commons_errors
+import tentacles.Meta.DSL_operators as dsl_operators
+import octobot_trading.exchanges as trading_exchanges
+import octobot_trading.constants as trading_constants
+import octobot_trading.errors as trading_errors
+import tentacles.Trading.Mode.trading_view_signals_trading_mode.trading_view_signals_trading as trading_view_signals_trading
+
+
+FREE_PARAMS_NAME = "params"
+
+
+class TradingViewSignalToDSLTranslator:
+ """
+ Translates TradingView signal parameters to DSL parameters.
+ Handles special cases for some parameters (ex: take profit prices, exchange order ids, ...).
+ """
+
+ @classmethod
+ def _get_dsl_signal_keyword_and_params(cls, parsed_data: dict) -> tuple[typing.Optional[str], dict[str, typing.Any]]:
+ keyword = None
+ params = {}
+ try:
+ signal = parsed_data[trading_view_signals_trading.TradingViewSignalsTradingMode.SIGNAL_KEY].casefold()
+ except KeyError:
+ raise trading_errors.InvalidArgumentError(
+ f"{trading_view_signals_trading.TradingViewSignalsTradingMode.SIGNAL_KEY} key "
+ f"not found in parsed data: {parsed_data}"
+ )
+ price = parsed_data.get(trading_view_signals_trading.TradingViewSignalsTradingMode.PRICE_KEY)
+ default_order_type = "market" if price is None else "limit"
+ order_type = parsed_data.get(trading_view_signals_trading.TradingViewSignalsTradingMode.ORDER_TYPE_SIGNAL, default_order_type).casefold()
+ if order_type == trading_view_signals_trading.TradingViewSignalsTradingMode.STOP_SIGNAL.lower():
+ order_type = "stop_loss"
+ if signal == trading_view_signals_trading.TradingViewSignalsTradingMode.SELL_SIGNAL:
+ keyword = order_type
+ params[
+ trading_view_signals_trading.TradingViewSignalsTradingMode.TRADINGVIEW_TO_DSL_PARAM[trading_view_signals_trading.TradingViewSignalsTradingMode.SIDE_PARAM_KEY]
+ ] = "sell"
+ elif signal == trading_view_signals_trading.TradingViewSignalsTradingMode.BUY_SIGNAL:
+ keyword = order_type
+ params[
+ trading_view_signals_trading.TradingViewSignalsTradingMode.TRADINGVIEW_TO_DSL_PARAM[trading_view_signals_trading.TradingViewSignalsTradingMode.SIDE_PARAM_KEY]
+ ] = "buy"
+ elif signal == trading_view_signals_trading.TradingViewSignalsTradingMode.CANCEL_SIGNAL:
+ keyword = "cancel_order"
+ elif signal == trading_view_signals_trading.TradingViewSignalsTradingMode.WITHDRAW_FUNDS_SIGNAL:
+ if not trading_constants.ALLOW_FUNDS_TRANSFER:
+ raise trading_errors.DisabledFundsTransferError(
+ "Withdraw funds signal is not allowed when ALLOW_FUNDS_TRANSFER is disabled"
+ )
+ keyword = "withdraw"
+ elif signal == trading_view_signals_trading.TradingViewSignalsTradingMode.TRANSFER_FUNDS_SIGNAL:
+ if not trading_constants.ALLOW_FUNDS_TRANSFER:
+ raise trading_errors.DisabledFundsTransferError(
+ "Transfer funds signal is not allowed when ALLOW_FUNDS_TRANSFER is disabled"
+ )
+ keyword = "blockchain_wallet_transfer"
+ return keyword, params
+
+ @classmethod
+ def _map_other_params_to_dsl(
+ cls, other_params: dict[str, typing.Any], operator_params: list[dsl_interpreter.OperatorParameter]
+ ) -> dict[str, typing.Any]:
+ operator_param_names = {p.name for p in operator_params}
+ dsl_params = {}
+ params_dict = {}
+ for key, value in other_params.items():
+ if not isinstance(key, str):
+ continue
+ dsl_key = trading_view_signals_trading.TradingViewSignalsTradingMode.TRADINGVIEW_TO_DSL_PARAM.get(key, key.lower())
+ if key.startswith(trading_view_signals_trading.TradingViewSignalsTradingMode.PARAM_PREFIX_KEY):
+ param_name = key[len(trading_view_signals_trading.TradingViewSignalsTradingMode.PARAM_PREFIX_KEY):]
+ params_dict[param_name] = value
+ elif dsl_key in operator_param_names:
+ dsl_params[dsl_key] = value
+ if params_dict and FREE_PARAMS_NAME in operator_param_names:
+ dsl_params[FREE_PARAMS_NAME] = params_dict
+ return dsl_params
+
+ @classmethod
+ def _adapt_special_format_values_for_param(
+ cls, param_name: str, value: typing.Any
+ ) -> typing.Any:
+ if param_name == trading_view_signals_trading.TradingViewSignalsTradingMode.TRADINGVIEW_TO_DSL_PARAM[trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY]:
+ if isinstance(value, list):
+ return value
+ if isinstance(value, (str, int, float)):
+ return [value] if value else []
+ if param_name == trading_view_signals_trading.TradingViewSignalsTradingMode.TRADINGVIEW_TO_DSL_PARAM[trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_VOLUME_RATIO_KEY]:
+ if isinstance(value, list):
+ return [float(v) for v in value]
+ if isinstance(value, (str, int, float)):
+ return [float(value)] if value else []
+ if param_name == trading_view_signals_trading.TradingViewSignalsTradingMode.TRADINGVIEW_TO_DSL_PARAM[trading_view_signals_trading.TradingViewSignalsTradingMode.EXCHANGE_ORDER_IDS] and isinstance(value, str):
+ return [oid.strip() for oid in value.split(",") if oid.strip()]
+ return value
+
+ @classmethod
+ def _get_operator_class(cls, keyword: str) -> typing.Optional[dsl_interpreter.Operator]:
+ allowed_operators = cls._get_allowed_keywords()
+ for op in allowed_operators:
+ if op.get_name() == keyword:
+ return op
+ return None
+
+ @classmethod
+ def _collect_numbered_list_param_values(
+ cls, params: dict[str, typing.Any], base_key: str
+ ) -> list[typing.Any]:
+ # collect numbered list values from params, ex: TAKE_PROFIT_PRICE_1, TAKE_PROFIT_PRICE_2, ...
+ standalone = params.get(base_key)
+ numbered: list[tuple[int, typing.Any]] = []
+ prefix = f"{base_key}_"
+ for key, value in params.items():
+ if not isinstance(key, str) or not key.startswith(prefix):
+ continue
+ suffix = key[len(prefix):]
+ try:
+ index = int(suffix)
+ numbered.append((index, value))
+ except ValueError:
+ continue
+ numbered.sort(key=lambda item: item[0])
+ if standalone is not None and standalone != "":
+ return [standalone] + [v for _, v in numbered]
+ return [v for _, v in numbered]
+
+ @classmethod
+ def _pre_process_special_params(
+ cls,
+ operator_class: dsl_interpreter.Operator,
+ params: dict[str, typing.Any]
+ ) -> dict[str, typing.Any]:
+ result = {
+ k: v
+ for k, v in params.items()
+ if not isinstance(k, str)
+ or not (
+ k.startswith(
+ f"{trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY}_"
+ ) or k.startswith(
+ f"{trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_VOLUME_RATIO_KEY}_"
+ )
+ )
+ }
+ if operator_class.get_name() == "stop_loss" and trading_view_signals_trading.TradingViewSignalsTradingMode.STOP_PRICE_KEY in params:
+ # special casee for stop loss price: used as price when creating a stop loss order
+ result[trading_view_signals_trading.TradingViewSignalsTradingMode.PRICE_KEY] = params[trading_view_signals_trading.TradingViewSignalsTradingMode.STOP_PRICE_KEY]
+ result.pop(trading_view_signals_trading.TradingViewSignalsTradingMode.STOP_PRICE_KEY)
+ for base_key in (
+ trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_PRICE_KEY,
+ trading_view_signals_trading.TradingViewSignalsTradingMode.TAKE_PROFIT_VOLUME_RATIO_KEY,
+ ):
+ if base_key in params or any(
+ isinstance(k, str) and k.startswith(f"{base_key}_")
+ for k in params
+ ):
+ values = cls._collect_numbered_list_param_values(params, base_key)
+ if values:
+ result[base_key] = values
+ elif base_key in params:
+ result[base_key] = [params[base_key]] if params[base_key] not in (None, "") else []
+ return result
+
+ @classmethod
+ def _resolve_operator_params(
+ cls,
+ operator_class: dsl_interpreter.Operator,
+ params: dict[str, typing.Any],
+ other_params: dict[str, typing.Any]
+ ) -> list[str]:
+ operator_params = operator_class.get_parameters()
+ adapted_other = cls._pre_process_special_params(operator_class, other_params)
+ mapped_other = cls._map_other_params_to_dsl(adapted_other, operator_params)
+ merged = dict(params)
+ for dsl_key, value in mapped_other.items():
+ if dsl_key not in merged:
+ merged[dsl_key] = value
+ # adapt special format values when needed
+ merged = {
+ name: cls._adapt_special_format_values_for_param(name, value)
+ for name, value in merged.items()
+ }
+ return dsl_interpreter.resove_operator_params(operator_class, merged)
+
+ @classmethod
+ def translate_signal(cls, parsed_data: dict) -> str:
+ keyword, params = cls._get_dsl_signal_keyword_and_params(parsed_data)
+ if not keyword:
+ return "None"
+ if operator_class := cls._get_operator_class(keyword):
+ all_params = cls._resolve_operator_params(operator_class, params, parsed_data)
+ return f"{operator_class.get_name()}({', '.join(all_params)})"
+ return "None"
+
+ @classmethod
+ def _get_allowed_keywords(cls) -> list[dsl_interpreter.Operator]:
+ return (
+ dsl_operators.create_create_order_operators(None) +
+ dsl_operators.create_cancel_order_operators(None) +
+ dsl_operators.create_blockchain_wallet_operators(None) +
+ dsl_operators.create_portfolio_operators(None)
+ ) # type: ignore
diff --git a/packages/trading/octobot_trading/api/blockchain_wallets.py b/packages/trading/octobot_trading/api/blockchain_wallets.py
index d2c8dd3d2..14e0bdfc3 100644
--- a/packages/trading/octobot_trading/api/blockchain_wallets.py
+++ b/packages/trading/octobot_trading/api/blockchain_wallets.py
@@ -25,7 +25,7 @@
@contextlib.asynccontextmanager
async def blockchain_wallet_context(
parameters: blockchain_wallets.BlockchainWalletParameters,
- trader: "octobot_trading.exchanges.Trader"
+ trader: typing.Optional["octobot_trading.exchanges.Trader"],
) -> typing.AsyncGenerator[blockchain_wallets.BlockchainWallet, None]:
wallet = blockchain_wallets.create_blockchain_wallet(parameters, trader)
async with wallet.open() as wallet:
diff --git a/packages/trading/octobot_trading/api/symbol_data.py b/packages/trading/octobot_trading/api/symbol_data.py
index b40ebf6d2..412f86445 100644
--- a/packages/trading/octobot_trading/api/symbol_data.py
+++ b/packages/trading/octobot_trading/api/symbol_data.py
@@ -21,7 +21,10 @@
import octobot_trading.enums
import octobot_trading.exchange_data as exchange_data
import octobot_trading.util as util
+import octobot_trading.exchanges.util as exchange_util
+if typing.TYPE_CHECKING:
+ import octobot_trading.exchanges.exchange_manager
def get_symbol_data(exchange_manager, symbol, allow_creation=True) -> exchange_data.ExchangeSymbolData:
return exchange_manager.exchange_symbols_data.get_exchange_symbol_data(symbol, allow_creation=allow_creation)
@@ -150,9 +153,12 @@ def create_new_candles_manager(candles=None, max_candles_count=None) -> exchange
return manager
-def force_set_mark_price(exchange_manager, symbol, price):
- exchange_manager.exchange_symbols_data.get_exchange_symbol_data(symbol).prices_manager.\
- set_mark_price(decimal.Decimal(str(price)), octobot_trading.enums.MarkPriceSources.EXCHANGE_MARK_PRICE.value)
+def force_set_mark_price(
+ exchange_manager: "octobot_trading.exchanges.exchange_manager.ExchangeManager",
+ symbol: str,
+ price: typing.Union[float, decimal.Decimal],
+) -> None:
+ return exchange_util.force_set_mark_price(exchange_manager, symbol, price)
def is_mark_price_initialized(exchange_manager, symbol: str) -> bool:
diff --git a/packages/trading/octobot_trading/blockchain_wallets/blockchain_wallet_factory.py b/packages/trading/octobot_trading/blockchain_wallets/blockchain_wallet_factory.py
index cd3165573..310b0435a 100644
--- a/packages/trading/octobot_trading/blockchain_wallets/blockchain_wallet_factory.py
+++ b/packages/trading/octobot_trading/blockchain_wallets/blockchain_wallet_factory.py
@@ -19,7 +19,6 @@
import octobot_commons.tentacles_management as tentacles_management
import octobot_trading.blockchain_wallets.blockchain_wallet as blockchain_wallet
import octobot_trading.blockchain_wallets.blockchain_wallet_parameters as blockchain_wallet_parameters
-import octobot_trading.blockchain_wallets.simulator.blockchain_wallet_simulator as blockchain_wallet_simulator
if typing.TYPE_CHECKING:
import octobot_trading.exchanges
@@ -36,19 +35,24 @@ def get_blockchain_wallet_class_by_blockchain() -> dict[str, type[blockchain_wal
def create_blockchain_wallet(
parameters: blockchain_wallet_parameters.BlockchainWalletParameters,
- trader: "octobot_trading.exchanges.Trader",
+ trader: typing.Optional["octobot_trading.exchanges.Trader"],
) -> blockchain_wallet.BlockchainWallet:
"""
Create a wallet of the given type
:param parameters: the parameters of the wallet to create
:return: the created wallet
"""
+ blockchain_wallet_class = None
try:
- return get_blockchain_wallet_class_by_blockchain()[
+ blockchain_wallet_class = get_blockchain_wallet_class_by_blockchain()[
parameters.blockchain_descriptor.blockchain
- ](parameters)
- except (KeyError, TypeError) as err:
- if trader.simulate:
- # use simulator wallet with trader callbacks to interact with simulated exchange wallet
- return blockchain_wallet_simulator.BlockchainWalletSimulator(parameters, trader=trader)
- raise ValueError(f"Blockchain {parameters.blockchain_descriptor.blockchain} not supported") from err
+ ]
+ try:
+ return blockchain_wallet_class(parameters)
+ except TypeError:
+ # trader arg is required for this wallet
+ return blockchain_wallet_class(parameters, trader=trader)
+ except KeyError as err:
+ raise ValueError(
+ f"Blockchain {parameters.blockchain_descriptor.blockchain} not supported"
+ ) from err
diff --git a/packages/trading/octobot_trading/blockchain_wallets/simulator/blockchain_wallet_simulator.py b/packages/trading/octobot_trading/blockchain_wallets/simulator/blockchain_wallet_simulator.py
index 818173f23..cce2568a3 100644
--- a/packages/trading/octobot_trading/blockchain_wallets/simulator/blockchain_wallet_simulator.py
+++ b/packages/trading/octobot_trading/blockchain_wallets/simulator/blockchain_wallet_simulator.py
@@ -15,6 +15,7 @@
# License along with this library.
import typing
import enum
+import time
import decimal
import uuid
@@ -53,7 +54,7 @@ class BlockchainWalletSimulator(blockchain_wallet.BlockchainWallet):
def __init__(
self,
parameters: blockchain_wallet_parameters.BlockchainWalletParameters,
- trader: "octobot_trading.exchanges.Trader"
+ trader: typing.Optional["octobot_trading.exchanges.Trader"]
):
if parameters.blockchain_descriptor.network != octobot_trading.constants.SIMULATED_BLOCKCHAIN_NETWORK:
# this is a simulator wallet, the network must be the simulated network
@@ -68,7 +69,7 @@ def __init__(
free=decimal.Decimal(0)
)
} if parameters.blockchain_descriptor.native_coin_symbol else {}
- self._trader: "octobot_trading.exchanges.Trader" = trader
+ self._trader: typing.Optional["octobot_trading.exchanges.Trader"] = trader
super().__init__(parameters)
if parameters.wallet_descriptor.specific_config:
self._apply_wallet_descriptor_specific_config(parameters.wallet_descriptor.specific_config)
@@ -123,13 +124,15 @@ def _ensure_native_coin_symbol(self):
@staticmethod
def create_wallet_descriptor_specific_config(**kwargs) -> dict:
return {
- BlockchainWalletSimulatorConfigurationKeys.ASSETS.value: {
- BlockchainWalletSimulatorConfigurationKeys.ASSET.value: asset,
- BlockchainWalletSimulatorConfigurationKeys.AMOUNT.value: amount,
- }
- for asset, amount in kwargs.get(
- BlockchainWalletSimulatorConfigurationKeys.ASSETS.value, {}
- ).items()
+ BlockchainWalletSimulatorConfigurationKeys.ASSETS.value: [
+ {
+ BlockchainWalletSimulatorConfigurationKeys.ASSET.value: asset,
+ BlockchainWalletSimulatorConfigurationKeys.AMOUNT.value: amount,
+ }
+ for asset, amount in kwargs.get(
+ BlockchainWalletSimulatorConfigurationKeys.ASSETS.value, {}
+ ).items()
+ ]
}
def _apply_wallet_descriptor_specific_config(self, specific_config: dict):
@@ -158,6 +161,10 @@ def _get_token_balance(self, asset: str) -> blockchain_wallet_adapter.Balance:
)
async def _get_trader_deposit_address(self, asset: str) -> str:
+ if self._trader is None:
+ raise octobot_trading.errors.BlockchainWalletConfigurationError(
+ f"No trader is provided to {self.__class__.__name__} to get a deposit address"
+ )
return (await self._trader.get_deposit_address(asset))[
octobot_trading.enums.ExchangeConstantsDepositAddressColumns.ADDRESS.value
]
@@ -172,13 +179,17 @@ async def _transfer_coin(
f"Available: {holdings.free}, required: {amount}"
)
transaction_id = str(uuid.uuid4())
- if to_address == await self._get_trader_deposit_address(asset):
+ if self._trader and to_address == await self._get_trader_deposit_address(asset):
# this is an exchange deposit: credit the exchange portfolio
await self._deposit_coin_on_trader_portfolio(asset, amount, to_address, transaction_id)
+ tx_timestamp = (
+ self._trader.exchange_manager.exchange.get_exchange_current_time()
+ if self._trader else int(time.time())
+ )
return blockchain_wallet_adapter.Transaction(
txid=transaction_id,
- timestamp=self._trader.exchange_manager.exchange.get_exchange_current_time(),
+ timestamp=tx_timestamp,
address_from=self.wallet_descriptor.address,
network=self.blockchain_descriptor.network,
address_to=to_address,
@@ -214,7 +225,7 @@ def _get_total_withdrawals_to_address(self, asset: str, to_address: str) -> deci
currency=asset,
transaction_type=octobot_trading.enums.TransactionType.BLOCKCHAIN_WITHDRAWAL,
)
- )
+ ) if self._trader else octobot_trading.constants.ZERO
def _get_total_deposits_from_address(self, asset: str, from_address: str) -> decimal.Decimal:
return sum( # type: ignore
@@ -225,4 +236,4 @@ def _get_total_deposits_from_address(self, asset: str, from_address: str) -> dec
currency=asset,
transaction_type=octobot_trading.enums.TransactionType.BLOCKCHAIN_DEPOSIT,
)
- )
+ ) if self._trader else octobot_trading.constants.ZERO
diff --git a/packages/trading/octobot_trading/constants.py b/packages/trading/octobot_trading/constants.py
index e4f838721..2b61b010a 100644
--- a/packages/trading/octobot_trading/constants.py
+++ b/packages/trading/octobot_trading/constants.py
@@ -205,6 +205,7 @@
':read ECONNRESET:read ETIMEDOUT'
)
).split(":"))
+USE_CCXT_SHARED_MARKETS_CACHE = os_util.parse_boolean_environment_var("USE_CCXT_SHARED_MARKETS_CACHE", "True")
# exchange proxy
RETRIABLE_EXCHANGE_PROXY_ERRORS_DESC: set[str] = set(os.getenv(
@@ -213,6 +214,11 @@
# used to force margin type update before positions init (if necessary)
FORCED_MARGIN_TYPE = enums.MarginType(os.getenv("FORCED_MARGIN_TYPE", enums.MarginType.ISOLATED.value))
+MINIMAL_POSITION_IDENTIFICATION_DETAILS_KEYS = [
+ enums.ExchangeConstantsPositionColumns.LOCAL_ID.value, # to fetch position
+ enums.ExchangeConstantsPositionColumns.SYMBOL.value, # to fetch position
+ enums.ExchangeConstantsPositionColumns.LEVERAGE.value, # to keep user configured leverage
+]
# API
API_LOGGER_TAG = "TradingApi"
@@ -243,6 +249,8 @@
# History
DEFAULT_SAVED_HISTORICAL_TIMEFRAMES = [commons_enums.TimeFrames.ONE_DAY]
HISTORICAL_CANDLES_FETCH_DEFAULT_TIMEOUT = 30
+MIN_CANDLES_HISTORY_SIZE = 2 # ensure that at least 2 candles are fetch to avoid issues were candles are not yet
+# available on exchange ending up in empty candles fetch
# 946742400 is 01/01/2000, if trade time is lower, there is an issue.
MINIMUM_VAL_TRADE_TIME = 946688400
diff --git a/packages/trading/octobot_trading/dsl/__init__.py b/packages/trading/octobot_trading/dsl/__init__.py
new file mode 100644
index 000000000..683416556
--- /dev/null
+++ b/packages/trading/octobot_trading/dsl/__init__.py
@@ -0,0 +1,23 @@
+# Drakkar-Software OctoBot-Trading
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+
+from octobot_trading.dsl.dsl_dependencies import (
+ SymbolDependency,
+)
+
+__all__ = [
+ "SymbolDependency",
+]
diff --git a/packages/trading/octobot_trading/dsl/dsl_dependencies.py b/packages/trading/octobot_trading/dsl/dsl_dependencies.py
new file mode 100644
index 000000000..de0e14246
--- /dev/null
+++ b/packages/trading/octobot_trading/dsl/dsl_dependencies.py
@@ -0,0 +1,25 @@
+# Drakkar-Software OctoBot-Trading
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import dataclasses
+import typing
+
+import octobot_commons.dsl_interpreter as dsl_interpreter
+
+
+@dataclasses.dataclass
+class SymbolDependency(dsl_interpreter.InterpreterDependency):
+ symbol: str
+ time_frame: typing.Optional[str] = None
diff --git a/packages/trading/octobot_trading/errors.py b/packages/trading/octobot_trading/errors.py
index 2faa16934..f15dca4c8 100644
--- a/packages/trading/octobot_trading/errors.py
+++ b/packages/trading/octobot_trading/errors.py
@@ -14,6 +14,8 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
+import octobot_trading.enums
+
class OctoBotTradingError(Exception):
"""
@@ -188,6 +190,18 @@ class NotSupported(OctoBotTradingError):
Raised when an exchange doesn't support the required element
"""
+class NotSupportedOrderTypeError(NotSupported):
+ """
+ Raised when an exchange doesn't support the required order type
+ """
+ def __init__(
+ self,
+ message: str,
+ order_type: octobot_trading.enums.TraderOrderType
+ ):
+ self.order_type: octobot_trading.enums.TraderOrderType = order_type
+ super().__init__(message)
+
class UnSupportedSymbolError(NotSupported):
"""
diff --git a/packages/trading/octobot_trading/exchange_data/__init__.py b/packages/trading/octobot_trading/exchange_data/__init__.py
index 906ddc3b0..e43255f77 100644
--- a/packages/trading/octobot_trading/exchange_data/__init__.py
+++ b/packages/trading/octobot_trading/exchange_data/__init__.py
@@ -93,6 +93,7 @@
MiniTickerProducer,
MiniTickerChannel,
TickerUpdaterSimulator,
+ TickerCache,
)
from octobot_trading.exchange_data import contracts
from octobot_trading.exchange_data.contracts import (
@@ -103,6 +104,7 @@
get_contract_type_from_symbol,
update_contracts_from_positions,
update_future_contract_from_dict,
+ initialize_contracts_from_exchange_data,
create_default_future_contract,
create_default_option_contract,
create_contract,
@@ -211,6 +213,7 @@
"MiniTickerProducer",
"MiniTickerChannel",
"TickerUpdaterSimulator",
+ "TickerCache",
"Contract",
"MarginContract",
"FutureContract",
@@ -218,6 +221,7 @@
"get_contract_type_from_symbol",
"update_contracts_from_positions",
"update_future_contract_from_dict",
+ "initialize_contracts_from_exchange_data",
"create_default_future_contract",
"create_default_option_contract",
"create_contract",
diff --git a/packages/trading/octobot_trading/exchange_data/contracts/__init__.py b/packages/trading/octobot_trading/exchange_data/contracts/__init__.py
index 7c1dfa7db..fe8bfe2bf 100644
--- a/packages/trading/octobot_trading/exchange_data/contracts/__init__.py
+++ b/packages/trading/octobot_trading/exchange_data/contracts/__init__.py
@@ -37,6 +37,7 @@
from octobot_trading.exchange_data.contracts import contract_factory
from octobot_trading.exchange_data.contracts.contract_factory import (
get_contract_type_from_symbol,
+ initialize_contracts_from_exchange_data,
update_contracts_from_positions,
update_future_contract_from_dict,
create_default_future_contract,
@@ -50,6 +51,7 @@
"FutureContract",
"OptionContract",
"get_contract_type_from_symbol",
+ "initialize_contracts_from_exchange_data",
"update_contracts_from_positions",
"update_future_contract_from_dict",
"create_default_future_contract",
diff --git a/packages/trading/octobot_trading/exchange_data/contracts/contract_factory.py b/packages/trading/octobot_trading/exchange_data/contracts/contract_factory.py
index 7cf3de490..bcddf7ce8 100644
--- a/packages/trading/octobot_trading/exchange_data/contracts/contract_factory.py
+++ b/packages/trading/octobot_trading/exchange_data/contracts/contract_factory.py
@@ -13,6 +13,7 @@
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
+import typing
import decimal
import octobot_commons.logging as logging
@@ -25,6 +26,8 @@
import octobot_trading.exchange_data.contracts.future_contract as future_contract
import octobot_trading.exchange_data.contracts.option_contract as option_contract
+if typing.TYPE_CHECKING:
+ import octobot_trading.exchanges.util.exchange_data as exchange_data_import
def update_contracts_from_positions(exchange_manager, positions) -> bool:
updated = False
@@ -58,6 +61,12 @@ def update_contracts_from_positions(exchange_manager, positions) -> bool:
return updated
+def initialize_contracts_from_exchange_data(exchange_manager, exchange_data: "exchange_data_import.ExchangeData") -> None:
+ for position_data in exchange_data.positions:
+ if position_data.contract:
+ update_future_contract_from_dict(exchange_manager, position_data.contract)
+
+
def update_future_contract_from_dict(exchange_manager, contract: dict) -> bool:
return exchange_manager.exchange.create_pair_contract(
pair=contract[enums.ExchangeConstantsMarginContractColumns.PAIR.value],
diff --git a/packages/trading/octobot_trading/exchange_data/exchange_symbols_data.py b/packages/trading/octobot_trading/exchange_data/exchange_symbols_data.py
index 75704a9d6..eb2e7355b 100644
--- a/packages/trading/octobot_trading/exchange_data/exchange_symbols_data.py
+++ b/packages/trading/octobot_trading/exchange_data/exchange_symbols_data.py
@@ -14,13 +14,18 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import typing
+import decimal
import octobot_commons.logging as logging
import octobot_trading.exchange_data.exchange_symbol_data as exchange_symbol_data_import
import octobot_trading.exchanges
+import octobot_trading.enums as enums
import octobot_trading.exchange_data.markets.markets_manager as markets_manager
+if typing.TYPE_CHECKING:
+ import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
class ExchangeSymbolsData:
def __init__(self, exchange_manager):
@@ -32,6 +37,19 @@ def __init__(self, exchange_manager):
self.markets_manager: markets_manager.MarketsManager = markets_manager.MarketsManager()
+ def initialize_from_exchange_data(
+ self, exchange_data: "exchange_data_import.ExchangeData", price_by_symbol: dict[str, float]
+ ) -> None:
+ """
+ Initialize prices from exchange data.
+ """
+ for market in exchange_data.markets:
+ price = price_by_symbol.get(market.symbol)
+ if price is not None:
+ self.get_exchange_symbol_data(market.symbol).prices_manager.set_mark_price(
+ decimal.Decimal(str(price)), enums.MarkPriceSources.EXCHANGE_MARK_PRICE.value
+ )
+
async def stop(self):
self.exchange_manager = None # type: ignore
self.exchange = None # type: ignore
diff --git a/packages/trading/octobot_trading/exchange_data/ticker/__init__.py b/packages/trading/octobot_trading/exchange_data/ticker/__init__.py
index b6e40cad9..0a2c48d35 100644
--- a/packages/trading/octobot_trading/exchange_data/ticker/__init__.py
+++ b/packages/trading/octobot_trading/exchange_data/ticker/__init__.py
@@ -28,6 +28,9 @@
MiniTickerProducer,
MiniTickerChannel,
)
+from octobot_trading.exchange_data.ticker.ticker_cache import (
+ TickerCache,
+)
__all__ = [
"TickerManager",
@@ -37,4 +40,5 @@
"MiniTickerProducer",
"MiniTickerChannel",
"TickerUpdaterSimulator",
+ "TickerCache",
]
diff --git a/packages/trading/octobot_trading/exchange_data/ticker/ticker_cache.py b/packages/trading/octobot_trading/exchange_data/ticker/ticker_cache.py
new file mode 100644
index 000000000..3482eaf24
--- /dev/null
+++ b/packages/trading/octobot_trading/exchange_data/ticker/ticker_cache.py
@@ -0,0 +1,90 @@
+import typing
+import cachetools
+
+import octobot_commons.constants
+import octobot_commons.symbols
+import octobot_commons.logging
+
+
+class TickerCache:
+
+ def __init__(self, ttl: float, maxsize: int):
+ # direct cache
+ self._ALL_TICKERS_BY_EXCHANGE_KEY: cachetools.TTLCache[str, dict[str, dict[str, float]]] = cachetools.TTLCache(
+ maxsize=maxsize, ttl=ttl
+ )
+
+ # indirect caches:
+ # - synchronized with _ALL_TICKERS_BY_EXCHANGE_KEY
+ # BTCUSDT => BTC/USDT
+ self._ALL_PARSED_SYMBOLS_BY_MERGED_SYMBOLS_BY_EXCHANGE_KEY: dict[str, dict[str, octobot_commons.symbols.Symbol]] = {}
+ # BTCUSDT => BTC/USDT + BTCUSDT => BTC/USDT:USDT
+ self._ALL_PARSED_SYMBOLS_BY_FUTURE_MERGED_SYMBOLS_BY_EXCHANGE_KEY: dict[str, dict[str, octobot_commons.symbols.Symbol]] = {}
+
+ def is_valid_symbol(self, exchange_name: str, exchange_type: str, sandboxed: bool, symbol: str) -> bool:
+ try:
+ # will raise if symbol is missing (therefore invalid)
+ self._ALL_TICKERS_BY_EXCHANGE_KEY[ # pylint: disable=expression-not-assigned
+ self.get_exchange_key(exchange_name, exchange_type, sandboxed)
+ ][symbol]
+ return True
+ except KeyError:
+ return False
+
+ def get_all_tickers(
+ self, exchange_name: str, exchange_type: str, sandboxed: bool,
+ default: typing.Optional[dict[str, dict[str, float]]] = None
+ ) -> typing.Optional[dict[str, dict[str, float]]]:
+ return self._ALL_TICKERS_BY_EXCHANGE_KEY.get(self.get_exchange_key(exchange_name, exchange_type, sandboxed), default)
+
+ def has_ticker_data(self, exchange_name: str, exchange_type: str, sandboxed: bool) -> bool:
+ return self.get_exchange_key(exchange_name, exchange_type, sandboxed) in self._ALL_TICKERS_BY_EXCHANGE_KEY
+
+ def get_all_parsed_symbols_by_merged_symbols(
+ self, exchange_name: str, exchange_type: str, sandboxed: bool, default=None
+ ) -> typing.Optional[dict[str, octobot_commons.symbols.Symbol]]:
+ # populated by set_all_tickers
+ # WARNING: does not expire when tickers expire: use has_ticker_data to check if cache is up-to-date
+ if exchange_type == octobot_commons.constants.CONFIG_EXCHANGE_FUTURE:
+ return self._ALL_PARSED_SYMBOLS_BY_FUTURE_MERGED_SYMBOLS_BY_EXCHANGE_KEY.get(
+ self.get_exchange_key(exchange_name, exchange_type, sandboxed), default
+ )
+ return self._ALL_PARSED_SYMBOLS_BY_MERGED_SYMBOLS_BY_EXCHANGE_KEY.get(
+ self.get_exchange_key(exchange_name, exchange_type, sandboxed), default
+ )
+
+ def set_all_tickers(
+ self, exchange_name: str, exchange_type: str, sandboxed: bool, tickers: dict, replace_all: bool = True
+ ):
+ sandbox = " sandbox" if sandboxed else ""
+ key = self.get_exchange_key(exchange_name, exchange_type, sandboxed)
+ merged_tickers = tickers if replace_all else {
+ **self._ALL_TICKERS_BY_EXCHANGE_KEY.get(key, {}), **tickers
+ }
+ octobot_commons.logging.get_logger(self.__class__.__name__).info(
+ f"Refreshed {len(tickers)} ({len(tickers)})/{len(merged_tickers)}) tickers cache for {exchange_name} {exchange_type}{sandbox}"
+ )
+ self._ALL_TICKERS_BY_EXCHANGE_KEY[key] = merged_tickers
+ self._ALL_PARSED_SYMBOLS_BY_MERGED_SYMBOLS_BY_EXCHANGE_KEY[key] = {
+ octobot_commons.symbols.parse_symbol(symbol).merged_str_symbol(market_separator=""):
+ octobot_commons.symbols.parse_symbol(symbol)
+ for symbol in merged_tickers
+ }
+ if exchange_type == octobot_commons.constants.CONFIG_EXCHANGE_FUTURE:
+ self._ALL_PARSED_SYMBOLS_BY_FUTURE_MERGED_SYMBOLS_BY_EXCHANGE_KEY[key] = {
+ **self._ALL_PARSED_SYMBOLS_BY_MERGED_SYMBOLS_BY_EXCHANGE_KEY[key],
+ **{
+ octobot_commons.symbols.parse_symbol(symbol).merged_str_base_and_quote_only_symbol(market_separator=""):
+ octobot_commons.symbols.parse_symbol(symbol)
+ for symbol in merged_tickers
+ }
+ }
+
+ def reset_all_tickers_cache(self):
+ self._ALL_TICKERS_BY_EXCHANGE_KEY.clear()
+ self._ALL_PARSED_SYMBOLS_BY_MERGED_SYMBOLS_BY_EXCHANGE_KEY.clear()
+ self._ALL_PARSED_SYMBOLS_BY_FUTURE_MERGED_SYMBOLS_BY_EXCHANGE_KEY.clear()
+
+ @staticmethod
+ def get_exchange_key(exchange_name: str, exchange_type: str, sandboxed: bool) -> str:
+ return f"{exchange_name}_{exchange_type or octobot_commons.constants.CONFIG_EXCHANGE_SPOT}_{sandboxed}"
diff --git a/packages/trading/octobot_trading/exchanges/__init__.py b/packages/trading/octobot_trading/exchanges/__init__.py
index 26fe16a48..3e6a6d2ad 100644
--- a/packages/trading/octobot_trading/exchanges/__init__.py
+++ b/packages/trading/octobot_trading/exchanges/__init__.py
@@ -83,6 +83,12 @@
is_proxy_config_compatible_with_websocket_connector,
search_websocket_class,
supports_websocket,
+ force_set_mark_price,
+ get_traded_assets,
+)
+from octobot_trading.exchanges import market_filters
+from octobot_trading.exchanges.market_filters.market_filter_factory import (
+ create_market_filter,
)
from octobot_trading.exchanges import exchange_websocket_factory
from octobot_trading.exchanges.exchange_websocket_factory import (
@@ -170,6 +176,8 @@
"get_auto_filled_exchange_names",
"get_exchange_details",
"is_error_on_this_type",
+ "force_set_mark_price",
+ "get_traded_assets",
"AbstractExchange",
"is_channel_managed_by_websocket",
"is_channel_fully_managed_by_websocket",
@@ -202,4 +210,5 @@
"ExchangeSimulatorAdapter",
"retried_failed_network_request",
"ExchangeDetails",
+ "create_market_filter",
]
diff --git a/packages/trading/octobot_trading/exchanges/connectors/ccxt/ccxt_client_util.py b/packages/trading/octobot_trading/exchanges/connectors/ccxt/ccxt_client_util.py
index 3b66f33d7..a6e2b7618 100644
--- a/packages/trading/octobot_trading/exchanges/connectors/ccxt/ccxt_client_util.py
+++ b/packages/trading/octobot_trading/exchanges/connectors/ccxt/ccxt_client_util.py
@@ -24,7 +24,6 @@ class ProxyConnectionError(Exception):
import os
import ssl
import aiohttp
-import copy
import logging
import typing
import ccxt
@@ -114,24 +113,6 @@ def create_client(
async def close_client(client):
await client.close()
- client.markets = {}
- client.markets_by_id = {}
- client.ids = []
- client.last_json_response = {}
- client.last_http_response = ""
- client.last_response_headers = {}
- client.markets_loading = None
- client.currencies = {}
- client.baseCurrencies = {}
- client.quoteCurrencies = {}
- client.currencies_by_id = {}
- client.codes = []
- client.symbols = {}
- client.accounts = []
- client.accounts_by_id = {}
- client.ohlcvs = {}
- client.trades = {}
- client.orderbooks = {}
def get_unauthenticated_exchange(
@@ -196,23 +177,18 @@ async def _filted_fetched_markets(*args, **kwargs):
client.fetch_markets = origin_fetch_markets
-def load_markets_from_cache(client, authenticated_cache: bool, market_filter: typing.Union[None, typing.Callable[[dict], bool]] = None):
+def load_markets_from_cache(client: async_ccxt.Exchange, authenticated_cache: bool, market_filter: typing.Union[None, typing.Callable[[dict], bool]] = None):
client_key = ccxt_clients_cache.get_client_key(client, authenticated_cache)
- client.set_markets(
- market
- for market in ccxt_clients_cache.get_exchange_parsed_markets(client_key)
- if market_filter is None or market_filter(market)
- )
+ ccxt_clients_cache.apply_exchange_markets_cache(client_key, client, market_filter)
if time_difference := ccxt_clients_cache.get_exchange_time_difference(client_key):
- client.options[ccxt_constants.CCXT_TIME_DIFFERENCE] = time_difference
+ if client.options:
+ client.options[ccxt_constants.CCXT_TIME_DIFFERENCE] = time_difference
-def set_markets_cache(client, authenticated_cache: bool):
+def set_ccxt_client_cache(client: async_ccxt.Exchange, authenticated_cache: bool):
if client.markets:
client_key = ccxt_clients_cache.get_client_key(client, authenticated_cache)
- ccxt_clients_cache.set_exchange_parsed_markets(
- client_key, copy.deepcopy(list(client.markets.values()))
- )
+ ccxt_clients_cache.set_exchange_markets_cache(client_key, client)
if time_difference := client.options.get(ccxt_constants.CCXT_TIME_DIFFERENCE):
ccxt_clients_cache.set_exchange_time_difference(client_key, time_difference)
diff --git a/packages/trading/octobot_trading/exchanges/connectors/ccxt/ccxt_clients_cache.py b/packages/trading/octobot_trading/exchanges/connectors/ccxt/ccxt_clients_cache.py
index 2e4270ef6..2829fdef2 100644
--- a/packages/trading/octobot_trading/exchanges/connectors/ccxt/ccxt_clients_cache.py
+++ b/packages/trading/octobot_trading/exchanges/connectors/ccxt/ccxt_clients_cache.py
@@ -15,24 +15,28 @@
# License along with this library.
import cachetools
import json
+import copy
import typing
import contextlib
+import ccxt.async_support as async_ccxt
import octobot_commons.constants as commons_constants
-
+import octobot_trading.constants as trading_constants
# To avoid side effects related to a cache refresh at a fix time of the day every day,
# cache should not be refreshed at the same time every day.
# Use 30h and 18min as a period. It could be anything else as long as it doesn't make it so
# that cache ends up refreshed approximately at the same time of the day
_CACHE_TIME = commons_constants.HOURS_TO_SECONDS * 30 + commons_constants.MINUTE_TO_SECONDS * 18
-_MARKETS_BY_EXCHANGE = cachetools.TTLCache(maxsize=50, ttl=_CACHE_TIME)
+_MARKETS_BY_EXCHANGE: cachetools.TTLCache[str, list[dict]] = cachetools.TTLCache(maxsize=50, ttl=_CACHE_TIME)
+_SHARED_MARKETS_EXCHANGE_BY_EXCHANGE: cachetools.TTLCache[str, async_ccxt.Exchange] = cachetools.TTLCache(maxsize=50, ttl=_CACHE_TIME)
# Time difference between system clock and exchange server clock, fetched when needed when loading market statuses
_TIME_DIFFERENCE_BY_EXCHANGE: dict[str, float] = {}
# use short cache time for authenticated markets to avoid caching them for too long
_AUTH_CACHE_TIME = 15 * commons_constants.MINUTE_TO_SECONDS
-_AUTH_MARKETS_BY_EXCHANGE = cachetools.TTLCache(maxsize=50, ttl=_AUTH_CACHE_TIME)
+_AUTH_MARKETS_BY_EXCHANGE: cachetools.TTLCache[str, list[dict]] = cachetools.TTLCache(maxsize=50, ttl=_AUTH_CACHE_TIME)
+_AUTH_SHARED_MARKETS_EXCHANGE_BY_EXCHANGE: cachetools.TTLCache[str, async_ccxt.Exchange] = cachetools.TTLCache(maxsize=50, ttl=_AUTH_CACHE_TIME)
_UNAUTHENTICATED_SUFFIX = "unauthenticated"
@@ -41,6 +45,29 @@ def get_client_key(client, authenticated_cache: bool) -> str:
return f"{client.__class__.__name__}:{json.dumps(client.urls.get('api'))}:{suffix}"
+def set_exchange_markets_cache(client_key: str, client: async_ccxt.Exchange):
+ if trading_constants.USE_CCXT_SHARED_MARKETS_CACHE:
+ set_cached_shared_markets_exchange(client_key, client)
+ else:
+ set_exchange_parsed_markets(
+ client_key, copy.deepcopy(list(client.markets.values()))
+ )
+
+
+def apply_exchange_markets_cache(
+ client_key: str, client: async_ccxt.Exchange,
+ market_filter: typing.Union[None, typing.Callable[[dict], bool]] = None
+):
+ if trading_constants.USE_CCXT_SHARED_MARKETS_CACHE:
+ client.set_markets_from_exchange(get_cached_shared_markets_exchange(client_key))
+ else:
+ client.set_markets(
+ market
+ for market in get_exchange_parsed_markets(client_key)
+ if market_filter is None or market_filter(market)
+ )
+
+
def get_exchange_parsed_markets(client_key: str):
return _get_cached_markets(client_key)[client_key]
@@ -49,12 +76,28 @@ def set_exchange_parsed_markets(client_key: str, markets):
_get_cached_markets(client_key)[client_key] = markets
-def _get_cached_markets(client_key: str) -> cachetools.TTLCache:
+def _get_cached_markets(client_key: str) -> cachetools.TTLCache[str, list[dict]]:
+ # used when USE_CCXT_SHARED_MARKETS_CACHE is False
if _is_authenticated_cache(client_key):
return _AUTH_MARKETS_BY_EXCHANGE
return _MARKETS_BY_EXCHANGE
+def get_cached_shared_markets_exchange(client_key: str) -> async_ccxt.Exchange:
+ return _get_shared_markets_exchange_cache(client_key)[client_key]
+
+
+def set_cached_shared_markets_exchange(client_key: str, exchange: async_ccxt.Exchange):
+ _get_shared_markets_exchange_cache(client_key)[client_key] = exchange
+
+
+def _get_shared_markets_exchange_cache(client_key: str) -> cachetools.TTLCache[str, async_ccxt.Exchange]:
+ # used when USE_CCXT_SHARED_MARKETS_CACHE is True
+ if _is_authenticated_cache(client_key):
+ return _AUTH_SHARED_MARKETS_EXCHANGE_BY_EXCHANGE
+ return _SHARED_MARKETS_EXCHANGE_BY_EXCHANGE
+
+
def get_exchange_time_difference(client_key: str) -> typing.Optional[float]:
return _TIME_DIFFERENCE_BY_EXCHANGE.get(client_key, None)
diff --git a/packages/trading/octobot_trading/exchanges/connectors/ccxt/ccxt_connector.py b/packages/trading/octobot_trading/exchanges/connectors/ccxt/ccxt_connector.py
index e2adef644..03b84397c 100644
--- a/packages/trading/octobot_trading/exchanges/connectors/ccxt/ccxt_connector.py
+++ b/packages/trading/octobot_trading/exchanges/connectors/ccxt/ccxt_connector.py
@@ -248,7 +248,7 @@ async def load_symbol_markets(
)
try:
await self._load_markets(self.client, reload, market_filter=market_filter)
- ccxt_client_util.set_markets_cache(self.client, authenticated_cache)
+ ccxt_client_util.set_ccxt_client_cache(self.client, authenticated_cache)
except (
ccxt.AuthenticationError, ccxt.ArgumentsRequired, ccxt.static_dependencies.ecdsa.der.UnexpectedDER,
binascii.Error, AssertionError, IndexError
@@ -287,7 +287,7 @@ async def load_symbol_markets(
try:
unauth_client = self._client_factory(True)[0]
await self._load_markets(unauth_client, reload, market_filter=market_filter)
- ccxt_client_util.set_markets_cache(unauth_client, False)
+ ccxt_client_util.set_ccxt_client_cache(unauth_client, False)
# apply markets to target client
ccxt_client_util.load_markets_from_cache(self.client, False, market_filter=market_filter)
self.logger.debug(
diff --git a/packages/trading/octobot_trading/exchanges/exchange_builder.py b/packages/trading/octobot_trading/exchanges/exchange_builder.py
index 5d4a14f0e..300edde47 100644
--- a/packages/trading/octobot_trading/exchanges/exchange_builder.py
+++ b/packages/trading/octobot_trading/exchanges/exchange_builder.py
@@ -258,14 +258,6 @@ def use_cached_markets(self, use_cached_markets: bool):
def use_market_filter(self, market_filter: typing.Union[None, typing.Callable[[dict], bool]]):
self.exchange_manager.market_filter = market_filter
return self
-
- def set_rest_exchange(self, rest_exchange: typing.Optional["exchanges.RestExchange"]):
- self.exchange_manager.preconfigured_exchange = rest_exchange
- return self
-
- def leave_rest_exchange_open(self, leave_rest_exchange_open: bool):
- self.exchange_manager.leave_rest_exchange_open = leave_rest_exchange_open
- return self
def is_ignoring_config(self, ignore_config=True):
self.exchange_manager.ignore_config = ignore_config
diff --git a/packages/trading/octobot_trading/exchanges/exchange_factory.py b/packages/trading/octobot_trading/exchanges/exchange_factory.py
index 486e95d94..bd7b27ed7 100644
--- a/packages/trading/octobot_trading/exchanges/exchange_factory.py
+++ b/packages/trading/octobot_trading/exchanges/exchange_factory.py
@@ -54,14 +54,9 @@ async def create_real_exchange(exchange_manager, exchange_config_by_exchange: ty
:param exchange_manager: the related exchange manager
:param exchange_config_by_exchange: optional exchange configurations
"""
- if exchange_manager.preconfigured_exchange:
- exchange_manager.exchange = exchange_manager.preconfigured_exchange
- exchange_manager.exchange.exchange_manager = exchange_manager
- else:
- await _create_rest_exchange(exchange_manager, exchange_config_by_exchange)
+ await _create_rest_exchange(exchange_manager, exchange_config_by_exchange)
try:
- if exchange_manager.preconfigured_exchange is None:
- await exchange_manager.exchange.initialize()
+ await exchange_manager.exchange.initialize()
_create_exchange_backend(exchange_manager)
if exchange_manager.exchange_only:
return
diff --git a/packages/trading/octobot_trading/exchanges/exchange_manager.py b/packages/trading/octobot_trading/exchanges/exchange_manager.py
index ab0aca394..81f4c5673 100644
--- a/packages/trading/octobot_trading/exchanges/exchange_manager.py
+++ b/packages/trading/octobot_trading/exchanges/exchange_manager.py
@@ -24,7 +24,7 @@
import octobot_trading.exchange_channel as exchange_channel
import octobot_trading.exchanges as exchanges
import octobot_trading.personal_data as personal_data
-import octobot_trading.exchange_data as exchange_data
+import octobot_trading.exchange_data
import octobot_trading.constants as constants
import octobot_trading.enums as enums
import octobot_trading.util as util
@@ -34,6 +34,9 @@
import trading_backend.exchanges
+if typing.TYPE_CHECKING:
+ import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
class ExchangeManager(util.Initializable):
def __init__(self, config, exchange_class_string):
super().__init__()
@@ -74,8 +77,6 @@ def __init__(self, config, exchange_class_string):
self.trader: exchanges.Trader = None # type: ignore
self.exchange: exchanges.RestExchange = None # type: ignore
- self.preconfigured_exchange: typing.Optional[exchanges.RestExchange] = None
- self.leave_rest_exchange_open: bool = False
self.exchange_backend: trading_backend.exchanges.Exchange = None # type: ignore
self.is_broker_enabled: bool = False
self.trading_modes: list = []
@@ -88,7 +89,9 @@ def __init__(self, config, exchange_class_string):
self.storage_manager: storage.StorageManager = storage.StorageManager(self)
self.exchange_config: exchanges.ExchangeConfig = exchanges.ExchangeConfig(self)
self.exchange_personal_data: personal_data.ExchangePersonalData = personal_data.ExchangePersonalData(self)
- self.exchange_symbols_data: exchange_data.ExchangeSymbolsData = exchange_data.ExchangeSymbolsData(self)
+ self.exchange_symbols_data: octobot_trading.exchange_data.ExchangeSymbolsData = (
+ octobot_trading.exchange_data.ExchangeSymbolsData(self)
+ )
self.debug_info: dict[str, typing.Any] = {}
@@ -134,26 +137,24 @@ async def stop(self, warning_on_missing_elements=True, enable_logs=True):
# stop exchange channels
if enable_logs:
self.logger.debug(f"Stopping exchange channels for exchange_id: {self.id} ...")
- if self.exchange is not None and not self.leave_rest_exchange_open:
- try:
- exchange_channel.get_exchange_channels(self.id)
- await exchange_channel.stop_exchange_channels(self, should_warn=warning_on_missing_elements)
- except KeyError:
- # no exchange channel to stop
- pass
- except Exception as err:
- self.logger.exception(err, True, f"Error when stopping exchange channels: {err}")
+ try:
+ exchange_channel.get_exchange_channels(self.id)
+ await exchange_channel.stop_exchange_channels(self, should_warn=warning_on_missing_elements)
+ except KeyError:
+ # no exchange channel to stop
+ pass
+ except Exception as err:
+ self.logger.exception(err, True, f"Error when stopping exchange channels: {err}")
+ if self.exchange is not None:
+ # ensure self.exchange still exists as await self.exchange.stop()
+ # internally uses asyncio.sleep within ccxt
+ exchanges.Exchanges.instance().del_exchange(
+ self.exchange.name, self.id, should_warn=warning_on_missing_elements
+ )
try:
await self.exchange.stop()
except Exception as err:
self.logger.exception(err, True, f"Error when stopping exchange: {err}")
- if self.exchange is not None:
- # ensure self.exchange still exists as await self.exchange.stop()
- # internally uses asyncio.sleep within ccxt
- exchanges.Exchanges.instance().del_exchange(
- self.exchange.name, self.id, should_warn=warning_on_missing_elements
- )
- self.exchange.exchange_manager = None # type: ignore
self.exchange = None # type: ignore
if self.exchange_personal_data is not None:
try:
@@ -206,6 +207,38 @@ async def register_trader(self, trader):
await self.exchange_personal_data.initialize()
await self.exchange_config.initialize()
+ async def initialize_from_exchange_data(
+ self,
+ exchange_data: "exchange_data_import.ExchangeData",
+ price_by_symbol: dict[str, float],
+ ignore_orders_and_trades: bool,
+ lock_chained_orders_funds: bool,
+ as_simulator: bool,
+ ) -> None:
+ """
+ Initialize trader positions and orders from exchange data by delegating to all relevant managers.
+ """
+ await self.trader.initialize()
+ self.exchange_symbols_data.initialize_from_exchange_data(exchange_data, price_by_symbol)
+ self.exchange_personal_data.portfolio_manager.portfolio_value_holder.initialize_from_exchange_data(
+ exchange_data, price_by_symbol
+ )
+ if not ignore_orders_and_trades:
+ if exchange_data.trades:
+ self.exchange_personal_data.trades_manager.initialize_from_exchange_data(exchange_data)
+ if (
+ exchange_data.orders_details.open_orders
+ and exchange_data.orders_details.open_orders[0]
+ .get(constants.STORAGE_ORIGIN_VALUE, {})
+ .get(enums.ExchangeConstantsOrderColumns.TYPE.value)
+ ):
+ await self.exchange_personal_data.orders_manager.initialize_from_exchange_data(exchange_data)
+ if lock_chained_orders_funds:
+ await self.exchange_personal_data.portfolio_manager.initialize_from_exchange_data(exchange_data)
+ self.exchange_personal_data.positions_manager.initialize_from_exchange_data(
+ exchange_data, exclusively_use_exchange_position_details=not as_simulator
+ )
+
def load_constants(self):
if not self.is_backtesting:
self._load_config_symbols_and_time_frames()
@@ -220,7 +253,7 @@ def need_user_stream(self):
return self.config[common_constants.CONFIG_TRADER][common_constants.CONFIG_ENABLED_OPTION]
def reset_exchange_symbols_data(self):
- self.exchange_symbols_data = exchange_data.ExchangeSymbolsData(self)
+ self.exchange_symbols_data = octobot_trading.exchange_data.ExchangeSymbolsData(self)
def reset_exchange_personal_data(self):
self.exchange_personal_data = personal_data.ExchangePersonalData(self)
diff --git a/packages/trading/octobot_trading/exchanges/market_filters/__init__.py b/packages/trading/octobot_trading/exchanges/market_filters/__init__.py
new file mode 100644
index 000000000..d01670038
--- /dev/null
+++ b/packages/trading/octobot_trading/exchanges/market_filters/__init__.py
@@ -0,0 +1,5 @@
+from octobot_trading.exchanges.market_filters.market_filter_factory import create_market_filter
+
+__all__ = [
+ "create_market_filter",
+]
\ No newline at end of file
diff --git a/packages/trading/octobot_trading/exchanges/market_filters/market_filter_factory.py b/packages/trading/octobot_trading/exchanges/market_filters/market_filter_factory.py
new file mode 100644
index 000000000..9354455ff
--- /dev/null
+++ b/packages/trading/octobot_trading/exchanges/market_filters/market_filter_factory.py
@@ -0,0 +1,52 @@
+import typing
+
+import octobot_commons.constants as common_constants
+
+import octobot_trading.enums as trading_enums
+import octobot_trading.exchanges.util.exchange_data_util as exchange_data_util
+
+if typing.TYPE_CHECKING:
+ import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
+
+def create_market_filter(
+ exchange_data: typing.Optional["exchange_data_import.ExchangeData"],
+ to_keep_quote: typing.Optional[str],
+ to_keep_symbols: typing.Optional[typing.Iterable[str]] = None,
+ to_keep_quotes: typing.Optional[typing.Iterable[str]] = None,
+ force_usd_like_markets: bool = True,
+) -> typing.Callable[[dict], bool]:
+ relevant_symbols_to_keep = set(to_keep_symbols or []) # forced symbols
+ if exchange_data:
+ relevant_symbols_to_keep.update(exchange_data_util.get_orders_and_positions_symbols(exchange_data)) # orders/positions symbols
+ relevant_symbols_to_keep.update(market.symbol for market in exchange_data.markets) # always in symbols in markets
+ merged_to_keep_quotes = set(to_keep_quotes or [])
+ if to_keep_quote:
+ merged_to_keep_quotes.add(to_keep_quote)
+
+ def market_filter(market: dict) -> bool:
+ if market[trading_enums.ExchangeConstantsMarketStatusColumns.SYMBOL.value] in relevant_symbols_to_keep:
+ return True
+ base = market[trading_enums.ExchangeConstantsMarketStatusColumns.CURRENCY.value]
+ quote = market[trading_enums.ExchangeConstantsMarketStatusColumns.MARKET.value]
+ return (
+ (
+ # 1. all "X/to_keep_quote" markets
+ # => always required to run the strategy
+ quote in merged_to_keep_quotes or
+ # 2. all "to_keep_quote/X" markets
+ # => used in portfolio optimization. Ex: to buy BTC from USDT when BTC is the "to_keep_quote",
+ # BTC/USD-like market is required
+ base in merged_to_keep_quotes or
+ # 3. all USD-like/X markets
+ # => used in portfolio optimization. Ex: to be able to convert USD like currencies into the
+ # same USD-like currency
+ (force_usd_like_markets and base in common_constants.USD_LIKE_COINS)
+ )
+ and (
+ market[trading_enums.ExchangeConstantsMarketStatusColumns.TYPE.value] ==
+ trading_enums.ExchangeTypes.SPOT.value
+ )
+ )
+
+ return market_filter
diff --git a/packages/trading/octobot_trading/exchanges/traders/trader.py b/packages/trading/octobot_trading/exchanges/traders/trader.py
index d4b20d976..e5a57812f 100644
--- a/packages/trading/octobot_trading/exchanges/traders/trader.py
+++ b/packages/trading/octobot_trading/exchanges/traders/trader.py
@@ -15,6 +15,7 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import decimal
+import uuid
import typing
import asyncio
@@ -1172,3 +1173,6 @@ def _has_open_position(self, symbol):
"""
return len(self.exchange_manager.exchange_personal_data.positions_manager.get_symbol_positions(
symbol=symbol)) != 0
+
+ def generate_random_order_id(self) -> str:
+ return str(uuid.uuid4())
diff --git a/packages/trading/octobot_trading/exchanges/traders/trader_simulator.py b/packages/trading/octobot_trading/exchanges/traders/trader_simulator.py
index af6879e54..d0a08ddae 100644
--- a/packages/trading/octobot_trading/exchanges/traders/trader_simulator.py
+++ b/packages/trading/octobot_trading/exchanges/traders/trader_simulator.py
@@ -15,7 +15,6 @@
# License along with this library.
import decimal
import time
-import uuid
import octobot_trading.constants
import octobot_trading.enums as enums
@@ -59,7 +58,7 @@ async def _withdraw_on_exchange(
self, asset: str, amount: decimal.Decimal, network: str, address: str, tag: str = "", params: dict = None
) -> dict:
deposit_address = await self.get_deposit_address(asset)
- transaction_id = str(uuid.uuid4())
+ transaction_id = self.generate_random_order_id()
return {
enums.ExchangeConstantsTransactionColumns.TXID.value: transaction_id,
enums.ExchangeConstantsTransactionColumns.TIMESTAMP.value: time.time(),
diff --git a/packages/trading/octobot_trading/exchanges/util/__init__.py b/packages/trading/octobot_trading/exchanges/util/__init__.py
index 84caff0b4..8f3f1fc34 100644
--- a/packages/trading/octobot_trading/exchanges/util/__init__.py
+++ b/packages/trading/octobot_trading/exchanges/util/__init__.py
@@ -46,6 +46,8 @@
get_auto_filled_exchange_names,
get_exchange_details,
is_error_on_this_type,
+ force_set_mark_price,
+ get_traded_assets,
)
from octobot_trading.exchanges.util import websockets_util
from octobot_trading.exchanges.util.websockets_util import (
@@ -86,4 +88,6 @@
"is_proxy_config_compatible_with_websocket_connector",
"search_websocket_class",
"supports_websocket",
+ "force_set_mark_price",
+ "get_traded_assets",
]
diff --git a/packages/trading/octobot_trading/util/test_tools/exchange_data.py b/packages/trading/octobot_trading/exchanges/util/exchange_data.py
similarity index 96%
rename from packages/trading/octobot_trading/util/test_tools/exchange_data.py
rename to packages/trading/octobot_trading/exchanges/util/exchange_data.py
index 707ee417c..27fde7180 100644
--- a/packages/trading/octobot_trading/util/test_tools/exchange_data.py
+++ b/packages/trading/octobot_trading/exchanges/util/exchange_data.py
@@ -19,8 +19,7 @@
import octobot_commons.dataclasses
import octobot_commons.enums as common_enums
-import octobot_trading.exchanges
-
+import octobot_trading.exchanges.util.symbol_details as symbol_details_import
@dataclasses.dataclass
class IncompatibleAssetDetails(
@@ -62,8 +61,8 @@ class ExchangeDetails(octobot_commons.dataclasses.FlexibleDataclass, octobot_com
class MarketDetails(octobot_commons.dataclasses.FlexibleDataclass, octobot_commons.dataclasses.UpdatableDataclass):
id: str = ""
symbol: str = ""
- details: octobot_trading.exchanges.SymbolDetails = \
- dataclasses.field(default_factory=octobot_trading.exchanges.SymbolDetails)
+ details: symbol_details_import.SymbolDetails = \
+ dataclasses.field(default_factory=symbol_details_import.SymbolDetails)
time_frame: str = ""
close: list[float] = dataclasses.field(default_factory=list)
open: list[float] = dataclasses.field(default_factory=list)
diff --git a/packages/trading/octobot_trading/exchanges/util/exchange_data_util.py b/packages/trading/octobot_trading/exchanges/util/exchange_data_util.py
new file mode 100644
index 000000000..0e3219753
--- /dev/null
+++ b/packages/trading/octobot_trading/exchanges/util/exchange_data_util.py
@@ -0,0 +1,54 @@
+# Drakkar-Software OctoBot-Trading
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import typing
+
+import octobot_trading.constants as constants
+import octobot_trading.enums as enums
+
+if typing.TYPE_CHECKING:
+ import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
+
+def _get_positions_symbols(exchange_data: "exchange_data_import.ExchangeData") -> set[str]:
+ return set(get_positions_by_symbol(exchange_data))
+
+
+def _get_orders_symbols(exchange_data: "exchange_data_import.ExchangeData") -> set[str]:
+ return set(
+ order[constants.STORAGE_ORIGIN_VALUE][enums.ExchangeConstantsOrderColumns.SYMBOL.value]
+ for order in exchange_data.orders_details.open_orders + exchange_data.orders_details.missing_orders
+ if order.get(constants.STORAGE_ORIGIN_VALUE, {}).get(
+ enums.ExchangeConstantsOrderColumns.SYMBOL.value
+ )
+ )
+
+
+def get_orders_and_positions_symbols(exchange_data: "exchange_data_import.ExchangeData") -> set[str]:
+ return _get_orders_symbols(exchange_data).union(_get_positions_symbols(exchange_data))
+
+
+def get_positions_by_symbol(exchange_data: "exchange_data_import.ExchangeData") -> dict[str, list[dict]]:
+ return {
+ position_details.position[enums.ExchangeConstantsPositionColumns.SYMBOL.value]:
+ [
+ symbol_position_details.position
+ for symbol_position_details in exchange_data.positions
+ if symbol_position_details.position.get(enums.ExchangeConstantsPositionColumns.SYMBOL.value) ==
+ position_details.position[enums.ExchangeConstantsPositionColumns.SYMBOL.value]
+ ]
+ for position_details in exchange_data.positions
+ if enums.ExchangeConstantsPositionColumns.SYMBOL.value in position_details.position
+ }
diff --git a/packages/trading/octobot_trading/exchanges/util/exchange_util.py b/packages/trading/octobot_trading/exchanges/util/exchange_util.py
index 9279a90e6..be5a3b6ca 100644
--- a/packages/trading/octobot_trading/exchanges/util/exchange_util.py
+++ b/packages/trading/octobot_trading/exchanges/util/exchange_util.py
@@ -16,6 +16,7 @@
import contextlib
import typing
import ccxt
+import decimal
import trading_backend
import octobot_commons.logging as logging
@@ -36,6 +37,8 @@
import octobot_trading.exchanges.exchange_details as exchange_details
import octobot_trading.exchanges.exchange_builder as exchange_builder
+if typing.TYPE_CHECKING:
+ import octobot_trading.exchanges.exchange_manager
def get_rest_exchange_class(
exchange_name: str, tentacles_setup_config, exchange_config_by_exchange: typing.Optional[dict[str, dict]]
@@ -222,8 +225,6 @@ async def get_local_exchange_manager(
is_broker_enabled: bool = False, exchange_config_by_exchange: typing.Optional[dict[str, dict]] = None,
disable_unauth_retry: bool = False,
market_filter: typing.Union[None, typing.Callable[[dict], bool]] = None,
- rest_exchange: typing.Optional[exchanges_types.RestExchange] = None,
- leave_rest_exchange_open: bool = False,
):
exchange_type = exchange_config.get(common_constants.CONFIG_EXCHANGE_TYPE, get_default_exchange_type(exchange_name))
builder = builder or exchange_builder.ExchangeBuilder(
@@ -241,8 +242,6 @@ async def get_local_exchange_manager(
.is_broker_enabled(is_broker_enabled) \
.use_cached_markets(use_cached_markets) \
.use_market_filter(market_filter) \
- .set_rest_exchange(rest_exchange) \
- .leave_rest_exchange_open(leave_rest_exchange_open) \
.is_ignoring_config(ignore_config) \
.disable_trading_mode() \
.build()
@@ -486,3 +485,23 @@ def is_error_on_this_type(error: BaseException, descriptions: typing.List[typing
if all(identifier in lower_error for identifier in identifiers):
return True
return False
+
+
+def get_traded_assets(exchange_manager: "octobot_trading.exchanges.exchange_manager.ExchangeManager") -> list:
+ # use list to maintain order
+ assets = []
+ for symbol in exchange_manager.exchange_config.traded_symbols:
+ if symbol.base not in assets:
+ assets.append(symbol.base)
+ if symbol.quote not in assets:
+ assets.append(symbol.quote)
+ return assets
+
+
+def force_set_mark_price(
+ exchange_manager: "octobot_trading.exchanges.exchange_manager.ExchangeManager",
+ symbol: str,
+ price: typing.Union[float, decimal.Decimal]
+) -> None:
+ exchange_manager.exchange_symbols_data.get_exchange_symbol_data(symbol).prices_manager.\
+ set_mark_price(decimal.Decimal(str(price)), enums.MarkPriceSources.EXCHANGE_MARK_PRICE.value)
diff --git a/packages/trading/octobot_trading/modes/channel/abstract_mode_consumer.py b/packages/trading/octobot_trading/modes/channel/abstract_mode_consumer.py
index b39943443..0936dc6e6 100644
--- a/packages/trading/octobot_trading/modes/channel/abstract_mode_consumer.py
+++ b/packages/trading/octobot_trading/modes/channel/abstract_mode_consumer.py
@@ -289,31 +289,17 @@ async def register_chained_order(
self, main_order, price, order_type, side, quantity=None, allow_bundling=True, tag=None, reduce_only=False,
update_with_triggering_order_fees=None
) -> tuple:
- chained_order = personal_data.create_order_instance(
- trader=self.exchange_manager.trader,
- order_type=order_type,
- symbol=main_order.symbol,
- current_price=price,
- quantity=quantity or main_order.origin_quantity,
- price=price,
- side=side,
- associated_entry_id=main_order.order_id,
- reduce_only=reduce_only,
+ return await personal_data.create_and_register_chained_order_on_base_order(
+ main_order,
+ price,
+ order_type,
+ side,
+ quantity=quantity,
+ allow_bundling=allow_bundling,
tag=tag,
+ reduce_only=reduce_only,
+ update_with_triggering_order_fees=update_with_triggering_order_fees
)
- params = {}
- # do not reduce chained order amounts to account for fees when trading futures
- if update_with_triggering_order_fees is None:
- update_with_triggering_order_fees = not self.exchange_manager.is_future
- if allow_bundling:
- params = await self.exchange_manager.trader.bundle_chained_order_with_uncreated_order(
- main_order, chained_order, update_with_triggering_order_fees
- )
- else:
- await self.exchange_manager.trader.chain_order(
- main_order, chained_order, update_with_triggering_order_fees, False
- )
- return params, chained_order
def check_factor(min_val, max_val, factor):
diff --git a/packages/trading/octobot_trading/modes/script_keywords/__init__.py b/packages/trading/octobot_trading/modes/script_keywords/__init__.py
index f9cd4c031..87f13a391 100644
--- a/packages/trading/octobot_trading/modes/script_keywords/__init__.py
+++ b/packages/trading/octobot_trading/modes/script_keywords/__init__.py
@@ -51,6 +51,7 @@
from octobot_trading.modes.script_keywords.context_management import (
get_base_context,
get_full_context,
+ get_base_context_from_exchange_manager,
Context,
)
@@ -83,5 +84,6 @@
"set_plot_orders",
"get_base_context",
"get_full_context",
+ "get_base_context_from_exchange_manager",
"Context",
]
diff --git a/packages/trading/octobot_trading/modes/script_keywords/context_management.py b/packages/trading/octobot_trading/modes/script_keywords/context_management.py
index 3a010966a..f9da903b6 100644
--- a/packages/trading/octobot_trading/modes/script_keywords/context_management.py
+++ b/packages/trading/octobot_trading/modes/script_keywords/context_management.py
@@ -38,6 +38,28 @@ def get_base_context(trading_mode, symbol=None, init_call=False):
return get_full_context(trading_mode, None, None, symbol, None, None, None, None, None, init_call=init_call)
+def get_base_context_from_exchange_manager(exchange_manager, symbol):
+ context = Context(
+ modes.AbstractTradingMode,
+ exchange_manager,
+ exchange_manager.trader,
+ exchange_manager.exchange_name,
+ symbol,
+ None,
+ None,
+ None,
+ exchange_manager.logger,
+ modes.AbstractTradingMode,
+ None,
+ None,
+ None,
+ None,
+ None,
+ )
+ context.enable_trading = True
+ return context
+
+
def get_full_context(trading_mode, matrix_id, cryptocurrency, symbol, time_frame, trigger_source, trigger_cache_timestamp,
candle, kline, init_call=False):
context = Context(
diff --git a/packages/trading/octobot_trading/personal_data/__init__.py b/packages/trading/octobot_trading/personal_data/__init__.py
index cd73fd9fa..14f93115a 100644
--- a/packages/trading/octobot_trading/personal_data/__init__.py
+++ b/packages/trading/octobot_trading/personal_data/__init__.py
@@ -54,6 +54,8 @@
generate_order_id,
wait_for_order_fill,
get_short_order_summary,
+ get_enriched_orders_by_exchange_id,
+ get_symbol_count,
apply_order_storage_details_if_any,
create_orders_storage_related_elements,
create_missing_virtual_orders_from_storage_order_groups,
@@ -83,6 +85,7 @@
ActiveOrderSwapStrategy,
StopFirstActiveOrderSwapStrategy,
TakeProfitFirstActiveOrderSwapStrategy,
+ create_active_order_swap_strategy,
BaseTrigger,
PriceTrigger,
OrdersUpdater,
@@ -111,6 +114,9 @@
create_order_instance,
create_order_from_dict,
create_order_from_order_storage_details,
+ create_order_from_order_raw_in_storage_details_without_related_elements,
+ OrderFactory,
+ create_and_register_chained_order_on_base_order,
OrdersProducer,
OrdersChannel,
OrdersManager,
@@ -328,6 +334,8 @@
"generate_order_id",
"wait_for_order_fill",
"get_short_order_summary",
+ "get_enriched_orders_by_exchange_id",
+ "get_symbol_count",
"apply_order_storage_details_if_any",
"create_orders_storage_related_elements",
"create_missing_virtual_orders_from_storage_order_groups",
@@ -357,6 +365,7 @@
"ActiveOrderSwapStrategy",
"StopFirstActiveOrderSwapStrategy",
"TakeProfitFirstActiveOrderSwapStrategy",
+ "create_active_order_swap_strategy",
"BaseTrigger",
"PriceTrigger",
"OrdersUpdater",
@@ -385,6 +394,9 @@
"create_order_instance",
"create_order_from_dict",
"create_order_from_order_storage_details",
+ "create_order_from_order_raw_in_storage_details_without_related_elements",
+ "OrderFactory",
+ "create_and_register_chained_order_on_base_order",
"OrdersProducer",
"OrdersChannel",
"OrdersManager",
diff --git a/packages/trading/octobot_trading/personal_data/orders/__init__.py b/packages/trading/octobot_trading/personal_data/orders/__init__.py
index 876d8991f..3c23ca853 100644
--- a/packages/trading/octobot_trading/personal_data/orders/__init__.py
+++ b/packages/trading/octobot_trading/personal_data/orders/__init__.py
@@ -44,6 +44,7 @@
ActiveOrderSwapStrategy,
StopFirstActiveOrderSwapStrategy,
TakeProfitFirstActiveOrderSwapStrategy,
+ create_active_order_swap_strategy,
)
from octobot_trading.personal_data.orders import cancel_policies
from octobot_trading.personal_data.orders.cancel_policies import (
@@ -114,7 +115,7 @@
get_fees_for_currency,
get_order_locked_amount,
get_orders_locked_amounts_by_asset,
- parse_raw_fees,
+ parse_raw_fees,
parse_order_status,
parse_is_cancelled,
parse_is_pending_cancel,
@@ -142,6 +143,9 @@
generate_order_id,
wait_for_order_fill,
get_short_order_summary,
+ create_and_register_chained_order_on_base_order,
+ get_enriched_orders_by_exchange_id,
+ get_symbol_count,
)
from octobot_trading.personal_data.orders import orders_storage_operations
from octobot_trading.personal_data.orders.orders_storage_operations import (
@@ -181,6 +185,8 @@
create_order_instance,
create_order_from_dict,
create_order_from_order_storage_details,
+ create_order_from_order_raw_in_storage_details_without_related_elements,
+ OrderFactory,
)
__all__ = [
@@ -224,6 +230,7 @@
"generate_order_id",
"wait_for_order_fill",
"get_short_order_summary",
+ "create_and_register_chained_order_on_base_order",
"apply_order_storage_details_if_any",
"create_missing_virtual_orders_from_storage_order_groups",
"is_associated_pending_order",
@@ -241,6 +248,7 @@
"TrailingProfileTypes",
"create_trailing_profile",
"create_filled_take_profit_trailing_profile",
+ "create_active_order_swap_strategy",
"ActiveOrderSwapStrategy",
"StopFirstActiveOrderSwapStrategy",
"TakeProfitFirstActiveOrderSwapStrategy",
@@ -275,6 +283,8 @@
"create_order_instance",
"create_order_from_dict",
"create_order_from_order_storage_details",
+ "create_order_from_order_raw_in_storage_details_without_related_elements",
+ "OrderFactory",
"OrdersProducer",
"OrdersChannel",
"OrdersManager",
@@ -300,4 +310,6 @@
"TakeProfitLimitOrder",
"TrailingStopOrder",
"TrailingStopLimitOrder",
+ "get_enriched_orders_by_exchange_id",
+ "get_symbol_count",
]
diff --git a/packages/trading/octobot_trading/personal_data/orders/active_order_swap_strategies/__init__.py b/packages/trading/octobot_trading/personal_data/orders/active_order_swap_strategies/__init__.py
index 5ea40e35d..e71985a89 100644
--- a/packages/trading/octobot_trading/personal_data/orders/active_order_swap_strategies/__init__.py
+++ b/packages/trading/octobot_trading/personal_data/orders/active_order_swap_strategies/__init__.py
@@ -18,6 +18,10 @@
from octobot_trading.personal_data.orders.active_order_swap_strategies.active_order_swap_strategy import (
ActiveOrderSwapStrategy,
)
+from octobot_trading.personal_data.orders.active_order_swap_strategies import active_order_swap_strategy_factory
+from octobot_trading.personal_data.orders.active_order_swap_strategies.active_order_swap_strategy_factory import (
+ create_active_order_swap_strategy,
+)
from octobot_trading.personal_data.orders.active_order_swap_strategies import stop_first_active_order_swap_strategy
from octobot_trading.personal_data.orders.active_order_swap_strategies.stop_first_active_order_swap_strategy import (
@@ -34,4 +38,5 @@
"ActiveOrderSwapStrategy",
"StopFirstActiveOrderSwapStrategy",
"TakeProfitFirstActiveOrderSwapStrategy",
+ "create_active_order_swap_strategy",
]
diff --git a/packages/trading/octobot_trading/personal_data/orders/active_order_swap_strategies/active_order_swap_strategy_factory.py b/packages/trading/octobot_trading/personal_data/orders/active_order_swap_strategies/active_order_swap_strategy_factory.py
new file mode 100644
index 000000000..02ad6b665
--- /dev/null
+++ b/packages/trading/octobot_trading/personal_data/orders/active_order_swap_strategies/active_order_swap_strategy_factory.py
@@ -0,0 +1,34 @@
+# Drakkar-Software OctoBot-Trading
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import typing
+
+import octobot_trading.constants
+import octobot_trading.personal_data.orders.active_order_swap_strategies.stop_first_active_order_swap_strategy
+import octobot_trading.personal_data.orders.active_order_swap_strategies.take_profit_first_active_order_swap_strategy
+
+if typing.TYPE_CHECKING:
+ import octobot_trading.personal_data
+
+
+def create_active_order_swap_strategy(
+ strategy_type: str, **kwargs
+) -> "octobot_trading.personal_data.ActiveOrderSwapStrategy":
+ if strategy_type == octobot_trading.personal_data.StopFirstActiveOrderSwapStrategy.__name__:
+ return octobot_trading.personal_data.StopFirstActiveOrderSwapStrategy(**kwargs)
+ elif strategy_type == octobot_trading.personal_data.TakeProfitFirstActiveOrderSwapStrategy.__name__:
+ return octobot_trading.personal_data.TakeProfitFirstActiveOrderSwapStrategy(**kwargs)
+ else:
+ raise ValueError(f"Invalid active order swap strategy type: {strategy_type}")
diff --git a/packages/trading/octobot_trading/personal_data/orders/order.py b/packages/trading/octobot_trading/personal_data/orders/order.py
index 50d3b3ce6..7c2e8a52a 100644
--- a/packages/trading/octobot_trading/personal_data/orders/order.py
+++ b/packages/trading/octobot_trading/personal_data/orders/order.py
@@ -159,8 +159,8 @@ def update(
fee=None, total_cost=constants.ZERO, timestamp=None,
order_type=None, reduce_only=None, close_position=None, position_side=None, fees_currency_side=None,
group=None, tag=None, quantity_currency=None, exchange_creation_params=None,
- associated_entry_id=None, trigger_above=None, trailing_profile: trailing_profiles.TrailingProfile=None,
- is_active=None, active_trigger: base_trigger_import.BaseTrigger = None,
+ associated_entry_id=None, trigger_above=None, trailing_profile: typing.Optional[trailing_profiles.TrailingProfile]=None,
+ is_active=None, active_trigger: typing.Optional[base_trigger_import.BaseTrigger] = None,
cancel_policy: typing.Optional[order_cancel_policy_import.OrderCancelPolicy] = None,
) -> bool:
changed: bool = False
@@ -494,7 +494,7 @@ async def set_as_inactive(self, active_trigger: base_trigger_import.BaseTrigger)
await self._ensure_inactive_order_watcher()
def should_become_active(self, price_time: float, current_price: decimal.Decimal) -> bool:
- if self.is_active:
+ if self.is_active or self.active_trigger is None:
return False
if price_time >= self.creation_time:
return self.active_trigger.triggers(current_price)
@@ -1137,7 +1137,7 @@ def is_synchronization_enabled(self):
self.exchange_manager.exchange_personal_data.orders_manager.enable_order_auto_synchronization
)
- def to_dict(self):
+ def to_dict(self) -> dict[str, typing.Any]:
filled_price = self.filled_price if self.filled_price > 0 else self.origin_price
return {
enums.ExchangeConstantsOrderColumns.ID.value: self.order_id,
diff --git a/packages/trading/octobot_trading/personal_data/orders/order_factory.py b/packages/trading/octobot_trading/personal_data/orders/order_factory.py
index 5998f02c6..942aa787f 100644
--- a/packages/trading/octobot_trading/personal_data/orders/order_factory.py
+++ b/packages/trading/octobot_trading/personal_data/orders/order_factory.py
@@ -13,21 +13,42 @@
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
+import decimal
+import typing
+import dataclasses
+
import octobot_commons.logging as logging
+import octobot_commons.signals as commons_signals
import octobot_trading.personal_data as personal_data
import octobot_trading.enums as enums
import octobot_trading.constants as constants
+import octobot_trading.errors as trading_errors
+import octobot_trading.modes.script_keywords as script_keywords
+
+if typing.TYPE_CHECKING:
+ import octobot_trading.exchanges
+ import octobot_trading.modes
-def create_order_from_raw(trader, raw_order):
+def create_order_from_raw(
+ trader: "octobot_trading.exchanges.Trader",
+ raw_order: dict,
+) -> "personal_data.Order":
_, order_type = personal_data.parse_order_type(raw_order)
+ if not order_type:
+ logging.get_logger(__name__).error(
+ f"Unhandled order type: {raw_order.get(enums.ExchangeConstantsOrderColumns.TYPE.value)} ({raw_order=})"
+ )
return create_order_from_type(trader, order_type)
def create_order_instance_from_raw(
- trader, raw_order, force_open_or_pending_creation=False, has_just_been_created=False
-):
+ trader: "octobot_trading.exchanges.Trader",
+ raw_order: dict,
+ force_open_or_pending_creation: bool = False,
+ has_just_been_created: bool = False,
+) -> "personal_data.Order":
try:
order = create_order_from_raw(trader, raw_order)
order.update_from_raw(raw_order)
@@ -45,47 +66,49 @@ def create_order_instance_from_raw(
raise
-def create_order_from_type(trader, order_type, side=None):
+def create_order_from_type(
+ trader: "octobot_trading.exchanges.Trader",
+ order_type: enums.TraderOrderType,
+ side: typing.Optional[enums.TradeOrderSide] = None,
+) -> "personal_data.Order":
if side is None:
return personal_data.TraderOrderTypeClasses[order_type](trader)
return personal_data.TraderOrderTypeClasses[order_type](trader, side=side)
def create_order_instance(
- trader,
- order_type,
- symbol,
- current_price,
- quantity,
- price=constants.ZERO,
- stop_price=constants.ZERO,
+ trader: "octobot_trading.exchanges.Trader",
+ order_type: enums.TraderOrderType,
+ symbol: str,
+ current_price: decimal.Decimal,
+ quantity: decimal.Decimal,
+ price: decimal.Decimal = constants.ZERO,
+ stop_price: decimal.Decimal = constants.ZERO,
status: enums.OrderStatus = enums.OrderStatus.OPEN,
- order_id=None,
- exchange_order_id=None,
- filled_price=constants.ZERO,
- average_price=constants.ZERO,
- quantity_filled=constants.ZERO,
- total_cost=constants.ZERO,
- timestamp=0,
- side=None,
- trigger_above=None,
- fees_currency_side=None,
- group=None,
- tag=None,
- reduce_only=None,
- quantity_currency=None,
- close_position=False,
- exchange_creation_params=None,
- associated_entry_id=None,
- trailing_profile=None,
- is_active=None,
- active_trigger_price=None,
- active_trigger_above=None,
- cancel_policy=None,
-):
- order = create_order_from_type(trader=trader,
- order_type=order_type,
- side=side)
+ order_id: typing.Optional[str] = None,
+ exchange_order_id: typing.Optional[str] = None,
+ filled_price: decimal.Decimal = constants.ZERO,
+ average_price: decimal.Decimal = constants.ZERO,
+ quantity_filled: decimal.Decimal = constants.ZERO,
+ total_cost: decimal.Decimal = constants.ZERO,
+ timestamp: int = 0,
+ side: typing.Optional[enums.TradeOrderSide] = None,
+ trigger_above: typing.Optional[bool] = None,
+ fees_currency_side: typing.Optional[str] = None,
+ group: typing.Optional[str] = None,
+ tag: typing.Optional[str] = None,
+ reduce_only: typing.Optional[bool] = None,
+ quantity_currency: typing.Optional[str] = None,
+ close_position: bool = False,
+ exchange_creation_params: typing.Optional[dict] = None,
+ associated_entry_id: typing.Optional[str] = None,
+ trailing_profile: typing.Optional["personal_data.TrailingProfile"] = None,
+ is_active: typing.Optional[bool] = None,
+ active_trigger_price: typing.Optional[decimal.Decimal] = None,
+ active_trigger_above: typing.Optional[bool] = None,
+ cancel_policy: typing.Optional["personal_data.OrderCancelPolicy"] = None,
+) -> "personal_data.Order":
+ order = create_order_from_type(trader, order_type, side=side)
order.update(
order_type=order_type,
symbol=symbol,
@@ -120,7 +143,10 @@ def create_order_instance(
return order
-def create_order_from_dict(trader, order_dict):
+def create_order_from_dict(
+ trader: "octobot_trading.exchanges.Trader",
+ order_dict: dict,
+) -> "personal_data.Order":
"""
:param trader: the trader to associate the order to
:param order_dict: a dict formatted as from order.to_dict()
@@ -146,7 +172,25 @@ def create_order_from_dict(trader, order_dict):
)
-async def create_order_from_order_storage_details(order_storage_details, exchange_manager, pending_groups: dict):
+def create_order_from_order_raw_in_storage_details_without_related_elements(
+ exchange_manager: "octobot_trading.exchanges.ExchangeManager",
+ order_details: dict
+) -> "personal_data.Order":
+ """
+ unlike create_order_from_order_storage_details, will not create related elements and will
+ parse order from raw dict
+ """
+ order_dict = order_details[constants.STORAGE_ORIGIN_VALUE]
+ order = personal_data.create_order_instance_from_raw(exchange_manager.trader, order_dict)
+ order.update_from_storage_order_details(order_details)
+ return order
+
+
+async def create_order_from_order_storage_details(
+ order_storage_details: dict,
+ exchange_manager: "octobot_trading.exchanges.ExchangeManager",
+ pending_groups: dict,
+) -> "personal_data.Order":
order = create_order_from_dict(
exchange_manager.trader,
order_storage_details[constants.STORAGE_ORIGIN_VALUE]
@@ -158,7 +202,12 @@ async def create_order_from_order_storage_details(order_storage_details, exchang
return order
-async def restore_chained_orders_from_storage_order_details(order, order_details, exchange_manager, pending_groups):
+async def restore_chained_orders_from_storage_order_details(
+ order: "personal_data.Order",
+ order_details: dict,
+ exchange_manager: "octobot_trading.exchanges.ExchangeManager",
+ pending_groups: dict,
+) -> None:
chained_orders = order_details.get(enums.StoredOrdersAttr.CHAINED_ORDERS.value, None)
if chained_orders:
for chained_order in chained_orders:
@@ -174,3 +223,302 @@ async def restore_chained_orders_from_storage_order_details(order, order_details
)
order.add_chained_order(chained_order_inst)
logging.get_logger(order.get_logger_name()).debug(f"Restored chained order: {chained_order_inst}")
+
+
+@dataclasses.dataclass
+class _OrderDetails:
+ def __init__(self, input_price: decimal.Decimal, input_quantity_ratio: decimal.Decimal):
+ self.input_price = input_price
+ self.input_quantity_ratio = input_quantity_ratio
+
+
+class OrderFactory:
+ def __init__(
+ self,
+ exchange_manager: typing.Optional["octobot_trading.exchanges.ExchangeManager"],
+ trading_mode: typing.Optional["octobot_trading.modes.AbstractTradingMode"],
+ dependencies: typing.Optional[commons_signals.SignalDependencies],
+ wait_for_creation: bool,
+ try_to_handle_unconfigured_symbol: bool,
+ ):
+ self.exchange_manager: "octobot_trading.exchanges.ExchangeManager" = exchange_manager # type: ignore
+ self.trading_mode: typing.Optional["octobot_trading.modes.AbstractTradingMode"] = trading_mode
+ self.dependencies: typing.Optional[commons_signals.SignalDependencies] = dependencies
+ self.wait_for_creation: bool = wait_for_creation
+ self.try_to_handle_unconfigured_symbol: bool = try_to_handle_unconfigured_symbol
+
+ def validate(self) -> None:
+ if self.exchange_manager is None:
+ raise ValueError(
+ f"exchange_manager is required to use {self.__class__.__name__}"
+ )
+
+ def _get_validated_amounts_and_prices(
+ self, symbol: str, amount: decimal.Decimal, price: decimal.Decimal, symbol_market
+ ) -> list[tuple[decimal.Decimal, decimal.Decimal]]:
+ quantities_and_prices = personal_data.decimal_check_and_adapt_order_details_if_necessary(
+ amount, price, symbol_market
+ )
+ if len(quantities_and_prices) == 0:
+ min_amount = personal_data.get_minimal_order_amount(symbol_market)
+ if amount < min_amount:
+ raise trading_errors.MissingMinimalExchangeTradeVolume(
+ f"An order amount of {amount} is too small to trade {symbol} on {self.exchange_manager.exchange_name}. Minimum amount is {min_amount}."
+ )
+ cost = price * amount
+ min_cost = personal_data.get_minimal_order_cost(symbol_market)
+ if cost < min_cost:
+ raise trading_errors.MissingMinimalExchangeTradeVolume(
+ f"An order cost of {cost} is too small to trade {symbol} on {self.exchange_manager.exchange_name}. Minimum cost is {min_cost}."
+ )
+ return quantities_and_prices
+
+ async def _get_computed_price(self, ctx: script_keywords.Context, order_price: str) -> decimal.Decimal:
+ return await script_keywords.get_price_with_offset(ctx, order_price, use_delta_type_as_flat_value=True)
+
+ async def _get_computed_quantity(
+ self, ctx: script_keywords.Context, input_amount: str,
+ side: enums.TradeOrderSide, target_price: decimal.Decimal,
+ reduce_only: bool, allow_holdings_adaptation: bool
+ ):
+ if not input_amount or input_amount == "0":
+ return constants.ZERO
+ return await script_keywords.get_amount_from_input_amount(
+ context=ctx,
+ input_amount=input_amount,
+ side=side.value,
+ reduce_only=reduce_only,
+ is_stop_order=False,
+ use_total_holding=False,
+ target_price=target_price,
+ # raise when not enough funds to create an order according to user input
+ allow_holdings_adaptation=allow_holdings_adaptation,
+ )
+
+ def _ensure_supported_order_type(self, order_type: enums.TraderOrderType):
+ if not self.exchange_manager.exchange.is_supported_order_type(order_type):
+ raise trading_errors.NotSupportedOrderTypeError(
+ f"{order_type.name} orders are not supported on {self.exchange_manager.exchange_name}",
+ order_type
+ )
+
+ async def _create_stop_orders(
+ self, ctx: script_keywords.Context,
+ base_order: "personal_data.Order",
+ symbol_market: dict,
+ params: dict, chained_orders: list["personal_data.Order"],
+ stop_loss_price: typing.Optional[decimal.Decimal] = None,
+ ):
+ if not stop_loss_price:
+ return
+ self._ensure_supported_order_type(enums.TraderOrderType.STOP_LOSS)
+ computed_stop_price = await self._get_computed_price(ctx, stop_loss_price)
+ adapted_stop_price = personal_data.decimal_adapt_price(
+ symbol_market, computed_stop_price
+ )
+ exit_side = (
+ enums.TradeOrderSide.SELL
+ if base_order.side == enums.TradeOrderSide.BUY else enums.TradeOrderSide.BUY
+ )
+ param_update, chained_order = await personal_data.create_and_register_chained_order_on_base_order(
+ base_order, adapted_stop_price, enums.TraderOrderType.STOP_LOSS, exit_side,
+ quantity=base_order.origin_quantity, tag=base_order.tag,
+ reduce_only=self.exchange_manager.is_future,
+ )
+ params.update(param_update)
+ chained_orders.append(chained_order)
+
+ async def _create_take_profit_orders(
+ self,
+ ctx: script_keywords.Context,
+ base_order: "personal_data.Order",
+ symbol_market: dict,
+ params: dict,
+ chained_orders: list["personal_data.Order"],
+ take_profit_prices: typing.Optional[list[decimal.Decimal]] = None,
+ take_profit_volume_percents: typing.Optional[list[decimal.Decimal]] = None,
+ ):
+ if not take_profit_prices:
+ return
+ take_profit_volume_percents = take_profit_volume_percents or []
+ if len(take_profit_volume_percents) not in (0, len(take_profit_prices)):
+ raise trading_errors.InvalidArgumentError(
+ f"There must be either 0 or as many take profit volume percents as take profit prices"
+ )
+ exit_side = (
+ enums.TradeOrderSide.SELL
+ if base_order.side == enums.TradeOrderSide.BUY else enums.TradeOrderSide.BUY
+ )
+ total_take_profit_volume_percent = decimal.Decimal(str(sum(
+ float(v) for v in take_profit_volume_percents)
+ ))
+ take_profit_order_details = [
+ _OrderDetails(
+ take_profit_price,
+ (decimal.Decimal(str(take_profit_volume_percents[i])) / total_take_profit_volume_percent) if total_take_profit_volume_percent else (
+ constants.ONE / len(take_profit_prices)
+ )
+ )
+ for i, take_profit_price in enumerate(take_profit_prices)
+ ]
+ for index, take_profits_detail in enumerate(take_profit_order_details):
+ is_last = index == len(take_profit_order_details) - 1
+ price = await self._get_computed_price(ctx, take_profits_detail.input_price)
+ quantity = personal_data.decimal_adapt_quantity(
+ symbol_market, base_order.origin_quantity * take_profits_detail.input_quantity_ratio
+ )
+ order_type = self.exchange_manager.trader.get_take_profit_order_type(
+ base_order,
+ enums.TraderOrderType.SELL_LIMIT if exit_side is enums.TradeOrderSide.SELL
+ else enums.TraderOrderType.BUY_LIMIT
+ )
+ param_update, chained_order = await personal_data.create_and_register_chained_order_on_base_order(
+ base_order, price, order_type, exit_side,
+ quantity=quantity, tag=base_order.tag, reduce_only=self.exchange_manager.is_future,
+ # only the last order is to take trigger fees into account
+ update_with_triggering_order_fees=is_last and not self.exchange_manager.is_future
+ )
+ params.update(param_update)
+ chained_orders.append(chained_order)
+
+ def _create_active_order_swap_strategy(
+ self,
+ active_order_swap_strategy_type: typing.Optional[str] = None,
+ active_order_swap_strategy_params: typing.Optional[dict] = None
+ ) -> "personal_data.ActiveOrderSwapStrategy":
+ return personal_data.create_active_order_swap_strategy(
+ active_order_swap_strategy_type, **(active_order_swap_strategy_params or {})
+ )
+
+ async def _create_base_order_associated_elements(
+ self,
+ base_order: "personal_data.Order",
+ ctx: script_keywords.Context,
+ symbol_market: dict,
+ stop_loss_price: typing.Optional[decimal.Decimal] = None,
+ take_profit_prices: typing.Optional[list[decimal.Decimal]] = None,
+ take_profit_volume_percents: typing.Optional[list[decimal.Decimal]] = None,
+ trailing_profile_type: typing.Optional[str] = None,
+ active_order_swap_strategy_type: typing.Optional[str] = None,
+ active_order_swap_strategy_params: typing.Optional[dict] = None,
+ ) -> None:
+ # create chained orders
+ params = {}
+ chained_orders = []
+ await self._create_stop_orders(
+ ctx, base_order, symbol_market, params, chained_orders, stop_loss_price
+ )
+ await self._create_take_profit_orders(
+ ctx, base_order, symbol_market, params, chained_orders, take_profit_prices, take_profit_volume_percents
+ )
+ stop_orders = [o for o in chained_orders if personal_data.is_stop_order(o.order_type)]
+ tp_orders = [o for o in chained_orders if not personal_data.is_stop_order(o.order_type)]
+ if stop_orders and tp_orders:
+
+ active_order_swap_strategy = self._create_active_order_swap_strategy(
+ active_order_swap_strategy_type, active_order_swap_strategy_params
+ )
+ if len(stop_orders) == len(tp_orders):
+ group_type = personal_data.OneCancelsTheOtherOrderGroup
+ elif trailing_profile_type == personal_data.TrailingProfileTypes.FILLED_TAKE_PROFIT.value:
+ group_type = personal_data.TrailingOnFilledTPBalancedOrderGroup
+ entry_price = base_order.origin_price
+ for stop_order in stop_orders:
+ # register trailing profile in stop orders
+ stop_order.trailing_profile = personal_data.create_filled_take_profit_trailing_profile(
+ entry_price, tp_orders
+ )
+ else:
+ group_type = personal_data.BalancedTakeProfitAndStopOrderGroup
+ oco_group = self.exchange_manager.exchange_personal_data.orders_manager.create_group(
+ group_type, active_order_swap_strategy=active_order_swap_strategy
+ )
+ for order in chained_orders:
+ order.add_to_order_group(oco_group)
+ # in futures, inactive orders are not necessary
+ if self.exchange_manager.trader.enable_inactive_orders and not self.exchange_manager.is_future:
+ await oco_group.active_order_swap_strategy.apply_inactive_orders(chained_orders)
+
+
+ async def create_base_orders_and_associated_elements(
+ self,
+ order_type: enums.TraderOrderType,
+ symbol: str,
+ side: enums.TradeOrderSide,
+ amount: str,
+ price: typing.Optional[decimal.Decimal] = None,
+ reduce_only: bool = False,
+ allow_holdings_adaptation: bool = False,
+ tag: typing.Optional[str] = None,
+ exchange_creation_params: typing.Optional[dict] = None,
+ cancel_policy: typing.Optional["personal_data.OrderCancelPolicy"] = None,
+ stop_loss_price: typing.Optional[decimal.Decimal] = None,
+ take_profit_prices: typing.Optional[list[decimal.Decimal]] = None,
+ take_profit_volume_percents: typing.Optional[list[decimal.Decimal]] = None,
+ trailing_profile_type: typing.Optional[str] = None,
+ active_order_swap_strategy_type: typing.Optional[str] = None,
+ active_order_swap_strategy_params: typing.Optional[dict] = None,
+ ) -> list["personal_data.Order"]:
+ if symbol not in self.exchange_manager.exchange_symbols_data.exchange_symbol_data:
+ if self.try_to_handle_unconfigured_symbol:
+ raise NotImplementedError("try_to_handle_unconfigured_symbol is not yet implemented")
+ else:
+ raise trading_errors.UnSupportedSymbolError(
+ f"Symbol {symbol} not found in exchange traded symbols. Available symbols: "
+ f"{', '.join(self.exchange_manager.exchange_symbols_data.exchange_symbol_data)}"
+ )
+ current_price = await personal_data.get_up_to_date_price(
+ self.exchange_manager, symbol=symbol, timeout=constants.ORDER_DATA_FETCHING_TIMEOUT
+ )
+ symbol_market = self.exchange_manager.exchange.get_market_status(symbol, with_fixer=False)
+ ctx = script_keywords.get_base_context_from_exchange_manager(self.exchange_manager, symbol)
+ # market orders have no price
+ computed_price = current_price if price is None else await self._get_computed_price(ctx, price)
+ computed_amount = await self._get_computed_quantity(
+ ctx, amount, side, computed_price, reduce_only, allow_holdings_adaptation=allow_holdings_adaptation
+ )
+ valid_amount_and_prices = self._get_validated_amounts_and_prices(
+ symbol, computed_amount, computed_price, symbol_market
+ )
+ base_orders = []
+ for valid_amount, valid_price in valid_amount_and_prices:
+ base_order = personal_data.create_order_instance(
+ trader=self.exchange_manager.trader,
+ order_type=order_type,
+ symbol=symbol,
+ current_price=current_price,
+ quantity=valid_amount,
+ price=valid_price,
+ side=side,
+ tag=tag,
+ reduce_only=reduce_only,
+ exchange_creation_params=exchange_creation_params,
+ cancel_policy=cancel_policy,
+ )
+ await self._create_base_order_associated_elements(
+ base_order,
+ ctx,
+ symbol_market,
+ stop_loss_price,
+ take_profit_prices,
+ take_profit_volume_percents,
+ trailing_profile_type,
+ active_order_swap_strategy_type,
+ active_order_swap_strategy_params,
+ )
+ base_orders.append(base_order)
+ return base_orders
+
+ async def create_order_on_exchange(
+ self, order: "personal_data.Order",
+ ) -> typing.Optional["personal_data.Order"]:
+ return (
+ await self.trading_mode.create_order(
+ order, dependencies=self.dependencies, wait_for_creation=self.wait_for_creation
+ ) if self.trading_mode else (
+ await self.exchange_manager.trader.create_order(order, wait_for_creation=self.wait_for_creation)
+ )
+ )
+
+ def _logger(self) -> logging.BotLogger:
+ return logging.get_logger(f"{self.__class__.__name__} | {self.exchange_manager.exchange_name}")
diff --git a/packages/trading/octobot_trading/personal_data/orders/order_util.py b/packages/trading/octobot_trading/personal_data/orders/order_util.py
index bf684fce4..5ad9670e2 100644
--- a/packages/trading/octobot_trading/personal_data/orders/order_util.py
+++ b/packages/trading/octobot_trading/personal_data/orders/order_util.py
@@ -19,6 +19,7 @@
import contextlib
import uuid
import typing
+import collections
import octobot_commons.symbols as symbol_util
import octobot_commons.constants as commons_constants
@@ -30,11 +31,15 @@
import octobot_trading.personal_data.orders.decimal_order_adapter as decimal_order_adapter
import octobot_trading.personal_data.orders.states.fill_order_state as fill_order_state
import octobot_trading.personal_data.orders.order as order_import
+import octobot_trading.personal_data.orders.order_factory as order_factory
import octobot_trading.personal_data.orders.triggers.price_trigger as price_trigger
import octobot_trading.exchanges.util.exchange_market_status_fixer as exchange_market_status_fixer
import octobot_trading.signals as signals
from octobot_trading.enums import ExchangeConstantsMarketStatusColumns as Ecmsc
+if typing.TYPE_CHECKING:
+ import octobot_trading.exchanges
+
LOGGER_NAME = "order_util"
@@ -799,6 +804,41 @@ async def adapt_chained_order_before_creation(base_order, chained_order):
return can_be_created
+async def create_and_register_chained_order_on_base_order(
+ base_order: order_import.Order,
+ price: decimal.Decimal, order_type: enums.TraderOrderType, side: enums.TradeOrderSide,
+ quantity: typing.Optional[decimal.Decimal] = None, allow_bundling: bool = True,
+ tag: typing.Optional[str] = None, reduce_only: bool = False,
+ update_with_triggering_order_fees: typing.Optional[bool] = None
+) -> tuple[dict, order_import.Order]:
+ exchange_manager = base_order.exchange_manager
+ chained_order = order_factory.create_order_instance(
+ trader=exchange_manager.trader,
+ order_type=order_type,
+ symbol=base_order.symbol,
+ current_price=price,
+ quantity=quantity or base_order.origin_quantity,
+ price=price,
+ side=side,
+ associated_entry_id=base_order.order_id,
+ reduce_only=reduce_only,
+ tag=tag,
+ )
+ params = {}
+ # do not reduce chained order amounts to account for fees when trading futures
+ if update_with_triggering_order_fees is None:
+ update_with_triggering_order_fees = not exchange_manager.is_future
+ if allow_bundling:
+ params = await exchange_manager.trader.bundle_chained_order_with_uncreated_order(
+ base_order, chained_order, update_with_triggering_order_fees
+ )
+ else:
+ await exchange_manager.trader.chain_order(
+ base_order, chained_order, update_with_triggering_order_fees, False
+ )
+ return params, chained_order
+
+
async def wait_for_order_fill(order, timeout, wait_for_portfolio_update):
if order.is_open():
if order.state is None:
@@ -842,3 +882,21 @@ def get_short_order_summary(order: typing.Union[dict, order_import.Order]) -> st
f"{order[enums.ExchangeConstantsOrderColumns.TYPE.value]} {order[enums.ExchangeConstantsOrderColumns.AMOUNT.value]}{filled} "
f"{order[enums.ExchangeConstantsOrderColumns.SYMBOL.value]} at {order[enums.ExchangeConstantsOrderColumns.PRICE.value]} cost: {round(cost, 8)}"
)
+
+
+def get_enriched_orders_by_exchange_id(enriched_orders: list[dict]) -> dict[str, dict]:
+ return {
+ order_details[constants.STORAGE_ORIGIN_VALUE][
+ enums.ExchangeConstantsOrderColumns.EXCHANGE_ID.value
+ ]: order_details
+ for order_details in enriched_orders
+ }
+
+
+def get_symbol_count(raw_trades_or_raw_orders: list[dict]) -> dict[str, int]:
+ return dict(
+ collections.Counter(
+ element[enums.ExchangeConstantsOrderColumns.SYMBOL.value]
+ for element in raw_trades_or_raw_orders
+ )
+ )
diff --git a/packages/trading/octobot_trading/personal_data/orders/orders_manager.py b/packages/trading/octobot_trading/personal_data/orders/orders_manager.py
index 8d64f0d97..6361c19c8 100644
--- a/packages/trading/octobot_trading/personal_data/orders/orders_manager.py
+++ b/packages/trading/octobot_trading/personal_data/orders/orders_manager.py
@@ -27,12 +27,17 @@
import octobot_trading.personal_data.orders.order as order_class
import octobot_trading.personal_data.orders.order_factory as order_factory
import octobot_trading.personal_data.orders.order_util as order_util
+import octobot_trading.personal_data.orders.orders_storage_operations as orders_storage_operations
import octobot_trading.exchanges
import octobot_trading.personal_data.orders.active_order_swap_strategies.active_order_swap_strategy as \
active_order_swap_strategy_import
import octobot_trading.personal_data.orders.order_group as order_group_import
+if typing.TYPE_CHECKING:
+ import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
+
class OrdersManager(util.Initializable):
MAX_ORDERS_COUNT = 0
@@ -256,6 +261,25 @@ def disabled_order_auto_synchronization(self):
finally:
self.enable_order_auto_synchronization = True
+ async def initialize_from_exchange_data(self, exchange_data: "exchange_data_import.ExchangeData") -> None:
+ """
+ Initialize orders from exchange data by parsing open orders and adding them to this manager.
+ """
+ exchange_manager = self.trader.exchange_manager
+ pending_groups = {}
+ for order_details in exchange_data.orders_details.open_orders:
+ if constants.STORAGE_ORIGIN_VALUE in order_details:
+ order = order_factory.create_order_from_order_raw_in_storage_details_without_related_elements(
+ exchange_manager, order_details
+ )
+ await orders_storage_operations.create_orders_storage_related_elements(
+ order, order_details, exchange_manager, pending_groups
+ )
+ else:
+ # simple order dict (order just fetched from exchange)
+ order = order_factory.create_order_instance_from_raw(self.trader, order_details)
+ await self.upsert_order_instance(order)
+
# private methods
def _reset_orders(self):
self.orders_initialized = False
diff --git a/packages/trading/octobot_trading/personal_data/portfolios/portfolio_manager.py b/packages/trading/octobot_trading/personal_data/portfolios/portfolio_manager.py
index bc55d5ff1..2d686ef0d 100644
--- a/packages/trading/octobot_trading/personal_data/portfolios/portfolio_manager.py
+++ b/packages/trading/octobot_trading/personal_data/portfolios/portfolio_manager.py
@@ -25,14 +25,17 @@
import octobot_trading.exchange_channel as exchange_channel
import octobot_trading.constants as constants
+import octobot_trading.enums as enums
import octobot_trading.errors as errors
import octobot_trading.personal_data as personal_data
+import octobot_trading.storage as storage
import octobot_trading.util as util
import octobot_trading.enums as enums
import octobot_trading.personal_data.portfolios.update_events as update_events
if typing.TYPE_CHECKING:
import octobot_trading.exchanges
+ import octobot_trading.exchanges.util.exchange_data as exchange_data_import
class PortfolioManager(util.Initializable):
@@ -340,6 +343,32 @@ def refresh_portfolio_available_from_order(self, order, is_new_order):
if self.enable_portfolio_available_update_from_order:
self.portfolio.update_portfolio_available(order, is_new_order=is_new_order)
+ async def initialize_from_exchange_data(self, exchange_data: "exchange_data_import.ExchangeData") -> None:
+ """
+ Lock funds for chained orders from missing orders in portfolio.
+ """
+ groups = {}
+ for base_order in exchange_data.orders_details.missing_orders:
+ for chained_order_dict in base_order.get(enums.StoredOrdersAttr.CHAINED_ORDERS.value, []):
+ chained_order = await personal_data.create_order_from_order_storage_details(
+ storage.orders_storage.from_order_document(chained_order_dict),
+ self.exchange_manager,
+ groups,
+ )
+ if chained_order.update_with_triggering_order_fees and (
+ base_order_exchange_id := base_order.get(constants.STORAGE_ORIGIN_VALUE, {}).get(
+ enums.ExchangeConstantsOrderColumns.EXCHANGE_ID.value
+ )
+ ):
+ trade = personal_data.aggregate_trades_by_exchange_order_id(
+ self.exchange_manager.exchange_personal_data.trades_manager.get_trades(
+ exchange_order_id=base_order_exchange_id
+ )
+ ).get(base_order_exchange_id)
+ if trade:
+ chained_order.update_quantity_with_order_fees(trade)
+ self.portfolio.update_portfolio_available(chained_order, is_new_order=True)
+
def _load_portfolio(self, reset_from_config):
"""
Load simulated portfolio from config if required
diff --git a/packages/trading/octobot_trading/personal_data/portfolios/portfolio_value_holder.py b/packages/trading/octobot_trading/personal_data/portfolios/portfolio_value_holder.py
index 8ee5d68a8..e2db9741f 100644
--- a/packages/trading/octobot_trading/personal_data/portfolios/portfolio_value_holder.py
+++ b/packages/trading/octobot_trading/personal_data/portfolios/portfolio_value_holder.py
@@ -21,12 +21,16 @@
import octobot_commons.symbols as symbol_util
import octobot_trading.constants as constants
+import octobot_trading.exchanges as exchanges
import octobot_trading.errors as errors
import octobot_trading.enums as enums
import octobot_trading.personal_data.portfolios.value_converter as value_converter
import octobot_trading.personal_data.portfolios
+if typing.TYPE_CHECKING:
+ import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
class PortfolioValueHolder:
"""
PortfolioValueHolder calculates the current and the origin portfolio value in reference market for each updates
@@ -57,6 +61,49 @@ def reset_portfolio_values(self):
self.origin_crypto_currencies_values = {}
self.current_crypto_currencies_values = {}
+ def initialize_from_exchange_data(
+ self, exchange_data: "exchange_data_import.ExchangeData", price_by_symbol: dict[str, float]
+ ) -> None:
+ """
+ Initialize prices and portfolio values from exchange data.
+ """
+ self.value_converter.initialize_from_exchange_data(exchange_data, price_by_symbol)
+ self._sync_portfolio_current_value_if_necessary()
+
+ def _sync_portfolio_current_value_if_necessary(self) -> None:
+ if not self.portfolio_manager.portfolio.portfolio:
+ # portfolio is not initialized, skip portfolio values initialization
+ return
+ try:
+ self._sync_portfolio_current_value_using_available_currencies_values(init_price_fetchers=False)
+ portfolio_value = self.portfolio_current_value
+ if not portfolio_value or portfolio_value <= constants.ZERO:
+ if self._should_have_initialized_portfolio_values():
+ # should not happen (if it does, holding ratios using portfolio_value can't
+ # be computed)
+ # This is not critial but should be fixed if seen
+ self.logger.error(
+ f"[{self.portfolio_manager.exchange_manager.exchange_name}] Portfolio current value "
+ f"can't be initialized: {portfolio_value=}"
+ )
+ else:
+ self.logger.info(
+ f"[{self.portfolio_manager.exchange_manager.exchange_name}] Portfolio current value "
+ f"not initialized: no traded asset holdings in portfolio"
+ )
+ except Exception as err:
+ self.logger.exception(err, True, f"Error when initializing trading portfolio values: {err}")
+
+ def _should_have_initialized_portfolio_values(self) -> bool:
+ portfolio_assets = [
+ asset
+ for asset, values in self.portfolio_manager.portfolio.portfolio.items()
+ if values.total > constants.ZERO
+ ]
+ if any(coin in portfolio_assets for coin in exchanges.get_traded_assets(self.portfolio_manager.exchange_manager)):
+ return True
+ return False
+
def update_origin_crypto_currencies_values(self, symbol, mark_price):
"""
Update origin cryptocurrencies value
@@ -106,7 +153,7 @@ def get_current_crypto_currencies_values(self):
:return: the current crypto-currencies values
"""
if not self.current_crypto_currencies_values:
- self.sync_portfolio_current_value_using_available_currencies_values()
+ self._sync_portfolio_current_value_using_available_currencies_values()
return self.current_crypto_currencies_values
def get_current_holdings_values(self):
@@ -171,7 +218,7 @@ def handle_profitability_recalculation(self, force_recompute_origin_portfolio):
Initialize values required by portfolio profitability to perform its profitability calculation
:param force_recompute_origin_portfolio: when True, force origin portfolio computation
"""
- self.sync_portfolio_current_value_using_available_currencies_values()
+ self._sync_portfolio_current_value_using_available_currencies_values()
self._init_portfolio_values_if_necessary(force_recompute_origin_portfolio)
def get_origin_portfolio_current_value(self, refresh_values=False):
@@ -248,7 +295,7 @@ def _fill_currencies_values(self, currencies_values):
if currency not in currencies_values
})
- def sync_portfolio_current_value_using_available_currencies_values(self, init_price_fetchers=True):
+ def _sync_portfolio_current_value_using_available_currencies_values(self, init_price_fetchers=True):
"""
:param init_price_fetchers: When True, can init price using fetchers
Update the portfolio current value with the current portfolio instance
diff --git a/packages/trading/octobot_trading/personal_data/portfolios/value_converter.py b/packages/trading/octobot_trading/personal_data/portfolios/value_converter.py
index 155b92c93..ddc1b3d7c 100644
--- a/packages/trading/octobot_trading/personal_data/portfolios/value_converter.py
+++ b/packages/trading/octobot_trading/personal_data/portfolios/value_converter.py
@@ -15,6 +15,7 @@
# License along with this library.
import asyncio
import decimal
+import typing
import octobot_commons.logging as logging
import octobot_commons.symbols as symbol_util
@@ -23,7 +24,10 @@
import octobot_trading.constants as constants
import octobot_trading.errors as errors
+import octobot_trading.exchanges as exchanges
+if typing.TYPE_CHECKING:
+ import octobot_trading.exchanges.util.exchange_data as exchange_data_import
class ValueConverter:
"""
@@ -50,6 +54,31 @@ def __init__(self, portfolio_manager):
self._price_bridge_by_symbol = {}
self._missing_price_bridges = set()
+ def initialize_from_exchange_data(
+ self, exchange_data: "exchange_data_import.ExchangeData", price_by_symbol: dict[str, float]
+ ) -> None:
+ """
+ Initialize value converter last prices from exchange data.
+ """
+ added_symbols = set()
+ for market in exchange_data.markets:
+ price = price_by_symbol.get(market.symbol)
+ if price is not None:
+ self.update_last_price(market.symbol, decimal.Decimal(str(price)))
+ added_symbols.add(market.symbol)
+ ref_market = self.portfolio_manager.reference_market
+ for asset, value in exchange_data.portfolio_details.asset_values.items():
+ if asset == ref_market:
+ continue
+ # include fetched portfolio assets values to be able to value them in ref market in case they
+ # are not already added from traded pairs
+ value_symbol = symbol_util.merge_currencies(asset, ref_market)
+ decimal_value = decimal.Decimal(str(value))
+ if value_symbol not in added_symbols:
+ exchanges.force_set_mark_price(self.portfolio_manager.exchange_manager, value_symbol, decimal_value)
+ self.update_last_price(value_symbol, decimal_value)
+ added_symbols.add(value_symbol)
+
def update_last_price(self, symbol, price):
if symbol not in self.last_prices_by_trading_pair:
self.reset_missing_price_bridges()
diff --git a/packages/trading/octobot_trading/personal_data/positions/positions_manager.py b/packages/trading/octobot_trading/personal_data/positions/positions_manager.py
index a37b09768..fbb99b3a9 100644
--- a/packages/trading/octobot_trading/personal_data/positions/positions_manager.py
+++ b/packages/trading/octobot_trading/personal_data/positions/positions_manager.py
@@ -16,11 +16,13 @@
import collections
import contextlib
import typing
+import copy
import octobot_commons.logging as logging
import octobot_commons.enums as commons_enums
import octobot_commons.tree as commons_tree
+import octobot_trading.exchange_data.contracts.contract_factory as contract_factory
import octobot_trading.personal_data.positions.position_factory as position_factory
import octobot_trading.personal_data.positions.position as position_import
import octobot_trading.util as util
@@ -29,6 +31,9 @@
import octobot_trading.errors as errors
import octobot_trading.exchange_channel as exchange_channel
+if typing.TYPE_CHECKING:
+ import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
class PositionsManager(util.Initializable):
POSITION_ID_SEPARATOR = "_"
@@ -232,6 +237,31 @@ def disabled_positions_update_from_order(self):
finally:
self._enable_position_update_from_order = True
+ def initialize_from_exchange_data(
+ self, exchange_data: "exchange_data_import.ExchangeData",
+ exclusively_use_exchange_position_details: bool = False
+ ) -> None:
+ """
+ Initialize positions from exchange data by parsing position details and adding them to this manager.
+ """
+ exchange_manager = self.trader.exchange_manager
+ contract_factory.initialize_contracts_from_exchange_data(exchange_manager, exchange_data)
+ for position_details in exchange_data.positions:
+ if not self._is_cleared_position(position_details.position):
+ position = position_factory.create_position_instance_from_dict(
+ self.trader, copy.copy(position_details.position)
+ )
+ position.position_id = self.create_position_id(position)
+ self.add_position(position)
+ self.is_exclusively_using_exchange_position_details = exclusively_use_exchange_position_details
+
+ @staticmethod
+ def _is_cleared_position(position_dict: dict) -> bool:
+ for key in position_dict:
+ if key not in constants.MINIMAL_POSITION_IDENTIFICATION_DETAILS_KEYS:
+ return False
+ return True
+
# private
def _position_id_factory(
diff --git a/packages/trading/octobot_trading/personal_data/trades/trade.py b/packages/trading/octobot_trading/personal_data/trades/trade.py
index 797351623..6a8dd3901 100644
--- a/packages/trading/octobot_trading/personal_data/trades/trade.py
+++ b/packages/trading/octobot_trading/personal_data/trades/trade.py
@@ -47,10 +47,10 @@ def __init__(self, trader):
self.symbol: str = None # type: ignore
self.currency: typing.Optional[str] = None
self.market: typing.Optional[str] = None
- self.taker_or_maker: str = None # type: ignore
+ self.taker_or_maker: typing.Optional[str] = None
self.origin_price: decimal.Decimal = constants.ZERO
self.origin_quantity: decimal.Decimal = constants.ZERO
- self.trade_type: typing.Optional[enums.TradeOrderType] = None
+ self.trade_type: typing.Optional[enums.TraderOrderType] = None
self.side: enums.TradeOrderSide = None # type: ignore
self.executed_quantity: decimal.Decimal = constants.ZERO
self.canceled_time: float = 0
@@ -60,15 +60,21 @@ def __init__(self, trader):
self.trade_profitability: decimal.Decimal = constants.ZERO
self.total_cost: decimal.Decimal = constants.ZERO
self.reduce_only: bool = False
- self.tag: str = None # type: ignore
- self.quantity_currency: str = None # type: ignore
+ self.tag: typing.Optional[str] = None
+ self.quantity_currency: typing.Optional[str] = None
self.associated_entry_ids: typing.Optional[list[str]] = None
self.broker_applied: bool = False
# raw exchange trade type, used to create trade dict
self.exchange_trade_type: typing.Optional[enums.TradeOrderType] = None
- def update_from_order(self, order, creation_time=0, canceled_time=0, executed_time=0, exchange_trade_id=None):
+ def update_from_order(
+ self, order: "order_import.Order",
+ creation_time: float=0,
+ canceled_time: float=0,
+ executed_time: float=0,
+ exchange_trade_id: typing.Optional[str]=None
+ ) -> None:
self.currency = order.currency
self.market = order.market
self.taker_or_maker = order.taker_or_maker
diff --git a/packages/trading/octobot_trading/personal_data/trades/trades_manager.py b/packages/trading/octobot_trading/personal_data/trades/trades_manager.py
index 27ab11b52..fd2f729d9 100644
--- a/packages/trading/octobot_trading/personal_data/trades/trades_manager.py
+++ b/packages/trading/octobot_trading/personal_data/trades/trades_manager.py
@@ -26,6 +26,9 @@
import octobot_trading.personal_data.trades.trade_pnl as trade_pnl
import octobot_trading.util as util
+if typing.TYPE_CHECKING:
+ import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
class TradesManager(util.Initializable):
# memory usage for 100000 trades: approx 180 Mo
@@ -155,6 +158,15 @@ def get_trades(self, origin_order_id=None, exchange_order_id=None):
)
]
+ def initialize_from_exchange_data(self, exchange_data: "exchange_data_import.ExchangeData") -> None:
+ """
+ Initialize trades from exchange data by parsing trade dicts and adding them to this manager.
+ """
+ for trade_dict in exchange_data.trades:
+ trade = personal_data.create_trade_from_dict(self.trader, trade_dict)
+ trade.trade_id = trade.trade_id or self.trader.generate_random_order_id()
+ self.upsert_trade_instance(trade)
+
# private
def _check_trades_size(self):
if len(self.trades) > self.MAX_TRADES_COUNT:
diff --git a/packages/trading/octobot_trading/util/test_tools/exchanges_test_tools.py b/packages/trading/octobot_trading/util/test_tools/exchanges_test_tools.py
index 4964d3b6f..6729ba2a0 100644
--- a/packages/trading/octobot_trading/util/test_tools/exchanges_test_tools.py
+++ b/packages/trading/octobot_trading/util/test_tools/exchanges_test_tools.py
@@ -32,7 +32,7 @@
import octobot_trading.personal_data as personal_data
-import octobot_trading.util.test_tools.exchange_data as exchange_data_import
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
BASE_TIMEOUT = 10
diff --git a/packages/trading/tests/blockchain_wallets/test_wallet_factory.py b/packages/trading/tests/blockchain_wallets/test_wallet_factory.py
index 37f93e79a..301d2cb79 100644
--- a/packages/trading/tests/blockchain_wallets/test_wallet_factory.py
+++ b/packages/trading/tests/blockchain_wallets/test_wallet_factory.py
@@ -52,18 +52,6 @@ def test_create_blockchain_wallet_simulated(mock_trader_simulate, blockchain_des
assert wallet._trader == mock_trader_simulate
-def test_create_blockchain_wallet_simulated_wrong_network(mock_trader_simulate, blockchain_descriptor_real, wallet_descriptor):
- parameters = octobot_trading.blockchain_wallets.BlockchainWalletParameters(
- blockchain_descriptor=blockchain_descriptor_real,
- wallet_descriptor=wallet_descriptor
- )
-
- # Should raise BlockchainWalletConfigurationError when network is not SIMULATED
- with pytest.raises(errors.BlockchainWalletConfigurationError) as exc_info:
- octobot_trading.blockchain_wallets.create_blockchain_wallet(parameters, mock_trader_simulate)
- assert constants.SIMULATED_BLOCKCHAIN_NETWORK in str(exc_info.value)
-
-
def test_create_blockchain_wallet_real_trader_unsupported_blockchain(mock_trader_real, wallet_descriptor):
blockchain_descriptor = octobot_trading.blockchain_wallets.BlockchainDescriptor(
blockchain="unsupported_blockchain",
diff --git a/packages/trading/tests/exchange_data/ticker/test_ticker_cache.py b/packages/trading/tests/exchange_data/ticker/test_ticker_cache.py
new file mode 100644
index 000000000..333fa1baf
--- /dev/null
+++ b/packages/trading/tests/exchange_data/ticker/test_ticker_cache.py
@@ -0,0 +1,110 @@
+# Drakkar-Software OctoBot-Trading
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+
+import mock
+import pytest
+
+import octobot_commons.constants
+import octobot_commons.symbols
+import octobot_trading.exchange_data as exchange_data
+
+
+@pytest.fixture
+def ticker_cache():
+ cache = exchange_data.TickerCache(ttl=3600, maxsize=50)
+ yield cache
+ cache.reset_all_tickers_cache()
+
+
+SPOT_TICKERS = {
+ "BTC/USDT": mock.Mock(),
+ "ETH/USDT": mock.Mock(),
+ "SOL/USDT": mock.Mock(),
+}
+
+FUTURES_TICKERS = {
+ "BTC/USDT:USDT": mock.Mock(),
+ "ETH/USDT:USDT": mock.Mock(),
+ "SOL/USD:SOL": mock.Mock(),
+}
+
+
+def test_is_valid_symbol(ticker_cache):
+ ticker_cache.set_all_tickers("binance", "spot", False, SPOT_TICKERS)
+ assert ticker_cache.is_valid_symbol("binance", "spot", False, "BTC/USDT") is True
+ assert ticker_cache.is_valid_symbol("binance", "spot", False, "BTC2/USDT") is False
+ assert ticker_cache.is_valid_symbol("binance", "futures", False, "BTC/USDT:USDT") is False
+ ticker_cache.set_all_tickers("binance", "futures", False, FUTURES_TICKERS)
+ assert ticker_cache.is_valid_symbol("binance", "futures", False, "BTC/USDT:USDT") is True
+ ticker_cache.reset_all_tickers_cache()
+ assert ticker_cache.is_valid_symbol("binance", "futures", False, "BTC/USDT:USDT") is False
+
+
+def test_get_all_tickers(ticker_cache):
+ assert ticker_cache.get_all_tickers("binance", "spot", False) is None
+ assert ticker_cache.get_all_tickers("binance", "spot", False, "default") == "default"
+ ticker_cache.set_all_tickers("binance", "spot", False, SPOT_TICKERS)
+ assert ticker_cache.get_all_tickers("binance", "spot", False) == SPOT_TICKERS
+ assert ticker_cache.get_all_tickers("binance", "spot", True) is None
+ assert ticker_cache.get_all_tickers("binance", octobot_commons.constants.CONFIG_EXCHANGE_FUTURE, False) is None
+
+
+def test_has_ticker_data(ticker_cache):
+ assert ticker_cache.has_ticker_data("binance", "spot", False) is False
+ ticker_cache.set_all_tickers("binance", "spot", False, SPOT_TICKERS)
+ assert ticker_cache.has_ticker_data("binance", "spot", False) is True
+ assert ticker_cache.has_ticker_data("binance", "spot", True) is False
+
+ ticker_cache.reset_all_tickers_cache()
+ assert ticker_cache.has_ticker_data("binance", "spot", False) is False
+
+
+def test_get_all_parsed_symbols_by_merged_symbols(ticker_cache):
+ assert ticker_cache.get_all_parsed_symbols_by_merged_symbols("binance", "spot", False) is None
+ ticker_cache.set_all_tickers("binance", "spot", False, SPOT_TICKERS)
+ assert ticker_cache.get_all_parsed_symbols_by_merged_symbols("binance", "spot", False) == {
+ "BTCUSDT": octobot_commons.symbols.parse_symbol("BTC/USDT"),
+ "ETHUSDT": octobot_commons.symbols.parse_symbol("ETH/USDT"),
+ "SOLUSDT": octobot_commons.symbols.parse_symbol("SOL/USDT"),
+ }
+ assert ticker_cache.get_all_parsed_symbols_by_merged_symbols(
+ "binance", octobot_commons.constants.CONFIG_EXCHANGE_FUTURE, False
+ ) is None
+
+ ticker_cache.set_all_tickers(
+ "binance", octobot_commons.constants.CONFIG_EXCHANGE_FUTURE, False, FUTURES_TICKERS
+ )
+ assert ticker_cache.get_all_parsed_symbols_by_merged_symbols(
+ "binance", octobot_commons.constants.CONFIG_EXCHANGE_FUTURE, False
+ ) == {
+ "BTCUSDT": octobot_commons.symbols.parse_symbol("BTC/USDT:USDT"),
+ "BTCUSDT:USDT": octobot_commons.symbols.parse_symbol("BTC/USDT:USDT"),
+ "ETHUSDT": octobot_commons.symbols.parse_symbol("ETH/USDT:USDT"),
+ "ETHUSDT:USDT": octobot_commons.symbols.parse_symbol("ETH/USDT:USDT"),
+ "SOLUSD": octobot_commons.symbols.parse_symbol("SOL/USD:SOL"),
+ "SOLUSD:SOL": octobot_commons.symbols.parse_symbol("SOL/USD:SOL"),
+ }
+
+ assert ticker_cache.get_all_parsed_symbols_by_merged_symbols(
+ "binance", octobot_commons.constants.CONFIG_EXCHANGE_FUTURE, True
+ ) is None
+
+
+def test_get_exchange_key():
+ assert exchange_data.TickerCache.get_exchange_key("binance", "spot", True) == "binance_spot_True"
+ assert exchange_data.TickerCache.get_exchange_key("binance", "spot", False) == "binance_spot_False"
+ assert exchange_data.TickerCache.get_exchange_key("binance", "future", False) == "binance_future_False"
+ assert exchange_data.TickerCache.get_exchange_key("okx", "future", False) == "okx_future_False"
diff --git a/packages/trading/tests/exchanges/__init__.py b/packages/trading/tests/exchanges/__init__.py
index 556a8b150..d44d3e9e8 100644
--- a/packages/trading/tests/exchanges/__init__.py
+++ b/packages/trading/tests/exchanges/__init__.py
@@ -26,6 +26,7 @@
import octobot_backtesting.time as backtesting_time
from octobot_commons.asyncio_tools import wait_asyncio_next_cycle
from octobot_commons.enums import TimeFrames
+import octobot_trading.constants
import octobot_trading.exchanges.connectors.ccxt.ccxt_clients_cache as ccxt_clients_cache
from octobot_commons.tests.test_config import load_test_config
@@ -414,9 +415,15 @@ async def cached_markets_exchange_manager(config, exchange_name, exchange_only=F
def register_market_status_mocks(exchange_name):
+ cached_client = ccxt_client_util.ccxt_exchange_class_factory(exchange_name)()
+ client_key = ccxt_clients_cache.get_client_key(cached_client, False)
+ # save markets in cache
ccxt_clients_cache.set_exchange_parsed_markets(
- ccxt_clients_cache.get_client_key(
- ccxt_client_util.ccxt_exchange_class_factory(exchange_name)(), False
- ),
- mock_exchanges_data.MOCKED_EXCHANGE_SYMBOL_DETAILS[exchange_name]
+ client_key, mock_exchanges_data.MOCKED_EXCHANGE_SYMBOL_DETAILS[exchange_name]
)
+ with mock.patch.object(octobot_trading.constants, "USE_CCXT_SHARED_MARKETS_CACHE", False):
+ # apply markets from cache
+ ccxt_clients_cache.apply_exchange_markets_cache(client_key, cached_client)
+ # save cached_client cache in cached exchange
+ ccxt_clients_cache.set_cached_shared_markets_exchange(client_key, cached_client)
+
diff --git a/packages/trading/tests/exchanges/market_filters/test_market_filter_factory.py b/packages/trading/tests/exchanges/market_filters/test_market_filter_factory.py
new file mode 100644
index 000000000..b2cc906c3
--- /dev/null
+++ b/packages/trading/tests/exchanges/market_filters/test_market_filter_factory.py
@@ -0,0 +1,140 @@
+# Drakkar-Software OctoBot-Trading
+# Copyright (c) Drakkar-Software, All rights reserved.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3.0 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library.
+import octobot_commons.symbols as commons_symbols
+import octobot_trading.constants as trading_constants
+import octobot_trading.enums as trading_enums
+import octobot_trading.exchanges as exchanges
+import octobot_trading.exchanges.util.exchange_data as exchange_data_import
+
+
+def _market(base, quote, m_type):
+ return {
+ trading_enums.ExchangeConstantsMarketStatusColumns.SYMBOL.value: commons_symbols.merge_currencies(
+ base, quote
+ ),
+ trading_enums.ExchangeConstantsMarketStatusColumns.CURRENCY.value: base,
+ trading_enums.ExchangeConstantsMarketStatusColumns.MARKET.value: quote,
+ trading_enums.ExchangeConstantsMarketStatusColumns.TYPE.value: m_type,
+ }
+
+
+def _get_market_symbols(markets):
+ return [
+ commons_symbols.merge_currencies(
+ m[trading_enums.ExchangeConstantsMarketStatusColumns.CURRENCY.value],
+ m[trading_enums.ExchangeConstantsMarketStatusColumns.MARKET.value],
+ )
+ for m in markets
+ ]
+
+
+MARKETS = [
+ _market(base, quote, m_type)
+ for base, quote, m_type in [
+ ("BTC", "USDT", trading_enums.ExchangeTypes.SPOT.value),
+ ("BTC", "USDC", trading_enums.ExchangeTypes.SPOT.value),
+ ("ETH", "USDT", trading_enums.ExchangeTypes.SPOT.value),
+ ("USDC", "USDT", trading_enums.ExchangeTypes.SPOT.value),
+ ("ETH", "BTC", trading_enums.ExchangeTypes.SPOT.value),
+ ("DAI", "USDT", trading_enums.ExchangeTypes.SPOT.value),
+ ("DAI", "BUSD", trading_enums.ExchangeTypes.SPOT.value),
+ ("ZEC", "ETH", trading_enums.ExchangeTypes.SPOT.value),
+ ("ZEC", "BTC", trading_enums.ExchangeTypes.SPOT.value),
+ ("USDT", "BNB", trading_enums.ExchangeTypes.SPOT.value),
+ ("XBY", "DAI", trading_enums.ExchangeTypes.SPOT.value),
+ ("NANO", "JPUSD", trading_enums.ExchangeTypes.SPOT.value),
+ ("NANO", "USDT", trading_enums.ExchangeTypes.SPOT.value),
+ ]
+]
+
+
+def test_create_market_filter():
+ empty_exchange_data = exchange_data_import.ExchangeData()
+
+ assert _get_market_symbols(
+ [m for m in MARKETS if exchanges.create_market_filter(empty_exchange_data, "BTC")(m)]
+ ) == ['BTC/USDT', 'BTC/USDC', 'USDC/USDT', 'ETH/BTC', 'DAI/USDT', 'DAI/BUSD', 'ZEC/BTC', 'USDT/BNB']
+
+ assert _get_market_symbols(
+ [m for m in MARKETS if exchanges.create_market_filter(empty_exchange_data, "USDT")(m)]
+ ) == ['BTC/USDT', 'ETH/USDT', 'USDC/USDT', 'DAI/USDT', 'DAI/BUSD', 'USDT/BNB', 'NANO/USDT']
+
+ exchange_data_with_orders = exchange_data_import.ExchangeData()
+ exchange_data_with_orders.orders_details.open_orders = [
+ {
+ trading_constants.STORAGE_ORIGIN_VALUE: {
+ trading_enums.ExchangeConstantsOrderColumns.SYMBOL.value: "XBY/DAI",
+ }
+ }
+ ]
+ exchange_data_with_orders.orders_details.missing_orders = [
+ {
+ trading_constants.STORAGE_ORIGIN_VALUE: {
+ trading_enums.ExchangeConstantsOrderColumns.SYMBOL.value: "NANO/JPUSD",
+ }
+ }
+ ]
+
+ assert _get_market_symbols(
+ [m for m in MARKETS if exchanges.create_market_filter(exchange_data_with_orders, "USDT")(m)]
+ ) == [
+ 'BTC/USDT', 'ETH/USDT', 'USDC/USDT', 'DAI/USDT', 'DAI/BUSD',
+ 'USDT/BNB', "XBY/DAI", "NANO/JPUSD", "NANO/USDT",
+ ]
+
+ assert _get_market_symbols(
+ [
+ m
+ for m in MARKETS
+ if exchanges.create_market_filter(
+ exchange_data_with_orders,
+ "USDT",
+ to_keep_symbols={"ZEC/BTC"},
+ )(m)
+ ]
+ ) == [
+ 'BTC/USDT', 'ETH/USDT', 'USDC/USDT', 'DAI/USDT', 'DAI/BUSD',
+ 'ZEC/BTC', 'USDT/BNB', "XBY/DAI", "NANO/JPUSD", "NANO/USDT",
+ ]
+
+ assert _get_market_symbols(
+ [
+ m
+ for m in MARKETS
+ if exchanges.create_market_filter(
+ exchange_data_with_orders,
+ "USDT",
+ to_keep_symbols={"ZEC/BTC"},
+ to_keep_quotes={"USDC"},
+ )(m)
+ ]
+ ) == [
+ 'BTC/USDT', 'BTC/USDC', 'ETH/USDT', 'USDC/USDT', 'DAI/USDT', 'DAI/BUSD',
+ 'ZEC/BTC', 'USDT/BNB', "XBY/DAI", "NANO/JPUSD", "NANO/USDT",
+ ]
+
+ exchange_data_with_markets = exchange_data_import.ExchangeData()
+ exchange_data_with_markets.markets = [
+ exchange_data_import.MarketDetails(symbol="ZEC/BTC"),
+ ]
+
+ assert _get_market_symbols(
+ [m for m in MARKETS if exchanges.create_market_filter(exchange_data_with_markets, "USDT")(m)]
+ ) == [
+ 'BTC/USDT', 'ETH/USDT', 'USDC/USDT', 'DAI/USDT', 'DAI/BUSD',
+ 'ZEC/BTC', # from markets
+ 'USDT/BNB', "NANO/USDT",
+ ]
diff --git a/packages/trading/tests/personal_data/orders/test_order_factory.py b/packages/trading/tests/personal_data/orders/test_order_factory.py
index d1760ba8a..b7c3da61c 100644
--- a/packages/trading/tests/personal_data/orders/test_order_factory.py
+++ b/packages/trading/tests/personal_data/orders/test_order_factory.py
@@ -14,18 +14,30 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this library.
import decimal
+import mock
import pytest
from octobot_commons.tests.test_config import load_test_config
from tests import event_loop
import octobot_trading.personal_data as personal_data
+import octobot_trading.modes.script_keywords as script_keywords
+import octobot_trading.personal_data.orders.order_factory as order_factory_module
import octobot_trading.storage.orders_storage as orders_storage
+import octobot_trading.constants as constants
import octobot_trading.enums as enums
-from octobot_trading.enums import TradeOrderSide, TradeOrderType, TraderOrderType, StoredOrdersAttr
+import octobot_trading.errors as trading_errors
+from octobot_trading.enums import (
+ ExchangeConstantsMarketStatusColumns as Ecmsc,
+ TradeOrderSide,
+ TradeOrderType,
+ TraderOrderType,
+ StoredOrdersAttr,
+)
from octobot_trading.exchanges.exchange_manager import ExchangeManager
from octobot_trading.exchanges.traders.trader_simulator import TraderSimulator
from octobot_trading.api.exchange import cancel_ccxt_throttle_task
+from tests.personal_data.orders import created_order
pytestmark = pytest.mark.asyncio
@@ -406,3 +418,390 @@ async def test_create_order_from_order_storage_details_with_chained_orders_with_
assert second_level_chained_orders[0].trailing_profile is None
assert second_level_chained_orders[0].cancel_policy is None
await self.stop(exchange_manager)
+
+
+def _symbol_market():
+ return {
+ Ecmsc.LIMITS.value: {
+ Ecmsc.LIMITS_AMOUNT.value: {
+ Ecmsc.LIMITS_AMOUNT_MIN.value: 0.5,
+ Ecmsc.LIMITS_AMOUNT_MAX.value: 1000,
+ },
+ Ecmsc.LIMITS_COST.value: {
+ Ecmsc.LIMITS_COST_MIN.value: 1,
+ Ecmsc.LIMITS_COST_MAX.value: 2000000000,
+ },
+ Ecmsc.LIMITS_PRICE.value: {
+ Ecmsc.LIMITS_PRICE_MIN.value: 0.5,
+ Ecmsc.LIMITS_PRICE_MAX.value: 5000000,
+ },
+ },
+ Ecmsc.PRECISION.value: {
+ Ecmsc.PRECISION_PRICE.value: 8,
+ Ecmsc.PRECISION_AMOUNT.value: 8,
+ },
+ }
+
+
+class TestOrderFactoryClass:
+ DEFAULT_SYMBOL = "BTC/USDT"
+ EXCHANGE_MANAGER_CLASS_STRING = "binanceus"
+
+ @staticmethod
+ async def init_default():
+ config = load_test_config()
+ exchange_manager = ExchangeManager(config, TestOrderFactoryClass.EXCHANGE_MANAGER_CLASS_STRING)
+ await exchange_manager.initialize(exchange_config_by_exchange=None)
+ trader = TraderSimulator(config, exchange_manager)
+ await trader.initialize()
+ return config, exchange_manager, trader
+
+ @staticmethod
+ async def stop(exchange_manager):
+ cancel_ccxt_throttle_task()
+ await exchange_manager.stop()
+
+ def test_order_factory_validate_raises_when_exchange_manager_none(self):
+ factory = order_factory_module.OrderFactory(None, None, None, False, False)
+ with pytest.raises(ValueError) as exc_info:
+ factory.validate()
+ assert "exchange_manager is required" in str(exc_info.value)
+
+ def test_order_factory_validate_succeeds_when_exchange_manager_set(self):
+ exchange_manager = mock.Mock()
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ factory.validate()
+
+ def test_order_factory_get_validated_amounts_and_prices_returns_adapted(self):
+ exchange_manager = mock.Mock(exchange_name="test_exchange")
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ symbol_market = _symbol_market()
+ amount = decimal.Decimal("1")
+ price = decimal.Decimal("2")
+ result = factory._get_validated_amounts_and_prices(
+ "BTC/USDT", amount, price, symbol_market
+ )
+ assert result == [(decimal.Decimal("1"), decimal.Decimal("2"))]
+
+ def test_order_factory_get_validated_amounts_and_prices_raises_min_amount(self):
+ exchange_manager = mock.Mock(exchange_name="test_exchange")
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ symbol_market = _symbol_market()
+ amount = decimal.Decimal("0.1")
+ price = decimal.Decimal("100")
+ with pytest.raises(trading_errors.MissingMinimalExchangeTradeVolume) as exc_info:
+ factory._get_validated_amounts_and_prices(
+ "BTC/USDT", amount, price, symbol_market
+ )
+ assert "too small" in str(exc_info.value)
+ assert "amount" in str(exc_info.value).lower()
+
+ def test_order_factory_get_validated_amounts_and_prices_raises_min_cost(self):
+ exchange_manager = mock.Mock(exchange_name="test_exchange")
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ symbol_market = _symbol_market()
+ amount = decimal.Decimal("0.5")
+ price = decimal.Decimal("1")
+ with pytest.raises(trading_errors.MissingMinimalExchangeTradeVolume) as exc_info:
+ factory._get_validated_amounts_and_prices(
+ "BTC/USDT", amount, price, symbol_market
+ )
+ assert "cost" in str(exc_info.value).lower()
+
+ def test_order_factory_ensure_supported_order_type_raises(self):
+ exchange = mock.Mock(is_supported_order_type=mock.Mock(return_value=False))
+ exchange_manager = mock.Mock(exchange=exchange, exchange_name="test_exchange")
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ with pytest.raises(trading_errors.NotSupportedOrderTypeError) as exc_info:
+ factory._ensure_supported_order_type(enums.TraderOrderType.STOP_LOSS)
+ assert exc_info.value.order_type == enums.TraderOrderType.STOP_LOSS
+
+ def test_order_factory_ensure_supported_order_type_succeeds(self):
+ exchange = mock.Mock(is_supported_order_type=mock.Mock(return_value=True))
+ exchange_manager = mock.Mock(exchange=exchange, exchange_name="test_exchange")
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ factory._ensure_supported_order_type(enums.TraderOrderType.STOP_LOSS)
+
+ @pytest.mark.asyncio
+ async def test_order_factory_get_computed_price(self):
+ exchange_manager = mock.Mock(exchange_name="test_exchange")
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ ctx = mock.Mock()
+ order_price = "50000"
+ expected_price = decimal.Decimal("50100")
+ mock_get_price = mock.AsyncMock(return_value=expected_price)
+ with mock.patch.object(script_keywords, "get_price_with_offset", mock_get_price):
+ result = await factory._get_computed_price(ctx, order_price)
+ assert result == expected_price
+ mock_get_price.assert_called_once_with(
+ ctx, order_price, use_delta_type_as_flat_value=True
+ )
+
+ @pytest.mark.asyncio
+ async def test_order_factory_get_computed_quantity_returns_zero_for_empty_input(self):
+ exchange_manager = mock.Mock()
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ ctx = mock.Mock()
+ for input_amount in ("", "0"):
+ result = await factory._get_computed_quantity(
+ ctx, input_amount,
+ enums.TradeOrderSide.BUY,
+ decimal.Decimal("50000"),
+ reduce_only=False,
+ allow_holdings_adaptation=False,
+ )
+ assert result == constants.ZERO
+
+ @pytest.mark.asyncio
+ async def test_order_factory_get_computed_quantity_delegates_to_get_amount_from_input_amount(self):
+ exchange_manager = mock.Mock()
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ ctx = mock.Mock()
+ expected_amount = decimal.Decimal("2")
+ mock_get_amount = mock.AsyncMock(return_value=expected_amount)
+ with mock.patch.object(script_keywords, "get_amount_from_input_amount", mock_get_amount):
+ result = await factory._get_computed_quantity(
+ ctx, "2", enums.TradeOrderSide.BUY, decimal.Decimal("50000"),
+ reduce_only=False, allow_holdings_adaptation=False,
+ )
+ assert result == expected_amount
+ mock_get_amount.assert_called_once_with(
+ context=ctx,
+ input_amount="2",
+ side="buy",
+ reduce_only=False,
+ is_stop_order=False,
+ use_total_holding=False,
+ target_price=decimal.Decimal("50000"),
+ allow_holdings_adaptation=False,
+ )
+
+ @pytest.mark.asyncio
+ async def test_order_factory_create_stop_orders_early_return_when_no_stop_loss_price(self):
+ exchange_manager = mock.Mock()
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ ctx = mock.Mock()
+ base_order = mock.Mock()
+ symbol_market = _symbol_market()
+ params = {}
+ chained_orders = []
+ await factory._create_stop_orders(ctx, base_order, symbol_market, params, chained_orders)
+ assert chained_orders == []
+ assert params == {}
+
+ @pytest.mark.asyncio
+ async def test_order_factory_create_stop_orders_creates_chained_order_when_stop_loss_price_set(self):
+ _, exchange_manager, trader_inst = await self.init_default()
+ try:
+ base_order = created_order(
+ personal_data.BuyLimitOrder, enums.TraderOrderType.BUY_LIMIT, trader_inst,
+ )
+ base_order.update(
+ order_type=enums.TraderOrderType.BUY_LIMIT,
+ symbol=self.DEFAULT_SYMBOL,
+ current_price=decimal.Decimal("50000"),
+ quantity=decimal.Decimal("1"),
+ price=decimal.Decimal("50000"),
+ )
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ ctx = script_keywords.get_base_context_from_exchange_manager(exchange_manager, self.DEFAULT_SYMBOL)
+ symbol_market = _symbol_market()
+ params = {}
+ chained_orders = []
+ stop_loss_price = decimal.Decimal("45000")
+ adapted_price = decimal.Decimal("45000")
+ with mock.patch.object(
+ exchange_manager.exchange, "is_supported_order_type", mock.Mock(return_value=True)
+ ), mock.patch.object(factory, "_get_computed_price", mock.AsyncMock(return_value=adapted_price)):
+ await factory._create_stop_orders(
+ ctx, base_order, symbol_market, params, chained_orders,
+ stop_loss_price=stop_loss_price,
+ )
+ assert len(chained_orders) == 1
+ chained_order = chained_orders[0]
+ assert chained_order.order_type == enums.TraderOrderType.STOP_LOSS
+ assert chained_order.side == enums.TradeOrderSide.SELL
+ assert chained_order.origin_quantity == base_order.origin_quantity
+ assert chained_order.origin_price == adapted_price
+ assert chained_order.symbol == self.DEFAULT_SYMBOL
+ finally:
+ await self.stop(exchange_manager)
+
+ @pytest.mark.asyncio
+ async def test_order_factory_create_take_profit_orders_early_return_when_no_take_profit_prices(self):
+ exchange_manager = mock.Mock()
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ ctx = mock.Mock()
+ base_order = mock.Mock()
+ symbol_market = _symbol_market()
+ params = {}
+ chained_orders = []
+ await factory._create_take_profit_orders(ctx, base_order, symbol_market, params, chained_orders)
+ assert chained_orders == []
+ assert params == {}
+
+ @pytest.mark.asyncio
+ async def test_order_factory_create_take_profit_orders_invalid_volume_percents(self):
+ exchange_manager = mock.Mock(exchange_name="test_exchange")
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ ctx = mock.Mock()
+ base_order = mock.Mock(side=enums.TradeOrderSide.BUY, origin_quantity=decimal.Decimal("1"), tag="tag")
+ symbol_market = _symbol_market()
+ params = {}
+ chained_orders = []
+ take_profit_prices = [decimal.Decimal("1"), decimal.Decimal("2")]
+ take_profit_volume_percents = [decimal.Decimal("50")]
+ with pytest.raises(trading_errors.InvalidArgumentError) as exc_info:
+ await factory._create_take_profit_orders(
+ ctx, base_order, symbol_market, params, chained_orders,
+ take_profit_prices=take_profit_prices,
+ take_profit_volume_percents=take_profit_volume_percents,
+ )
+ assert "take profit volume percents" in str(exc_info.value)
+
+ def test_order_factory_create_active_order_swap_strategy(self):
+ exchange_manager = mock.Mock()
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ strategy_type = "StopFirstActiveOrderSwapStrategy"
+ strategy_params = {"swap_timeout": 123}
+ result_strategy = mock.Mock()
+ with mock.patch.object(
+ personal_data, "create_active_order_swap_strategy",
+ return_value=result_strategy,
+ ) as create_mock:
+ result = factory._create_active_order_swap_strategy(strategy_type, strategy_params)
+ assert result is result_strategy
+ create_mock.assert_called_once_with(strategy_type, **strategy_params)
+
+ def test_order_factory_create_active_order_swap_strategy_with_none_params(self):
+ exchange_manager = mock.Mock()
+ factory = order_factory_module.OrderFactory(exchange_manager, None, None, False, False)
+ result_strategy = mock.Mock()
+ with mock.patch.object(
+ personal_data, "create_active_order_swap_strategy",
+ return_value=result_strategy,
+ ) as create_mock:
+ result = factory._create_active_order_swap_strategy("SomeStrategy", None)
+ assert result is result_strategy
+ create_mock.assert_called_once_with("SomeStrategy", **{})
+
+ @pytest.mark.asyncio
+ async def test_order_factory_create_order_on_exchange_with_trading_mode(self):
+ order = mock.Mock()
+ created_order = mock.Mock()
+ trading_mode = mock.Mock()
+ trading_mode.create_order = mock.AsyncMock(return_value=created_order)
+ exchange_manager = mock.Mock()
+ dependencies = mock.Mock()
+ factory = order_factory_module.OrderFactory(
+ exchange_manager, trading_mode, dependencies, wait_for_creation=True, try_to_handle_unconfigured_symbol=False
+ )
+ result = await factory.create_order_on_exchange(order)
+ assert result is created_order
+ trading_mode.create_order.assert_called_once_with(
+ order, dependencies=dependencies, wait_for_creation=True
+ )
+
+ @pytest.mark.asyncio
+ async def test_order_factory_create_order_on_exchange_without_trading_mode(self):
+ order = mock.Mock()
+ created_order = mock.Mock()
+ trader = mock.Mock()
+ trader.create_order = mock.AsyncMock(return_value=created_order)
+ exchange_manager = mock.Mock(trader=trader)
+ factory = order_factory_module.OrderFactory(
+ exchange_manager, None, None, wait_for_creation=False, try_to_handle_unconfigured_symbol=False
+ )
+ result = await factory.create_order_on_exchange(order)
+ assert result is created_order
+ trader.create_order.assert_called_once_with(order, wait_for_creation=False)
+
+ @pytest.mark.asyncio
+ async def test_order_factory_create_base_orders_unsupported_symbol(self):
+ _, exchange_manager, _ = await self.init_default()
+ try:
+ factory = order_factory_module.OrderFactory(
+ exchange_manager, None, None, False, try_to_handle_unconfigured_symbol=False
+ )
+ with pytest.raises(trading_errors.UnSupportedSymbolError) as exc_info:
+ await factory.create_base_orders_and_associated_elements(
+ enums.TraderOrderType.BUY_MARKET,
+ "INVALID/XYZ",
+ enums.TradeOrderSide.BUY,
+ "1",
+ )
+ assert "INVALID/XYZ" in str(exc_info.value) or "not found" in str(exc_info.value).lower()
+ finally:
+ await self.stop(exchange_manager)
+
+ @pytest.mark.asyncio
+ async def test_order_factory_create_base_orders_try_to_handle_unconfigured_symbol(self):
+ _, exchange_manager, _ = await self.init_default()
+ try:
+ factory = order_factory_module.OrderFactory(
+ exchange_manager, None, None, False, try_to_handle_unconfigured_symbol=True
+ )
+ with pytest.raises(NotImplementedError) as exc_info:
+ await factory.create_base_orders_and_associated_elements(
+ enums.TraderOrderType.BUY_MARKET,
+ "INVALID/XYZ",
+ enums.TradeOrderSide.BUY,
+ "1",
+ )
+ assert "try_to_handle_unconfigured_symbol" in str(exc_info.value)
+ finally:
+ await self.stop(exchange_manager)
+
+ @pytest.mark.asyncio
+ async def test_order_factory_create_base_orders_and_associated_elements_success(self):
+ _, real_exchange_manager, _ = await self.init_default()
+ try:
+ symbol = "BTC/USDT"
+ symbol_market = _symbol_market()
+ current_price = decimal.Decimal("50000")
+ portfolio_manager = real_exchange_manager.exchange_personal_data.portfolio_manager
+ portfolio_manager.portfolio.portfolio["USDT"] = personal_data.SpotAsset(
+ name="USDT",
+ available=decimal.Decimal("100000"),
+ total=decimal.Decimal("100000"),
+ )
+ symbol_data = mock.Mock()
+ symbol_data.prices_manager.get_mark_price = mock.AsyncMock(return_value=float(current_price))
+ exchange_symbols_data = mock.Mock(
+ exchange_symbol_data={symbol: symbol_data},
+ get_exchange_symbol_data=mock.Mock(return_value=symbol_data),
+ )
+ exchange = mock.Mock(
+ get_market_status=mock.Mock(return_value=symbol_market),
+ get_exchange_current_time=mock.Mock(return_value=1234567890),
+ )
+ exchange_manager = mock.Mock(
+ bot_id=None,
+ is_margin=False,
+ exchange=exchange,
+ exchange_symbols_data=exchange_symbols_data,
+ trader=real_exchange_manager.trader,
+ exchange_name=real_exchange_manager.exchange_name,
+ is_future=real_exchange_manager.is_future,
+ logger=real_exchange_manager.logger,
+ exchange_personal_data=real_exchange_manager.exchange_personal_data,
+ exchange_config=real_exchange_manager.exchange_config,
+ )
+ factory = order_factory_module.OrderFactory(
+ exchange_manager, None, None, False, try_to_handle_unconfigured_symbol=False
+ )
+ result = await factory.create_base_orders_and_associated_elements(
+ enums.TraderOrderType.BUY_MARKET,
+ symbol,
+ enums.TradeOrderSide.BUY,
+ "1",
+ )
+ assert len(result) == 1
+ base_order = result[0]
+ assert base_order.order_type == enums.TraderOrderType.BUY_MARKET
+ assert base_order.symbol == symbol
+ assert base_order.origin_quantity == decimal.Decimal("1")
+ assert base_order.origin_price == decimal.Decimal("50000")
+ assert base_order.side == enums.TradeOrderSide.BUY
+ finally:
+ await self.stop(real_exchange_manager)
diff --git a/packages/trading/tests/personal_data/portfolios/test_portfolio_value_holder.py b/packages/trading/tests/personal_data/portfolios/test_portfolio_value_holder.py
index 0db51c657..a9dc7487a 100644
--- a/packages/trading/tests/personal_data/portfolios/test_portfolio_value_holder.py
+++ b/packages/trading/tests/personal_data/portfolios/test_portfolio_value_holder.py
@@ -166,7 +166,7 @@ def mock_create_symbol_position(symbol, position_id):
portfolio_value_holder.value_converter.last_prices_by_trading_pair["ETH/BTC"] = decimal.Decimal("50")
# Update current_crypto_currencies_values to include ETH with the calculated price
portfolio_value_holder.current_crypto_currencies_values["ETH"] = decimal.Decimal("50")
- portfolio_value_holder.sync_portfolio_current_value_using_available_currencies_values(init_price_fetchers=False)
+ portfolio_value_holder._sync_portfolio_current_value_using_available_currencies_values(init_price_fetchers=False)
assert portfolio_value_holder.get_current_holdings_values() == {
'BTC': decimal.Decimal("10"),
'ETH': decimal.Decimal("5000"),
@@ -255,19 +255,19 @@ async def test_update_origin_crypto_currencies_values(backtesting_trader):
is False
@pytest.mark.parametrize("backtesting_exchange_manager", ["spot", "margin", "futures", "options"], indirect=True)
-async def test_sync_portfolio_current_value_using_available_currencies_values(backtesting_trader):
+async def test__sync_portfolio_current_value_using_available_currencies_values(backtesting_trader):
config, exchange_manager, trader = backtesting_trader
portfolio_manager = exchange_manager.exchange_personal_data.portfolio_manager
portfolio_value_holder = portfolio_manager.portfolio_value_holder
assert portfolio_value_holder.portfolio_current_value == constants.ZERO
- portfolio_value_holder.sync_portfolio_current_value_using_available_currencies_values()
+ portfolio_value_holder._sync_portfolio_current_value_using_available_currencies_values()
assert portfolio_value_holder.portfolio_current_value == decimal.Decimal(str(10))
portfolio_value_holder.value_converter.missing_currency_data_in_exchange.clear()
exchange_manager.client_symbols.append("BTC/USDT")
portfolio_manager.handle_mark_price_update("BTC/USDT", decimal.Decimal(str(100)))
- portfolio_value_holder.sync_portfolio_current_value_using_available_currencies_values()
+ portfolio_value_holder._sync_portfolio_current_value_using_available_currencies_values()
assert portfolio_value_holder.portfolio_current_value == decimal.Decimal(str(20)) # now includes USDT
@pytest.mark.parametrize("backtesting_exchange_manager", ["spot", "futures"], indirect=True)
@@ -564,7 +564,7 @@ async def test_get_holdings_ratio_from_portfolio(backtesting_trader, currency, t
exchange_manager.client_symbols.append("BTC/USDT")
portfolio_value_holder.value_converter.last_prices_by_trading_pair["BTC/USDT"] = decimal.Decimal("1000")
portfolio_value_holder.value_converter.missing_currency_data_in_exchange.discard("USDT")
- portfolio_value_holder.sync_portfolio_current_value_using_available_currencies_values(init_price_fetchers=False)
+ portfolio_value_holder._sync_portfolio_current_value_using_available_currencies_values(init_price_fetchers=False)
result = portfolio_value_holder._get_holdings_ratio_from_portfolio(
currency, traded_symbols_only, include_assets_in_open_orders, coins_whitelist
@@ -598,7 +598,7 @@ async def test_get_total_holdings_value(backtesting_trader, coins_whitelist, tra
exchange_manager.client_symbols.append("BTC/USDT")
portfolio_value_holder.value_converter.last_prices_by_trading_pair["BTC/USDT"] = decimal.Decimal("1000")
portfolio_value_holder.value_converter.missing_currency_data_in_exchange.discard("USDT")
- portfolio_value_holder.sync_portfolio_current_value_using_available_currencies_values(init_price_fetchers=False)
+ portfolio_value_holder._sync_portfolio_current_value_using_available_currencies_values(init_price_fetchers=False)
result = portfolio_value_holder._get_total_holdings_value(coins_whitelist, traded_symbols_only)
diff --git a/pants.toml b/pants.toml
index 3e86af055..6169a4484 100644
--- a/pants.toml
+++ b/pants.toml
@@ -24,6 +24,7 @@ root_patterns = [
"/packages/commons",
"/packages/evaluators",
"/packages/node",
+ "/packages/flow",
"/packages/services",
"/packages/tentacles_manager",
"/packages/trading",