From ca655862862a9c990bfe5678143f130b86d32392 Mon Sep 17 00:00:00 2001 From: Peter Solnica Date: Sun, 6 Jul 2025 20:01:14 +0000 Subject: [PATCH 1/3] Fix handling of context in Telemetry --- lib/drops/operations/extensions/telemetry.ex | 34 +++- .../operations/extensions/telemetry_test.exs | 149 +++++++++++++++++- 2 files changed, 174 insertions(+), 9 deletions(-) diff --git a/lib/drops/operations/extensions/telemetry.ex b/lib/drops/operations/extensions/telemetry.ex index 86193dd..c5cf661 100644 --- a/lib/drops/operations/extensions/telemetry.ex +++ b/lib/drops/operations/extensions/telemetry.ex @@ -398,6 +398,7 @@ defmodule Drops.Operations.Extensions.Telemetry do def emit_operation_stop(operation_module, step, context, result, config) do {actual_step, identifier} = config.original_config duration = Drops.Operations.Trace.total_duration(config.trace) || 0 + current_context = extract_current_context(context, result, actual_step) case result do {:ok, _} -> @@ -406,7 +407,7 @@ defmodule Drops.Operations.Extensions.Telemetry do :telemetry.execute( [identifier, :operation, :stop], %{duration: duration}, - %{operation: operation_module, step: actual_step, context: context} + %{operation: operation_module, step: actual_step, context: current_context} ) end @@ -418,7 +419,7 @@ defmodule Drops.Operations.Extensions.Telemetry do %{ operation: operation_module, step: actual_step, - context: context, + context: current_context, kind: :error, reason: reason, stacktrace: [] @@ -433,6 +434,25 @@ defmodule Drops.Operations.Extensions.Telemetry do List.last(operation_module.__unit_of_work__().step_order) == step end + # Extracts the most current context from step results for telemetry + # This ensures telemetry events show the actual state after step execution + defp extract_current_context(input_context, result, _step) do + case result do + {:ok, new_context} when is_map(new_context) -> + # For successful steps, use the updated context + new_context + + {:error, %Ecto.Changeset{} = changeset} -> + # For validation failures with changesets, use context with the invalid changeset + # This shows the actual validation errors instead of the pre-validation state + Map.put(input_context, :changeset, changeset) + + {:error, _reason} -> + # For other errors, use the original input context + input_context + end + end + @doc false def emit_step_start(operation_module, _step, context, config) do {actual_step, identifier} = config.original_config @@ -451,15 +471,15 @@ defmodule Drops.Operations.Extensions.Telemetry do @doc false def emit_step_stop(operation_module, _step, context, result, config) do {actual_step, identifier} = config.original_config - duration = config.trace.step_timings[actual_step][:duration] + current_context = extract_current_context(context, result, actual_step) case result do {:ok, _} -> :telemetry.execute( [identifier, :operation, :step, :stop], %{duration: duration}, - %{operation: operation_module, step: actual_step, context: context} + %{operation: operation_module, step: actual_step, context: current_context} ) {:error, reason} -> @@ -469,7 +489,7 @@ defmodule Drops.Operations.Extensions.Telemetry do %{ operation: operation_module, step: actual_step, - context: context, + context: current_context, kind: :error, reason: reason, stacktrace: [] @@ -483,8 +503,8 @@ defmodule Drops.Operations.Extensions.Telemetry do @doc false def emit_step_error(operation_module, _step, context, result, config) do {actual_step, identifier} = config.original_config - duration = config.trace.step_timings[actual_step][:duration] + current_context = extract_current_context(context, result, actual_step) case result do {:error, reason} -> @@ -494,7 +514,7 @@ defmodule Drops.Operations.Extensions.Telemetry do %{ operation: operation_module, step: actual_step, - context: context, + context: current_context, kind: :error, reason: reason, stacktrace: [] diff --git a/test/drops/operations/extensions/telemetry_test.exs b/test/drops/operations/extensions/telemetry_test.exs index d405a55..6d881b9 100644 --- a/test/drops/operations/extensions/telemetry_test.exs +++ b/test/drops/operations/extensions/telemetry_test.exs @@ -96,8 +96,11 @@ defmodule Drops.Operations.Extensions.TelemetryTest do assert %{duration: _} = measurements - assert %{operation: TestOperationDefault, step: :execute, context: ^context} = + # Context should contain the updated result from execute step + assert %{operation: TestOperationDefault, step: :execute, context: updated_context} = metadata + + assert updated_context == %{executed: true, name: "test"} end test "emits events for specific steps when configured" do @@ -616,8 +619,11 @@ defmodule Drops.Operations.Extensions.TelemetryTest do assert %{duration: _} = measurements - assert %{operation: TestOperationCustomId, step: :execute, context: ^context} = + # Context should contain the updated result from execute step + assert %{operation: TestOperationCustomId, step: :execute, context: updated_context} = metadata + + assert updated_context == %{executed: true, name: "test"} end test "emits events with custom identifier for specific steps" do @@ -1026,5 +1032,144 @@ defmodule Drops.Operations.Extensions.TelemetryTest do refute_receive {:telemetry_event, [:drops, :operation, :exception], _, _} refute_receive {:telemetry_event, [:drops, :operation, :step, :exception], _, _} end + + test "telemetry events contain updated context for successful steps" do + defmodule TestOperationSuccessContext do + use Drops.Operations.Command, telemetry: [steps: [:prepare]] + + steps do + def prepare(context) do + {:ok, Map.put(context, :prepared, true)} + end + + @impl true + def execute(context) do + {:ok, context} + end + end + end + + context = %{params: %{name: "test"}} + {:ok, _result} = TestOperationSuccessContext.call(context) + + # Should receive step stop event with updated context + assert_receive {:telemetry_event, [:drops, :operation, :step, :stop], _, + %{step: :prepare, context: received_context}} + + # Context should contain the update from the prepare step + assert received_context.prepared == true + assert received_context.params == %{name: "test"} + end + + @tag ecto_schemas: [Test.Ecto.TestSchemas.UserSchema] + test "telemetry events contain invalid changeset context for validation failures" do + defmodule TestOperationChangesetValidation do + use Drops.Operations.Command, + repo: Drops.TestRepo, + telemetry: [steps: [:validate]], + telemetry_step_errors: [:validate] + + schema(Test.Ecto.TestSchemas.UserSchema) + + steps do + @impl true + def execute(%{changeset: changeset}) do + case insert(changeset) do + {:ok, user} -> {:ok, %{name: user.name}} + {:error, changeset} -> {:error, changeset} + end + end + end + + @impl true + def validate_changeset(%{changeset: changeset}) do + changeset + |> Ecto.Changeset.validate_required([:email]) + |> Ecto.Changeset.validate_length(:email, min: 1, message: "can't be blank") + end + end + + # Use empty email to trigger validation failure + context = %{params: %{name: "Jane Doe", email: ""}} + {:error, _changeset} = TestOperationChangesetValidation.call(context) + + # Should receive step exception event with invalid changeset in context + assert_receive {:telemetry_event, [:drops, :operation, :step, :exception], _, + %{step: :validate, context: received_context}} + + # Context should contain the invalid changeset with errors + assert %Ecto.Changeset{} = received_context.changeset + assert received_context.changeset.valid? == false + assert received_context.changeset.errors[:email] + end + + @tag ecto_schemas: [Test.Ecto.TestSchemas.UserSchema] + test "telemetry events preserve original context for non-changeset errors" do + defmodule TestOperationSimpleError do + use Drops.Operations.Command, + telemetry: [steps: [:validate]], + telemetry_step_errors: [:validate] + + steps do + def validate(_context) do + {:error, "simple validation error"} + end + + @impl true + def execute(context) do + {:ok, context} + end + end + end + + context = %{params: %{name: "test"}, original_data: "preserved"} + {:error, _reason} = TestOperationSimpleError.call(context) + + # Should receive step exception event with original context preserved + assert_receive {:telemetry_event, [:drops, :operation, :step, :exception], _, + %{step: :validate, context: received_context}} + + # Context should be the original input context + assert received_context.params == %{name: "test"} + assert received_context.original_data == "preserved" + end + + @tag ecto_schemas: [Test.Ecto.TestSchemas.UserSchema] + test "step exception events contain updated context for changeset failures" do + defmodule TestOperationExceptionContext do + use Drops.Operations.Command, repo: Drops.TestRepo, telemetry: true + + schema(Test.Ecto.TestSchemas.UserSchema) + + steps do + @impl true + def execute(%{changeset: changeset}) do + case insert(changeset) do + {:ok, user} -> {:ok, %{name: user.name}} + {:error, changeset} -> {:error, changeset} + end + end + end + + @impl true + def validate_changeset(%{changeset: changeset}) do + changeset + |> Ecto.Changeset.validate_required([:email]) + |> Ecto.Changeset.validate_length(:email, min: 1, message: "can't be blank") + end + end + + context = %{params: %{name: "Jane Doe", email: ""}} + {:error, _reason} = TestOperationExceptionContext.call(context) + + # Should receive operation exception event with invalid changeset in context + assert_receive {:telemetry_event, [:drops, :operation, :exception], _, + %{step: :validate, context: received_context}} + + # Context should contain the invalid changeset + assert %Ecto.Changeset{} = received_context.changeset + assert received_context.changeset.valid? == false + assert received_context.changeset.errors[:email] + end end end From 250f87fffd73441a1982e495d755610877d8ce65 Mon Sep 17 00:00:00 2001 From: Peter Solnica Date: Sun, 6 Jul 2025 10:44:03 +0000 Subject: [PATCH 2/3] Introduce Drops.Logger for Operations --- config/dev.exs | 19 ++ config/test.exs | 11 + lib/drops/application.ex | 5 + lib/drops/config.ex | 34 +++ lib/drops/logger.ex | 295 +++++++++++++++++++++ lib/drops/logger/filter.ex | 45 ++++ lib/drops/logger/formatter.ex | 77 ++++++ lib/drops/logger/formatter/string.ex | 134 ++++++++++ lib/drops/logger/formatter/structured.ex | 153 +++++++++++ lib/drops/logger/metadata_dumper.ex | 133 ++++++++++ lib/drops/logger/test_handler.ex | 122 +++++++++ mix.exs | 1 + test/drops/logger/formatter_test.exs | 311 +++++++++++++++++++++++ test/drops/logger_test.exs | 296 +++++++++++++++++++++ test/support/logger_case.ex | 211 +++++++++++++++ test/support/operation_case.ex | 1 + test/test_helper.exs | 1 + 17 files changed, 1849 insertions(+) create mode 100644 config/test.exs create mode 100644 lib/drops/logger.ex create mode 100644 lib/drops/logger/filter.ex create mode 100644 lib/drops/logger/formatter.ex create mode 100644 lib/drops/logger/formatter/string.ex create mode 100644 lib/drops/logger/formatter/structured.ex create mode 100644 lib/drops/logger/metadata_dumper.ex create mode 100644 lib/drops/logger/test_handler.ex create mode 100644 test/drops/logger/formatter_test.exs create mode 100644 test/drops/logger_test.exs create mode 100644 test/support/logger_case.ex diff --git a/config/dev.exs b/config/dev.exs index 10a2ce0..855e9d6 100644 --- a/config/dev.exs +++ b/config/dev.exs @@ -12,3 +12,22 @@ config :drops, Drops.TestRepo, # Configure Ecto repos config :drops, :ecto_repos, [Drops.TestRepo] + +# Configure debug logging for development +config :drops, :logger, + handler: :console, + level: :debug, + metadata: [ + :operation, + :step, + :context, + :duration_us, + :kind, + :reason, + :error_type, + :errors + ], + formatter: %{ + format: :string, + colorize: true + } diff --git a/config/test.exs b/config/test.exs new file mode 100644 index 0000000..9cd4216 --- /dev/null +++ b/config/test.exs @@ -0,0 +1,11 @@ +import Config + +# Suppress console log output during tests +config :logger, level: :warning + +# Configure the debug handler to use memory for testing +config :drops, :logger, + handler: :memory, + level: :debug, + format: "[$level] $message $metadata\n", + metadata: [:operation, :step] diff --git a/lib/drops/application.ex b/lib/drops/application.ex index 7a4a8f3..c42f9c5 100644 --- a/lib/drops/application.ex +++ b/lib/drops/application.ex @@ -11,6 +11,7 @@ defmodule Drops.Application do :ok = Config.persist(config) register_builtin_types() + initialize_logger() children = [] @@ -30,4 +31,8 @@ defmodule Drops.Application do Enum.each(builtin_types, &Drops.Type.register_type/1) end + + defp initialize_logger do + Drops.Logger.init() + end end diff --git a/lib/drops/config.ex b/lib/drops/config.ex index 6f532ad..d71b579 100644 --- a/lib/drops/config.ex +++ b/lib/drops/config.ex @@ -63,6 +63,40 @@ defmodule Drops.Config do config :drops, registered_types: [MyApp.Types.CustomString, MyApp.Types.Email] """ + ], + logger: [ + type: :keyword_list, + default: [], + type_doc: "keyword list", + doc: """ + Configuration for the Drops debug logger handler. + + ## Options + + * `:handler` - The handler type to use. Can be `:file`, `:memory`, or `:console`. Defaults to `:console`. + * `:file` - When handler is `:file`, specifies the file path to log to. Defaults to "log/drops_debug.log". + * `:level` - The minimum log level for the handler. Defaults to `:debug`. + * `:format` - The log format string. Defaults to "[$level] $message $metadata\\n". + * `:metadata` - List of metadata keys to include in logs. Defaults to `[:operation, :step]`. + + ## Examples + + # Log to file + config :drops, :logger, + handler: :file, + file: "log/operations.log", + level: :debug + + # Log to memory (useful for testing) + config :drops, :logger, + handler: :memory, + level: :debug + + # Log to console (default) + config :drops, :logger, + handler: :console, + level: :info + """ ] ] diff --git a/lib/drops/logger.ex b/lib/drops/logger.ex new file mode 100644 index 0000000..568cc7a --- /dev/null +++ b/lib/drops/logger.ex @@ -0,0 +1,295 @@ +defmodule Drops.Logger do + @moduledoc """ + Logger utilities for Drops operations. + + This module provides functionality for setting up and managing debug logging + for Drops operations using built-in Erlang logger handlers and custom formatters. + + ## Configuration + + The debug handler can be configured via application environment: + + config :drops, :logger, + handler: :console, + file: "log/operations.log", + level: :debug, + formatter: {Drops.Logger.Formatter.String, [add_newline: true, colorize: true]}, + metadata: [:operation, :step] + + ## Handler Types + + * `:console` - Logs to standard output using built-in logger_std_h (default) + * `:file` - Logs to a specified file using built-in logger_std_h + * `:memory` - Captures logs in memory for testing purposes + + ## Formatter Types + + * `Drops.Logger.Formatter.String` - Human-readable string format with message and metadata (default) + * `Drops.Logger.Formatter.Structured` - JSON format with message and metadata as structured data + + ## Automatic Initialization + + The debug handler is automatically initialized when the Drops application starts. + This ensures that operation debug logs are captured from the moment the application + is running. + + ## Manual Management + + You can manually add or remove the handler if needed: + + # Add the handler + Drops.Logger.add_handler() + + # Remove the handler + Drops.Logger.remove_handler() + + ## Testing + + When using the `:memory` handler for testing, you can retrieve and clear logs: + + # Get all captured logs + logs = Drops.Logger.get_logs() + + # Clear captured logs + Drops.Logger.clear_logs() + """ + + alias Drops.Logger.TestHandler + + @default_config %{ + handler: :console, + file: "log/drops.log", + level: :debug, + formatter: {Drops.Logger.Formatter.String, [add_newline: true, colorize: true]}, + metadata: :all + } + + @doc """ + Initializes the debug logger during application startup. + + This function is called automatically by the Drops application and sets up + the debug handler with the configured settings. It ensures that operation + debug logs are captured exclusively by the Drops debug handler. + + ## Returns + + Returns `:ok` on success or `{:error, reason}` if initialization fails. + """ + @spec init() :: :ok | {:error, term()} + def init do + case should_initialize_handler?() do + true -> add_handler() + false -> :ok + end + end + + @doc """ + Adds the debug handler to the logger system. + + This function registers the appropriate handler (built-in or custom) with the logger system + using the configuration from the application environment. + + For console and file handlers, uses built-in `:logger_std_h` with custom formatters. + For memory handler, uses custom `DebugHandler` for testing purposes. + + When adding the debug handler, this function also adds a filter to the default + handler to prevent operation logs from being duplicated. + + ## Returns + + Returns `:ok` on success, or `{:error, reason}` if the handler cannot be added. + If the handler already exists, returns `:ok`. + """ + @spec add_handler() :: :ok | {:error, term()} + def add_handler do + config = get_handler_config() + + result = + case config.handler do + :console -> + add_console_handler(config) + + :file -> + add_file_handler(config) + + :memory -> + add_memory_handler(config) + + _ -> + {:error, {:invalid_handler, config.handler}} + end + + case result do + :ok -> + # Add filter to default handler to prevent duplicate operation logs + add_operation_filter_to_default_handler() + :ok + + error -> + error + end + end + + @doc """ + Removes the debug handler from the logger system. + + When removing the debug handler, this function also removes the operation + filter from the default handler to restore normal logging behavior. + + ## Returns + + Returns `:ok` on success or `{:error, reason}` if removal fails. + If the handler doesn't exist, returns `:ok`. + """ + @spec remove_handler() :: :ok | {:error, term()} + def remove_handler do + result = + case :logger.remove_handler(:drops_handler) do + :ok -> :ok + {:error, {:not_found, _}} -> :ok + error -> error + end + + case result do + :ok -> + # Remove filter from default handler to restore normal logging + remove_operation_filter_from_default_handler() + :ok + + error -> + error + end + end + + @doc """ + Gets all captured logs when using memory handler. + + This is a convenience function that delegates to the TestHandler. + + ## Returns + + Returns a list of captured log messages when using the memory handler, + or an empty list if not using memory handler or no logs captured. + """ + @spec get_logs() :: [String.t()] + def get_logs do + TestHandler.get_logs() + end + + @doc """ + Clears all captured logs when using memory handler. + + This is a convenience function that delegates to the TestHandler. + + ## Returns + + Returns `:ok`. + """ + @spec clear_logs() :: :ok + def clear_logs do + TestHandler.clear_logs() + end + + # Private functions + + defp should_initialize_handler? do + # Only initialize if we have logger configuration + # This allows users to opt-out by not configuring :logger + logger_config = Application.get_env(:drops, :logger, []) + logger_config != [] + end + + defp get_handler_config do + logger_config = Application.get_env(:drops, :logger, []) + + Enum.reduce(logger_config, @default_config, fn {key, value}, acc -> + Map.put(acc, key, value) + end) + end + + defp add_console_handler(config) do + {formatter_module, formatter_config} = normalize_formatter_config(config.formatter) + + handler_config = %{ + level: config.level, + formatter: {formatter_module, formatter_config} + } + + case :logger.add_handler(:drops_handler, :logger_std_h, handler_config) do + :ok -> :ok + {:error, {:already_exist, _}} -> :ok + error -> error + end + end + + defp add_file_handler(config) do + # Ensure log directory exists + log_dir = Path.dirname(config.file) + File.mkdir_p!(log_dir) + + {formatter_module, formatter_config} = normalize_formatter_config(config.formatter) + + handler_config = %{ + config: %{file: String.to_charlist(config.file)}, + level: config.level, + formatter: {formatter_module, formatter_config} + } + + case :logger.add_handler(:drops_handler, :logger_std_h, handler_config) do + :ok -> :ok + {:error, {:already_exist, _}} -> :ok + error -> error + end + end + + defp add_memory_handler(config) do + handler_config = %{ + level: config.level + } + + case :logger.add_handler(:drops_handler, TestHandler, handler_config) do + :ok -> :ok + {:error, {:already_exist, _}} -> :ok + error -> error + end + end + + # Filter management for default handler + + defp add_operation_filter_to_default_handler do + # Add a filter to the default handler to prevent operation logs from being duplicated + filter_config = {&Drops.Logger.Filter.filter_operation_logs/2, :stop} + + case :logger.add_handler_filter(:default, :drops_operation_filter, filter_config) do + :ok -> :ok + {:error, {:already_exist, _}} -> :ok + error -> error + end + end + + defp remove_operation_filter_from_default_handler do + # Remove the operation filter from the default handler + case :logger.remove_handler_filter(:default, :drops_operation_filter) do + :ok -> :ok + {:error, {:not_found, _}} -> :ok + error -> error + end + end + + # Normalize formatter config to use specific formatter modules + defp normalize_formatter_config(formatter_config) do + case formatter_config do + # Direct module reference + module when is_atom(module) -> + {module, %Drops.Logger.Formatter.Config{}} + + # Module with options tuple + {module, opts} when is_atom(module) and is_list(opts) -> + {module, struct(Drops.Logger.Formatter.Config, opts)} + + # Default fallback to string formatter + _ -> + {Drops.Logger.Formatter.String, %Drops.Logger.Formatter.Config{}} + end + end +end diff --git a/lib/drops/logger/filter.ex b/lib/drops/logger/filter.ex new file mode 100644 index 0000000..9415df7 --- /dev/null +++ b/lib/drops/logger/filter.ex @@ -0,0 +1,45 @@ +defmodule Drops.Logger.Filter do + @moduledoc """ + Logger filters for Drops operations. + + This module provides filter functions that can be used with Erlang logger + to control which log messages are processed by different handlers. + """ + + @doc """ + Filter function to stop operation-related logs from being processed. + + This filter is designed to be used with the default logger handler to prevent + operation logs from being duplicated when the Drops debug handler is active. + + ## Parameters + + - `log_event` - The log event map from the logger + - `_config` - Filter configuration (unused) + + ## Returns + + - `:stop` - If the log has operation-related metadata (should be filtered out) + - `:ignore` - If the log should be processed normally by this handler + + ## Usage + + This filter is automatically applied to the default handler when the Drops + debug handler is added to prevent duplicate log output. + """ + def filter_operation_logs(log_event, _config) do + if is_operation_log?(log_event) do + :stop + else + :ignore + end + end + + # Private functions + + defp is_operation_log?(log_event) do + meta = Map.get(log_event, :meta, %{}) + # Check if the log has operation-related metadata + Map.has_key?(meta, :operation) or Map.has_key?(meta, :step) + end +end diff --git a/lib/drops/logger/formatter.ex b/lib/drops/logger/formatter.ex new file mode 100644 index 0000000..b0ce86f --- /dev/null +++ b/lib/drops/logger/formatter.ex @@ -0,0 +1,77 @@ +defmodule Drops.Logger.Formatter.Config do + @moduledoc """ + Configuration struct for Drops logger formatters. + + This struct centralizes formatter configuration options that are shared + across different formatter implementations. + + ## Fields + + * `colorize` - Whether to apply ANSI colors to output (default: true) + * `add_newline` - Whether to add newlines to formatted output (default: true) + """ + + @type t :: %__MODULE__{ + colorize: boolean(), + add_newline: boolean() + } + + defstruct colorize: true, add_newline: true + + @doc """ + Creates a new formatter config with the given options. + + ## Examples + + iex> Drops.Logger.Formatter.Config.new() + %Drops.Logger.Formatter.Config{colorize: true, add_newline: true} + + iex> Drops.Logger.Formatter.Config.new(colorize: false) + %Drops.Logger.Formatter.Config{colorize: false, add_newline: true} + """ + @spec new(keyword()) :: t() + def new(opts \\ []) do + struct(__MODULE__, opts) + end +end + +defmodule Drops.Logger.Formatter do + @moduledoc """ + Custom formatters for Drops operations debug logging. + + This module provides formatters that work with built-in Erlang logger handlers + to format operation debug logs in different formats. + + ## Formatter Types + + * `Drops.Logger.Formatter.String` - Human-readable string format with message and metadata (default) + * `Drops.Logger.Formatter.Structured` - JSON format with message and metadata as structured data + + ## Configuration + + Formatters can be configured in two ways: + + # Simple module reference + formatter: Drops.Logger.Formatter.String + + # Module with options + formatter: {Drops.Logger.Formatter.String, colorize: false, add_newline: true} + + ## Usage + + These formatters are used automatically by the Drops logger system when + configuring built-in handlers for console and file logging. + """ + + @doc """ + Checks if a log event is operation-related. + + Returns true if the log event has operation metadata, false otherwise. + This is a common utility function used by all formatters. + """ + @spec is_operation_log?(map()) :: boolean() + def is_operation_log?(log_event) do + meta = Map.get(log_event, :meta, %{}) + Map.has_key?(meta, :operation) + end +end diff --git a/lib/drops/logger/formatter/string.ex b/lib/drops/logger/formatter/string.ex new file mode 100644 index 0000000..19b862e --- /dev/null +++ b/lib/drops/logger/formatter/string.ex @@ -0,0 +1,134 @@ +defmodule Drops.Logger.Formatter.String do + @moduledoc """ + String formatter for Drops operations debug logging. + + This formatter produces human-readable string output with operation metadata. + Only processes logs that have operation-related metadata. + + ## Format + + The output format is: `[level] message metadata` + + Where metadata includes operation and step information when available. + + ## Configuration + + The formatter accepts a `Drops.Logger.Formatter.Config` struct with options: + + * `colorize` - Whether to apply ANSI colors to output (default: true) + * `add_newline` - Whether to add newlines to formatted output (default: true) + """ + + alias Drops.Logger.MetadataDumper + alias Drops.Logger.Formatter.Config + + @doc """ + Formats a log event as a human-readable string. + + ## Parameters + + * `log_event` - The log event map from the logger + * `config` - Formatter configuration struct + + ## Returns + + A formatted string if the log event has operation metadata, empty string otherwise. + """ + @spec format(map(), Config.t()) :: String.t() + def format(log_event, %Config{} = config) do + level = Map.get(log_event, :level, :debug) + message = extract_message(log_event) + + # Use configured metadata fields, defaulting to [:operation, :step] + app_config = Application.get_env(:drops, :logger, []) + metadata_fields = Keyword.get(app_config, :metadata, [:operation, :step]) + + # Only colorize if enabled in config AND ANSI is available + should_colorize = config.colorize && IO.ANSI.enabled?() + metadata = extract_metadata(log_event, metadata_fields, should_colorize) + + formatted = + if should_colorize do + format_with_colors(level, message, metadata) + else + "[#{level}] #{message}#{metadata}" + end + + if config.add_newline do + formatted <> "\n" + else + formatted + end + end + + defp extract_message(log_event) do + case Map.get(log_event, :msg) do + {:string, message} when is_binary(message) -> + message + + message when is_binary(message) -> + message + + message when is_list(message) -> + IO.iodata_to_binary(message) + + {format, args} when is_binary(format) and is_list(args) -> + try do + :io_lib.format(format, args) |> IO.iodata_to_binary() + rescue + _ -> inspect({format, args}) + end + + other -> + inspect(other) + end + end + + defp extract_metadata(log_event, metadata_fields, colorize) do + meta = Map.get(log_event, :meta, %{}) + + metadata_fields + |> Enum.filter(&Map.has_key?(meta, &1)) + |> Enum.map(fn field -> + value = Map.get(meta, field) + formatted_value = MetadataDumper.dump(value) + + # Apply colors to metadata when colorization is enabled + formatted_entry = + if colorize and should_colorize_as_error?(field) do + # Error metadata gets red color + "#{field}=#{IO.ANSI.red()}#{formatted_value}#{IO.ANSI.reset()}" + else + # Non-error metadata gets faint color when colorization is enabled + if colorize do + "#{IO.ANSI.faint()}#{field}=#{formatted_value}#{IO.ANSI.reset()}" + else + "#{field}=#{formatted_value}" + end + end + + formatted_entry + end) + |> case do + [] -> "" + metadata_parts -> " " <> Enum.join(metadata_parts, " ") + end + end + + defp should_colorize_as_error?(field) when is_atom(field) do + field |> Atom.to_string() |> String.starts_with?("error") + end + + defp should_colorize_as_error?(_field), do: false + + defp format_with_colors(level, message, metadata) do + level_color = get_level_color(level) + level_color <> "[#{level}]" <> IO.ANSI.reset() <> " #{message}#{metadata}" + end + + defp get_level_color(:debug), do: IO.ANSI.cyan() + defp get_level_color(:info), do: IO.ANSI.green() + defp get_level_color(:warning), do: IO.ANSI.yellow() + defp get_level_color(:error), do: IO.ANSI.red() + defp get_level_color(_), do: "" +end diff --git a/lib/drops/logger/formatter/structured.ex b/lib/drops/logger/formatter/structured.ex new file mode 100644 index 0000000..744383b --- /dev/null +++ b/lib/drops/logger/formatter/structured.ex @@ -0,0 +1,153 @@ +defmodule Drops.Logger.Formatter.Structured do + @moduledoc """ + Structured (JSON) formatter for Drops operations debug logging. + + This formatter produces JSON output with structured metadata. + Only processes logs that have operation-related metadata. + + ## Format + + The output is a JSON object with the following structure: + + { + "level": "debug", + "message": "Operation message", + "metadata": { + "operation": "OperationName", + "step": "step_name" + } + } + + Note: Timestamps are not generated by formatters as all log entries + already have timestamps available. + + ## JSON Backend + + The formatter uses a JSON backend in the following priority: + 1. Built-in JSON from Elixir (if available) + 2. Jason library (if available) + 3. Raises an error if neither is available + + ## Configuration + + The formatter accepts a `Drops.Logger.Formatter.Config` struct with options: + + * `colorize` - Not used for structured output (ignored) + * `add_newline` - Whether to add newlines to formatted output (default: true) + """ + + alias Drops.Logger.Formatter.Config + + @doc """ + Formats a log event as structured JSON. + + ## Parameters + + * `log_event` - The log event map from the logger + * `config` - Formatter configuration struct + + ## Returns + + A JSON string if the log event has operation metadata, empty string otherwise. + """ + @spec format(map(), Config.t()) :: String.t() + def format(log_event, %Config{} = config) do + level = Map.get(log_event, :level, :debug) + message = extract_message(log_event) + + # Use configured metadata fields, defaulting to [:operation, :step] + app_config = Application.get_env(:drops, :logger, []) + metadata_fields = Keyword.get(app_config, :metadata, [:operation, :step]) + metadata = extract_metadata_map(log_event, metadata_fields) + + # Use timestamp from log event if available, otherwise generate current timestamp + timestamp = + case Map.get(log_event, :time) do + nil -> System.system_time(:millisecond) + time_microseconds -> div(time_microseconds, 1000) + end + + json_data = %{ + level: level, + message: message, + metadata: metadata, + timestamp: timestamp + } + + result = encode!(json_data) + + if config.add_newline do + result <> "\n" + else + result + end + end + + @doc """ + Encodes data to JSON using the best available JSON backend. + + Uses built-in JSON from Elixir if available, falls back to Jason, + and raises an error if neither is available. + + ## Parameters + + * `data` - The data to encode as JSON + + ## Returns + + A JSON string representation of the data. + + ## Raises + + * `RuntimeError` - If no JSON backend is available + """ + @spec encode!(term()) :: String.t() + def encode!(data) do + encode!(data, backend()) + end + + @spec encode!(term(), module()) :: String.t() + def encode!(data, backend) do + backend.encode!(data) + end + + if Code.ensure_loaded?(Jason) do + defp backend, do: Jason + else + defp backend, do: JSON + end + + defp extract_message(log_event) do + case Map.get(log_event, :msg) do + {:string, message} when is_binary(message) -> + message + + message when is_binary(message) -> + message + + message when is_list(message) -> + IO.iodata_to_binary(message) + + {format, args} when is_binary(format) and is_list(args) -> + try do + :io_lib.format(format, args) |> IO.iodata_to_binary() + rescue + _ -> inspect({format, args}) + end + + other -> + inspect(other) + end + end + + defp extract_metadata_map(log_event, metadata_fields) do + meta = Map.get(log_event, :meta, %{}) + + metadata_fields + |> Enum.filter(&Map.has_key?(meta, &1)) + |> Enum.into(%{}, fn field -> + value = Map.get(meta, field) + {field, value} + end) + end +end diff --git a/lib/drops/logger/metadata_dumper.ex b/lib/drops/logger/metadata_dumper.ex new file mode 100644 index 0000000..6503e4b --- /dev/null +++ b/lib/drops/logger/metadata_dumper.ex @@ -0,0 +1,133 @@ +defprotocol Drops.Logger.MetadataDumper do + @moduledoc """ + Protocol for dumping metadata values into logs. + + This protocol provides a way to customize how different types of values + are formatted when included in log metadata. It's particularly useful for + complex data structures that need special formatting for readability. + + ## Default Implementations + + The protocol provides implementations for common Elixir types: + + - Basic types (strings, numbers, atoms) are formatted as-is + - Lists are formatted with a maximum of 4 items, showing count for larger lists + - Maps are formatted with a maximum of 3 key-value pairs, showing count for larger maps + - Structs like `Ecto.Changeset` have special formatting + + ## Custom Implementations + + You can implement this protocol for your own types: + + defimpl Drops.Logger.MetadataDumper, for: MyStruct do + def dump(%MyStruct{name: name, status: status}) do + "MyStruct(name: \#{name}, status: \#{status})" + end + end + + ## Examples + + iex> alias Drops.Logger.MetadataDumper + iex> MetadataDumper.dump("hello") + "\\"hello\\"" + + iex> MetadataDumper.dump([1, 2, 3]) + "[1, 2, 3]" + + iex> MetadataDumper.dump([1, 2, 3, 4, 5]) + "[5 items]" + + iex> MetadataDumper.dump(%{a: 1, b: 2}) + "%{:a => 1, :b => 2}" + """ + + @fallback_to_any true + + @doc """ + Dumps a value into a string representation suitable for logging. + + Returns a string that represents the value in a concise, readable format + appropriate for log output. + """ + @spec dump(term()) :: String.t() + def dump(value) +end + +defimpl Drops.Logger.MetadataDumper, for: BitString do + def dump(string) when is_binary(string) do + inspect(string) + end +end + +defimpl Drops.Logger.MetadataDumper, for: Atom do + def dump(atom) do + inspect(atom) + end +end + +defimpl Drops.Logger.MetadataDumper, for: Integer do + def dump(integer) do + to_string(integer) + end +end + +defimpl Drops.Logger.MetadataDumper, for: Float do + def dump(float) do + to_string(float) + end +end + +defimpl Drops.Logger.MetadataDumper, for: List do + alias Drops.Logger.MetadataDumper + + def dump(list) when length(list) <= 4 do + items = Enum.map(list, &MetadataDumper.dump/1) + "[#{Enum.join(items, ", ")}]" + end + + def dump(list) do + "[#{length(list)} items]" + end +end + +defimpl Drops.Logger.MetadataDumper, for: Map do + alias Drops.Logger.MetadataDumper + + def dump(map) when map_size(map) <= 3 do + pairs = + map + |> Enum.sort_by(fn {key, _value} -> key end) + |> Enum.map(fn {key, value} -> + "#{MetadataDumper.dump(key)} => #{MetadataDumper.dump(value)}" + end) + + "%{#{Enum.join(pairs, ", ")}}" + end + + def dump(map) do + "%{#{map_size(map)} keys}" + end +end + +defimpl Drops.Logger.MetadataDumper, for: Any do + def dump(value) do + inspect(value) + end +end + +if Code.ensure_loaded?(Ecto.Changeset) do + defimpl Drops.Logger.MetadataDumper, for: Ecto.Changeset do + def dump(%Ecto.Changeset{valid?: valid?, changes: changes, errors: errors}) do + status = if valid?, do: "valid", else: "invalid" + changes_count = map_size(changes) + errors_count = length(errors) + + case {changes_count, errors_count} do + {0, 0} -> "Ecto.Changeset(#{status})" + {c, 0} -> "Ecto.Changeset(#{status}), #{c} changes" + {0, e} -> "Ecto.Changeset(#{status}), #{e} errors" + {c, e} -> "Ecto.Changeset(#{status}), #{c} changes, #{e} errors" + end + end + end +end diff --git a/lib/drops/logger/test_handler.ex b/lib/drops/logger/test_handler.ex new file mode 100644 index 0000000..6ae5729 --- /dev/null +++ b/lib/drops/logger/test_handler.ex @@ -0,0 +1,122 @@ +defmodule Drops.Logger.TestHandler do + @moduledoc """ + Memory log handler for Drops operations debug logging in tests. + + This module provides a memory-based log handler for testing purposes only. + For console and file logging, Drops uses built-in Erlang logger handlers + with custom formatters for better performance and consistency. + + This handler uses the same formatters as other handlers and respects their + configuration (string vs JSON formatting). + + ## Memory Handler + + When using `:memory` handler, logs can be retrieved and cleared: + + # Get all captured logs + logs = Drops.Logger.TestHandler.get_logs() + + # Clear captured logs + Drops.Logger.TestHandler.clear_logs() + + This is particularly useful for testing where you want to assert on log content. + """ + + @doc """ + Gets all captured logs when using memory handler. + + Returns an empty list if not using memory handler or no logs captured. + """ + @spec get_logs() :: [String.t()] + def get_logs do + case :persistent_term.get({__MODULE__, :logs}, nil) do + nil -> [] + logs -> Enum.reverse(logs) + end + end + + @doc """ + Clears all captured logs when using memory handler. + """ + @spec clear_logs() :: :ok + def clear_logs do + :persistent_term.put({__MODULE__, :logs}, []) + :ok + end + + # Logger handler callbacks for memory handler only + + def adding_handler(config) do + # Initialize memory storage for memory handler + :persistent_term.put({__MODULE__, :logs}, []) + {:ok, config} + end + + def removing_handler(_config) do + # Clean up memory storage + try do + :persistent_term.erase({__MODULE__, :logs}) + rescue + _ -> :ok + end + + :ok + end + + def log(log_event, _config) do + # Process all operation-related logs regardless of level + if Drops.Logger.Formatter.is_operation_log?(log_event) do + # Use the same formatter as other handlers + {formatter_module, formatter_config} = get_formatter_config() + formatted_message = formatter_module.format(log_event, formatter_config) + + # Only store if the formatter returned content (operation logs) + if formatted_message != "" do + current_logs = :persistent_term.get({__MODULE__, :logs}, []) + :persistent_term.put({__MODULE__, :logs}, [formatted_message | current_logs]) + end + end + + :ok + end + + def changing_config(_set_or_update, _old_config, new_config) do + # Re-validate configuration on changes + adding_handler(new_config) + end + + def filter_config(config) do + # Remove internal state from config when fetched + config + end + + # Private functions + + defp get_formatter_config do + # Get the formatter configuration from the application environment + logger_config = Application.get_env(:drops, :logger) + formatter_type = Keyword.get(logger_config, :formatter) + + # Use the same normalization logic as the main logger + normalize_formatter_config(formatter_type) + end + + # Use the same normalization logic as Drops.Logger + defp normalize_formatter_config(formatter_config) do + case formatter_config do + # Direct module reference + module when is_atom(module) -> + {module, %Drops.Logger.Formatter.Config{add_newline: false}} + + # Module with options tuple + {module, opts} when is_atom(module) and is_list(opts) -> + {module, + struct(Drops.Logger.Formatter.Config, Keyword.put(opts, :add_newline, false))} + + # Default fallback to string formatter + _ -> + {Drops.Logger.Formatter.String, + %Drops.Logger.Formatter.Config{add_newline: false}} + end + end +end diff --git a/mix.exs b/mix.exs index dd63d7f..2ba2a0b 100644 --- a/mix.exs +++ b/mix.exs @@ -111,6 +111,7 @@ defmodule Drops.MixProject do {:telemetry, "~> 1.0"}, {:ex_doc, ">= 0.0.0", only: :dev, runtime: false}, {:credo, "~> 1.6", only: [:dev, :test], runtime: false}, + {:jason, "~> 1.4", only: [:dev, :test], runtime: false}, {:doctor, "~> 0.21.0", only: :dev}, {:dialyxir, "~> 1.4", only: [:dev, :test], runtime: false}, {:ecto, "~> 3.10", optional: true}, diff --git a/test/drops/logger/formatter_test.exs b/test/drops/logger/formatter_test.exs new file mode 100644 index 0000000..1148bdc --- /dev/null +++ b/test/drops/logger/formatter_test.exs @@ -0,0 +1,311 @@ +defmodule Drops.Logger.FormatterTest do + use ExUnit.Case, async: true + use Drops.LoggerCase + + alias Drops.Logger.Formatter.String, as: StringFormatter + alias Drops.Logger.Formatter.Structured, as: StructuredFormatter + alias Drops.Logger.Formatter.Config + + # Test struct for JSON encoding error tests + defmodule TestStruct do + @derive Jason.Encoder + defstruct [:value] + end + + describe "String formatter" do + test "formats operation logs with string formatter" do + log_event = %{ + level: :debug, + msg: {:string, "Starting step execute"}, + meta: %{operation: "TestOp", step: :execute}, + time: System.os_time(:microsecond) + } + + config = %Config{colorize: false, add_newline: true} + result = StringFormatter.format(log_event, config) + + assert result =~ "[debug] Starting step execute" + assert result =~ "operation=\"TestOp\"" + assert result =~ "step=:execute" + end + + test "formats operation logs with binary message" do + log_event = %{ + level: :info, + msg: "Test message", + meta: %{operation: "TestOp"}, + time: System.os_time(:microsecond) + } + + config = %Config{colorize: false, add_newline: true} + result = StringFormatter.format(log_event, config) + + assert result =~ "[info] Test message" + assert result =~ "operation=\"TestOp\"" + end + + test "formats operation logs with format tuple message" do + log_event = %{ + level: :warning, + msg: {"Step ~s failed with ~p", ["execute", :timeout]}, + meta: %{operation: "TestOp", step: :execute}, + time: System.os_time(:microsecond) + } + + config = %Config{colorize: false, add_newline: true} + result = StringFormatter.format(log_event, config) + + assert result =~ "[warning] Step execute failed with timeout" + assert result =~ "operation=\"TestOp\"" + assert result =~ "step=:execute" + end + + test "handles logs with only operation metadata" do + log_event = %{ + level: :debug, + msg: {:string, "Operation started"}, + meta: %{operation: "TestOp"}, + time: System.os_time(:microsecond) + } + + config = %Config{colorize: false, add_newline: true} + result = StringFormatter.format(log_event, config) + + assert result =~ "[debug] Operation started" + assert result =~ "operation=\"TestOp\"" + refute result =~ "step=" + end + end + + describe "Structured formatter" do + test "formats operation logs as JSON" do + timestamp = System.os_time(:microsecond) + + log_event = %{ + level: :debug, + msg: {:string, "Starting step execute"}, + meta: %{operation: "TestOp", step: :execute}, + time: timestamp + } + + config = %Config{colorize: false, add_newline: true} + result = StructuredFormatter.format(log_event, config) + + # Parse JSON to verify structure + assert {:ok, json} = Jason.decode(result) + assert json["level"] == "debug" + assert json["message"] == "Starting step execute" + assert json["metadata"]["operation"] == "TestOp" + assert json["metadata"]["step"] == "execute" + assert json["timestamp"] == div(timestamp, 1000) + end + end + + describe "message extraction" do + test "extracts binary messages" do + log_event = %{ + level: :debug, + msg: "Test message", + meta: %{operation: "TestOp"}, + time: System.os_time(:microsecond) + } + + config = %Config{colorize: false, add_newline: true} + result = StringFormatter.format(log_event, config) + assert result =~ "Test message" + end + + test "extracts list messages" do + log_event = %{ + level: :debug, + msg: [~c"Test", ~c" message"], + meta: %{operation: "TestOp"}, + time: System.os_time(:microsecond) + } + + config = %Config{colorize: false, add_newline: true} + result = StringFormatter.format(log_event, config) + assert result =~ "Test message" + end + + test "extracts format tuple messages" do + log_event = %{ + level: :debug, + msg: {"Hello ~s", ["world"]}, + meta: %{operation: "TestOp"}, + time: System.os_time(:microsecond) + } + + config = %Config{colorize: false, add_newline: true} + result = StringFormatter.format(log_event, config) + assert result =~ "Hello world" + end + + test "extracts string tuple messages" do + log_event = %{ + level: :debug, + msg: {:string, "String tuple message"}, + meta: %{operation: "TestOp"}, + time: System.os_time(:microsecond) + } + + config = %Config{colorize: false, add_newline: true} + result = StringFormatter.format(log_event, config) + assert result =~ "String tuple message" + end + + test "handles unknown message formats" do + log_event = %{ + level: :debug, + msg: %{unknown: "format"}, + meta: %{operation: "TestOp"}, + time: System.os_time(:microsecond) + } + + config = %Config{colorize: false, add_newline: true} + result = StringFormatter.format(log_event, config) + assert result =~ "%{unknown: \"format\"}" + end + end + + describe "operation log filtering" do + test "identifies operation logs with operation metadata" do + log_event = %{ + level: :debug, + msg: {:string, "Test"}, + meta: %{operation: "TestOp"}, + time: System.os_time(:microsecond) + } + + config = %Config{colorize: false, add_newline: true} + refute StringFormatter.format(log_event, config) == "" + refute StructuredFormatter.format(log_event, config) == "" + end + end + + describe "error metadata coloring" do + @tag logger: [metadata: [:operation, :error_code, :error_message]] + test "applies red color to metadata values when key starts with 'error' and colorization is enabled" do + log_event = %{ + level: :debug, + msg: {:string, "Operation failed"}, + meta: %{ + operation: "TestOp", + error_code: 500, + error_message: "Internal server error" + }, + time: System.os_time(:microsecond) + } + + # Enable colorization in config + config = %Config{colorize: true, add_newline: true} + result = StringFormatter.format(log_event, config) + + # Check if ANSI is enabled in test environment + if IO.ANSI.enabled?() do + # Check that error_code and error_message values are colored red + red = IO.ANSI.red() + reset = IO.ANSI.reset() + + assert result =~ "error_code=#{red}500#{reset}" + assert result =~ "error_message=#{red}\"Internal server error\"#{reset}" + # Regular metadata should not be colored red + assert result =~ "operation=\"TestOp\"" + refute result =~ "operation=#{red}" + else + # If ANSI is disabled, should fall back to plain format + refute result =~ "\e[" + assert result =~ "error_code=500" + assert result =~ "error_message=\"Internal server error\"" + assert result =~ "operation=\"TestOp\"" + end + end + + @tag logger: [metadata: [:operation, :error_code]] + test "does not apply red color when colorization is disabled" do + log_event = %{ + level: :debug, + msg: {:string, "Operation failed"}, + meta: %{ + operation: "TestOp", + error_code: 500 + }, + time: System.os_time(:microsecond) + } + + # Disable colorization in config + config = %Config{colorize: false, add_newline: true} + result = StringFormatter.format(log_event, config) + + # Check that no ANSI color codes are present + refute result =~ IO.ANSI.red() + refute result =~ IO.ANSI.reset() + assert result =~ "error_code=500" + assert result =~ "operation=\"TestOp\"" + end + + @tag logger: [metadata: [:operation, :step, :status]] + test "does not apply red color to non-error metadata keys" do + log_event = %{ + level: :debug, + msg: {:string, "Operation completed"}, + meta: %{ + operation: "TestOp", + step: :execute, + status: "success" + }, + time: System.os_time(:microsecond) + } + + # Enable colorization in config + config = %Config{colorize: true, add_newline: true} + result = StringFormatter.format(log_event, config) + + # Check that no metadata values are colored red since no keys start with "error" + red = IO.ANSI.red() + refute result =~ "operation=#{red}" + refute result =~ "step=#{red}" + refute result =~ "status=#{red}" + end + + @tag logger: [metadata: [:operation, :step, :error_type, :errors]] + test "applies faint color to non-error metadata and red color to error metadata" do + log_event = %{ + level: :debug, + msg: {:string, "SaveUser.validate failed"}, + meta: %{ + operation: "SaveUser", + step: :validate, + error_type: "Ecto.Changeset", + errors: "name: can't be blank" + }, + time: System.os_time(:microsecond) + } + + # Enable colorization in config + config = %Config{colorize: true, add_newline: true} + result = StringFormatter.format(log_event, config) + + # Check if ANSI is enabled in test environment + if IO.ANSI.enabled?() do + # Check that error metadata values are colored red + red = IO.ANSI.red() + reset = IO.ANSI.reset() + assert result =~ "error_type=#{red}\"Ecto.Changeset\"#{reset}" + assert result =~ "errors=#{red}\"name: can't be blank\"#{reset}" + + # Check that non-error metadata is colored faint + faint = IO.ANSI.faint() + assert result =~ "#{faint}operation=\"SaveUser\"#{reset}" + assert result =~ "#{faint}step=:validate#{reset}" + else + # If ANSI is disabled, should fall back to plain format + refute result =~ "\e[" + assert result =~ "error_type=\"Ecto.Changeset\"" + assert result =~ "errors=\"name: can't be blank\"" + assert result =~ "operation=\"SaveUser\"" + assert result =~ "step=:validate" + end + end + end +end diff --git a/test/drops/logger_test.exs b/test/drops/logger_test.exs new file mode 100644 index 0000000..16b9ca5 --- /dev/null +++ b/test/drops/logger_test.exs @@ -0,0 +1,296 @@ +defmodule Drops.LoggerTest do + use ExUnit.Case, async: false + use Drops.LoggerCase + + require Logger, as: ElixirLogger + + alias Drops.Logger, as: DropsLogger + + describe "init/0" do + @describetag logger: false + + test "initializes handler when logger configuration is present" do + Application.put_env(:drops, :logger, + handler: :memory, + level: :debug + ) + + assert :ok = DropsLogger.init() + + # Verify handler was added + {:ok, _config} = :logger.get_handler_config(:drops_handler) + end + + test "does not initialize handler when no logger configuration" do + Application.put_env(:drops, :logger, []) + + assert :ok = DropsLogger.init() + + # Verify handler was not added + assert {:error, {:not_found, :drops_handler}} = + :logger.get_handler_config(:drops_handler) + end + end + + describe "add_handler/0" do + @describetag logger: false + + test "adds debug handler with console configuration" do + Application.put_env(:drops, :logger, + handler: :console, + level: :debug + ) + + assert :ok = DropsLogger.add_handler() + + # Verify handler was added + {:ok, _config} = :logger.get_handler_config(:drops_handler) + end + + @tag logger: [handler: :file, file: "test/tmp/test.log", level: :debug] + test "adds debug handler with file configuration" do + assert :ok = DropsLogger.add_handler() + + # Verify handler was added + {:ok, _config} = :logger.get_handler_config(:drops_handler) + end + + test "adds debug handler with memory configuration" do + Application.put_env(:drops, :logger, + handler: :memory, + level: :debug + ) + + assert :ok = DropsLogger.add_handler() + + # Verify handler was added + {:ok, _config} = :logger.get_handler_config(:drops_handler) + end + + test "returns ok when handler already exists" do + Application.put_env(:drops, :logger, + handler: :memory, + level: :debug + ) + + assert :ok = DropsLogger.add_handler() + # Second call should also return :ok + assert :ok = DropsLogger.add_handler() + end + + test "uses default configuration when no config provided" do + Application.put_env(:drops, :logger, []) + + assert :ok = DropsLogger.add_handler() + + # Verify handler was added with defaults + {:ok, _config} = :logger.get_handler_config(:drops_handler) + end + end + + describe "remove_handler/0" do + @describetag logger: false + + test "removes debug handler successfully" do + Application.put_env(:drops, :logger, + handler: :memory, + level: :debug + ) + + DropsLogger.add_handler() + + # Verify handler exists + {:ok, _config} = :logger.get_handler_config(:drops_handler) + + assert :ok = DropsLogger.remove_handler() + + # Verify handler was removed + assert {:error, {:not_found, :drops_handler}} = + :logger.get_handler_config(:drops_handler) + end + + test "returns ok when handler does not exist" do + assert :ok = DropsLogger.remove_handler() + end + end + + describe "memory handler operations" do + @describetag logger: [ + handler: :memory, + level: :debug, + formatter: + {Drops.Logger.Formatter.String, [add_newline: true, colorize: false]}, + metadata: [:operation, :step] + ] + + test "get_logs/0 returns captured logs" do + # Initially empty + assert DropsLogger.get_logs() == [] + + # Log a debug message with metadata + ElixirLogger.debug("Test message", operation: "TestOp", step: "test_step") + + # Give logger time to process + Process.sleep(50) + + logs = DropsLogger.get_logs() + assert length(logs) == 1 + assert hd(logs) =~ "Test message" + end + + test "clear_logs/0 clears captured logs" do + # Log a message + ElixirLogger.debug("Test message", operation: "TestOp", step: "test_step") + + # Give logger time to process + Process.sleep(10) + + # Verify log exists + assert length(DropsLogger.get_logs()) == 1 + + # Clear logs + assert :ok = DropsLogger.clear_logs() + + # Verify logs are cleared + assert DropsLogger.get_logs() == [] + end + + test "logs are captured for all levels with operation metadata" do + # Log at different levels + ElixirLogger.debug("Debug message", operation: "TestOp", step: "test_step") + ElixirLogger.info("Info message", operation: "TestOp", step: "test_step") + ElixirLogger.warning("Warning message", operation: "TestOp", step: "test_step") + + # Give logger time to process + Process.sleep(10) + + logs = DropsLogger.get_logs() + # All operation-related messages should be captured + assert length(logs) == 3 + assert Enum.any?(logs, &(&1 =~ "Debug message")) + assert Enum.any?(logs, &(&1 =~ "Info message")) + assert Enum.any?(logs, &(&1 =~ "Warning message")) + end + + test "non-operation logs are not captured" do + # Log at different levels + ElixirLogger.debug("Debug message") + ElixirLogger.info("Info message") + ElixirLogger.warning("Warning message") + + # Give logger time to process + Process.sleep(10) + + logs = DropsLogger.get_logs() + + assert logs == [] + end + end + + describe "formatter configuration" do + @tag logger: [handler: :console, level: :debug] + test "configures string formatter for console handler" do + assert :ok = DropsLogger.add_handler() + + # Verify handler was configured with string formatter + {:ok, config} = :logger.get_handler_config(:drops_handler) + + assert {Drops.Logger.Formatter.String, %Drops.Logger.Formatter.Config{}} = + config.formatter + + assert config.level == :debug + end + + @tag logger: [ + handler: :console, + formatter: + {Drops.Logger.Formatter.Structured, [add_newline: true, colorize: false]}, + level: :debug + ] + test "configures JSON formatter for console handler" do + assert :ok = DropsLogger.add_handler() + + # Verify handler was configured with JSON formatter + {:ok, config} = :logger.get_handler_config(:drops_handler) + + assert {Drops.Logger.Formatter.Structured, %Drops.Logger.Formatter.Config{}} = + config.formatter + + assert config.level == :debug + end + + @tag logger: [handler: :file, file: "test/tmp/formatter_test.log", level: :debug] + test "configures string formatter for file handler" do + assert :ok = DropsLogger.add_handler() + + # Verify handler was configured with string formatter + {:ok, config} = :logger.get_handler_config(:drops_handler) + + assert {Drops.Logger.Formatter.String, %Drops.Logger.Formatter.Config{}} = + config.formatter + + assert config.level == :debug + end + + @tag logger: [ + handler: :file, + file: "test/tmp/formatter_test.log", + formatter: + {Drops.Logger.Formatter.Structured, [add_newline: true, colorize: false]}, + level: :debug + ] + test "configures JSON formatter for file handler" do + assert :ok = DropsLogger.add_handler() + + # Verify handler was configured with JSON formatter + {:ok, config} = :logger.get_handler_config(:drops_handler) + + assert {Drops.Logger.Formatter.Structured, %Drops.Logger.Formatter.Config{}} = + config.formatter + + assert config.level == :debug + end + + @tag logger: [handler: :console, level: :debug] + test "defaults to string formatter when not specified" do + assert :ok = DropsLogger.add_handler() + + # Verify handler defaults to string formatter + {:ok, config} = :logger.get_handler_config(:drops_handler) + # The formatter config should be normalized to String formatter + assert {Drops.Logger.Formatter.String, %Drops.Logger.Formatter.Config{}} = + config.formatter + end + end + + describe "built-in handler integration" do + @tag logger: [handler: :console, level: :debug] + test "uses logger_std_h for console handler" do + assert :ok = DropsLogger.add_handler() + + # Verify handler uses built-in logger_std_h module + {:ok, config} = :logger.get_handler_config(:drops_handler) + assert config.module == :logger_std_h + assert config.config.type == :standard_io + end + + @tag logger: [handler: :file, file: "test/tmp/builtin_test.log", level: :debug] + test "uses logger_std_h for file handler" do + assert :ok = DropsLogger.add_handler() + + # Verify handler uses built-in logger_std_h module + {:ok, config} = :logger.get_handler_config(:drops_handler) + assert config.module == :logger_std_h + assert String.ends_with?(to_string(config.config.file), "test/tmp/builtin_test.log") + end + + @tag logger: [handler: :memory, level: :debug] + test "uses custom TestHandler for memory handler" do + assert :ok = DropsLogger.add_handler() + + # Verify handler uses custom DebugHandler module + {:ok, config} = :logger.get_handler_config(:drops_handler) + assert config.module == Drops.Logger.TestHandler + end + end +end diff --git a/test/support/logger_case.ex b/test/support/logger_case.ex new file mode 100644 index 0000000..e810c02 --- /dev/null +++ b/test/support/logger_case.ex @@ -0,0 +1,211 @@ +defmodule Drops.LoggerCase do + @moduledoc """ + Test case template for tests that need logger configuration. + + This module provides a clean way to configure Drops logger for tests using + ExUnit tags. It handles setup and cleanup of logger configuration automatically. + + ## Usage + + Use this case template in your test modules: + + defmodule MyTest do + use Drops.LoggerCase + + @tag logger: [handler: :memory, level: :debug] + test "logs debug messages" do + # Your test code here + # Logger is automatically configured based on the tag + end + end + + ## Logger Tag Options + + The `@tag logger: opts` supports all standard Drops logger configuration options: + + * `:handler` - Handler type (`:console`, `:file`, `:memory`) - defaults to `:memory` + * `:level` - Log level (`:debug`, `:info`, `:warning`, `:error`) - defaults to `:debug` + * `:formatter` - Formatter configuration - defaults to string formatter without colors + * `:metadata` - Metadata fields to include - defaults to operation-related fields + * `:file` - File path when using `:file` handler + + ## Examples + + # Basic memory handler for testing + @tag logger: true + test "basic logging" do + # Uses default memory handler with debug level + end + + # Custom configuration + @tag logger: [handler: :memory, level: :info, formatter: {Drops.Logger.Formatter.Structured, []}] + test "structured logging" do + # Uses memory handler with info level and structured formatter + end + + # File logging + @tag logger: [handler: :file, file: "test/tmp/test.log"] + test "file logging" do + # Logs to specified file + end + + ## Automatic Cleanup + + The logger configuration is automatically restored to its original state + after each test, ensuring test isolation. + """ + + use ExUnit.CaseTemplate + + using do + quote do + alias Drops.Logger, as: DropsLogger + require Logger, as: ElixirLogger + end + end + + setup tags do + # Check if logger tag is present + case Map.get(tags, :logger) do + nil -> + # No logger configuration needed, but ensure clean state + setup_clean_logger_state() + + true -> + # Use default logger configuration + setup_logger([]) + + false -> + # Explicitly disable logger, but ensure clean state + setup_clean_logger_state() + + opts when is_list(opts) -> + # Use custom logger configuration + setup_logger(opts) + + opts -> + raise ArgumentError, """ + Invalid logger tag value: #{inspect(opts)} + + Expected one of: + - true (use defaults) + - false (disable) + - keyword list with options + + Example: @tag logger: [handler: :memory, level: :debug] + """ + end + end + + @default_logger_config [ + handler: :memory, + level: :debug, + formatter: {Drops.Logger.Formatter.String, [add_newline: true, colorize: false]}, + metadata: [ + :operation, + :step, + :context, + :reason, + :error_type, + :errors, + :kind, + :duration_us + ] + ] + + defp setup_clean_logger_state do + # Store all original configurations for cleanup + original_drops_config = Application.get_env(:drops, :logger, []) + + # Ensure handler is removed before each test + Drops.Logger.remove_handler() + + # Setup cleanup to restore original state + ExUnit.Callbacks.on_exit(fn -> + Drops.Logger.remove_handler() + Application.put_env(:drops, :logger, original_drops_config) + Drops.Logger.clear_logs() + end) + + :ok + end + + defp setup_logger(opts) do + # Store all original configurations + original_drops_config = Application.get_env(:drops, :logger, []) + original_logger_level = Application.get_env(:logger, :level) + original_primary_config = :logger.get_primary_config() + original_process_level = Logger.level() + + # Get original default handler config + original_default_level = + case :logger.get_handler_config(:default) do + {:ok, config} -> config.level + _ -> :all + end + + # Merge provided options with defaults, but allow complete override + logger_config = + if Keyword.keyword?(opts) and length(opts) > 0 do + # If specific options provided, merge with defaults + Keyword.merge(@default_logger_config, opts) + else + # Use defaults + @default_logger_config + end + + # Apply logger configuration + Application.put_env(:drops, :logger, logger_config) + + # Set logger configuration to allow debug logs + Application.put_env(:logger, :level, :debug) + :logger.set_primary_config(level: :debug) + + # Also set process-level logger to debug to override any process-level filtering + Logger.configure(level: :debug) + + # Suppress debug output from default console handler during tests + :logger.set_handler_config(:default, :level, :warning) + + # Remove any existing handler and add the new one + Drops.Logger.remove_handler() + Drops.Logger.add_handler() + + # Clear any existing logs + Drops.Logger.clear_logs() + + # Setup cleanup + ExUnit.Callbacks.on_exit(fn -> + # Clean up in reverse order + Drops.Logger.remove_handler() + Drops.Logger.clear_logs() + + # Clean up file if file handler was used + case Keyword.get(logger_config, :handler) do + :file -> + if file_path = Keyword.get(logger_config, :file) do + # Clean up the specific file and its directory if empty + File.rm_rf!(Path.dirname(file_path)) + end + + _ -> + :ok + end + + # Restore all original configurations + :logger.set_handler_config(:default, :level, original_default_level) + Application.put_env(:drops, :logger, original_drops_config) + + if original_logger_level do + Application.put_env(:logger, :level, original_logger_level) + else + Application.delete_env(:logger, :level) + end + + :logger.set_primary_config(original_primary_config) + Logger.configure(level: original_process_level) + end) + + :ok + end +end diff --git a/test/support/operation_case.ex b/test/support/operation_case.ex index 0b56093..5bd3e46 100644 --- a/test/support/operation_case.ex +++ b/test/support/operation_case.ex @@ -5,6 +5,7 @@ defmodule Drops.OperationCase do quote do use Drops.DataCase use Drops.DoctestCase + use Drops.LoggerCase import Drops.OperationCase import Drops.Test.Config end diff --git a/test/test_helper.exs b/test/test_helper.exs index d5f66ca..18e2f2b 100644 --- a/test/test_helper.exs +++ b/test/test_helper.exs @@ -39,6 +39,7 @@ end Code.require_file("support/test_config.ex", __DIR__) Code.require_file("support/doctest_case.ex", __DIR__) Code.require_file("support/data_case.ex", __DIR__) +Code.require_file("support/logger_case.ex", __DIR__) Code.require_file("support/operation_case.ex", __DIR__) Code.require_file("support/ecto/test_schemas.ex", __DIR__) Code.require_file("support/ecto/user_group_schemas.ex", __DIR__) From 77d48df2a0c668b2472cf0441e4d74ba647d37cc Mon Sep 17 00:00:00 2001 From: Peter Solnica Date: Sun, 6 Jul 2025 10:44:24 +0000 Subject: [PATCH 3/3] Introduce Logging operations extension --- examples/ecto/save-user-operation.exs | 2 +- lib/drops/operations/command.ex | 1 + lib/drops/operations/extensions/ecto.ex | 5 +- lib/drops/operations/extensions/logging.ex | 728 +++++++++++++++ .../operations/extensions/logging_test.exs | 836 ++++++++++++++++++ 5 files changed, 1570 insertions(+), 2 deletions(-) create mode 100644 lib/drops/operations/extensions/logging.ex create mode 100644 test/drops/operations/extensions/logging_test.exs diff --git a/examples/ecto/save-user-operation.exs b/examples/ecto/save-user-operation.exs index 3f4988a..d862355 100644 --- a/examples/ecto/save-user-operation.exs +++ b/examples/ecto/save-user-operation.exs @@ -11,7 +11,7 @@ Code.require_file("examples/setup.exs") _pid = ExampleSetup.setup_database([Test.Ecto.TestSchemas.UserSchema]) defmodule SaveUser do - use Drops.Operations.Command, repo: Drops.TestRepo + use Drops.Operations.Command, repo: Drops.TestRepo, debug: true schema(Test.Ecto.TestSchemas.UserSchema) diff --git a/lib/drops/operations/command.ex b/lib/drops/operations/command.ex index 173a41c..3562518 100644 --- a/lib/drops/operations/command.ex +++ b/lib/drops/operations/command.ex @@ -40,6 +40,7 @@ defmodule Drops.Operations.Command do Drops.Operations.Extensions.Command, Drops.Operations.Extensions.Params, Drops.Operations.Extensions.Ecto, + Drops.Operations.Extensions.Logging, Drops.Operations.Extensions.Telemetry ] end diff --git a/lib/drops/operations/extensions/ecto.ex b/lib/drops/operations/extensions/ecto.ex index 603608a..4022f94 100644 --- a/lib/drops/operations/extensions/ecto.ex +++ b/lib/drops/operations/extensions/ecto.ex @@ -198,7 +198,10 @@ defmodule Drops.Operations.Extensions.Ecto do end def validate(%{changeset: changeset} = context) do - case validate_changeset(%{context | changeset: %{changeset | action: :validate}}) do + validated_changeset = + validate_changeset(%{context | changeset: %{changeset | action: :validate}}) + + case validated_changeset do %{valid?: true} = changeset -> {:ok, %{context | changeset: %{changeset | action: nil}}} diff --git a/lib/drops/operations/extensions/logging.ex b/lib/drops/operations/extensions/logging.ex new file mode 100644 index 0000000..2fbcb05 --- /dev/null +++ b/lib/drops/operations/extensions/logging.ex @@ -0,0 +1,728 @@ +defmodule Drops.Operations.Extensions.Logging do + @moduledoc """ + Logging extension for Operations framework. + + This extension provides configurable logging for Operations by leveraging telemetry events + and a custom log handler. It supports both debug mode (detailed logging with context) and + info mode (basic success/error logging) with configurable step filtering. + + ## Features + + - Configurable logging levels (debug mode vs info mode) + - Automatic operation-level logging (start/stop events) with step-level error logging + - Configurable step-level instrumentation + - Configurable log handler (console, file, or memory) + - Metadata includes operation module for all events, step name for step-level events, and execution context + - Built on top of the Telemetry extension + - Smart metadata dumping for complex types via MetadataDumper protocol + + ## Configuration + + The logging handler can be configured via application environment: + + config :drops, :logger, + handler: :file, + file: "log/operations.log", + level: :debug, + format: "[$level] $message $metadata\\n", + metadata: [ + :operation, # Operation module name (always included) + :step, # Step name (for step-level events) + :duration_us, # Duration in microseconds (for stop/exception events) + :system_time, # System time (for start events) + :context, # Execution context (debug mode and error events) + :kind, # Error kind (for exception events) + :reason, # Error reason (for exception events) + :error_type, # Error type (for structured errors like Ecto.Changeset) + :errors # Formatted errors (for structured errors) + ] + + ## Usage + + ### Enable Basic Logging + + Enable default logging with info level (no context included): + + defmodule CreateUser do + use Drops.Operations.Command, logging: true + + steps do + @impl true + def execute(%{params: params}) do + {:ok, create_user(params)} + end + end + end + + This will log operation start/stop events at info level for successful operations, + operation failure events at warning level with basic metadata (no context), and + step error events at error level only when they occur in the last step. + + ### Enable Debug Logging + + Enable debug logging with detailed context: + + defmodule CreateUser do + use Drops.Operations.Command, debug: true + + steps do + @impl true + def execute(%{params: params}) do + {:ok, create_user(params)} + end + end + end + + ### Step-Level Configuration + + Configure logging for specific steps: + + defmodule CreateUser do + use Drops.Operations.Command, logging: [steps: [:validate, :execute]] + + steps do + @impl true + def execute(%{params: params}) do + {:ok, create_user(params)} + end + end + end + + Configure logging for all steps: + + defmodule CreateUser do + use Drops.Operations.Command, logging: [steps: :all] + + steps do + @impl true + def execute(%{params: params}) do + {:ok, create_user(params)} + end + end + end + + ### Error Logging Configuration + + Control when step errors are logged: + + # Log all step errors (default behavior) + defmodule CreateUser do + use Drops.Operations.Command, logging: [log_all_errors: true] + end + + # Only log errors when operation fails (recommended) + defmodule CreateUser do + use Drops.Operations.Command, logging: [log_all_errors: false] + end + + When `log_all_errors: false` (default), step errors are only logged when they + occur in the last step of the operation pipeline. This reduces noise from step + errors in intermediate steps that are part of normal control flow. + + ## Logging Behavior + + ### Debug Mode (debug: true) + + - Logs at debug level for successful operations/steps + - Logs at error level for failed operations/steps + - Includes full context in all log messages + - Logs all steps by default + + ### Info Mode (logging: true or logging: [steps: ...]) + + - Logs at info level for successful operations/steps + - Logs at error level for step errors in the last step (when log_all_errors: false) + - Logs at warning level for operation failures with basic metadata (reason, error_type, duration, kind) + - No context included for successful operations + - Context included for failed operations in debug mode only + - Configurable step filtering + - Configurable step error logging behavior via `log_all_errors` option (default: false) + + ## Implementation Details + + This extension works by: + + 1. Enabling telemetry with operation-level instrumentation and step-level error instrumentation + 2. Attaching telemetry handlers that log events using the custom logging handler + 3. Automatically cleaning up handlers when the operation module is unloaded + + The extension uses the Telemetry extension internally and attaches handlers + during module compilation. When `logging: true` is specified, operation boundaries + are instrumented for start/stop events. Step-level error instrumentation is + controlled by the `log_all_errors` setting (default: false). + """ + use Drops.Operations.Extension + + require Logger + + @depends_on [Drops.Operations.Extensions.Telemetry] + + @impl true + @spec enable?(keyword()) :: boolean() + def enable?(opts) do + # Enable if either logging or debug is configured + case {Keyword.get(opts, :logging, false), Keyword.get(opts, :debug, false)} do + {false, false} -> false + # debug: true takes precedence + {_, true} -> true + {_, config} when is_list(config) -> true + {true, _} -> true + {config, _} when is_list(config) -> true + _ -> false + end + end + + @impl true + @spec default_opts(keyword()) :: keyword() + def default_opts(opts) do + debug_config = Keyword.get(opts, :debug, false) + logging_config = Keyword.get(opts, :logging, false) + + cond do + debug_config != false -> + # Debug mode: enable telemetry with all steps instrumented + case debug_config do + true -> + [telemetry: [steps: :all]] + + config when is_list(config) -> + # Pass through custom identifier but ensure all steps are instrumented + identifier = Keyword.get(config, :identifier, :drops) + [telemetry: [identifier: identifier, steps: :all]] + end + + logging_config != false -> + # Logging mode: configure telemetry based on logging config + case logging_config do + true -> + # Default logging: instrument operation boundaries + all step errors + # Maintain backward compatibility by logging all step errors by default + [telemetry: true, telemetry_step_errors: :all] + + config when is_list(config) -> + # Custom logging configuration + identifier = Keyword.get(config, :identifier, :drops) + steps = Keyword.get(config, :steps, []) + log_all_errors = Keyword.get(config, :log_all_errors, false) + + telemetry_config = + case steps do + :all -> + [telemetry: [identifier: identifier, steps: :all]] + + [] -> + [telemetry: [identifier: identifier]] + + step_list when is_list(step_list) -> + [telemetry: [identifier: identifier, steps: step_list]] + end + + # Add step error instrumentation based on log_all_errors setting + if log_all_errors do + Keyword.put(telemetry_config, :telemetry_step_errors, :all) + else + telemetry_config + end + end + + true -> + [] + end + end + + @impl true + @spec unit_of_work(Drops.Operations.UnitOfWork.t(), keyword()) :: + Drops.Operations.UnitOfWork.t() + def unit_of_work(uow, opts) do + debug_config = Keyword.get(opts, :debug, false) + logging_config = Keyword.get(opts, :logging, false) + + # Determine which config to use (debug takes precedence) + config = + if debug_config != false, + do: {:debug, debug_config}, + else: {:logging, logging_config} + + case config do + {_, false} -> + uow + + {mode, config_value} -> + # Extract log_all_errors setting for later use + log_all_errors = extract_log_all_errors_setting(mode, config_value) + + # Store logging config in module attribute for later use in __before_compile__ + Module.put_attribute(uow.module, :drops_logging_config, {mode, config_value}) + + # Add a before callback to the first step to ensure handlers are attached + first_step = List.first(uow.step_order) + + if first_step do + Drops.Operations.UnitOfWork.register_before_callback( + uow, + first_step, + __MODULE__, + :ensure_logging_handlers_attached, + %{logging_config: {mode, config_value}, log_all_errors: log_all_errors} + ) + else + uow + end + end + end + + @impl true + @spec using() :: Macro.t() + def using do + quote do + # Ensure handlers are cleaned up when module is unloaded + @before_compile Drops.Operations.Extensions.Logging + end + end + + @impl true + @spec helpers() :: Macro.t() + def helpers do + quote do + # No additional helpers needed + end + end + + @impl true + @spec steps() :: Macro.t() + def steps do + quote do + # No additional steps needed + end + end + + defmacro __before_compile__(env) do + # Get the logging configuration from the module attributes + logging_config = Module.get_attribute(env.module, :drops_logging_config) + + if logging_config do + quote do + def __logging_handler_id__, do: "logging-#{__MODULE__}" + + # Clean up handlers when module is unloaded + @before_compile :__detach_logging_handlers__ + + def __detach_logging_handlers__ do + try do + :telemetry.detach(__logging_handler_id__()) + rescue + _ -> :ok + end + end + end + else + quote do + # No logging configuration, no handlers to manage + end + end + end + + # Private functions + + defp extract_log_all_errors_setting(mode, config_value) do + case {mode, config_value} do + {:debug, _} -> + # Debug mode always logs all errors + true + + {:logging, true} -> + # Default logging mode: only log errors from last step (log_all_errors: false) + false + + {:logging, config} when is_list(config) -> + # Custom logging config: check log_all_errors setting + Keyword.get(config, :log_all_errors, false) + + _ -> + false + end + end + + defp get_log_all_errors_setting(logging_config) do + case logging_config do + {:debug, _} -> + # Debug mode always logs all errors + true + + {:logging, true} -> + # Default logging mode: only log errors from last step (log_all_errors: false) + false + + {:logging, config} when is_list(config) -> + # Custom logging config: check log_all_errors setting + Keyword.get(config, :log_all_errors, false) + + _ -> + false + end + end + + defp should_log_step_error?(log_all_errors, metadata) do + if log_all_errors do + # Log all step errors when explicitly configured + true + else + # Only log errors from the last step in the UnitOfWork step_order + is_last_step_in_pipeline?(metadata.operation, metadata.step) + end + end + + defp is_last_step_in_pipeline?(operation_module, step) do + try do + # Get the UnitOfWork from the operation module + uow = operation_module.__unit_of_work__() + + # Check if this step is the last step in the step_order + last_step = List.last(uow.step_order) + step == last_step + rescue + # If we can't determine the step order, default to logging the error + _ -> true + end + end + + @doc false + def ensure_logging_handlers_attached(operation_module, _step, _context, config) do + logging_config = config.logging_config + + # Try to attach handlers if not already attached + try do + attach_logging_handlers(operation_module, logging_config) + rescue + _ -> :ok + end + + :ok + end + + @doc false + def attach_logging_handlers(operation_module, logging_config) do + identifier = get_identifier(logging_config) + handler_id = "logging-#{operation_module}" + + # Define the events we want to listen to + events = [ + [identifier, :operation, :start], + [identifier, :operation, :stop], + [identifier, :operation, :exception], + [identifier, :operation, :step, :start], + [identifier, :operation, :step, :stop], + [identifier, :operation, :step, :exception] + ] + + # Attach the handler + :telemetry.attach_many( + handler_id, + events, + &__MODULE__.handle_logging_event/4, + %{operation_module: operation_module, logging_config: logging_config} + ) + end + + defp get_identifier(logging_config) do + case logging_config do + {:debug, true} -> :drops + {:debug, config} when is_list(config) -> Keyword.get(config, :identifier, :drops) + {:logging, true} -> :drops + {:logging, config} when is_list(config) -> Keyword.get(config, :identifier, :drops) + true -> :drops + config when is_list(config) -> Keyword.get(config, :identifier, :drops) + _ -> :drops + end + end + + defp get_logging_config(logging_config) do + case logging_config do + {:debug, _} -> + # Debug mode: debug level for success, error level for failures, always include context + {:debug, true} + + {:logging, _} -> + # Info mode: info level for success, error level for failures, no context for success + {:info, false} + + _ -> + # Fallback to debug mode + {:debug, true} + end + end + + @doc false + def handle_logging_event( + [_identifier, :operation, :start], + measurements, + metadata, + config + ) do + operation_name = format_operation_name(metadata.operation) + {log_level, include_context} = get_logging_config(config.logging_config) + + log_metadata = [ + operation: operation_name, + system_time: measurements.system_time + ] + + log_metadata = + if include_context do + context = format_context_for_logging(metadata.context) + Keyword.put(log_metadata, :context, context) + else + log_metadata + end + + Logger.log(log_level, "#{operation_name} started", log_metadata) + end + + def handle_logging_event( + [_identifier, :operation, :stop], + measurements, + metadata, + config + ) do + operation_name = format_operation_name(metadata.operation) + duration_us = System.convert_time_unit(measurements.duration, :native, :microsecond) + duration_display = format_duration(duration_us) + {log_level, include_context} = get_logging_config(config.logging_config) + + log_metadata = [ + operation: operation_name, + duration_us: duration_us + ] + + log_metadata = + if include_context do + context = format_context_for_logging(metadata.context) + Keyword.put(log_metadata, :context, context) + else + log_metadata + end + + Logger.log( + log_level, + "#{operation_name} succeeded in #{duration_display}", + log_metadata + ) + end + + def handle_logging_event( + [_identifier, :operation, :exception], + measurements, + metadata, + config + ) do + operation_name = format_operation_name(metadata.operation) + duration_us = System.convert_time_unit(measurements.duration, :native, :microsecond) + duration_display = format_duration(duration_us) + reason_info = format_reason(metadata.reason) + {_log_level, include_context} = get_logging_config(config.logging_config) + + # Build log metadata based on debug mode + log_metadata = + if include_context do + # Debug mode: include full context and all metadata + context = format_context_for_logging(metadata.context) + + [ + operation: operation_name, + context: context, + duration_us: duration_us, + kind: metadata.kind + ] ++ reason_info + else + # Non-debug mode: include basic info but no context + basic_reason_info = extract_basic_reason_info(reason_info) + + [ + operation: operation_name, + duration_us: duration_us, + kind: metadata.kind + ] ++ basic_reason_info + end + + # Use warning level for operation failures in non-debug mode, error level in debug mode + if include_context do + Logger.error( + "#{operation_name} failed in #{duration_display}", + log_metadata + ) + else + Logger.warning( + "#{operation_name} failed in #{duration_display}", + log_metadata + ) + end + end + + def handle_logging_event( + [_identifier, :operation, :step, :start], + measurements, + metadata, + config + ) do + operation_name = format_operation_name(metadata.operation) + {log_level, include_context} = get_logging_config(config.logging_config) + + log_metadata = [ + operation: operation_name, + step: metadata.step, + system_time: measurements.system_time + ] + + log_metadata = + if include_context do + context = format_context_for_logging(metadata.context) + Keyword.put(log_metadata, :context, context) + else + log_metadata + end + + Logger.log(log_level, "#{operation_name}.#{metadata.step} started", log_metadata) + end + + def handle_logging_event( + [_identifier, :operation, :step, :stop], + measurements, + metadata, + config + ) do + operation_name = format_operation_name(metadata.operation) + duration_us = System.convert_time_unit(measurements.duration, :native, :microsecond) + duration_display = format_duration(duration_us) + {log_level, include_context} = get_logging_config(config.logging_config) + + log_metadata = [ + operation: operation_name, + step: metadata.step, + duration_us: duration_us + ] + + log_metadata = + if include_context do + context = format_context_for_logging(metadata.context) + Keyword.put(log_metadata, :context, context) + else + log_metadata + end + + Logger.log( + log_level, + "#{operation_name}.#{metadata.step} succeeded in #{duration_display}", + log_metadata + ) + end + + def handle_logging_event( + [_identifier, :operation, :step, :exception], + measurements, + metadata, + config + ) do + # Check if we should log this step error + log_all_errors = get_log_all_errors_setting(config.logging_config) + + if should_log_step_error?(log_all_errors, metadata) do + operation_name = format_operation_name(metadata.operation) + duration_us = System.convert_time_unit(measurements.duration, :native, :microsecond) + duration_display = format_duration(duration_us) + reason_info = format_reason(metadata.reason) + + # Always include context for error cases and always use error level + context = format_context_for_logging(metadata.context) + + log_metadata = + [ + operation: operation_name, + step: metadata.step, + context: context, + duration_us: duration_us, + kind: metadata.kind + ] ++ reason_info + + Logger.error( + "#{operation_name}.#{metadata.step} failed in #{duration_display}", + log_metadata + ) + end + end + + # Private helper functions + + defp format_operation_name(operation) when is_atom(operation) do + operation + |> to_string() + |> String.replace_prefix("Elixir.", "") + end + + defp format_operation_name(operation), do: to_string(operation) + + defp format_context_for_logging(context) do + # Pass the raw context to the logger formatter + # The formatter will handle pretty-printing using inspect with proper options + context + end + + defp format_metadata_value(value) do + try do + Drops.Logger.MetadataDumper.dump(value) + rescue + Protocol.UndefinedError -> + # Fallback to inspect for types without MetadataDumper implementation + inspect(value, limit: 50, printable_limit: 100) + end + end + + defp format_reason(reason) do + cond do + # Handle Ecto.Changeset errors specially + is_struct(reason, Ecto.Changeset) and Code.ensure_loaded?(Ecto.Changeset) -> + [ + reason: :validation, + error_type: "Ecto.Changeset", + errors: format_changeset_errors(reason.errors) + ] + + # Handle other structs with errors field + is_struct(reason) and Map.has_key?(reason, :errors) -> + struct_name = + reason.__struct__ |> to_string() |> String.replace_prefix("Elixir.", "") + + [ + reason: :error, + error_type: struct_name, + errors: format_metadata_value(reason.errors) + ] + + # Handle regular values + true -> + [reason: format_metadata_value(reason)] + end + end + + defp format_changeset_errors(errors) when is_list(errors) do + errors + |> Enum.map(fn {field, {message, _opts}} -> + "#{field}: #{message}" + end) + |> Enum.join(", ") + end + + defp format_changeset_errors(errors), do: format_metadata_value(errors) + + defp extract_basic_reason_info(reason_info) do + # Extract only reason and error_type from the full reason info + reason_info + |> Keyword.take([:reason, :error_type]) + end + + defp format_duration(duration_us) when duration_us < 1000, do: "#{duration_us}μs" + + defp format_duration(duration_us) when duration_us < 1_000_000, + do: "#{Float.round(duration_us / 1000, 2)}ms" + + defp format_duration(duration_us), do: "#{Float.round(duration_us / 1_000_000, 2)}s" +end diff --git a/test/drops/operations/extensions/logging_test.exs b/test/drops/operations/extensions/logging_test.exs new file mode 100644 index 0000000..34da803 --- /dev/null +++ b/test/drops/operations/extensions/logging_test.exs @@ -0,0 +1,836 @@ +defmodule Drops.Operations.Extensions.LoggingTest do + use Drops.OperationCase, async: false + + # Configure logger for all tests in this module + @moduletag logger: [ + handler: :memory, + level: :debug, + formatter: + {Drops.Logger.Formatter.String, [add_newline: true, colorize: false]}, + metadata: [ + :operation, + :step, + :context, + :reason, + :error_type, + :errors, + :kind, + :duration_us + ] + ] + + alias Drops.Logger + + defmodule TestHandler do + def handle_event(event, measurements, metadata, _config) do + send(self(), {:telemetry_event, event, measurements, metadata}) + end + end + + setup do + # Setup telemetry cleanup (LoggerCase handles logger setup/cleanup) + on_exit(fn -> + # Detach any telemetry handlers that might have been attached during tests + # This prevents interference between tests + detach_all_logging_handlers() + end) + + :ok + end + + # Helper function to detach all logging telemetry handlers + defp detach_all_logging_handlers do + # Get all attached telemetry handlers + handlers = :telemetry.list_handlers([]) + + # Find and detach all handlers that start with "logging-" + Enum.each(handlers, fn handler -> + if String.starts_with?(handler.id, "logging-") do + try do + :telemetry.detach(handler.id) + rescue + _ -> :ok + end + end + end) + end + + describe "when debug is enabled" do + operation type: :command, debug: true do + schema do + %{ + required(:name) => string(), + required(:age) => integer() + } + end + + steps do + @impl true + def execute(context) do + {:ok, context} + end + end + end + + test "logging operation start/stop events by default", %{operation: operation} do + context = %{params: %{name: "test", age: 30}} + + # Clear logs before test + Logger.clear_logs() + + {:ok, _result} = operation.call(context) + + # Get captured logs + logs = Logger.get_logs() + log_output = Enum.join(logs, "") + + # Get operation name without Elixir. prefix + operation_name = operation |> to_string() |> String.replace_prefix("Elixir.", "") + + # Should log operation start/stop events (first and last steps) + assert log_output =~ "#{operation_name}.conform started" + assert log_output =~ "#{operation_name}.conform succeeded in" + assert log_output =~ "#{operation_name}.execute started" + assert log_output =~ "#{operation_name}.execute succeeded in" + end + + test "logging operation's step start/stop events by default", %{operation: operation} do + context = %{params: %{name: "test", age: 30}} + + # Clear logs before test + Logger.clear_logs() + + {:ok, _result} = operation.call(context) + + # Get captured logs + logs = Logger.get_logs() + log_output = Enum.join(logs, "") + + # Get operation name without Elixir. prefix + operation_name = operation |> to_string() |> String.replace_prefix("Elixir.", "") + + # Should log all step start/stop events + assert log_output =~ "#{operation_name}.conform started" + assert log_output =~ "#{operation_name}.conform succeeded in" + assert log_output =~ "#{operation_name}.execute started" + assert log_output =~ "#{operation_name}.execute succeeded in" + end + + test "logs duration with proper precision formatting", %{operation: operation} do + context = %{params: %{name: "test", age: 30}} + + # Clear logs before test + Logger.clear_logs() + + {:ok, _result} = operation.call(context) + + # Get captured logs + logs = Logger.get_logs() + log_output = Enum.join(logs, "") + + # Should contain duration information with proper units + # Fast operations should show microseconds (μs) + # Slower operations might show milliseconds (ms) or seconds (s) + duration_patterns = [ + # microseconds + ~r/succeeded in \d+μs/, + # milliseconds + ~r/succeeded in \d+\.\d+ms/, + # seconds + ~r/succeeded in \d+\.\d+s/ + ] + + # At least one duration pattern should match + assert Enum.any?(duration_patterns, fn pattern -> + Regex.match?(pattern, log_output) + end), + "Expected to find duration with proper unit formatting in: #{log_output}" + end + + test "duration formatting converts microseconds correctly" do + # Since format_duration is private, we'll test it through telemetry events + # Create mock telemetry events with known durations and verify the log output + + # Clear logs before test + Logger.clear_logs() + + # Test different duration ranges by simulating telemetry events + test_cases = [ + # microseconds + {500, "500μs"}, + # milliseconds + {1_500, "1.5ms"}, + # seconds (from your example) + {14_096_948, "14.1s"}, + # seconds (from your example) + {32_596_681, "32.6s"} + ] + + for {duration_us, _expected_display} <- test_cases do + # Convert from microseconds to native time units for the telemetry measurement + duration_native = System.convert_time_unit(duration_us, :microsecond, :native) + + # Simulate a telemetry event + measurements = %{duration: duration_native} + metadata = %{operation: TestOperation, step: :execute, context: %{}} + config = %{logging_config: %{level: :info, include_context: false}} + + Drops.Operations.Extensions.Logging.handle_logging_event( + [:test, :operation, :stop], + measurements, + metadata, + config + ) + end + + # Get captured logs and verify duration formatting + logs = Logger.get_logs() + log_output = Enum.join(logs, "") + + # Verify each expected duration appears in the logs + for {_duration_us, expected_display} <- test_cases do + assert String.contains?(log_output, "succeeded in #{expected_display}"), + "Expected to find '#{expected_display}' in logs: #{log_output}" + end + end + + test "logs contain proper metadata for operation filtering", %{operation: operation} do + context = %{params: %{name: "test", age: 30}} + + # Clear logs before test + Logger.clear_logs() + + {:ok, _result} = operation.call(context) + + # Get captured logs + logs = Logger.get_logs() + + # All logs should contain operation metadata (required for formatter filtering) + assert length(logs) > 0, "Expected debug logs to be captured" + + Enum.each(logs, fn log -> + # Each log should contain operation metadata (without Elixir. prefix) + operation_name = operation |> to_string() |> String.replace_prefix("Elixir.", "") + + # The operation name is quoted in the metadata + assert log =~ "operation=\"#{operation_name}\"", + "Expected log to contain operation metadata: #{log}" + + # Step-level logs (containing a dot) should contain step metadata + # Operation-level logs (without a dot) should not contain step metadata + if log =~ "started" or log =~ "succeeded" do + if String.contains?(log, ".") do + # This is a step-level log (e.g., "OperationName.step_name started") + assert log =~ "step=", + "Expected step-level log to contain step metadata: #{log}" + else + # This is an operation-level log (e.g., "OperationName started") + refute log =~ "step=", + "Expected operation-level log to NOT contain step metadata: #{log}" + end + end + end) + end + end + + describe "when debug is disabled" do + operation type: :command, debug: false do + schema do + %{ + required(:name) => string(), + required(:age) => integer() + } + end + + steps do + @impl true + def execute(context) do + {:ok, context} + end + end + end + + test "does not log debug messages", %{operation: operation} do + context = %{params: %{name: "test", age: 30}} + + # Clear logs before test + Logger.clear_logs() + + {:ok, _result} = operation.call(context) + + # Get captured logs + logs = Logger.get_logs() + + # Should not contain debug logs from the Logging extension + assert logs == [] + end + end + + describe "when logging is enabled" do + operation type: :command, logging: true do + schema do + %{ + required(:name) => string(), + required(:age) => integer() + } + end + + steps do + @impl true + def execute(context) do + {:ok, context} + end + end + end + + test "logs at info level without context for successful operations", %{ + operation: operation + } do + context = %{params: %{name: "test", age: 30}} + + # Clear logs before test + Logger.clear_logs() + + {:ok, _result} = operation.call(context) + + # Get captured logs + logs = Logger.get_logs() + log_output = Enum.join(logs, "") + + # Get operation name without Elixir. prefix + operation_name = operation |> to_string() |> String.replace_prefix("Elixir.", "") + + # Should log operation-level events at info level (operation boundaries) + assert log_output =~ "#{operation_name} started" + assert log_output =~ "#{operation_name} succeeded in" + + # Should not contain context in successful operations + refute log_output =~ "context=" + end + end + + describe "when logging is enabled with specific steps" do + operation type: :command, logging: [steps: [:execute]] do + schema do + %{ + required(:name) => string(), + required(:age) => integer() + } + end + + steps do + @impl true + def execute(context) do + {:ok, context} + end + end + end + + test "logs only specified steps", %{operation: operation} do + context = %{params: %{name: "test", age: 30}} + + # Clear logs before test + Logger.clear_logs() + + {:ok, _result} = operation.call(context) + + # Get captured logs + logs = Logger.get_logs() + log_output = Enum.join(logs, "") + + # Get operation name without Elixir. prefix + operation_name = operation |> to_string() |> String.replace_prefix("Elixir.", "") + + # Should log execute step + assert log_output =~ "#{operation_name}.execute started" + assert log_output =~ "#{operation_name}.execute succeeded in" + + # Should not log conform step (not in specified steps) + refute log_output =~ "#{operation_name}.conform started" + end + end + + describe "when logging is enabled with all steps" do + operation type: :command, logging: [steps: :all] do + schema do + %{ + required(:name) => string(), + required(:age) => integer() + } + end + + steps do + @impl true + def execute(context) do + {:ok, context} + end + end + end + + test "logs all steps at info level without context", %{operation: operation} do + context = %{params: %{name: "test", age: 30}} + + # Clear logs before test + Logger.clear_logs() + + {:ok, _result} = operation.call(context) + + # Get captured logs + logs = Logger.get_logs() + log_output = Enum.join(logs, "") + + # Get operation name without Elixir. prefix + operation_name = operation |> to_string() |> String.replace_prefix("Elixir.", "") + + # Should log all step start/stop events + assert log_output =~ "#{operation_name}.conform started" + assert log_output =~ "#{operation_name}.conform succeeded in" + assert log_output =~ "#{operation_name}.execute started" + assert log_output =~ "#{operation_name}.execute succeeded in" + + # Should not contain context in successful operations + refute log_output =~ "context=" + end + end + + describe "when debug is enabled with custom identifier" do + operation type: :command, debug: [identifier: :my_app] do + schema do + %{ + required(:name) => string(), + required(:age) => integer() + } + end + + steps do + @impl true + def execute(context) do + {:ok, context} + end + end + end + + test "logs debug messages with custom identifier", %{operation: operation} do + context = %{params: %{name: "test", age: 30}} + + # Clear logs before test + Logger.clear_logs() + + {:ok, _result} = operation.call(context) + + # Get captured logs + logs = Logger.get_logs() + log_output = Enum.join(logs, "") + + # Get operation name without Elixir. prefix + operation_name = operation |> to_string() |> String.replace_prefix("Elixir.", "") + + # Should log all step start/stop events + assert log_output =~ "#{operation_name}.conform started" + assert log_output =~ "#{operation_name}.conform succeeded in" + assert log_output =~ "#{operation_name}.execute started" + assert log_output =~ "#{operation_name}.execute succeeded in" + end + end + + describe "formatter colorization" do + test "supports colorization option" do + log_event = %{ + level: :debug, + msg: {:string, "Test message"}, + meta: %{operation: "TestOp", step: "test"} + } + + # Test with colorization enabled + config = %Drops.Logger.Formatter.Config{colorize: true, add_newline: false} + colored_output = Drops.Logger.Formatter.String.format(log_event, config) + + # Check if ANSI is enabled in test environment + if IO.ANSI.enabled?() do + # Cyan color for debug level + assert String.contains?(colored_output, "\e[36m[debug]\e[0m") + # Faint color for metadata + assert String.contains?(colored_output, "\e[2m") + else + # If ANSI is disabled, should fall back to plain format + refute String.contains?(colored_output, "\e[") + end + + # Test with colorization disabled + config = %Drops.Logger.Formatter.Config{colorize: false, add_newline: false} + plain_output = Drops.Logger.Formatter.String.format(log_event, config) + # No ANSI escape codes + refute String.contains?(plain_output, "\e[") + assert String.contains?(plain_output, "[debug]") + end + + test "uses different colors for different log levels" do + # Skip this test if ANSI is not enabled + if IO.ANSI.enabled?() do + test_cases = [ + # cyan + {:debug, "\e[36m"}, + # normal + {:info, "\e[0m"}, + # yellow + {:warning, "\e[33m"}, + # red + {:error, "\e[31m"} + ] + + for {level, expected_color} <- test_cases do + log_event = %{ + level: level, + msg: {:string, "Test message"}, + meta: %{operation: "TestOp", step: "test"} + } + + config = %Drops.Logger.Formatter.Config{colorize: true, add_newline: false} + output = Drops.Logger.Formatter.String.format(log_event, config) + + assert String.contains?(output, expected_color), + "Expected #{level} to contain color code #{expected_color}" + end + end + end + end + + describe "error logging" do + @tag :error_logging + test "logs error events at error level with context for failed operations" do + defmodule TestErrorOperation do + use Drops.Operations.Command, logging: true + + schema do + %{ + required(:name) => string(), + required(:age) => integer() + } + end + + steps do + @impl true + def execute(_context) do + {:error, "something went wrong"} + end + end + end + + context = %{params: %{name: "test", age: 30}} + + # Clear logs before test + Logger.clear_logs() + + {:error, _reason} = TestErrorOperation.call(context) + + # Get captured logs + logs = Logger.get_logs() + log_output = Enum.join(logs, "") + + # Get operation name without Elixir. prefix + operation_name = + TestErrorOperation |> to_string() |> String.replace_prefix("Elixir.", "") + + # Should log operation start event (operation-level telemetry) + assert log_output =~ "#{operation_name} started" + + # Should log step failure at error level with context (step-level error telemetry) + assert log_output =~ "#{operation_name}.execute failed in" + assert log_output =~ "reason=" + assert log_output =~ "kind=:error" + assert log_output =~ "context=" + end + + @tag :error_logging + test "does not log error events for non-last steps by default" do + defmodule TestValidateErrorOperation do + use Drops.Operations.Command, logging: true + + schema do + %{ + required(:name) => string(), + required(:age) => integer() + } + end + + steps do + @impl true + def validate(_context) do + {:error, "validation failed"} + end + + @impl true + def execute(_context) do + {:ok, "should not reach here"} + end + end + end + + context = %{params: %{name: "test", age: 30}} + + # Clear logs before test + Logger.clear_logs() + + {:error, _reason} = TestValidateErrorOperation.call(context) + + # Get captured logs + logs = Logger.get_logs() + log_output = Enum.join(logs, "") + + # Get operation name without Elixir. prefix + operation_name = + TestValidateErrorOperation |> to_string() |> String.replace_prefix("Elixir.", "") + + # Should log operation start event (operation-level telemetry) + assert log_output =~ "#{operation_name} started" + + # Should NOT log step failure for non-last steps (validate is not the last step) + refute log_output =~ "#{operation_name}.validate failed in" + end + + @tag :error_logging + test "logs error events for non-last steps when log_all_errors is true" do + defmodule TestValidateErrorOperationWithAllErrors do + use Drops.Operations.Command, logging: [log_all_errors: true] + + schema do + %{ + required(:name) => string(), + required(:age) => integer() + } + end + + steps do + @impl true + def validate(_context) do + {:error, "validation failed"} + end + + @impl true + def execute(_context) do + {:ok, "should not reach here"} + end + end + end + + context = %{params: %{name: "test", age: 30}} + + # Clear logs before test + Logger.clear_logs() + + {:error, _reason} = TestValidateErrorOperationWithAllErrors.call(context) + + # Get captured logs + logs = Logger.get_logs() + log_output = Enum.join(logs, "") + + # Get operation name without Elixir. prefix + operation_name = + TestValidateErrorOperationWithAllErrors + |> to_string() + |> String.replace_prefix("Elixir.", "") + + # Should log operation start event (operation-level telemetry) + assert log_output =~ "#{operation_name} started" + + # Should log step failure at error level with context and reason (step-level error telemetry) + assert log_output =~ "#{operation_name}.validate failed in" + assert log_output =~ "reason=" + assert log_output =~ "kind=:error" + assert log_output =~ "context=" + end + + @tag :error_logging + test "logs error events in debug mode with context" do + defmodule TestDebugErrorOperation do + use Drops.Operations.Command, debug: true + + schema do + %{ + required(:name) => string(), + required(:age) => integer() + } + end + + steps do + @impl true + def execute(_context) do + {:error, "debug mode error"} + end + end + end + + context = %{params: %{name: "test", age: 30}} + + # Clear logs before test + Logger.clear_logs() + + {:error, _reason} = TestDebugErrorOperation.call(context) + + # Get captured logs + logs = Logger.get_logs() + log_output = Enum.join(logs, "") + + # Get operation name without Elixir. prefix + operation_name = + TestDebugErrorOperation |> to_string() |> String.replace_prefix("Elixir.", "") + + # Should log step-level events in debug mode (all steps instrumented) + assert log_output =~ "#{operation_name}.execute started" + assert log_output =~ "#{operation_name}.execute failed in" + + # Should log error with context and reason + assert log_output =~ "reason=" + assert log_output =~ "kind=:error" + assert log_output =~ "context=" + end + + @tag :error_logging + test "logs operation failure at warning level in non-debug mode with limited metadata" do + defmodule TestOperationFailureLogging do + use Drops.Operations.Command, logging: true + + schema do + %{ + required(:name) => string(), + required(:age) => integer() + } + end + + steps do + @impl true + def execute(_context) do + {:error, "operation failed"} + end + end + end + + context = %{params: %{name: "test", age: 30}} + + # Clear logs before test + Logger.clear_logs() + + {:error, _reason} = TestOperationFailureLogging.call(context) + + # Get captured logs + logs = Logger.get_logs() + log_output = Enum.join(logs, "") + + # Get operation name without Elixir. prefix + operation_name = + TestOperationFailureLogging |> to_string() |> String.replace_prefix("Elixir.", "") + + # Should log operation start event + assert log_output =~ "#{operation_name} started" + + # Find the operation failure log line (not the step failure line) + operation_failure_line = + logs + |> Enum.find(fn log -> + log =~ "#{operation_name} failed in" and not (log =~ ".execute failed in") + end) + + assert operation_failure_line, "Expected to find operation failure log line" + + # Operation failure should include reason, duration_us, and kind but not context in non-debug mode + assert operation_failure_line =~ "reason=" + assert operation_failure_line =~ "duration_us=" + assert operation_failure_line =~ "kind=" + refute operation_failure_line =~ "context=" + + # Should also log step error (since it's the last step) - this one should have full metadata + step_failure_line = + logs + |> Enum.find(fn log -> + log =~ "#{operation_name}.execute failed in" + end) + + assert step_failure_line, "Expected to find step failure log line" + assert step_failure_line =~ "duration_us=" + assert step_failure_line =~ "kind=" + end + end + + describe "MetadataDumper protocol" do + alias Drops.Logger.MetadataDumper + + test "dumps basic types correctly" do + assert MetadataDumper.dump("test") == "\"test\"" + assert MetadataDumper.dump(42) == "42" + assert MetadataDumper.dump(:atom) == ":atom" + assert MetadataDumper.dump([1, 2, 3]) == "[1, 2, 3]" + assert MetadataDumper.dump([1, 2, 3, 4, 5]) == "[5 items]" + assert MetadataDumper.dump(%{a: 1, b: 2}) == "%{:a => 1, :b => 2}" + assert MetadataDumper.dump(%{a: 1, b: 2, c: 3, d: 4}) == "%{4 keys}" + end + + test "dumps nested structures recursively" do + # Nested list with maps + nested = [%{name: "Alice"}, %{name: "Bob"}] + result = MetadataDumper.dump(nested) + assert result == "[%{:name => \"Alice\"}, %{:name => \"Bob\"}]" + + # Map with nested structures + nested_map = %{users: ["Alice", "Bob"], count: 2} + result = MetadataDumper.dump(nested_map) + assert result == "%{:count => 2, :users => [\"Alice\", \"Bob\"]}" + + # Complex nested structure (should truncate large collections) + large_list = Enum.to_list(1..10) + complex = %{data: large_list, meta: %{total: 10}} + result = MetadataDumper.dump(complex) + assert result == "%{:data => [10 items], :meta => %{:total => 10}}" + end + + test "handles deeply nested structures with mixed types" do + # Test with various nested types including atoms, strings, numbers + deep_structure = %{ + user: %{ + profile: %{name: "Alice", age: 30}, + settings: [:notifications, :dark_mode] + } + } + + result = MetadataDumper.dump(deep_structure) + + expected = + "%{:user => %{:profile => %{:age => 30, :name => \"Alice\"}, :settings => [:notifications, :dark_mode]}}" + + assert result == expected + end + + if Code.ensure_loaded?(Ecto.Changeset) do + test "dumps Ecto.Changeset correctly" do + # Valid changeset with changes + changeset = %Ecto.Changeset{ + valid?: true, + changes: %{name: "John", age: 30}, + errors: [] + } + + result = MetadataDumper.dump(changeset) + assert result == "Ecto.Changeset(valid), 2 changes" + + # Invalid changeset with errors + changeset = %Ecto.Changeset{ + valid?: false, + changes: %{name: "John"}, + errors: [age: {"is required", []}] + } + + result = MetadataDumper.dump(changeset) + assert result == "Ecto.Changeset(invalid), 1 changes, 1 errors" + + # Empty changeset + changeset = %Ecto.Changeset{ + valid?: true, + changes: %{}, + errors: [] + } + + result = MetadataDumper.dump(changeset) + assert result == "Ecto.Changeset(valid)" + end + end + end +end