diff --git a/.rubocop.yml b/.rubocop.yml new file mode 100644 index 0000000..fe4f815 --- /dev/null +++ b/.rubocop.yml @@ -0,0 +1,26 @@ +inherit_from: .rubocop_todo.yml + +inherit_gem: + rubocop-rock: defaults.yml + +inherit_mode: + merge: + - Exclude + - AllowedNames + +AllCops: + TargetRubyVersion: "2.5" + +Style/MultilineMemoization: + EnforcedStyle: braces + +Naming/MethodParameterName: + AllowedNames: + - as + - m + - kw + - x + - y + +Metrics/AbcSize: + Max: 17 \ No newline at end of file diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml new file mode 100644 index 0000000..402beaf --- /dev/null +++ b/.rubocop_todo.yml @@ -0,0 +1,334 @@ +# This configuration was generated by +# `rubocop --auto-gen-config --auto-gen-only-exclude --exclude-limit 1000000` +# on 2021-05-31 20:38:43 -0300 using RuboCop version 0.83.0. +# The point is for the user to remove these configuration records +# one by one as the offenses are removed from the code base. +# Note that changes in the inspected code, or installation of new +# versions of RuboCop, may require this file to be generated again. + +# Offense count: 2 +# Cop supports --auto-correct. +# Configuration parameters: EnforcedStyle. +# SupportedStyles: squiggly, active_support, powerpack, unindent +Layout/HeredocIndentation: + Exclude: + - 'test/cli/datastore_test.rb' + +# Offense count: 254 +# Cop supports --auto-correct. +# Configuration parameters: AutoCorrect, Max, AllowHeredoc, AllowURI, URISchemes, IgnoreCopDirectives, IgnoredPatterns. +# URISchemes: http, https +Layout/LineLength: + Exclude: + - 'lib/syskit/log/cli/datastore.rb' + - 'lib/syskit/log/daru/create_aligned_frame.rb' + - 'lib/syskit/log/daru/frame_builder.rb' + - 'lib/syskit/log/daru/path_builder.rb' + - 'lib/syskit/log/datastore.rb' + - 'lib/syskit/log/datastore/dataset.rb' + - 'lib/syskit/log/datastore/normalize.rb' + - 'lib/syskit/log/exceptions.rb' + - 'lib/syskit/log/extensions/configuration.rb' + - 'lib/syskit/log/extensions/deployment_group.rb' + - 'lib/syskit/log/lazy_data_stream.rb' + - 'lib/syskit/log/plugin.rb' + - 'lib/syskit/log/shell_interface.rb' + - 'syskit-log.gemspec' + - 'test/cli/datastore_test.rb' + - 'test/datastore/dataset_test.rb' + - 'test/datastore/import_test.rb' + - 'test/datastore/normalize_test.rb' + - 'test/datastore_test.rb' + - 'test/deployment_group_test.rb' + - 'test/deployment_test.rb' + - 'test/dsl_test.rb' + - 'test/models/deployment_test.rb' + - 'test/models/replay_task_context_test.rb' + - 'test/replay_manager_test.rb' + - 'test/rock_stream_matcher_test.rb' + - 'test/streams_test.rb' + - 'test/task_streams_test.rb' + - 'test/test_helper.rb' + +# Offense count: 33 +# Configuration parameters: IgnoredMethods, Max. +Metrics/AbcSize: + Exclude: + - 'lib/syskit/log/cli/datastore.rb' + - 'lib/syskit/log/cli/replay.rb' + - 'lib/syskit/log/daru/create_aligned_frame.rb' + - 'lib/syskit/log/datastore.rb' + - 'lib/syskit/log/datastore/dataset.rb' + - 'lib/syskit/log/datastore/import.rb' + - 'lib/syskit/log/datastore/index_build.rb' + - 'lib/syskit/log/datastore/normalize.rb' + - 'lib/syskit/log/dsl.rb' + - 'lib/syskit/log/exceptions.rb' + - 'lib/syskit/log/replay_manager.rb' + - 'lib/syskit/log/roby_sql_index/index.rb' + - 'lib/syskit/log/streams.rb' + +# Offense count: 14 +# Configuration parameters: CountComments, Max, ExcludedMethods. +# ExcludedMethods: refine +Metrics/BlockLength: + Exclude: + - '**/*.gemspec' + - 'lib/syskit/log/cli/datastore.rb' + - 'lib/syskit/log/daru/create_aligned_frame.rb' + - 'lib/syskit/log/daru/frame_builder.rb' + - 'lib/syskit/log/datastore/dataset.rb' + - 'lib/syskit/log/datastore/normalize.rb' + - 'test/cli/datastore_test.rb' + - 'test/deployment_group_test.rb' + - 'test/deployment_test.rb' + - 'test/dsl_test.rb' + - 'test/replay_manager_test.rb' + - 'test/task_streams_test.rb' + +# Offense count: 1 +# Configuration parameters: CountComments, Max. +Metrics/ClassLength: + Exclude: + - 'test/datastore/dataset_test.rb' + +# Offense count: 8 +# Configuration parameters: IgnoredMethods, Max. +Metrics/CyclomaticComplexity: + Exclude: + - 'lib/syskit/log/cli/datastore.rb' + - 'lib/syskit/log/daru/create_aligned_frame.rb' + - 'lib/syskit/log/datastore/dataset.rb' + - 'lib/syskit/log/dsl.rb' + - 'lib/syskit/log/streams.rb' + +# Offense count: 4 +# Configuration parameters: Max, CountKeywordArgs. +Metrics/ParameterLists: + Exclude: + - 'lib/syskit/log/datastore/normalize.rb' + - 'lib/syskit/log/extensions/deployment_group.rb' + - 'lib/syskit/log/lazy_data_stream.rb' + +# Offense count: 8 +# Configuration parameters: IgnoredMethods, Max. +Metrics/PerceivedComplexity: + Exclude: + - 'lib/syskit/log/cli/datastore.rb' + - 'lib/syskit/log/daru/create_aligned_frame.rb' + - 'lib/syskit/log/datastore/dataset.rb' + - 'lib/syskit/log/dsl.rb' + - 'lib/syskit/log/streams.rb' + +# Offense count: 1 +# Configuration parameters: ExpectMatchingDefinition, Regex, IgnoreExecutableScripts, AllowedAcronyms. +# AllowedAcronyms: CLI, DSL, ACL, API, ASCII, CPU, CSS, DNS, EOF, GUID, HTML, HTTP, HTTPS, ID, IP, JSON, LHS, QPS, RAM, RHS, RPC, SLA, SMTP, SQL, SSH, TCP, TLS, TTL, UDP, UI, UID, UUID, URI, URL, UTF8, VM, XML, XMPP, XSRF, XSS +Naming/FileName: + Exclude: + - 'lib/roby-syskit-log.rb' + +# Offense count: 4 +# Configuration parameters: ForbiddenDelimiters. +# ForbiddenDelimiters: (?-mix:(^|\s)(EO[A-Z]{1}|END)(\s|$)) +Naming/HeredocDelimiterNaming: + Exclude: + - 'test/cli/datastore_test.rb' + +# Offense count: 2 +# Configuration parameters: MinNameLength, AllowNamesEndingInNumbers, AllowedNames, ForbiddenNames. +# AllowedNames: io, id, to, by, on, in, at, ip, db, os, pp +Naming/MethodParameterName: + Exclude: + - 'lib/syskit/log/rock_stream_matcher.rb' + - 'lib/syskit/log/streams.rb' + +# Offense count: 3 +# Configuration parameters: EnforcedStyle. +# SupportedStyles: snake_case, normalcase, non_integer +Naming/VariableNumber: + Exclude: + - 'test/datastore/import_test.rb' + +# Offense count: 3 +# Cop supports --auto-correct. +Security/YAMLLoad: + Exclude: + - 'test/datastore/dataset_test.rb' + +# Offense count: 39 +# Cop supports --auto-correct. +# Configuration parameters: AutoCorrect, EnforcedStyle. +# SupportedStyles: nested, compact +Style/ClassAndModuleChildren: + Exclude: + - 'lib/syskit/log/cli/datastore.rb' + - 'lib/syskit/log/cli/replay.rb' + - 'lib/syskit/log/data_replay_task.rb' + - 'lib/syskit/log/datastore.rb' + - 'lib/syskit/log/datastore/dataset.rb' + - 'lib/syskit/log/datastore/import.rb' + - 'lib/syskit/log/datastore/index_build.rb' + - 'lib/syskit/log/datastore/normalize.rb' + - 'lib/syskit/log/deployment.rb' + - 'lib/syskit/log/exceptions.rb' + - 'lib/syskit/log/extensions.rb' + - 'lib/syskit/log/extensions/configuration.rb' + - 'lib/syskit/log/extensions/deployment_group.rb' + - 'lib/syskit/log/extensions/execution_engine.rb' + - 'lib/syskit/log/extensions/instance_requirements.rb' + - 'lib/syskit/log/lazy_data_stream.rb' + - 'lib/syskit/log/models/deployment.rb' + - 'lib/syskit/log/plugin.rb' + - 'lib/syskit/log/replay_manager.rb' + - 'lib/syskit/log/rock_stream_matcher.rb' + - 'lib/syskit/log/shell_interface.rb' + - 'lib/syskit/log/streams.rb' + - 'lib/syskit/log/task_streams.rb' + - 'test/cli/datastore_test.rb' + - 'test/datastore/dataset_test.rb' + - 'test/datastore/import_test.rb' + - 'test/datastore/index_build_test.rb' + - 'test/datastore/normalize_test.rb' + - 'test/datastore_test.rb' + - 'test/deployment_group_test.rb' + - 'test/deployment_test.rb' + - 'test/models/deployment_test.rb' + - 'test/models/replay_task_context_test.rb' + - 'test/replay_manager_test.rb' + - 'test/rock_stream_matcher_test.rb' + - 'test/shell_interface_test.rb' + - 'test/streams_test.rb' + - 'test/task_streams_test.rb' + - 'test/test_helper.rb' + +# Offense count: 1 +# Cop supports --auto-correct. +Style/ColonMethodCall: + Exclude: + - 'lib/syskit/log/dsl/periods.rb' + +# Offense count: 11 +Style/Documentation: + Exclude: + - 'spec/**/*' + - 'test/**/*' + - 'lib/syskit/log/cli/datastore.rb' + - 'lib/syskit/log/cli/replay.rb' + - 'lib/syskit/log/daru/create_aligned_frame.rb' + - 'lib/syskit/log/datastore/import.rb' + - 'lib/syskit/log/datastore/index_build.rb' + - 'lib/syskit/log/datastore/normalize.rb' + - 'lib/syskit/log/plugin.rb' + - 'lib/syskit/scripts/datastore.rb' + - 'lib/syskit/scripts/replay.rb' + +# Offense count: 1 +# Cop supports --auto-correct. +# Configuration parameters: EnforcedStyle. +# SupportedStyles: empty, nil, both +Style/EmptyElse: + Exclude: + - 'lib/syskit/log/daru/frame_builder.rb' + +# Offense count: 1 +# Cop supports --auto-correct. +Style/Encoding: + Exclude: + - 'syskit-log.gemspec' + +# Offense count: 1 +# Cop supports --auto-correct. +Style/ExpandPathArguments: + Exclude: + - 'syskit-log.gemspec' + +# Offense count: 7 +# Cop supports --auto-correct. +# Configuration parameters: EnforcedStyle. +# SupportedStyles: format, sprintf, percent +Style/FormatString: + Exclude: + - 'lib/syskit/log/cli/datastore.rb' + - 'lib/syskit/log/dsl.rb' + +# Offense count: 4 +# Configuration parameters: EnforcedStyle. +# SupportedStyles: annotated, template, unannotated +Style/FormatStringToken: + Exclude: + - 'lib/syskit/log/cli/datastore.rb' + +# Offense count: 7 +# Configuration parameters: MinBodyLength. +Style/GuardClause: + Exclude: + - 'lib/syskit/log/cli/datastore.rb' + - 'lib/syskit/log/daru/path_builder.rb' + - 'lib/syskit/log/datastore.rb' + - 'lib/syskit/log/exceptions.rb' + - 'lib/syskit/log/models/deployment.rb' + +# Offense count: 5 +# Cop supports --auto-correct. +Style/IfUnlessModifier: + Exclude: + - 'lib/syskit/log/daru/frame_builder.rb' + - 'lib/syskit/log/datastore.rb' + - 'lib/syskit/log/dsl.rb' + +# Offense count: 1 +# Cop supports --auto-correct. +# Configuration parameters: IgnoredMethods. +Style/MethodCallWithoutArgsParentheses: + Exclude: + - 'test/replay_manager_test.rb' + +# Offense count: 1 +Style/MissingRespondToMissing: + Exclude: + - 'lib/syskit/log/roby_sql_index/accessors.rb' + +# Offense count: 3 +# Cop supports --auto-correct. +# Configuration parameters: MinDigits, Strict. +Style/NumericLiterals: + Exclude: + - 'test/cli/datastore_test.rb' + - 'test/datastore/dataset_test.rb' + +# Offense count: 1 +# Cop supports --auto-correct. +Style/RedundantSelf: + Exclude: + - 'lib/syskit/log/task_streams.rb' + +# Offense count: 1 +# Cop supports --auto-correct. +# Configuration parameters: EnforcedStyle, AllowInnerSlashes. +# SupportedStyles: slashes, percent_r, mixed +Style/RegexpLiteral: + Exclude: + - 'test/streams_test.rb' + +# Offense count: 1 +# Cop supports --auto-correct. +# Configuration parameters: AllowAsExpressionSeparator. +Style/Semicolon: + Exclude: + - 'test/datastore/dataset_test.rb' + +# Offense count: 2 +# Cop supports --auto-correct. +# Configuration parameters: EnforcedStyle. +# SupportedStyles: single_quotes, double_quotes +Style/StringLiteralsInInterpolation: + Exclude: + - 'lib/syskit/log/cli/datastore.rb' + - 'lib/syskit/log/datastore.rb' + +# Offense count: 1 +# Cop supports --auto-correct. +# Configuration parameters: AllowNamedUnderscoreVariables. +Style/TrailingUnderscoreVariable: + Exclude: + - 'test/cli/datastore_test.rb' diff --git a/Gemfile b/Gemfile index b2674f2..f9aff80 100644 --- a/Gemfile +++ b/Gemfile @@ -1,4 +1,6 @@ -source 'https://rubygems.org' +# frozen_string_literal: true + +source "https://rubygems.org" # Specify your gem's dependencies in syskit-pocolog.gemspec gemspec diff --git a/Rakefile b/Rakefile index 00e4626..6d7f80a 100644 --- a/Rakefile +++ b/Rakefile @@ -1,11 +1,13 @@ +# frozen_string_literal: true + require "bundler/gem_tasks" require "rake/testtask" Rake::TestTask.new(:test) do |t| - t.libs << "test" - t.libs << "lib" - t.test_files = FileList['test/**/*_test.rb'] - t.warning = false + t.libs << "test" + t.libs << "lib" + t.test_files = FileList["test/**/*_test.rb"] + t.warning = false end task :default diff --git a/lib/roby-syskit-log.rb b/lib/roby-syskit-log.rb index b2a9f81..9a1e275 100644 --- a/lib/roby-syskit-log.rb +++ b/lib/roby-syskit-log.rb @@ -1,2 +1,4 @@ -require 'syskit/log' -Roby.app.register_plugin 'syskit-pocolog', Syskit::Log::Plugin +# frozen_string_literal: true + +require "syskit/log" +Roby.app.register_plugin "syskit-pocolog", Syskit::Log::Plugin diff --git a/lib/syskit/log/cli/datastore.rb b/lib/syskit/log/cli/datastore.rb index abd5fbb..3136477 100644 --- a/lib/syskit/log/cli/datastore.rb +++ b/lib/syskit/log/cli/datastore.rb @@ -1,19 +1,21 @@ -require 'roby' -require 'syskit' -require 'thor' - -require 'syskit/log' -require 'syskit/log/datastore/normalize' -require 'syskit/log/datastore/import' -require 'syskit/log/datastore/index_build' -require 'tty-progressbar' -require 'pocolog/cli/null_reporter' -require 'pocolog/cli/tty_reporter' +# frozen_string_literal: true + +require "roby" +require "syskit" +require "thor" + +require "syskit/log" +require "syskit/log/datastore/normalize" +require "syskit/log/datastore/import" +require "syskit/log/datastore/index_build" +require "tty-progressbar" +require "pocolog/cli/null_reporter" +require "pocolog/cli/tty_reporter" module Syskit::Log module CLI class Datastore < Thor - namespace 'datastore' + namespace "datastore" class_option :silent, type: :boolean, default: false class_option :colors, type: :boolean, default: TTY::Color.color? @@ -22,7 +24,7 @@ class Datastore < Thor no_commands do def create_reporter( - format = '', + format = "", progress: options[:progress], colors: options[:colors], silent: options[:silent], @@ -42,11 +44,11 @@ def create_pastel end def datastore_path - unless (path = options[:store] || ENV['SYSKIT_LOG_STORE']) + unless (path = options[:store] || ENV["SYSKIT_LOG_STORE"]) raise ArgumentError, - 'you must provide a path to a datastore either '\ - 'with the --store option or through the '\ - 'SYSKIT_LOG_STORE environment variable' + "you must provide a path to a datastore either "\ + "with the --store option or through the "\ + "SYSKIT_LOG_STORE environment variable" end Pathname.new(path) end @@ -61,7 +63,7 @@ def create_store def show_dataset(pastel, store, dataset, long_digest: false) description = dataset.metadata_fetch_all( - 'description', '' + "description", "" ) digest = store.short_digest(dataset) unless long_digest format = "% #{digest.size}s" @@ -70,7 +72,7 @@ def show_dataset(pastel, store, dataset, long_digest: false) end metadata = dataset.metadata metadata.each do |k, v| - next if k == 'description' + next if k == "description" if v.size == 1 puts " #{k}: #{v.first}" @@ -84,15 +86,15 @@ def show_dataset(pastel, store, dataset, long_digest: false) end def format_date(time) - time.strftime('%Y-%m-%d') + time.strftime("%Y-%m-%d") end def format_time(time) - time.strftime('%H:%M:%S.%6N %z') + time.strftime("%H:%M:%S.%6N %z") end def format_duration(time) - '%4i:%02i:%02i.%06i' % [ + "%4i:%02i:%02i.%06i" % [ Integer(time / 3600), Integer((time % 3600) / 60), Integer(time % 60), @@ -112,10 +114,10 @@ def show_task_objects(objects, name_field_size) end objects.each_with_index do |(name, stream), i| if stream.empty? - puts format % ["#{name}:", 'empty'] + puts format % ["#{name}:", "empty"] else interval_lg = stream.interval_lg.map do |t| - format_date(t) + ' ' + format_time(t) + format_date(t) + " " + format_time(t) end duration_lg = format_duration(stream.duration_lg) puts format % [ @@ -127,7 +129,7 @@ def show_task_objects(objects, name_field_size) end end - def show_dataset_pocolog(pastel, store, dataset) + def show_dataset_pocolog(dataset) tasks = dataset.each_task( load_models: false, skip_tasks_without_models: false ).to_a @@ -143,11 +145,11 @@ def show_dataset_pocolog(pastel, store, dataset) properties.map { |name, _| name.size } ).max unless ports.empty? - puts ' Ports:' + puts " Ports:" show_task_objects(ports, name_field_size) end unless properties.empty? - puts ' Properties:' + puts " Properties:" show_task_objects(properties, name_field_size) end end @@ -158,7 +160,7 @@ def show_dataset_pocolog(pastel, store, dataset) # Parse a metadata option such as --set some=value some-other=value def parse_metadata_option(hash) hash.each_with_object({}) do |arg, metadata| - key, value = arg.split('=') + key, value = arg.split("=") unless value raise ArgumentError, "metadata setters need to be specified as "\ @@ -325,23 +327,23 @@ def resolve_streams(datasets, *query) end end - desc 'normalize PATH [--out OUTPUT]', 'normalizes a data stream into a format that is suitable for the other log management commands to work' - method_option :out, desc: 'output directory (defaults to a normalized/ folder under the source folder)', - default: 'normalized' - method_option :override, desc: 'whether existing files in the output directory should be overriden', - type: :boolean, default: false + desc "normalize PATH [--out OUTPUT]", "normalizes a data stream into a format that is suitable for the other log management commands to work" + method_option :out, desc: "output directory (defaults to a normalized/ folder under the source folder)", + default: "normalized" + method_option :override, desc: "whether existing files in the output directory should be overriden", + type: :boolean, default: false def normalize(path) path = Pathname.new(path).realpath - output_path = Pathname.new(options['out']).expand_path(path) + output_path = Pathname.new(options["out"]).expand_path(path) output_path.mkpath paths = Syskit::Log.logfiles_in_dir(path) - bytes_total = paths.inject(0) do |total, path| - total + path.size + bytes_total = paths.inject(0) do |total, logfile_path| + total + logfile_path.size end reporter = create_reporter( - '|:bar| :current_byte/:total_byte :eta (:byte_rate/s)', + "|:bar| :current_byte/:total_byte :eta (:byte_rate/s)", total: bytes_total ) @@ -351,20 +353,20 @@ def normalize(path) end end - desc 'import PATH [DESCRIPTION]', - 'normalize and import a raw dataset into a syskit-pocolog datastore' - method_option :auto, desc: 'import all datasets under PATH', + desc "import PATH [DESCRIPTION]", + "normalize and import a raw dataset into a syskit-pocolog datastore" + method_option :auto, desc: "import all datasets under PATH", type: :boolean, default: false - method_option :force, desc: 'overwrite existing datasets', + method_option :force, desc: "overwrite existing datasets", type: :boolean, default: false - method_option :min_duration, desc: 'skip datasets whose duration is lower '\ - 'than this (in seconds)', + method_option :min_duration, desc: "skip datasets whose duration is lower "\ + "than this (in seconds)", type: :numeric, default: 60 - method_option :tags, desc: 'tags to be added to the dataset', + method_option :tags, desc: "tags to be added to the dataset", type: :array, default: [] - method_option :metadata, desc: 'metadata values as key=value pairs', + method_option :metadata, desc: "metadata values as key=value pairs", type: :array, default: [] - method_option :merge, desc: 'create a single dataset from multiple log dirs', + method_option :merge, desc: "create a single dataset from multiple log dirs", type: :boolean, default: false def import(root_path, description = nil) root_path = Pathname.new(root_path).realpath @@ -373,8 +375,8 @@ def import(root_path, description = nil) root_path.find do |p| is_raw_dataset = p.directory? && - Pathname.enum_for(:glob, p + '*-events.log').any? { true } && - Pathname.enum_for(:glob, p + '*.0.log').any? { true } + Pathname.enum_for(:glob, p + "*-events.log").any? { true } && + Pathname.enum_for(:glob, p + "*.0.log").any? { true } if is_raw_dataset paths << p Find.prune @@ -399,9 +401,9 @@ def import(root_path, description = nil) end end - desc 'index [DATASETS]', 'refreshes or rebuilds (with --force) the datastore indexes' - method_option :force, desc: 'force rebuilding even indexes that look up-to-date', - type: :boolean, default: false + desc "index [DATASETS]", "refreshes or rebuilds (with --force) the datastore indexes" + method_option :force, desc: "force rebuilding even indexes that look up-to-date", + type: :boolean, default: false def index(*datasets) store = open_store datasets = resolve_datasets(store, *datasets) @@ -414,9 +416,9 @@ def index(*datasets) end end - desc 'path [QUERY]', 'list path to datasets' + desc "path [QUERY]", "list path to datasets" method_option :long_digests, - desc: 'display digests in full, instead of shortening them', + desc: "display digests in full, instead of shortening them", type: :boolean, default: false def path(*query) store = open_store @@ -433,17 +435,17 @@ def path(*query) end end - desc 'list [QUERY]', 'list datasets and their information' - method_option :digest, desc: 'only show the digest and no other information (for scripting)', - type: :boolean, default: false - method_option :long_digests, desc: 'display digests in full form, instead of shortening them', - type: :boolean, default: false - method_option :pocolog, desc: 'show detailed information about the pocolog streams in the dataset(s)', - type: :boolean, default: false - method_option :roby, desc: 'show detailed information about the Roby log in the dataset(s)', - type: :boolean, default: false - method_option :all, desc: 'show all available information (implies --pocolog and --roby)', - aliases: 'a', type: :boolean, default: false + desc "list [QUERY]", "list datasets and their information" + method_option :digest, desc: "only show the digest and no other information (for scripting)", + type: :boolean, default: false + method_option :long_digests, desc: "display digests in full form, instead of shortening them", + type: :boolean, default: false + method_option :pocolog, desc: "show detailed information about the pocolog streams in the dataset(s)", + type: :boolean, default: false + method_option :roby, desc: "show detailed information about the Roby log in the dataset(s)", + type: :boolean, default: false + method_option :all, desc: "show all available information (implies --pocolog and --roby)", + aliases: "a", type: :boolean, default: false def list(*query) store = open_store datasets = resolve_datasets(store, *query) @@ -462,20 +464,20 @@ def list(*query) show_dataset_roby(pastel, store, dataset) end if options[:all] || options[:pocolog] - show_dataset_pocolog(pastel, store, dataset) + show_dataset_pocolog(dataset) end end end end - desc 'metadata [QUERY] [--set=KEY=VALUE KEY=VALUE|--get=KEY]', - 'sets or gets metadata values for a dataset or datasets' - method_option :set, desc: 'the key=value associations to set', - type: :array - method_option :get, desc: 'the keys to get', - type: :array, lazy_default: [] - method_option :long_digest, desc: 'display digests in full form, instead of shortening them', - type: :boolean, default: false + desc "metadata [QUERY] [--set=KEY=VALUE KEY=VALUE|--get=KEY]", + "sets or gets metadata values for a dataset or datasets" + method_option :set, desc: "the key=value associations to set", + type: :array + method_option :get, desc: "the keys to get", + type: :array, lazy_default: [] + method_option :long_digest, desc: "display digests in full form, instead of shortening them", + type: :boolean, default: false def metadata(*query) if !options[:get] && !options[:set] raise ArgumentError, "provide either --get or --set" @@ -503,19 +505,19 @@ def metadata(*query) end elsif options[:get].empty? datasets.each do |set| - metadata = set.metadata.map { |k, v| [k, v.to_a.sort.join(",")] }. - sort_by(&:first). - map { |k, v| "#{k}=#{v}" }. - join(" ") + metadata = set.metadata.map { |k, v| [k, v.to_a.sort.join(",")] } + .sort_by(&:first) + .map { |k, v| "#{k}=#{v}" } + .join(" ") puts "#{digest_to_s[set]} #{metadata}" end else datasets.each do |set| - metadata = options[:get].map do |k, v| + metadata = options[:get].map do |k, _| [k, set.metadata_fetch_all(k, "")] end - metadata = metadata.map { |k, v| "#{k}=#{v.to_a.sort.join(",")}" }. - join(" ") + metadata = metadata.map { |k, v| "#{k}=#{v.to_a.sort.join(",")}" } + .join(" ") puts "#{digest_to_s[set]} #{metadata}" end end @@ -558,7 +560,7 @@ def find_streams(*query) streams = resolve_streams(datasets, *query) if streams.empty? - puts 'no streams match the query' + puts "no streams match the query" return end diff --git a/lib/syskit/log/cli/replay.rb b/lib/syskit/log/cli/replay.rb index 34301b7..4d62383 100644 --- a/lib/syskit/log/cli/replay.rb +++ b/lib/syskit/log/cli/replay.rb @@ -1,8 +1,10 @@ -require 'roby' -require 'syskit' -require 'roby/cli/base' +# frozen_string_literal: true -require 'syskit/log' +require "roby" +require "syskit" +require "roby/cli/base" + +require "syskit/log" module Syskit::Log class << self @@ -17,16 +19,16 @@ class Replay < Roby::CLI::Base no_commands do def setup_roby_for_running(run_controllers: false) super - app.using 'syskit' - app.using 'syskit-log' + app.using "syskit" + app.using "syskit-log" end end - desc 'start [SCRIPTS] [DATASETS]', - 'replays a data replay script. If no script is given, allows '\ - 'to replay streams using profile definitions' - option :robot, aliases: 'r', type: :string, - desc: 'the robot configuration to load' + desc "start [SCRIPTS] [DATASETS]", + "replays a data replay script. If no script is given, allows "\ + "to replay streams using profile definitions" + option :robot, aliases: "r", type: :string, + desc: "the robot configuration to load" def start(*path) paths = path.map { |p| Pathname.new(p) } if (non_existent = paths.find { |p| !p.exist? }) @@ -36,7 +38,7 @@ def start(*path) setup_common setup_roby_for_running(run_controllers: true) script_paths, dataset_paths = - paths.partition { |p| p.extname == '.rb' } + paths.partition { |p| p.extname == ".rb" } app.setup begin @@ -58,7 +60,6 @@ def start(*path) script_paths.each { |p| require p.to_s } end app.run - ensure Syskit::Log.streams = nil app.cleanup diff --git a/lib/syskit/log/daru.rb b/lib/syskit/log/daru.rb index 23caf74..de4a4d9 100644 --- a/lib/syskit/log/daru.rb +++ b/lib/syskit/log/daru.rb @@ -3,4 +3,4 @@ require "daru" require "syskit/log/daru/create_aligned_frame" require "syskit/log/daru/frame_builder" -require "syskit/log/daru/path_builder" \ No newline at end of file +require "syskit/log/daru/path_builder" diff --git a/lib/syskit/log/daru/create_aligned_frame.rb b/lib/syskit/log/daru/create_aligned_frame.rb index 93d6032..9f23ec0 100644 --- a/lib/syskit/log/daru/create_aligned_frame.rb +++ b/lib/syskit/log/daru/create_aligned_frame.rb @@ -4,7 +4,7 @@ module Syskit module Log module Daru def self.build_aligned_vectors(center_time, builders, joint_stream, size, - timeout: nil) + timeout: nil) current_row = Array.new(builders.size) initialized = false diff --git a/lib/syskit/log/data_replay_task.rb b/lib/syskit/log/data_replay_task.rb index cfaef20..dfe4520 100644 --- a/lib/syskit/log/data_replay_task.rb +++ b/lib/syskit/log/data_replay_task.rb @@ -1,6 +1,7 @@ +# frozen_string_literal: true + module Syskit::Log # A single task in the replay environment class DataReplayTask < Syskit::RubyTaskContext end end - diff --git a/lib/syskit/log/datastore.rb b/lib/syskit/log/datastore.rb index 1508b8b..c51ee10 100644 --- a/lib/syskit/log/datastore.rb +++ b/lib/syskit/log/datastore.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Syskit::Log # Functionality related to building and using data stores # @@ -79,7 +81,8 @@ def has?(digest) # Enumerate the store's datasets def each_dataset_digest - return enum_for(__method__) if !block_given? + return enum_for(__method__) unless block_given? + core_path = (datastore_path + "core") core_path.each_entry do |dataset_path| if Dataset.dataset?(core_path + dataset_path) @@ -90,7 +93,8 @@ def each_dataset_digest # Enumerate the store's datasets def each_dataset - return enum_for(__method__) if !block_given? + return enum_for(__method__) unless block_given? + each_dataset_digest do |digest| yield(get(digest)) end @@ -195,7 +199,6 @@ def in_incoming(keep: false) end end end - end end diff --git a/lib/syskit/log/datastore/dataset.rb b/lib/syskit/log/datastore/dataset.rb index ae39945..da3b31b 100644 --- a/lib/syskit/log/datastore/dataset.rb +++ b/lib/syskit/log/datastore/dataset.rb @@ -34,12 +34,12 @@ class NoValue < ArgumentError; end # The basename of the file that contains identifying metadata # # @see write_identifying_metadata - BASENAME_IDENTITY_METADATA = 'syskit-dataset.yml' + BASENAME_IDENTITY_METADATA = "syskit-dataset.yml" # The basename of the file that contains identifying metadata # # @see write_identifying_metadata - BASENAME_METADATA = 'syskit-metadata.yml' + BASENAME_METADATA = "syskit-metadata.yml" IdentityEntry = Struct.new :path, :size, :sha2 @@ -116,7 +116,7 @@ def compute_dataset_identity_from_files each_important_file.map do |path| sha2 = path.open do |io| # Pocolog files do not hash their prologue - if path.dirname.basename.to_s == 'pocolog' + if path.dirname.basename.to_s == "pocolog" io.seek(Pocolog::Format::Current::PROLOGUE_SIZE) end compute_file_sha2(io) @@ -156,7 +156,7 @@ def self.validate_encoded_sha2(sha2) raise InvalidDigest, "#{sha2} does not look like a valid SHA2 digest encoded "\ "with #{DIGEST_ENCODING_METHOD}. "\ - 'Expected characters in 0-9a-zA-Z+/' + "Expected characters in 0-9a-zA-Z+/" end sha2 end @@ -170,13 +170,13 @@ def self.validate_encoded_sha2(sha2) def read_dataset_identity_from_metadata_file metadata_path = (dataset_path + BASENAME_IDENTITY_METADATA) identity_metadata = (YAML.safe_load(metadata_path.read) || {}) - if identity_metadata['layout_version'] != LAYOUT_VERSION + if identity_metadata["layout_version"] != LAYOUT_VERSION raise InvalidLayoutVersion, "layout version in #{dataset_path} is "\ "#{identity_metadata['layout_version']}, "\ "expected #{LAYOUT_VERSION}" end - digests = identity_metadata['identity'] + digests = identity_metadata["identity"] if !digests raise InvalidIdentityMetadata, "no 'identity' field in #{metadata_path}" @@ -185,36 +185,36 @@ def read_dataset_identity_from_metadata_file "the 'identity' field in #{metadata_path} is not an array" end digests = digests.map do |path_info| - if !path_info['path'].respond_to?(:to_str) + if !path_info["path"].respond_to?(:to_str) raise InvalidIdentityMetadata, "found non-string value for field 'path' "\ "in #{metadata_path}" - elsif !path_info['size'].kind_of?(Integer) + elsif !path_info["size"].kind_of?(Integer) raise InvalidIdentityMetadata, "found non-integral value for field 'size' "\ "in #{metadata_path}" - elsif !path_info['sha2'].respond_to?(:to_str) + elsif !path_info["sha2"].respond_to?(:to_str) raise InvalidIdentityMetadata, "found non-string value for field 'sha2' "\ "in #{metadata_path}" end begin - self.class.validate_encoded_sha2(path_info['sha2']) + self.class.validate_encoded_sha2(path_info["sha2"]) rescue InvalidDigest => e raise InvalidIdentityMetadata, "value of field 'sha2' in #{metadata_path} does "\ "not look like a valid SHA2 digest: #{e.message}" end - path = Pathname.new(path_info['path'].to_str) - if path.each_filename.find { |p| p == '..' } + path = Pathname.new(path_info["path"].to_str) + if path.each_filename.find { |p| p == ".." } raise InvalidIdentityMetadata, "found path #{path} not within the dataset" end IdentityEntry.new( - dataset_path + path, Integer(path_info['size']), - path_info['sha2'].to_str + dataset_path + path, Integer(path_info["size"]), + path_info["sha2"].to_str ) end digests @@ -273,12 +273,12 @@ def validate_identity_metadata unless (metadata_entry = precomputed.delete(entry.path)) raise InvalidIdentityMetadata, "#{entry.path} is present on disk and "\ - 'missing in the metadata file' + "missing in the metadata file" end if metadata_entry != entry raise InvalidIdentityMetadata, - 'metadata mismatch between metadata file '\ + "metadata mismatch between metadata file "\ "(#{metadata_entry.to_h}) and state on-disk "\ "(#{entry.to_h})" end @@ -288,7 +288,7 @@ def validate_identity_metadata raise InvalidIdentityMetadata, "#{precomputed.size} files are listed in the dataset "\ - 'identity metadata, but are not present on disk: '\ + "identity metadata, but are not present on disk: "\ "#{precomputed.keys.map(&:to_s).join(', ')}" end @@ -313,7 +313,7 @@ def weak_validate_identity_metadata( unless (actual_size = important_files.delete(entry.path)) raise InvalidIdentityMetadata, "file #{entry.path} is listed in the identity metadata, "\ - 'but is not present on disk' + "but is not present on disk" end if actual_size != entry.size @@ -328,7 +328,7 @@ def weak_validate_identity_metadata( raise InvalidIdentityMetadata, "#{important_files.size} important files are present on disk "\ - 'but are not listed in the identity metadata: '\ + "but are not listed in the identity metadata: "\ "#{important_files.keys.sort.join(', ')}" end @@ -342,12 +342,12 @@ def write_dataset_identity_to_metadata_file( dataset_digest = compute_dataset_digest(dataset_identity) dataset_identity = dataset_identity.map do |entry| relative_path = entry.path.relative_path_from(dataset_path) - if relative_path.each_filename.find { |p| p == '..' } + if relative_path.each_filename.find { |p| p == ".." } raise InvalidIdentityMetadata, "found path #{entry.path} not within the dataset" end size = begin Integer(entry.size) - rescue ArgumentError => e + rescue ArgumentError raise InvalidIdentityMetadata, "#{entry.size} is not a valid file size" end @@ -361,17 +361,17 @@ def write_dataset_identity_to_metadata_file( "#{entry.sha2} is not a valid digest" end - Hash['path' => relative_path.to_s, - 'sha2' => sha2, - 'size' => size] + Hash["path" => relative_path.to_s, + "sha2" => sha2, + "size" => size] end metadata = Hash[ - 'layout_version' => LAYOUT_VERSION, - 'sha2' => dataset_digest, - 'identity' => dataset_identity + "layout_version" => LAYOUT_VERSION, + "sha2" => dataset_digest, + "identity" => dataset_identity ] - (dataset_path + BASENAME_IDENTITY_METADATA).open('w') do |io| + (dataset_path + BASENAME_IDENTITY_METADATA).open("w") do |io| YAML.dump metadata, io end @digest = dataset_digest @@ -408,7 +408,7 @@ def metadata_fetch(key, *default_value) [default_value] else raise ArgumentError, - 'expected zero or one default value, '\ + "expected zero or one default value, "\ "got #{default_value.size}" end @@ -470,7 +470,7 @@ def metadata_write_to_file dumped = metadata.inject({}) do |h, (k, v)| h.merge!(k => v.to_a) end - (dataset_path + BASENAME_METADATA).open('w') do |io| + (dataset_path + BASENAME_METADATA).open("w") do |io| YAML.dump(dumped, io) end end @@ -478,7 +478,7 @@ def metadata_write_to_file def each_pocolog_path return enum_for(__method__) unless block_given? - Pathname.glob(dataset_path + 'pocolog' + '*.log') do |logfile_path| + Pathname.glob(dataset_path + "pocolog" + "*.log") do |logfile_path| yield(logfile_path) end end @@ -490,7 +490,7 @@ def each_pocolog_path def each_pocolog_stream return enum_for(__method__) unless block_given? - pocolog_index_dir = (cache_path + 'pocolog').to_s + pocolog_index_dir = (cache_path + "pocolog").to_s each_pocolog_path do |logfile_path| logfile = Pocolog::Logfiles.open( logfile_path, index_dir: pocolog_index_dir, silent: true @@ -503,8 +503,8 @@ def each_pocolog_stream # # Load lazy data stream information from disk def read_lazy_data_streams - pocolog_index_dir = (cache_path + 'pocolog').to_s - Pathname.enum_for(:glob, dataset_path + 'pocolog' + '*.log').map do |logfile_path| + pocolog_index_dir = (cache_path + "pocolog").to_s + Pathname.enum_for(:glob, dataset_path + "pocolog" + "*.log").map do |logfile_path| index_path = Pocolog::Logfiles.default_index_filename( logfile_path.to_s, index_dir: pocolog_index_dir.to_s ) diff --git a/lib/syskit/log/datastore/import.rb b/lib/syskit/log/datastore/import.rb index 4d18229..27aa149 100644 --- a/lib/syskit/log/datastore/import.rb +++ b/lib/syskit/log/datastore/import.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true -require 'syskit/log/datastore/normalize' -require 'pocolog/cli/tty_reporter' +require "syskit/log/datastore/normalize" +require "pocolog/cli/tty_reporter" module Syskit::Log class Datastore @@ -13,7 +13,7 @@ def self.import(datastore, dataset_path, silent: false, force: false) class Import class DatasetAlreadyExists < RuntimeError; end - BASENAME_IMPORT_TAG = '.syskit-pocolog-import' + BASENAME_IMPORT_TAG = ".syskit-pocolog-import" attr_reader :datastore def initialize(datastore) @@ -35,7 +35,7 @@ def prepare_import(dir_path) end ignored.concat(roby_files.map { |p| p.sub(/-events.log$/, "-index.log") }) - all_files = Pathname.enum_for(:glob, dir_path + '*').to_a + all_files = Pathname.enum_for(:glob, dir_path + "*").to_a remaining = (all_files - pocolog_files - text_files - roby_files - ignored) [pocolog_files, text_files, roby_files, remaining] @@ -242,7 +242,7 @@ def normalize_pocolog_files( def copy_text_files(output_dir, files) return if files.empty? - out_text_dir = (output_dir + 'text') + out_text_dir = (output_dir + "text") out_text_dir.mkpath FileUtils.cp files, out_text_dir end @@ -280,7 +280,7 @@ def copy_roby_event_log(output_dir, event_log) def copy_ignored_entries(output_dir, paths) return if paths.empty? - out_ignored_dir = (output_dir + 'ignored') + out_ignored_dir = (output_dir + "ignored") out_ignored_dir.mkpath FileUtils.cp_r paths, out_ignored_dir end diff --git a/lib/syskit/log/datastore/index_build.rb b/lib/syskit/log/datastore/index_build.rb index ed768f9..7664295 100644 --- a/lib/syskit/log/datastore/index_build.rb +++ b/lib/syskit/log/datastore/index_build.rb @@ -1,11 +1,11 @@ # frozen_string_literal: true -require 'roby/droby/logfile/index' +require "roby/droby/logfile/index" module Syskit::Log class Datastore def self.index_build(datastore, dataset, - force: false, reporter: Pocolog::CLI::NullReporter.new) + force: false, reporter: Pocolog::CLI::NullReporter.new) IndexBuild.new(datastore, dataset) .rebuild(force: force, reporter: reporter) end @@ -39,11 +39,11 @@ def rebuild(force: false, reporter: Pocolog::CLI::NullReporter.new) def rebuild_pocolog_indexes( force: false, reporter: Pocolog::CLI::NullReporter.new ) - pocolog_index_dir = (dataset.cache_path + 'pocolog') + pocolog_index_dir = (dataset.cache_path + "pocolog") pocolog_index_dir.mkpath if force # Just delete pocolog/*.idx from the cache - Pathname.glob(pocolog_index_dir + '*.idx', &:unlink) + Pathname.glob(pocolog_index_dir + "*.idx", &:unlink) end dataset.each_pocolog_path do |logfile_path| diff --git a/lib/syskit/log/datastore/normalize.rb b/lib/syskit/log/datastore/normalize.rb index 9f77076..eb4dd97 100644 --- a/lib/syskit/log/datastore/normalize.rb +++ b/lib/syskit/log/datastore/normalize.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -require 'digest/sha2' +require "digest/sha2" module Syskit::Log class Datastore @@ -22,7 +22,7 @@ class Output attr_reader :path, :stream_info, :digest, :stream_block_pos, :index_map, :last_data_block_time, :tell - WRITE_BLOCK_SIZE = 1024 ** 2 + WRITE_BLOCK_SIZE = 1024**2 def initialize(path, wio, stream_info, digest, stream_block_pos) @path = path @@ -76,30 +76,39 @@ def write(string) wio.write string digest.update string end + def close wio.close end + def flush wio.flush end + def tell wio.tell end + def closed? wio.closed? end + def seek(pos) wio.seek(pos) end + def read(count) wio.read(count) end + def path wio.path end + def size wio.size end + def stat wio.stat end @@ -137,7 +146,7 @@ def normalize( raw_stream_info = Pocolog::IndexBuilderStreamInfo.new(output.stream_block_pos, output.index_map) stream_info = Pocolog.create_index_from_raw_info(block_stream, [raw_stream_info]) index_path = Pocolog::Logfiles.default_index_filename(output.path, index_dir: index_dir) - File.open(index_path, 'w') do |io| + File.open(index_path, "w") do |io| Pocolog::Format::Current.write_index(io, block_stream.io, stream_info) end end @@ -149,7 +158,6 @@ def normalize( else out_files.each_value.map(&:path) end - ensure out_files.each_value(&:close) end @@ -196,7 +204,6 @@ def validate_time_followup(stream_index, data_block_header) end end - # @api private # # Normalize a single logfile @@ -404,7 +411,7 @@ def initialize_out_file( output.write raw_header[4..-1] output.write raw_payload out_files[out_file_path] = output - rescue Exception => e # rubocop:disable Lint/RescueException + rescue Exception # rubocop:disable Lint/RescueException wio&.close out_file_path&.unlink raise diff --git a/lib/syskit/log/deployment.rb b/lib/syskit/log/deployment.rb index d27fd78..556fff6 100644 --- a/lib/syskit/log/deployment.rb +++ b/lib/syskit/log/deployment.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Syskit::Log # Task supporting the replay process # @@ -36,7 +38,7 @@ def added_execution_agent_parent(executed_task, _info) executed_task.start_event.on do model.each_stream_mapping do |stream, model_port| orocos_port = model_port.bind(executed_task).to_orocos_port - unless orocos_port.name == 'state' + unless orocos_port.name == "state" stream_to_port[stream] = orocos_port end end diff --git a/lib/syskit/log/dsl.rb b/lib/syskit/log/dsl.rb index e29ecbe..6896a4d 100644 --- a/lib/syskit/log/dsl.rb +++ b/lib/syskit/log/dsl.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # frozen_string_literalr: true require "syskit/log" @@ -564,7 +566,7 @@ def daru_to_vega(frame) # to mark # @param [Symbol] time_field the field that holds the data's time, # relative to interval_start (as generated by e.g. {#to_daru_frame}) - def roby_vega_mark_tasks(field_name, data, *tasks, time_field: ) + def roby_vega_mark_tasks(field_name, data, *tasks, time_field:) tasks = tasks.flat_map do |t| if t.respond_to?(:each_task) t.each_task.to_a diff --git a/lib/syskit/log/exceptions.rb b/lib/syskit/log/exceptions.rb index 0714784..4b41ece 100644 --- a/lib/syskit/log/exceptions.rb +++ b/lib/syskit/log/exceptions.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Syskit::Log # Exception raised in resolution methods when one match was expected but more # than one was found diff --git a/lib/syskit/log/extensions.rb b/lib/syskit/log/extensions.rb index ecf86ed..370ec99 100644 --- a/lib/syskit/log/extensions.rb +++ b/lib/syskit/log/extensions.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Syskit::Log # Set of modules used to extend Roby and Syskit's own classes module Extensions @@ -6,10 +8,10 @@ module Extensions end end -require 'syskit/log/extensions/deployment_group' -require 'syskit/log/extensions/instance_requirements' -require 'syskit/log/extensions/configuration' -require 'syskit/log/extensions/execution_engine' +require "syskit/log/extensions/deployment_group" +require "syskit/log/extensions/instance_requirements" +require "syskit/log/extensions/configuration" +require "syskit/log/extensions/execution_engine" Syskit::Models::DeploymentGroup.class_eval do prepend Syskit::Log::Extensions::DeploymentGroup @@ -23,4 +25,3 @@ module Extensions Roby::ExecutionEngine.class_eval do prepend Syskit::Log::Extensions::ExecutionEngine end - diff --git a/lib/syskit/log/extensions/configuration.rb b/lib/syskit/log/extensions/configuration.rb index 20969a2..d15d886 100644 --- a/lib/syskit/log/extensions/configuration.rb +++ b/lib/syskit/log/extensions/configuration.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Syskit::Log module Extensions # Extension of the Syskit configuration class to add APIs related to @@ -13,4 +15,3 @@ def use_pocolog_task(streams, **options) end end end - diff --git a/lib/syskit/log/extensions/deployment_group.rb b/lib/syskit/log/extensions/deployment_group.rb index 8deb2d5..cbc34e1 100644 --- a/lib/syskit/log/extensions/deployment_group.rb +++ b/lib/syskit/log/extensions/deployment_group.rb @@ -1,22 +1,24 @@ +# frozen_string_literal: true + module Syskit::Log module Extensions # Extension of the Syskit::Models::DeploymentGroup class to add APIs related to # replaying tasks module DeploymentGroup # Expose a given set of streams as a task context in Syskit - def use_pocolog_task(streams, name: streams.task_name, model: streams.replay_model, allow_missing: true, on: 'pocolog', process_managers: Syskit.conf) + def use_pocolog_task(streams, name: streams.task_name, model: streams.replay_model, allow_missing: true, on: "pocolog", process_managers: Syskit.conf) # Verify the process manager's availability process_managers.process_server_config_for(on) deployment_model = Deployment.for_streams( - streams, name: name, model: model, allow_missing: allow_missing) + streams, name: name, model: model, allow_missing: allow_missing + ) - configured_deployment = Syskit::Models::ConfiguredDeployment. - new(on, deployment_model, Hash[name => name], name, Hash.new) + configured_deployment = Syskit::Models::ConfiguredDeployment + .new(on, deployment_model, Hash[name => name], name, {}) register_configured_deployment(configured_deployment) configured_deployment end end end end - diff --git a/lib/syskit/log/extensions/execution_engine.rb b/lib/syskit/log/extensions/execution_engine.rb index b5e2da0..f1857fe 100644 --- a/lib/syskit/log/extensions/execution_engine.rb +++ b/lib/syskit/log/extensions/execution_engine.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Syskit::Log module Extensions # Extension of the Roby::ExecutionEngine class to add the engine's @@ -10,4 +12,3 @@ def pocolog_replay_manager end end end - diff --git a/lib/syskit/log/extensions/instance_requirements.rb b/lib/syskit/log/extensions/instance_requirements.rb index 09d7e97..d0626a7 100644 --- a/lib/syskit/log/extensions/instance_requirements.rb +++ b/lib/syskit/log/extensions/instance_requirements.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Syskit::Log module Extensions # Extension of the Syskit::InstanceRequirements class to add APIs related to @@ -13,4 +15,3 @@ def use_pocolog_task(streams, **options) end end end - diff --git a/lib/syskit/log/lazy_data_stream.rb b/lib/syskit/log/lazy_data_stream.rb index 139dce2..e6c5dc7 100644 --- a/lib/syskit/log/lazy_data_stream.rb +++ b/lib/syskit/log/lazy_data_stream.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Syskit::Log # Placeholder for Pocolog::DataStream that does not load any actual data / # index from the stream. diff --git a/lib/syskit/log/models/deployment.rb b/lib/syskit/log/models/deployment.rb index d4d6504..5cb6313 100644 --- a/lib/syskit/log/models/deployment.rb +++ b/lib/syskit/log/models/deployment.rb @@ -92,13 +92,13 @@ def add_streams_from(streams, allow_missing: true) if ports.empty? Syskit::Log.warn( "no log stream available for #{p}, ignored "\ - 'as allow_missing is true (there are no log '\ - 'streams for the underlying task)' + "as allow_missing is true (there are no log "\ + "streams for the underlying task)" ) else Syskit::Log.warn( "no log stream available for #{p}, ignored "\ - 'as allow_missing is true, known ports are: '\ + "as allow_missing is true, known ports are: "\ "#{ports.sort.join(', ')}" ) end @@ -117,10 +117,10 @@ def add_streams_from(streams, allow_missing: true) # @raise MismatchingType if the port and stream have differing types def add_stream( stream, - port = task_model.port_by_name(stream.metadata['rock_task_object_name']) + port = task_model.port_by_name(stream.metadata["rock_task_object_name"]) ) if !port.output? - raise ArgumentError, 'cannot map a log stream to an input port' + raise ArgumentError, "cannot map a log stream to an input port" elsif port.component_model != task_model raise ArgumentError, "#{self} deploys #{task_model} but the stream mapping "\ diff --git a/lib/syskit/log/models/replay_task_context.rb b/lib/syskit/log/models/replay_task_context.rb index bc1b2ed..a5621de 100644 --- a/lib/syskit/log/models/replay_task_context.rb +++ b/lib/syskit/log/models/replay_task_context.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Syskit module Log module Models diff --git a/lib/syskit/log/plugin.rb b/lib/syskit/log/plugin.rb index a2f1a72..995b504 100644 --- a/lib/syskit/log/plugin.rb +++ b/lib/syskit/log/plugin.rb @@ -1,11 +1,13 @@ -require 'syskit' +# frozen_string_literal: true + +require "syskit" module Syskit::Log module Plugin def self.setup(app) Pocolog.logger = Syskit::Log.logger manager = Orocos::RubyTasks::ProcessManager.new(app.default_loader) - Syskit.conf.register_process_server('pocolog', manager, app.log_dir) + Syskit.conf.register_process_server("pocolog", manager, app.log_dir) end # This hooks into the network generation to deploy all tasks using @@ -21,4 +23,3 @@ def self.override_all_deployments_by_replay_streams(streams) end end end - diff --git a/lib/syskit/log/register_plugin.rb b/lib/syskit/log/register_plugin.rb index dc1a9c8..0fc5d0c 100644 --- a/lib/syskit/log/register_plugin.rb +++ b/lib/syskit/log/register_plugin.rb @@ -1,4 +1,5 @@ -require 'syskit/log/plugin' -Roby::Application.register_plugin('syskit-log', Syskit::Log::Plugin) do -end +# frozen_string_literal: true +require "syskit/log/plugin" +Roby::Application.register_plugin("syskit-log", Syskit::Log::Plugin) do +end diff --git a/lib/syskit/log/registration_namespace.rb b/lib/syskit/log/registration_namespace.rb index 890636d..7d28ed1 100644 --- a/lib/syskit/log/registration_namespace.rb +++ b/lib/syskit/log/registration_namespace.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module OroGen # Namespace on which the generated task models get registered module Pocolog diff --git a/lib/syskit/log/replay_manager.rb b/lib/syskit/log/replay_manager.rb index 2a80ad8..6618bd2 100644 --- a/lib/syskit/log/replay_manager.rb +++ b/lib/syskit/log/replay_manager.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Syskit::Log # The object that manages the replay itself # @@ -123,7 +125,7 @@ class StateMismatch < RuntimeError; end # Start replaying in realtime def start(replay_speed: 1) - raise StateMismatch, 'already running' if running? + raise StateMismatch, "already running" if running? reset_replay_base_times @handler_id = execution_engine.add_side_work_handler( @@ -132,7 +134,7 @@ def start(replay_speed: 1) end def stop - raise StateMismatch, 'not running' unless running? + raise StateMismatch, "not running" unless running? execution_engine.remove_side_work_handler(@handler_id) @handler_id = nil @@ -156,7 +158,7 @@ def end_of_current_engine_cycle # # Play samples required by the current execution engine's time def process_in_realtime(replay_speed, - limit_real_time: end_of_current_engine_cycle) + limit_real_time: end_of_current_engine_cycle) limit_logical_time = base_logical_time + (limit_real_time - base_real_time) * replay_speed diff --git a/lib/syskit/log/replay_task_context.rb b/lib/syskit/log/replay_task_context.rb index 040d9fe..824ce63 100644 --- a/lib/syskit/log/replay_task_context.rb +++ b/lib/syskit/log/replay_task_context.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Syskit module Log # Tasks that replay data streams @@ -16,4 +18,3 @@ class ReplayTaskContext < TaskContext end end end - diff --git a/lib/syskit/log/roby_sql_index/accessors.rb b/lib/syskit/log/roby_sql_index/accessors.rb index d6f5635..f8ec05c 100644 --- a/lib/syskit/log/roby_sql_index/accessors.rb +++ b/lib/syskit/log/roby_sql_index/accessors.rb @@ -244,6 +244,7 @@ def method_missing(m, *args, **kw, &block) unless args.empty? && kw.empty? raise ArgumentError, "wrong number of arguments" end + event(m[0..-7]) else super diff --git a/lib/syskit/log/roby_sql_index/definitions.rb b/lib/syskit/log/roby_sql_index/definitions.rb index 7bf044e..18979ff 100644 --- a/lib/syskit/log/roby_sql_index/definitions.rb +++ b/lib/syskit/log/roby_sql_index/definitions.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Syskit module Log module RobySQLIndex diff --git a/lib/syskit/log/roby_sql_index/entities.rb b/lib/syskit/log/roby_sql_index/entities.rb index b45b742..6814694 100644 --- a/lib/syskit/log/roby_sql_index/entities.rb +++ b/lib/syskit/log/roby_sql_index/entities.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Syskit module Log module RobySQLIndex diff --git a/lib/syskit/log/roby_sql_index/index.rb b/lib/syskit/log/roby_sql_index/index.rb index 19d232b..63a3b65 100644 --- a/lib/syskit/log/roby_sql_index/index.rb +++ b/lib/syskit/log/roby_sql_index/index.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Syskit module Log module RobySQLIndex diff --git a/lib/syskit/log/rock_stream_matcher.rb b/lib/syskit/log/rock_stream_matcher.rb index 2d42b6c..308eefd 100644 --- a/lib/syskit/log/rock_stream_matcher.rb +++ b/lib/syskit/log/rock_stream_matcher.rb @@ -36,40 +36,40 @@ def add_regex(key, rx) # Match ports def ports - add_regex('rock_stream_type', /^port$/) + add_regex("rock_stream_type", /^port$/) end # Match properties def properties - add_regex('rock_stream_type', /^property$/) + add_regex("rock_stream_type", /^property$/) end # Match the object (port/property) name # # @param [String] name the object name to match def object_name(name) - add_regex('rock_task_object_name', name) + add_regex("rock_task_object_name", name) end # Match the task name # # @param [String] name the task name to match def task_name(name) - add_regex('rock_task_name', name) + add_regex("rock_task_name", name) end # Match the task model # # @param [Syskit::Models::TaskContext] model def task_model(model) - add_regex('rock_task_model', model.orogen_model.name) + add_regex("rock_task_model", model.orogen_model.name) end # Match the type name # # @param [String,Regexp] name def type(name) - add_regex('type', name) + add_regex("type", name) end # Tests whether a stream matches this query @@ -78,7 +78,7 @@ def type(name) # @return [Boolean] def ===(stream) query.all? do |key, matcher| - if key == 'type' + if key == "type" matcher === stream.type.name elsif (metadata = stream.metadata[key]) matcher === metadata diff --git a/lib/syskit/log/shell_interface.rb b/lib/syskit/log/shell_interface.rb index 615a899..43155f2 100644 --- a/lib/syskit/log/shell_interface.rb +++ b/lib/syskit/log/shell_interface.rb @@ -1,4 +1,6 @@ -require 'roby/interface' +# frozen_string_literal: true + +require "roby/interface" module Syskit::Log # Definition of the syskit-specific interface commands @@ -8,17 +10,15 @@ class ShellInterface < Roby::Interface::CommandLibrary def initialize(app) super @replay_manager = app.plan.execution_engine.pocolog_replay_manager - Orocos.load_typekit 'base' + Orocos.load_typekit "base" @time_channel = Orocos::RubyTasks::TaskContext end def time replay_manager.time end - command :time, 'the current replay time', advanced: true + command :time, "the current replay time", advanced: true end end -Roby::Interface::Interface.subcommand 'replay', Syskit::Log::ShellInterface, 'Commands specific to syskit-pocolog' - - +Roby::Interface::Interface.subcommand "replay", Syskit::Log::ShellInterface, "Commands specific to syskit-pocolog" diff --git a/lib/syskit/log/streams.rb b/lib/syskit/log/streams.rb index 42f2211..7118790 100644 --- a/lib/syskit/log/streams.rb +++ b/lib/syskit/log/streams.rb @@ -73,7 +73,7 @@ def each_task( ignored_streams = Hash.new { |h, k| h[k] = [] } empty_task_models = [] each_stream do |s| - next unless (task_model_name = s.metadata['rock_task_model']) + next unless (task_model_name = s.metadata["rock_task_model"]) if task_model_name.empty? empty_task_models << s @@ -82,7 +82,7 @@ def each_task( task_m = Syskit::TaskContext.find_model_from_orogen_name(task_model_name) if !task_m && load_models - orogen_project_name, *_tail = task_model_name.split('::') + orogen_project_name, *_tail = task_model_name.split("::") begin loader.project_model_from_name(orogen_project_name) rescue OroGen::ProjectNotFound @@ -97,7 +97,7 @@ def each_task( end if task_m || !skip_tasks_without_models - available_tasks[s.metadata['rock_task_name']] << s + available_tasks[s.metadata["rock_task_name"]] << s else ignored_streams[task_model_name] << s end @@ -141,13 +141,13 @@ def make_file_groups_in_dir(path) files_per_basename = Hash.new { |h, k| h[k] = [] } path.children.each do |file_or_dir| next unless file_or_dir.file? - next unless file_or_dir.extname == '.log' + next unless file_or_dir.extname == ".log" - base_filename = file_or_dir.sub_ext('') + base_filename = file_or_dir.sub_ext("") id = base_filename.extname[1..-1] next if id !~ /^\d+$/ - base_filename = base_filename.sub_ext('') + base_filename = base_filename.sub_ext("") files_per_basename[base_filename.to_s][Integer(id)] = file_or_dir end files_per_basename.values.map(&:compact) @@ -272,7 +272,7 @@ def find_task_by_name(name) def respond_to_missing?(m, include_private = false) MetaRuby::DSLs.has_through_method_missing?( self, m, - '_task' => 'find_task_by_name' + "_task" => "find_task_by_name" ) || super end @@ -280,7 +280,7 @@ def respond_to_missing?(m, include_private = false) def method_missing(m, *args, &block) MetaRuby::DSLs.find_through_method_missing( self, m, args, - '_task' => 'find_task_by_name' + "_task" => "find_task_by_name" ) || super end diff --git a/lib/syskit/log/task_streams.rb b/lib/syskit/log/task_streams.rb index adf6b42..3c0384d 100644 --- a/lib/syskit/log/task_streams.rb +++ b/lib/syskit/log/task_streams.rb @@ -17,14 +17,14 @@ def initialize(streams = [], task_name: nil) # # @raise (see unique_metadata) def task_name - @task_name ||= unique_metadata('rock_task_name') + @task_name ||= unique_metadata("rock_task_name") end # Returns the orogen model name for all streams in self # # @raise (see unique_metadata) def orogen_model_name - @orogen_model_name ||= unique_metadata('rock_task_model') + @orogen_model_name ||= unique_metadata("rock_task_model") end # Returns the Syskit model for the orogen model name in @@ -53,8 +53,8 @@ def each_port_stream return enum_for(__method__) unless block_given? streams.each do |s| - if (s.metadata['rock_stream_type'] == 'port') && - (port_name = s.metadata['rock_task_object_name']) + if (s.metadata["rock_stream_type"] == "port") && + (port_name = s.metadata["rock_task_object_name"]) yield(port_name, s) end end @@ -68,8 +68,8 @@ def each_property_stream return enum_for(__method__) unless block_given? streams.each do |s| - if (s.metadata['rock_stream_type'] == 'property') && - (port_name = s.metadata['rock_task_object_name']) + if (s.metadata["rock_stream_type"] == "property") && + (port_name = s.metadata["rock_task_object_name"]) yield(port_name, s) end end @@ -98,8 +98,8 @@ def find_property_by_name(name) def respond_to_missing?(m, include_private = true) MetaRuby::DSLs.has_through_method_missing?( self, m, - '_port' => 'find_port_by_name', - '_property' => 'find_property_by_name' + "_port" => "find_port_by_name", + "_property" => "find_property_by_name" ) || super end @@ -108,8 +108,8 @@ def respond_to_missing?(m, include_private = true) def method_missing(m, *args) MetaRuby::DSLs.find_through_method_missing( self, m, args, - '_port' => 'find_port_by_name', - '_property' => 'find_property_by_name' + "_port" => "find_port_by_name", + "_property" => "find_property_by_name" ) || super end @@ -123,7 +123,7 @@ def method_missing(m, *args) # @raise Ambiguous if some streams have different values for the # metadata def unique_metadata(metadata_name) - raise Unknown, 'no streams' if streams.empty? + raise Unknown, "no streams" if streams.empty? model_name = nil streams.each do |s| @@ -136,7 +136,7 @@ def unique_metadata(metadata_name) model_name ||= name if model_name != name raise Ambiguous, - 'streams declare more than one value for '\ + "streams declare more than one value for "\ "#{metadata_name}: #{model_name} and #{name}" end end @@ -154,4 +154,3 @@ def as_plan end end end - diff --git a/lib/syskit/log/version.rb b/lib/syskit/log/version.rb index 1a780ed..4ec5813 100644 --- a/lib/syskit/log/version.rb +++ b/lib/syskit/log/version.rb @@ -2,6 +2,6 @@ module Syskit module Log - VERSION = '0.1.0' + VERSION = "0.1.0" end end diff --git a/lib/syskit/scripts/datastore.rb b/lib/syskit/scripts/datastore.rb index 1561be5..131dcad 100644 --- a/lib/syskit/scripts/datastore.rb +++ b/lib/syskit/scripts/datastore.rb @@ -1,13 +1,15 @@ -require 'syskit/log/cli/datastore' +# frozen_string_literal: true + +require "syskit/log/cli/datastore" class CLI < Thor - desc 'datastore', 'data management' - subcommand 'datastore', Syskit::Log::CLI::Datastore + desc "datastore", "data management" + subcommand "datastore", Syskit::Log::CLI::Datastore - desc 'ds', 'data management' - subcommand 'ds', Syskit::Log::CLI::Datastore + desc "ds", "data management" + subcommand "ds", Syskit::Log::CLI::Datastore end Roby.display_exception do - CLI.start(['datastore', *ARGV]) + CLI.start(["datastore", *ARGV]) end diff --git a/lib/syskit/scripts/ds.rb b/lib/syskit/scripts/ds.rb index 0d4e054..c091619 100644 --- a/lib/syskit/scripts/ds.rb +++ b/lib/syskit/scripts/ds.rb @@ -1,2 +1,4 @@ -require 'syskit/log/cli/datastore' -require 'syskit/scripts/datastore' +# frozen_string_literal: true + +require "syskit/log/cli/datastore" +require "syskit/scripts/datastore" diff --git a/lib/syskit/scripts/replay.rb b/lib/syskit/scripts/replay.rb index 188423e..ea34364 100644 --- a/lib/syskit/scripts/replay.rb +++ b/lib/syskit/scripts/replay.rb @@ -1,11 +1,12 @@ -require 'syskit/log/cli/replay' +# frozen_string_literal: true + +require "syskit/log/cli/replay" class CLI < Thor - desc 'replay', 'replaying of log data' - subcommand 'replay', Syskit::Log::CLI::Replay + desc "replay", "replaying of log data" + subcommand "replay", Syskit::Log::CLI::Replay end Roby.display_exception do - CLI.start(['replay', *ARGV]) + CLI.start(["replay", *ARGV]) end - diff --git a/syskit-log.gemspec b/syskit-log.gemspec index 69e666e..7fb6cdc 100644 --- a/syskit-log.gemspec +++ b/syskit-log.gemspec @@ -1,27 +1,29 @@ # coding: utf-8 -lib = File.expand_path('../lib', __FILE__) +# frozen_string_literal: true + +lib = File.expand_path("../lib", __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) -require 'syskit/log/version' +require "syskit/log/version" Gem::Specification.new do |spec| - spec.name = "syskit-pocolog" - spec.version = Syskit::Log::VERSION - spec.authors = ["Sylvain Joyeux"] - spec.email = ["sylvain.joyeux@m4x.org"] + spec.name = "syskit-pocolog" + spec.version = Syskit::Log::VERSION + spec.authors = ["Sylvain Joyeux"] + spec.email = ["sylvain.joyeux@m4x.org"] - spec.summary = "A Syskit plugin that allows to replay log files generated by pocolog" - spec.description = "Adds the APIs necessary to transform component networks to replay log files" - spec.homepage = "https://github.com/rock-core/syskit-pocolog" - spec.license = "MIT" + spec.summary = "A Syskit plugin that allows to replay log files generated by pocolog" + spec.description = "Adds the APIs necessary to transform component networks to replay log files" + spec.homepage = "https://github.com/rock-core/syskit-pocolog" + spec.license = "MIT" - spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } - spec.bindir = "exe" - spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } - spec.require_paths = ["lib"] + spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) } + spec.bindir = "exe" + spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } + spec.require_paths = ["lib"] - spec.add_dependency 'syskit' - spec.add_dependency 'metaruby' - spec.add_development_dependency "bundler", "~> 1.11" - spec.add_development_dependency "rake", "~> 10.0" - spec.add_development_dependency "minitest", "~> 5.0" + spec.add_dependency "metaruby" + spec.add_dependency "syskit" + spec.add_development_dependency "bundler", "~> 1.11" + spec.add_development_dependency "minitest", "~> 5.0" + spec.add_development_dependency "rake", "~> 10.0" end diff --git a/test/cli/datastore_test.rb b/test/cli/datastore_test.rb index 386c99d..2061452 100644 --- a/test/cli/datastore_test.rb +++ b/test/cli/datastore_test.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true -require 'test_helper' -require 'syskit/log/cli/datastore' +require "test_helper" +require "syskit/log/cli/datastore" module Syskit::Log module CLI @@ -34,27 +34,27 @@ def capture_io # Helper method to call a CLI subcommand def call_cli(*args, silent: true) extra_args = [] - extra_args << '--colors=f' << '--progress=f' - extra_args << '--silent' if silent + extra_args << "--colors=f" << "--progress=f" + extra_args << "--silent" if silent Datastore.start([*args, *extra_args], debug: true) end - describe '#import' do - it 'imports a single dataset into the store' do - incoming_path = datastore_path + 'incoming' + '0' + describe "#import" do + it "imports a single dataset into the store" do + incoming_path = datastore_path + "incoming" + "0" flexmock(datastore_m::Import) .new_instances.should_receive(:normalize_dataset) .with( - [logfile_pathname], incoming_path + 'core', + [logfile_pathname], incoming_path + "core", on do |h| - h[:cache_path] == incoming_path + 'cache' && + h[:cache_path] == incoming_path + "cache" && h[:reporter].kind_of?(Pocolog::CLI::NullReporter) end ) .once.pass_thru expected_dataset = lambda do |s| - assert_equal incoming_path + 'core', s.dataset_path - assert_equal incoming_path + 'cache', s.cache_path + assert_equal incoming_path + "core", s.dataset_path + assert_equal incoming_path + "cache", s.cache_path true end flexmock(datastore_m::Import) @@ -62,47 +62,47 @@ def call_cli(*args, silent: true) .with(expected_dataset) .once.pass_thru - call_cli('import', '--min-duration=0', - '--store', datastore_path.to_s, logfile_pathname.to_s, + call_cli("import", "--min-duration=0", + "--store", datastore_path.to_s, logfile_pathname.to_s, silent: true) end - it 'optionally sets tags, description and arbitraty metadata' do - call_cli('import', '--min-duration=0', - '--store', datastore_path.to_s, logfile_pathname.to_s, - 'some description', '--tags', 'test', 'tags', - '--metadata', 'key0=value0a', 'key0=value0b', 'key1=value1', + it "optionally sets tags, description and arbitraty metadata" do + call_cli("import", "--min-duration=0", + "--store", datastore_path.to_s, logfile_pathname.to_s, + "some description", "--tags", "test", "tags", + "--metadata", "key0=value0a", "key0=value0b", "key1=value1", silent: true) dataset = Syskit::Log::Datastore.new(datastore_path) .each_dataset.first - assert_equal ['some description'], - dataset.metadata_fetch_all('description').to_a + assert_equal ["some description"], + dataset.metadata_fetch_all("description").to_a assert_equal %w[test tags], - dataset.metadata_fetch_all('tags').to_a + dataset.metadata_fetch_all("tags").to_a assert_equal %w[value0a value0b], - dataset.metadata_fetch_all('key0').to_a + dataset.metadata_fetch_all("key0").to_a assert_equal %w[value1], - dataset.metadata_fetch_all('key1').to_a + dataset.metadata_fetch_all("key1").to_a end - describe '--auto' do + describe "--auto" do it "creates the datastore path" do datastore_path.rmtree - call_cli('import', '--auto', '--store', datastore_path.to_s, + call_cli("import", "--auto", "--store", datastore_path.to_s, root_path.to_s) assert datastore_path.exist? end it "auto-imports any directory that looks like a raw dataset" do - create_logfile('test.0.log') {} - FileUtils.touch logfile_path('test-events.log') - incoming_path = datastore_path + 'incoming' + '0' + create_logfile("test.0.log") {} + FileUtils.touch logfile_path("test-events.log") + incoming_path = datastore_path + "incoming" + "0" flexmock(datastore_m::Import) .new_instances.should_receive(:normalize_dataset) .with( - [logfile_pathname], incoming_path + 'core', + [logfile_pathname], incoming_path + "core", on do |h| - h[:cache_path] == incoming_path + 'cache' && + h[:cache_path] == incoming_path + "cache" && h[:reporter].kind_of?(Pocolog::CLI::NullReporter) end ) @@ -118,21 +118,21 @@ def call_cli(*args, silent: true) .with(expected_dataset) .once.pass_thru - call_cli('import', '--auto', '--min-duration=0', - '--store', datastore_path.to_s, + call_cli("import", "--auto", "--min-duration=0", + "--store", datastore_path.to_s, logfile_pathname.dirname.to_s, silent: true) digest, = datastore_m::Import.find_import_info(logfile_pathname) assert datastore.has?(digest) end - it 'ignores datasets that have already been imported' do - create_logfile('test.0.log') do - create_logfile_stream 'test', metadata: Hash['rock_task_name' => 'task', 'rock_task_object_name' => 'port'] + it "ignores datasets that have already been imported" do + create_logfile("test.0.log") do + create_logfile_stream "test", metadata: Hash["rock_task_name" => "task", "rock_task_object_name" => "port"] write_logfile_sample Time.now, Time.now, 10 write_logfile_sample Time.now + 10, Time.now + 1, 20 end - FileUtils.touch logfile_path('test-events.log') - call_cli('import', '--auto', '--min-duration=0', - '--store', datastore_path.to_s, + FileUtils.touch logfile_path("test-events.log") + call_cli("import", "--auto", "--min-duration=0", + "--store", datastore_path.to_s, logfile_pathname.dirname.to_s, silent: true) flexmock(datastore_m::Import) .new_instances.should_receive(:normalize_dataset) @@ -141,22 +141,22 @@ def call_cli(*args, silent: true) .new_instances.should_receive(:move_dataset_to_store) .never out, = capture_io do - call_cli('import', '--auto', '--min-duration=0', - '--store', datastore_path.to_s, + call_cli("import", "--auto", "--min-duration=0", + "--store", datastore_path.to_s, logfile_pathname.dirname.to_s, silent: false) end - assert_match /#{logfile_pathname} already seem to have been imported as .*Give --force/, - out + assert_match(/#{logfile_pathname} already seem to have been imported as .*Give --force/, + out) end it "processes datasets that have already been imported if --force is given" do - create_logfile('test.0.log') do - create_logfile_stream 'test', metadata: Hash['rock_task_name' => 'task', 'rock_task_object_name' => 'port'] + create_logfile("test.0.log") do + create_logfile_stream "test", metadata: Hash["rock_task_name" => "task", "rock_task_object_name" => "port"] write_logfile_sample Time.now, Time.now, 10 write_logfile_sample Time.now + 10, Time.now + 1, 20 end - FileUtils.touch logfile_path('test-events.log') - call_cli('import', '--auto', '--min-duration=0', - '--store', datastore_path.to_s, + FileUtils.touch logfile_path("test-events.log") + call_cli("import", "--auto", "--min-duration=0", + "--store", datastore_path.to_s, logfile_pathname.dirname.to_s, silent: true) flexmock(datastore_m::Import) .new_instances.should_receive(:normalize_dataset) @@ -165,20 +165,20 @@ def call_cli(*args, silent: true) .new_instances.should_receive(:move_dataset_to_store) . once.pass_thru capture_io do - call_cli('import', '--auto', '--min-duration=0', '--force', - '--store', datastore_path.to_s, + call_cli("import", "--auto", "--min-duration=0", "--force", + "--store", datastore_path.to_s, logfile_pathname.dirname.to_s, silent: false) end end it "ignores datasets that do not seem to be already imported, but are" do - create_logfile('test.0.log') do - create_logfile_stream 'test', metadata: Hash['rock_task_name' => 'task', 'rock_task_object_name' => 'port'] + create_logfile("test.0.log") do + create_logfile_stream "test", metadata: Hash["rock_task_name" => "task", "rock_task_object_name" => "port"] write_logfile_sample Time.now, Time.now, 10 write_logfile_sample Time.now + 10, Time.now + 1, 20 end - FileUtils.touch logfile_path('test-events.log') - call_cli('import', '--auto', '--min-duration=0', - '--store', datastore_path.to_s, + FileUtils.touch logfile_path("test-events.log") + call_cli("import", "--auto", "--min-duration=0", + "--store", datastore_path.to_s, logfile_pathname.dirname.to_s, silent: true) (logfile_pathname + datastore_m::Import::BASENAME_IMPORT_TAG) .unlink @@ -189,22 +189,22 @@ def call_cli(*args, silent: true) .new_instances.should_receive(:move_dataset_to_store) .never out, = capture_io do - call_cli('import', '--auto', '--min-duration=0', - '--store', datastore_path.to_s, + call_cli("import", "--auto", "--min-duration=0", + "--store", datastore_path.to_s, logfile_pathname.dirname.to_s, silent: false) end - assert_match /#{logfile_pathname} already seem to have been imported as .*Give --force/, - out + assert_match(/#{logfile_pathname} already seem to have been imported as .*Give --force/, + out) end it "imports datasets that do not seem to be already imported, but are if --force is given" do - create_logfile('test.0.log') do - create_logfile_stream 'test', metadata: Hash['rock_task_name' => 'task', 'rock_task_object_name' => 'port'] + create_logfile("test.0.log") do + create_logfile_stream "test", metadata: Hash["rock_task_name" => "task", "rock_task_object_name" => "port"] write_logfile_sample Time.now, Time.now, 10 write_logfile_sample Time.now + 10, Time.now + 1, 20 end - FileUtils.touch logfile_path('test-events.log') - call_cli('import', '--auto', '--min-duration=0', - '--store', datastore_path.to_s, + FileUtils.touch logfile_path("test-events.log") + call_cli("import", "--auto", "--min-duration=0", + "--store", datastore_path.to_s, logfile_pathname.dirname.to_s, silent: true) digest, _ = datastore_m::Import.find_import_info(logfile_pathname) marker_path = datastore.core_path_of(digest) + "marker" @@ -217,17 +217,17 @@ def call_cli(*args, silent: true) .new_instances.should_receive(:move_dataset_to_store) .once.pass_thru out, = capture_io do - call_cli('import', '--auto', '--force', '--min-duration=0', - '--store', datastore_path.to_s, + call_cli("import", "--auto", "--force", "--min-duration=0", + "--store", datastore_path.to_s, logfile_pathname.dirname.to_s, silent: false) end - assert_match /Replacing existing dataset #{digest} with new one/, out + assert_match(/Replacing existing dataset #{digest} with new one/, out) refute marker_path.exist? end it "ignores an empty dataset after normalization if --min-duration "\ "is non-zero" do - create_logfile('test.0.log') {} - FileUtils.touch logfile_path('test-events.log') + create_logfile("test.0.log") {} + FileUtils.touch logfile_path("test-events.log") flexmock(datastore_m::Import) .new_instances.should_receive(:normalize_dataset) .once.pass_thru @@ -235,21 +235,21 @@ def call_cli(*args, silent: true) .new_instances.should_receive(:move_dataset_to_store) .never - call_cli('import', '--auto', '--min-duration=1', - '--store', datastore_path.to_s, + call_cli("import", "--auto", "--min-duration=1", + "--store", datastore_path.to_s, logfile_pathname.dirname.to_s, silent: true) end it "ignores datasets whose logical duration is "\ "lower than --min-duration" do - create_logfile('test.0.log') do + create_logfile("test.0.log") do create_logfile_stream( - 'test', metadata: { 'rock_task_name' => 'task', - 'rock_task_object_name' => 'port' } + "test", metadata: { "rock_task_name" => "task", + "rock_task_object_name" => "port" } ) write_logfile_sample Time.now, Time.now, 10 write_logfile_sample Time.now + 10, Time.now + 1, 20 end - FileUtils.touch logfile_path('test-events.log') + FileUtils.touch logfile_path("test-events.log") flexmock(datastore_m::Import) .new_instances.should_receive(:normalize_dataset) .once.pass_thru @@ -258,33 +258,33 @@ def call_cli(*args, silent: true) .never out, = capture_io do - call_cli('import', '--auto', '--min-duration=5', - '--store', datastore_path.to_s, + call_cli("import", "--auto", "--min-duration=5", + "--store", datastore_path.to_s, logfile_pathname.dirname.to_s, silent: false) end - assert_match /#{logfile_pathname} lasts only 1.0s, ignored/, out + assert_match(/#{logfile_pathname} lasts only 1.0s, ignored/, out) end end end describe "#normalize" do it "normalizes the logfiles in the input directory into the directory provided as 'out'" do - create_logfile('test.0.log') {} + create_logfile("test.0.log") {} out_path = root_path + "normalized" - flexmock(Syskit::Log::Datastore).should_receive(:normalize). - with([logfile_pathname('test.0.log')], hsh(output_path: out_path)). - once.pass_thru - call_cli('normalize', logfile_pathname.to_s, "--out=#{out_path}", silent: true) + flexmock(Syskit::Log::Datastore).should_receive(:normalize) + .with([logfile_pathname("test.0.log")], hsh(output_path: out_path)) + .once.pass_thru + call_cli("normalize", logfile_pathname.to_s, "--out=#{out_path}", silent: true) end it "reports progress without --silent" do - create_logfile('test.0.log') {} + create_logfile("test.0.log") {} out_path = root_path + "normalized" - flexmock(Syskit::Log::Datastore).should_receive(:normalize). - with([logfile_pathname('test.0.log')], hsh(output_path: out_path)). - once.pass_thru + flexmock(Syskit::Log::Datastore).should_receive(:normalize) + .with([logfile_pathname("test.0.log")], hsh(output_path: out_path)) + .once.pass_thru capture_io do - call_cli('normalize', logfile_pathname.to_s, "--out=#{out_path}", silent: false) + call_cli("normalize", logfile_pathname.to_s, "--out=#{out_path}", silent: false) end end end @@ -292,10 +292,10 @@ def call_cli(*args, silent: true) describe "#index" do before do create_dataset "a" do - create_logfile('test.0.log') {} + create_logfile("test.0.log") {} end create_dataset "b" do - create_logfile('test.0.log') {} + create_logfile("test.0.log") {} end end @@ -308,22 +308,22 @@ def expected_dataset(digest) end it "runs the indexer on all datasets of the store if none are provided on the command line" do - flexmock(Syskit::Log::Datastore). - should_receive(:index_build). - with(expected_store, expected_dataset('a'), Hash).once. - pass_thru - flexmock(Syskit::Log::Datastore). - should_receive(:index_build). - with(expected_store, expected_dataset('b'), Hash).once. - pass_thru - call_cli('index', '--store', datastore_path.to_s) + flexmock(Syskit::Log::Datastore) + .should_receive(:index_build) + .with(expected_store, expected_dataset("a"), Hash).once + .pass_thru + flexmock(Syskit::Log::Datastore) + .should_receive(:index_build) + .with(expected_store, expected_dataset("b"), Hash).once + .pass_thru + call_cli("index", "--store", datastore_path.to_s) end it "runs the indexer on the datasets of the store specified on the command line" do - flexmock(Syskit::Log::Datastore). - should_receive(:index_build). - with(expected_store, expected_dataset('a'), Hash).once. - pass_thru - call_cli('index', '--store', datastore_path.to_s, 'a') + flexmock(Syskit::Log::Datastore) + .should_receive(:index_build) + .with(expected_store, expected_dataset("a"), Hash).once + .pass_thru + call_cli("index", "--store", datastore_path.to_s, "a") end end @@ -368,24 +368,24 @@ def expected_dataset(digest) attr_reader :show_a0ea, :show_a0fa, :base_time before do @base_time = Time.at(34200, 234) - create_dataset "a0ea", metadata: Hash['description' => 'first', 'test' => ['2'], 'array_test' => ['a', 'b']] do - create_logfile('test.0.log') do - create_logfile_stream 'test', metadata: Hash['rock_stream_type' => 'port', 'rock_task_name' => 'task0', 'rock_task_object_name' => 'port0', 'rock_task_model' => 'test::Task'] + create_dataset "a0ea", metadata: Hash["description" => "first", "test" => ["2"], "array_test" => %w[a b]] do + create_logfile("test.0.log") do + create_logfile_stream "test", metadata: Hash["rock_stream_type" => "port", "rock_task_name" => "task0", "rock_task_object_name" => "port0", "rock_task_model" => "test::Task"] write_logfile_sample base_time, base_time, 0 write_logfile_sample base_time + 1, base_time + 10, 1 end - create_logfile('test_property.0.log') do - create_logfile_stream 'test_property', metadata: Hash['rock_stream_type' => 'property', 'rock_task_name' => 'task0', 'rock_task_object_name' => 'property0', 'rock_task_model' => 'test::Task'] + create_logfile("test_property.0.log") do + create_logfile_stream "test_property", metadata: Hash["rock_stream_type" => "property", "rock_task_name" => "task0", "rock_task_object_name" => "property0", "rock_task_model" => "test::Task"] write_logfile_sample base_time, base_time + 1, 2 write_logfile_sample base_time + 1, base_time + 9, 3 end end - create_dataset "a0fa", metadata: Hash['test' => ['1'], 'array_test' => ['c', 'd']] do - create_logfile('test.0.log') do - create_logfile_stream 'test', metadata: Hash['rock_stream_type' => 'port', 'rock_task_name' => 'task0', 'rock_task_object_name' => 'port0', 'rock_task_model' => 'test::Task'] + create_dataset "a0fa", metadata: Hash["test" => ["1"], "array_test" => %w[c d]] do + create_logfile("test.0.log") do + create_logfile_stream "test", metadata: Hash["rock_stream_type" => "port", "rock_task_name" => "task0", "rock_task_object_name" => "port0", "rock_task_model" => "test::Task"] end - create_logfile('test_property.0.log') do - create_logfile_stream 'test_property', metadata: Hash['rock_stream_type' => 'property', 'rock_task_name' => 'task0', 'rock_task_object_name' => 'property0', 'rock_task_model' => 'test::Task'] + create_logfile("test_property.0.log") do + create_logfile_stream "test_property", metadata: Hash["rock_stream_type" => "property", "rock_task_name" => "task0", "rock_task_object_name" => "property0", "rock_task_model" => "test::Task"] end end @show_a0ea = <<-EOF @@ -406,49 +406,49 @@ def expected_dataset(digest) it "raises if the query is invalid" do assert_raises(Syskit::Log::Datastore::Dataset::InvalidDigest) do - call_cli('list', '--store', datastore_path.to_s, - 'not_a_sha', silent: false) + call_cli("list", "--store", datastore_path.to_s, + "not_a_sha", silent: false) end end it "lists all datasets if given only the datastore path" do out, _err = capture_io do - call_cli('list', '--store', datastore_path.to_s, silent: false) + call_cli("list", "--store", datastore_path.to_s, silent: false) end assert_equal [show_a0ea, show_a0fa].join, out end it "lists only the short digests if --digest is given" do out, _err = capture_io do - call_cli('list', '--store', datastore_path.to_s, - '--digest', silent: false) + call_cli("list", "--store", datastore_path.to_s, + "--digest", silent: false) end assert_equal "a0ea\na0fa\n", out end it "lists only the short digests if --digest --long-digests are given" do out, _err = capture_io do - call_cli('list', '--store', datastore_path.to_s, - '--digest', '--long-digests', silent: false) + call_cli("list", "--store", datastore_path.to_s, + "--digest", "--long-digests", silent: false) end assert_equal "a0ea\na0fa\n", out end it "accepts a digest prefix as argument" do out, _err = capture_io do - call_cli('list', '--store', datastore_path.to_s, - 'a0e', silent: false) + call_cli("list", "--store", datastore_path.to_s, + "a0e", silent: false) end assert_equal show_a0ea, out end it "can match metadata exactly" do out, _err = capture_io do - call_cli('list', '--store', datastore_path.to_s, - 'test=1', silent: false) + call_cli("list", "--store", datastore_path.to_s, + "test=1", silent: false) end assert_equal show_a0fa, out end it "can match metadata with a regexp" do out, _err = capture_io do - call_cli('list', '--store', datastore_path.to_s, - 'array_test~[ac]', silent: false) + call_cli("list", "--store", datastore_path.to_s, + "array_test~[ac]", silent: false) end assert_equal [show_a0ea, show_a0fa].join, out end @@ -456,10 +456,10 @@ def expected_dataset(digest) describe "--pocolog" do it "shows the pocolog stream information" do out, _err = capture_io do - call_cli('list', '--store', datastore_path.to_s, - 'a0e', '--pocolog', silent: false) + call_cli("list", "--store", datastore_path.to_s, + "a0e", "--pocolog", silent: false) end - pocolog_info =<<-EOF + pocolog_info = <<-EOF 1 oroGen tasks in 2 streams task0[test::Task]: 1 ports and 1 properties Ports: @@ -471,10 +471,10 @@ def expected_dataset(digest) end it "handles empty streams gracefully" do out, _err = capture_io do - call_cli('list', '--store', datastore_path.to_s, - 'a0f', '--pocolog', silent: false) + call_cli("list", "--store", datastore_path.to_s, + "a0f", "--pocolog", silent: false) end - pocolog_info =<<-EOF + pocolog_info = <<-EOF 1 oroGen tasks in 2 streams task0[test::Task]: 1 ports and 1 properties Ports: @@ -489,82 +489,82 @@ def expected_dataset(digest) describe "#metadata" do before do - create_dataset "a0ea", metadata: Hash['test' => ['a']] do - create_logfile('test.0.log') {} + create_dataset "a0ea", metadata: Hash["test" => ["a"]] do + create_logfile("test.0.log") {} end - create_dataset "a0fa", metadata: Hash['test' => ['b']] do - create_logfile('test.0.log') {} + create_dataset "a0fa", metadata: Hash["test" => ["b"]] do + create_logfile("test.0.log") {} end end it "raises if the query is invalid" do assert_raises(Syskit::Log::Datastore::Dataset::InvalidDigest) do - call_cli('metadata', '--store', datastore_path.to_s, - 'not_a_sha', '--get', silent: false) + call_cli("metadata", "--store", datastore_path.to_s, + "not_a_sha", "--get", silent: false) end end - describe '--set' do + describe "--set" do it "sets metadata on the given dataset" do - call_cli('metadata', '--store', datastore_path.to_s, - 'a0e', '--set', 'debug=true', silent: false) - assert_equal Set['true'], datastore.get('a0ea').metadata['debug'] - assert_nil datastore.get('a0fa').metadata['debug'] + call_cli("metadata", "--store", datastore_path.to_s, + "a0e", "--set", "debug=true", silent: false) + assert_equal Set["true"], datastore.get("a0ea").metadata["debug"] + assert_nil datastore.get("a0fa").metadata["debug"] end it "sets metadata on matching datasets" do - call_cli('metadata', '--store', datastore_path.to_s, 'test=b', '--set', 'debug=true', silent: false) - assert_nil datastore.get('a0ea').metadata['debug'] - assert_equal Set['true'], datastore.get('a0fa').metadata['debug'] + call_cli("metadata", "--store", datastore_path.to_s, "test=b", "--set", "debug=true", silent: false) + assert_nil datastore.get("a0ea").metadata["debug"] + assert_equal Set["true"], datastore.get("a0fa").metadata["debug"] end it "sets metadata on all datasets if no query is given" do - call_cli('metadata', '--store', datastore_path.to_s, '--set', 'debug=true', silent: false) - assert_equal Set['true'], datastore.get('a0ea').metadata['debug'] - assert_equal Set['true'], datastore.get('a0fa').metadata['debug'] + call_cli("metadata", "--store", datastore_path.to_s, "--set", "debug=true", silent: false) + assert_equal Set["true"], datastore.get("a0ea").metadata["debug"] + assert_equal Set["true"], datastore.get("a0fa").metadata["debug"] end it "collects all set arguments with the same key" do - call_cli('metadata', '--store', datastore_path.to_s, '--set', 'test=a', 'test=b', 'test=c', silent: false) - assert_equal Set['a', 'b', 'c'], datastore.get('a0ea').metadata['test'] + call_cli("metadata", "--store", datastore_path.to_s, "--set", "test=a", "test=b", "test=c", silent: false) + assert_equal Set["a", "b", "c"], datastore.get("a0ea").metadata["test"] end it "raises if the argument to set is not a key=value association" do assert_raises(ArgumentError) do - call_cli('metadata', '--store', datastore_path.to_s, 'a0ea', '--set', 'debug', silent: false) + call_cli("metadata", "--store", datastore_path.to_s, "a0ea", "--set", "debug", silent: false) end end end - describe '--get' do + describe "--get" do it "lists all metadata on all datasets if no query is given" do - call_cli('metadata', '--store', datastore_path.to_s, 'a0ea', '--set', 'test=a,b', silent: false) + call_cli("metadata", "--store", datastore_path.to_s, "a0ea", "--set", "test=a,b", silent: false) out, _err = capture_io do - call_cli('metadata', '--store', datastore_path.to_s, '--get', silent: false) + call_cli("metadata", "--store", datastore_path.to_s, "--get", silent: false) end assert_equal "a0ea test=a,b\na0fa test=b\n", out end it "displays the short digest by default" do - flexmock(Syskit::Log::Datastore).new_instances.should_receive(:short_digest). - and_return { |dataset| dataset.digest[0, 3] } + flexmock(Syskit::Log::Datastore).new_instances.should_receive(:short_digest) + .and_return { |dataset| dataset.digest[0, 3] } out, _err = capture_io do - call_cli('metadata', '--store', datastore_path.to_s, '--get', silent: false) + call_cli("metadata", "--store", datastore_path.to_s, "--get", silent: false) end assert_equal "a0e test=a\na0f test=b\n", out end it "displays the long digest if --long-digest is given" do flexmock(datastore).should_receive(:short_digest).never out, _err = capture_io do - call_cli('metadata', '--store', datastore_path.to_s, '--get', '--long-digest', silent: false) + call_cli("metadata", "--store", datastore_path.to_s, "--get", "--long-digest", silent: false) end assert_equal "a0ea test=a\na0fa test=b\n", out end it "lists the requested metadata of the matching datasets" do - call_cli('metadata', '--store', datastore_path.to_s, 'a0ea', '--set', 'test=a,b', 'debug=true', silent: false) + call_cli("metadata", "--store", datastore_path.to_s, "a0ea", "--set", "test=a,b", "debug=true", silent: false) out, _err = capture_io do - call_cli('metadata', '--store', datastore_path.to_s, 'a0ea', '--get', 'test', silent: false) + call_cli("metadata", "--store", datastore_path.to_s, "a0ea", "--get", "test", silent: false) end assert_equal "a0ea test=a,b\n", out end it "replaces requested metadata that are unset by " do out, _err = capture_io do - call_cli('metadata', '--store', datastore_path.to_s, 'a0ea', '--get', 'debug', silent: false) + call_cli("metadata", "--store", datastore_path.to_s, "a0ea", "--get", "debug", silent: false) end assert_equal "a0ea debug=\n", out end @@ -572,13 +572,13 @@ def expected_dataset(digest) it "raises if both --get and --set are provided" do assert_raises(ArgumentError) do - call_cli('metadata', '--store', datastore_path.to_s, 'a0ea', '--get', 'debug', '--set', 'test=10', silent: false) + call_cli("metadata", "--store", datastore_path.to_s, "a0ea", "--get", "debug", "--set", "test=10", silent: false) end end it "raises if neither --get nor --set are provided" do assert_raises(ArgumentError) do - call_cli('metadata', '--store', datastore_path.to_s, 'a0ea', silent: false) + call_cli("metadata", "--store", datastore_path.to_s, "a0ea", silent: false) end end end @@ -587,24 +587,24 @@ def expected_dataset(digest) attr_reader :show_a0ea, :show_a0fa, :base_time before do @base_time = Time.at(34200, 234) - create_dataset "a0ea", metadata: Hash['description' => 'first', 'test' => ['2'], 'array_test' => ['a', 'b']] do - create_logfile('test.0.log') do - create_logfile_stream 'test', metadata: Hash['rock_stream_type' => 'port', 'rock_task_name' => 'task0', 'rock_task_object_name' => 'port0', 'rock_task_model' => 'test::Task'] + create_dataset "a0ea", metadata: Hash["description" => "first", "test" => ["2"], "array_test" => %w[a b]] do + create_logfile("test.0.log") do + create_logfile_stream "test", metadata: Hash["rock_stream_type" => "port", "rock_task_name" => "task0", "rock_task_object_name" => "port0", "rock_task_model" => "test::Task"] write_logfile_sample base_time, base_time, 0 write_logfile_sample base_time + 1, base_time + 10, 1 end - create_logfile('test_property.0.log') do - create_logfile_stream 'test_property', metadata: Hash['rock_stream_type' => 'property', 'rock_task_name' => 'task0', 'rock_task_object_name' => 'property0', 'rock_task_model' => 'test::Task'] + create_logfile("test_property.0.log") do + create_logfile_stream "test_property", metadata: Hash["rock_stream_type" => "property", "rock_task_name" => "task0", "rock_task_object_name" => "property0", "rock_task_model" => "test::Task"] write_logfile_sample base_time, base_time + 1, 2 write_logfile_sample base_time + 1, base_time + 9, 3 end end - create_dataset "a0fa", metadata: Hash['test' => ['1'], 'array_test' => ['c', 'd']] do - create_logfile('test.0.log') do - create_logfile_stream 'test', metadata: Hash['rock_stream_type' => 'port', 'rock_task_name' => 'task0', 'rock_task_object_name' => 'port0', 'rock_task_model' => 'test::Task'] + create_dataset "a0fa", metadata: Hash["test" => ["1"], "array_test" => %w[c d]] do + create_logfile("test.0.log") do + create_logfile_stream "test", metadata: Hash["rock_stream_type" => "port", "rock_task_name" => "task0", "rock_task_object_name" => "port0", "rock_task_model" => "test::Task"] end - create_logfile('test_property.0.log') do - create_logfile_stream 'test_property', metadata: Hash['rock_stream_type' => 'property', 'rock_task_name' => 'task0', 'rock_task_object_name' => 'property0', 'rock_task_model' => 'test::Task'] + create_logfile("test_property.0.log") do + create_logfile_stream "test_property", metadata: Hash["rock_stream_type" => "property", "rock_task_name" => "task0", "rock_task_object_name" => "property0", "rock_task_model" => "test::Task"] end end end @@ -658,4 +658,3 @@ def expected_dataset(digest) end end end - diff --git a/test/daru/path_builder_test.rb b/test/daru/path_builder_test.rb index 789e15a..bb426e0 100644 --- a/test/daru/path_builder_test.rb +++ b/test/daru/path_builder_test.rb @@ -142,7 +142,6 @@ module Daru it "raises if trying to call a field method with arguments" do assert_raises(ArgumentError) { @path_builder.field(1) } end - end describe "when on a container type" do diff --git a/test/datastore/dataset_test.rb b/test/datastore/dataset_test.rb index 4928dbb..5240a69 100644 --- a/test/datastore/dataset_test.rb +++ b/test/datastore/dataset_test.rb @@ -1,5 +1,7 @@ -require 'test_helper' -require 'tmpdir' +# frozen_string_literal: true + +require "test_helper" +require "tmpdir" module Syskit::Log class Datastore @@ -14,23 +16,23 @@ def dataset_pathname(*names) before do @root_path = Pathname.new(Dir.mktmpdir) @store = Datastore.new(root_path) - @dataset_path = store.core_path_of('dataset') - (dataset_path + 'pocolog').mkpath - (dataset_path + 'text').mkpath - (dataset_path + 'ignored').mkpath - @cache_path = store.cache_path_of('dataset') + @dataset_path = store.core_path_of("dataset") + (dataset_path + "pocolog").mkpath + (dataset_path + "text").mkpath + (dataset_path + "ignored").mkpath + @cache_path = store.cache_path_of("dataset") @dataset = Dataset.new(dataset_path, cache: cache_path) move_logfile_path (dataset_path + "pocolog").to_s - create_logfile 'task0::port.0.log' do - create_logfile_stream 'test', - metadata: Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'port'] - end - FileUtils.touch dataset_pathname('text', 'test.txt') - dataset_pathname('roby-events.0.log').open('w') { |io| io.write "ROBY" } - FileUtils.touch dataset_pathname('ignored', 'not_recognized_file') - dataset_pathname('ignored', 'not_recognized_dir').mkpath - FileUtils.touch dataset_pathname('ignored', 'not_recognized_dir', 'test') + create_logfile "task0::port.0.log" do + create_logfile_stream "test", + metadata: Hash["rock_task_name" => "task0", "rock_task_object_name" => "port"] + end + FileUtils.touch dataset_pathname("text", "test.txt") + dataset_pathname("roby-events.0.log").open("w") { |io| io.write "ROBY" } + FileUtils.touch dataset_pathname("ignored", "not_recognized_file") + dataset_pathname("ignored", "not_recognized_dir").mkpath + FileUtils.touch dataset_pathname("ignored", "not_recognized_dir", "test") end after do root_path.rmtree @@ -58,8 +60,8 @@ def dataset_pathname(*names) it "lists the full paths to the pocolog and roby files" do files = dataset.each_important_file.to_set expected = [ - dataset_pathname('roby-events.0.log'), - dataset_pathname('pocolog', 'task0::port.0.log') + dataset_pathname("roby-events.0.log"), + dataset_pathname("pocolog", "task0::port.0.log") ].to_set assert_equal expected, files end @@ -79,7 +81,7 @@ def dataset_pathname(*names) end end it "raises if the string contains invalid characters for base64" do - sha2[3, 1] = '_' + sha2[3, 1] = "_" assert_raises(Dataset::InvalidDigest) do Dataset.validate_encoded_short_digest(sha2) end @@ -105,7 +107,7 @@ def dataset_pathname(*names) end end it "raises if the string contains invalid characters for base64" do - sha2[3, 1] = '_' + sha2[3, 1] = "_" assert_raises(Dataset::InvalidDigest) do Dataset.validate_encoded_sha2(sha2) end @@ -117,11 +119,12 @@ def dataset_pathname(*names) describe "#compute_dataset_identity_from_files" do it "returns a list of entries with full path, size and sha256 digest" do - roby_path = dataset_pathname('roby-events.0.log') + roby_path = dataset_pathname("roby-events.0.log") roby_digest = Digest::SHA256.hexdigest(roby_path.read) - pocolog_path = dataset_pathname('pocolog', 'task0::port.0.log') + pocolog_path = dataset_pathname("pocolog", "task0::port.0.log") pocolog_digest = Digest::SHA256.hexdigest( - pocolog_path.read[Pocolog::Format::Current::PROLOGUE_SIZE..-1]) + pocolog_path.read[Pocolog::Format::Current::PROLOGUE_SIZE..-1] + ) expected = Set[ Dataset::IdentityEntry.new(roby_path, roby_path.size, roby_digest), Dataset::IdentityEntry.new(pocolog_path, pocolog_path.size, pocolog_digest)] @@ -132,43 +135,43 @@ def dataset_pathname(*names) it "saves and loads the identity information in the dataset" do dataset.write_dataset_identity_to_metadata_file assert_equal dataset.compute_dataset_identity_from_files.to_set, - dataset.read_dataset_identity_from_metadata_file.to_set + dataset.read_dataset_identity_from_metadata_file.to_set end describe "#write_dataset_identity_to_metadata_file" do it "validates that the provided identity entries have paths within the dataset" do - entry = Dataset::IdentityEntry.new(Pathname.new('/'), 10, Digest::SHA256.hexdigest('')) + entry = Dataset::IdentityEntry.new(Pathname.new("/"), 10, Digest::SHA256.hexdigest("")) assert_raises(Dataset::InvalidIdentityMetadata) do dataset.write_dataset_identity_to_metadata_file([entry]) end end it "validates that the provided identity entries have sizes that are integers" do - entry = Dataset::IdentityEntry.new(dataset_path + "file", 'not_a_number', Digest::SHA256.hexdigest('')) + entry = Dataset::IdentityEntry.new(dataset_path + "file", "not_a_number", Digest::SHA256.hexdigest("")) assert_raises(Dataset::InvalidIdentityMetadata) do dataset.write_dataset_identity_to_metadata_file([entry]) end end it "validates that the provided identity entries have sizes that are positive" do - entry = Dataset::IdentityEntry.new(dataset_path + "file", -20, Digest::SHA256.hexdigest('')) + entry = Dataset::IdentityEntry.new(dataset_path + "file", -20, Digest::SHA256.hexdigest("")) assert_raises(Dataset::InvalidIdentityMetadata) do dataset.write_dataset_identity_to_metadata_file([entry]) end end it "validates that the provided identity entries have valid-looking sha256 digests" do - entry = Dataset::IdentityEntry.new(dataset_path + "file", 10, 'invalid_digest') + entry = Dataset::IdentityEntry.new(dataset_path + "file", 10, "invalid_digest") assert_raises(Dataset::InvalidIdentityMetadata) do dataset.write_dataset_identity_to_metadata_file([entry]) end end it "saves the result to the identity file" do - file_digest = Digest::SHA256.hexdigest('file') - dataset_digest = Digest::SHA256.hexdigest('dataset') + file_digest = Digest::SHA256.hexdigest("file") + dataset_digest = Digest::SHA256.hexdigest("dataset") entry = Dataset::IdentityEntry.new(dataset_path + "file", 10, file_digest) flexmock(dataset).should_receive(:compute_dataset_digest).with([entry]).and_return(dataset_digest) dataset.write_dataset_identity_to_metadata_file([entry]) data = YAML.load((dataset_path + Dataset::BASENAME_IDENTITY_METADATA).read) - expected = Hash['layout_version' => Dataset::LAYOUT_VERSION, 'sha2' => dataset_digest, - 'identity' => [Hash['sha2' => file_digest, 'size' => 10, 'path' => 'file']]] + expected = Hash["layout_version" => Dataset::LAYOUT_VERSION, "sha2" => dataset_digest, + "identity" => [Hash["sha2" => file_digest, "size" => 10, "path" => "file"]]] assert_equal expected, data end end @@ -183,7 +186,7 @@ def write_metadata( "sha2" => Digest::SHA2.hexdigest("") }.merge(overrides) - (dataset_path + Dataset::BASENAME_IDENTITY_METADATA).open('w') do |io| + (dataset_path + Dataset::BASENAME_IDENTITY_METADATA).open("w") do |io| io.write YAML.dump( { "layout_version" => layout_version, "identity" => [metadata] } @@ -199,64 +202,64 @@ def write_metadata( end end it "sets the entry's path to the file's absolute path" do - write_metadata({ 'path' => 'test' }) + write_metadata({ "path" => "test" }) entry = dataset.read_dataset_identity_from_metadata_file.first - assert_equal (dataset_path + 'test'), entry.path + assert_equal (dataset_path + "test"), entry.path end it "validates that the paths are within the dataset" do - write_metadata({ 'path' => '../test' }) + write_metadata({ "path" => "../test" }) assert_raises(Dataset::InvalidIdentityMetadata) do dataset.read_dataset_identity_from_metadata_file end end it "sets the entry's size" do - write_metadata({ 'size' => 20 }) + write_metadata({ "size" => 20 }) entry = dataset.read_dataset_identity_from_metadata_file.first assert_equal 20, entry.size end it "sets the entry's size" do - write_metadata({ 'sha2' => Digest::SHA2.hexdigest('test') }) + write_metadata({ "sha2" => Digest::SHA2.hexdigest("test") }) entry = dataset.read_dataset_identity_from_metadata_file.first - assert_equal Digest::SHA2.hexdigest('test'), entry.sha2 + assert_equal Digest::SHA2.hexdigest("test"), entry.sha2 end it "validates that the file's has an 'identity' field" do - (dataset_path + Dataset::BASENAME_IDENTITY_METADATA).open('w') do |io| - io.write YAML.dump(Hash['layout_version' => Dataset::LAYOUT_VERSION]) + (dataset_path + Dataset::BASENAME_IDENTITY_METADATA).open("w") do |io| + io.write YAML.dump(Hash["layout_version" => Dataset::LAYOUT_VERSION]) end e = assert_raises(Dataset::InvalidIdentityMetadata) do dataset.read_dataset_identity_from_metadata_file end - assert_match /no 'identity' field/, e.message + assert_match(/no 'identity' field/, e.message) end it "validates that the file's 'identity' field is an array" do - (dataset_path + Dataset::BASENAME_IDENTITY_METADATA).open('w') do |io| - io.write YAML.dump(Hash['layout_version' => Dataset::LAYOUT_VERSION, 'identity' => Hash.new]) + (dataset_path + Dataset::BASENAME_IDENTITY_METADATA).open("w") do |io| + io.write YAML.dump(Hash["layout_version" => Dataset::LAYOUT_VERSION, "identity" => {}]) end e = assert_raises(Dataset::InvalidIdentityMetadata) do dataset.read_dataset_identity_from_metadata_file end - assert_match /the 'identity' field.*is not an array/, e.message + assert_match(/the 'identity' field.*is not an array/, e.message) end it "validates that the 'path' field contains a string" do - write_metadata({ 'path' => 10 }) + write_metadata({ "path" => 10 }) assert_raises(Dataset::InvalidIdentityMetadata) do dataset.read_dataset_identity_from_metadata_file end end it "validates that the 'size' field is an integer" do - write_metadata({ 'size' => 'not_a_number' }) + write_metadata({ "size" => "not_a_number" }) assert_raises(Dataset::InvalidIdentityMetadata) do dataset.read_dataset_identity_from_metadata_file end end it "validates that the 'sha2' field contains a string" do - write_metadata({ 'sha2' => 10 }) + write_metadata({ "sha2" => 10 }) assert_raises(Dataset::InvalidIdentityMetadata) do dataset.read_dataset_identity_from_metadata_file end end it "validates that the 'path' field contains a valid hash" do - write_metadata({ 'sha2' => 'aerpojapoj' }) + write_metadata({ "sha2" => "aerpojapoj" }) assert_raises(Dataset::InvalidIdentityMetadata) do dataset.read_dataset_identity_from_metadata_file end @@ -281,32 +284,33 @@ def write_metadata( entries = dataset.compute_dataset_identity_from_files entries[0].size += 10 refute_equal dataset.compute_dataset_digest, - dataset.compute_dataset_digest(entries) + dataset.compute_dataset_digest(entries) end it "changes if the sha2 of one of the files change" do entries = dataset.compute_dataset_identity_from_files - entries[0].sha2[10] = '0' + entries[0].sha2[10] = "0" refute_equal dataset.compute_dataset_digest, - dataset.compute_dataset_digest(entries) + dataset.compute_dataset_digest(entries) end it "changes if a new entry is added" do entries = dataset.compute_dataset_identity_from_files entries << Dataset::IdentityEntry.new( - root_path + 'new_file', 10, Digest::SHA2.hexdigest('test')) + root_path + "new_file", 10, Digest::SHA2.hexdigest("test") + ) refute_equal dataset.compute_dataset_digest, - dataset.compute_dataset_digest(entries) + dataset.compute_dataset_digest(entries) end it "changes if an entry is removed" do entries = dataset.compute_dataset_identity_from_files entries.pop refute_equal dataset.compute_dataset_digest, - dataset.compute_dataset_digest(entries) + dataset.compute_dataset_digest(entries) end it "is not sensitive to the identity entries order" do entries = dataset.compute_dataset_identity_from_files entries = [entries[1], entries[0]] assert_equal dataset.compute_dataset_digest, - dataset.compute_dataset_digest(entries) + dataset.compute_dataset_digest(entries) end end describe "weak_validate_identity_metadata" do @@ -335,7 +339,7 @@ def write_metadata( end end it "raises if a file size mismatches" do - dataset_pathname("roby-events.0.log").open('a') { |io| io.write('10') } + dataset_pathname("roby-events.0.log").open("a") { |io| io.write("10") } assert_raises(Dataset::InvalidIdentityMetadata) do dataset.weak_validate_identity_metadata end @@ -368,13 +372,13 @@ def write_metadata( end end it "raises if a file size mismatches" do - dataset_pathname("roby-events.0.log").open('a') { |io| io.write('10') } + dataset_pathname("roby-events.0.log").open("a") { |io| io.write("10") } assert_raises(Dataset::InvalidIdentityMetadata) do dataset.validate_identity_metadata end end it "raises if the contents of a file changed" do - dataset_pathname("roby-events.0.log").open('a') { |io| io.seek(5); io.write('0') } + dataset_pathname("roby-events.0.log").open("a") { |io| io.seek(5); io.write("0") } assert_raises(Dataset::InvalidIdentityMetadata) do dataset.validate_identity_metadata end @@ -383,14 +387,14 @@ def write_metadata( describe "#metadata_reset" do before do - (dataset_path + Dataset::BASENAME_METADATA).open('w') do |io| - YAML.dump(Hash['test' => [10]], io) + (dataset_path + Dataset::BASENAME_METADATA).open("w") do |io| + YAML.dump(Hash["test" => [10]], io) end end it "empties the metadata" do dataset.metadata dataset.metadata_reset - assert_equal Hash.new, dataset.metadata + assert_equal({}, dataset.metadata) end it "does not cause a read from disk if called first" do flexmock(dataset).should_receive(:metadata_read_from_file).never @@ -401,13 +405,13 @@ def write_metadata( describe "#metadata" do it "loads the data from file" do - (dataset_path + Dataset::BASENAME_METADATA).open('w') do |io| - YAML.dump(Hash['test' => [10]], io) + (dataset_path + Dataset::BASENAME_METADATA).open("w") do |io| + YAML.dump(Hash["test" => [10]], io) end - assert_equal Hash['test' => Set[10]], dataset.metadata + assert_equal Hash["test" => Set[10]], dataset.metadata end it "sets the metadata to an empty hash if there is no file" do - assert_equal Hash.new, dataset.metadata + assert_equal({}, dataset.metadata) end it "loads the metadata only once" do metadata_hash = dataset.metadata @@ -418,65 +422,65 @@ def write_metadata( describe "#metadata_set" do it "creates a new key->values mapping" do dataset.metadata_set("test", 10, 20) - assert_equal Hash['test' => Set[10, 20]], dataset.metadata + assert_equal Hash["test" => Set[10, 20]], dataset.metadata end it "resets existing values with the new ones" do dataset.metadata_add("test", 10, 20) dataset.metadata_set("test", 30, 40) - assert_equal Hash['test' => Set[30, 40]], dataset.metadata + assert_equal Hash["test" => Set[30, 40]], dataset.metadata end end describe "#metadata_add" do it "creates a new key->values mapping" do dataset.metadata_add("test", 10, 20) - assert_equal Hash['test' => Set[10, 20]], dataset.metadata + assert_equal Hash["test" => Set[10, 20]], dataset.metadata end it "merges new values to existing ones" do dataset.metadata_add("test", 10, 20) dataset.metadata_add("test", 10, 30) - assert_equal Hash['test' => Set[10, 20, 30]], dataset.metadata + assert_equal Hash["test" => Set[10, 20, 30]], dataset.metadata end end describe "#metadata_fetch" do it "returns a single value" do - dataset.metadata_add 'test', 10 - assert_equal 10, dataset.metadata_fetch('test') + dataset.metadata_add "test", 10 + assert_equal 10, dataset.metadata_fetch("test") end it "raises ArgumentError if more than one default value is given" do assert_raises(ArgumentError) do - dataset.metadata_fetch('test', 10, 20) + dataset.metadata_fetch("test", 10, 20) end end it "raises NoValue if there are none" do assert_raises(Dataset::NoValue) do - dataset.metadata_fetch('test') + dataset.metadata_fetch("test") end end it "raises MultipleValues if there is more than one" do - dataset.metadata_add 'test', 10, 20 + dataset.metadata_add "test", 10, 20 assert_raises(Dataset::MultipleValues) do - dataset.metadata_fetch('test') + dataset.metadata_fetch("test") end end it "returns the default if there is no value for the key" do - assert_equal 10, dataset.metadata_fetch('test', 10) + assert_equal 10, dataset.metadata_fetch("test", 10) end end describe "#metadata_fetch_all" do it "returns all values for the key" do - dataset.metadata_add 'test', 10, 20 - assert_equal Set[10, 20], dataset.metadata_fetch_all('test') + dataset.metadata_add "test", 10, 20 + assert_equal Set[10, 20], dataset.metadata_fetch_all("test") end it "raises NoValue if there are none and no defaults are given" do assert_raises(Dataset::NoValue) do - dataset.metadata_fetch_all('test') + dataset.metadata_fetch_all("test") end end it "returns the default if there is no value for the key" do - assert_equal Set[10, 20], dataset.metadata_fetch_all('test', 10, 20) + assert_equal Set[10, 20], dataset.metadata_fetch_all("test", 10, 20) end end @@ -484,32 +488,32 @@ def write_metadata( it "writes an empty metadata hash if there is no metadata" do dataset.metadata_write_to_file assert_equal Hash[], - YAML.load((dataset_path + Dataset::BASENAME_METADATA).read) + YAML.load((dataset_path + Dataset::BASENAME_METADATA).read) end it "writes the metadata to file" do - dataset.metadata_add 'test', 10, 20 + dataset.metadata_add "test", 10, 20 dataset.metadata_write_to_file - assert_equal Hash['test' => [10, 20]], - YAML.load((dataset_path + Dataset::BASENAME_METADATA).read) + assert_equal Hash["test" => [10, 20]], + YAML.load((dataset_path + Dataset::BASENAME_METADATA).read) end end describe "#each_pocolog_path" do it "enumerates the pocolog files in the dataset" do paths = dataset.each_pocolog_path.to_a - assert_equal [logfile_pathname('task0::port.0.log')], paths + assert_equal [logfile_pathname("task0::port.0.log")], paths end end describe "#each_pocolog_stream" do it "expects the pocolog cache files in the dataset's cache directory" do cache_path.mkpath - open_logfile logfile_path('task0::port.0.log'), index_dir: (cache_path + "pocolog").to_s - flexmock(Pocolog::Logfiles).new_instances. - should_receive(:rebuild_and_load_index). - never + open_logfile logfile_path("task0::port.0.log"), index_dir: (cache_path + "pocolog").to_s + flexmock(Pocolog::Logfiles).new_instances + .should_receive(:rebuild_and_load_index) + .never streams = dataset.each_pocolog_stream.to_a - assert_equal ['test'], streams.map(&:name) + assert_equal ["test"], streams.map(&:name) end end @@ -518,35 +522,35 @@ def write_metadata( before do @base_time = Time.at(342983, 3219) registry = Typelib::Registry.new - @double_t = registry.create_numeric '/double', 8, :float - create_logfile 'task0::port.0.log' do - create_logfile_stream 'test', - metadata: Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'port'] + @double_t = registry.create_numeric "/double", 8, :float + create_logfile "task0::port.0.log" do + create_logfile_stream "test", + metadata: Hash["rock_task_name" => "task0", "rock_task_object_name" => "port"] write_logfile_sample base_time, base_time + 10, 1 write_logfile_sample base_time + 1, base_time + 20, 2 end - create_logfile 'task0::other.0.log' do - create_logfile_stream 'other_test', - metadata: Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'other'], - type: double_t + create_logfile "task0::other.0.log" do + create_logfile_stream "other_test", + metadata: Hash["rock_task_name" => "task0", "rock_task_object_name" => "other"], + type: double_t write_logfile_sample base_time + 100, base_time + 300, 3 end cache_path.mkpath - open_logfile logfile_path('task0::port.0.log'), index_dir: (cache_path + "pocolog").to_s - open_logfile logfile_path('task0::other.0.log'), index_dir: (cache_path + "pocolog").to_s + open_logfile logfile_path("task0::port.0.log"), index_dir: (cache_path + "pocolog").to_s + open_logfile logfile_path("task0::other.0.log"), index_dir: (cache_path + "pocolog").to_s end it "loads stream information and returns LazyDataStream objects" do streams = dataset.read_lazy_data_streams.sort_by(&:name).reverse - assert_equal ['test', 'other_test'], streams.map(&:name) + assert_equal %w[test other_test], streams.map(&:name) assert_equal [int32_t, double_t], streams.map(&:type) - assert_equal [Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'port'], - Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'other']], - streams.map(&:metadata) + assert_equal [Hash["rock_task_name" => "task0", "rock_task_object_name" => "port"], + Hash["rock_task_name" => "task0", "rock_task_object_name" => "other"]], + streams.map(&:metadata) assert_equal [[base_time, base_time + 1], [base_time + 100, base_time + 100]], - streams.map(&:interval_rt) + streams.map(&:interval_rt) assert_equal [[base_time + 10, base_time + 20], [base_time + 300, base_time + 300]], - streams.map(&:interval_lg) + streams.map(&:interval_lg) assert_equal [2, 1], streams.map(&:size) end @@ -557,15 +561,15 @@ def write_metadata( .should_receive(:rebuild_and_load_index).never streams = lazy_streams.map(&:syskit_eager_load).sort_by(&:name) .reverse - assert_equal ['test', 'other_test'], streams.map(&:name) + assert_equal %w[test other_test], streams.map(&:name) assert_equal [int32_t, double_t], streams.map(&:type) - assert_equal [Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'port'], - Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'other']], - streams.map(&:metadata) + assert_equal [Hash["rock_task_name" => "task0", "rock_task_object_name" => "port"], + Hash["rock_task_name" => "task0", "rock_task_object_name" => "other"]], + streams.map(&:metadata) assert_equal [[base_time, base_time + 1], [base_time + 100, base_time + 100]], - streams.map(&:interval_rt) + streams.map(&:interval_rt) assert_equal [[base_time + 10, base_time + 20], [base_time + 300, base_time + 300]], - streams.map(&:interval_lg) + streams.map(&:interval_lg) assert_equal [2, 1], streams.map(&:size) end end diff --git a/test/datastore/import_test.rb b/test/datastore/import_test.rb index 648a148..61bc353 100644 --- a/test/datastore/import_test.rb +++ b/test/datastore/import_test.rb @@ -1,7 +1,9 @@ -require 'test_helper' -require 'syskit/log/datastore/import' -require 'tmpdir' -require 'timecop' +# frozen_string_literal: true + +require "test_helper" +require "syskit/log/datastore/import" +require "tmpdir" +require "timecop" module Syskit::Log class Datastore @@ -10,7 +12,7 @@ class Datastore before do @root_path = Pathname.new(Dir.mktmpdir) - @datastore_path = root_path + 'datastore' + @datastore_path = root_path + "datastore" datastore_path.mkpath @datastore = Datastore.create(datastore_path) @import = Import.new(datastore) @@ -21,104 +23,104 @@ class Datastore describe "#prepare_import" do it "lists the pocolog files that should be copied, in normalized order" do - FileUtils.touch(file0_1 = logfile_pathname('file0.1.log')) - FileUtils.touch(file0_0 = logfile_pathname('file0.0.log')) - FileUtils.touch(file1_0 = logfile_pathname('file1.0.log')) + FileUtils.touch(file0_1 = logfile_pathname("file0.1.log")) + FileUtils.touch(file0_0 = logfile_pathname("file0.0.log")) + FileUtils.touch(file1_0 = logfile_pathname("file1.0.log")) assert_equal [[file0_0, file0_1, file1_0], [], [], []], import.prepare_import(logfile_pathname) end it "lists the test files that should be copied" do - FileUtils.touch(path = logfile_pathname('file0.txt')) + FileUtils.touch(path = logfile_pathname("file0.txt")) assert_equal [[], [path], [], []], import.prepare_import(logfile_pathname) end it "lists the Roby log files that should be copied" do - FileUtils.touch(path = logfile_pathname('test-events.log')) + FileUtils.touch(path = logfile_pathname("test-events.log")) assert_equal [[], [], [path], []], import.prepare_import(logfile_pathname) end it "raises if more than one file looks like a roby log file" do - FileUtils.touch(logfile_pathname('test-events.log')) - FileUtils.touch(logfile_pathname('test2-events.log')) + FileUtils.touch(logfile_pathname("test-events.log")) + FileUtils.touch(logfile_pathname("test2-events.log")) e = assert_raises(ArgumentError) do import.prepare_import(logfile_pathname) end assert_match "more than one Roby event log found", e.message end it "ignores pocolog's index files" do - FileUtils.touch(path = logfile_pathname('file0.1.log')) - FileUtils.touch(logfile_pathname('file0.1.idx')) + FileUtils.touch(path = logfile_pathname("file0.1.log")) + FileUtils.touch(logfile_pathname("file0.1.idx")) assert_equal [[path], [], [], []], import.prepare_import(logfile_pathname) end it "ignores Roby index files" do - FileUtils.touch(path = logfile_pathname('test-events.log')) - FileUtils.touch(logfile_pathname('test-index.log')) + FileUtils.touch(path = logfile_pathname("test-events.log")) + FileUtils.touch(logfile_pathname("test-index.log")) assert_equal [[], [], [path], []], import.prepare_import(logfile_pathname) end it "lists unrecognized files" do - FileUtils.touch(path = logfile_pathname('not_matching')) + FileUtils.touch(path = logfile_pathname("not_matching")) assert_equal [[], [], [], [path]], import.prepare_import(logfile_pathname) end it "lists unrecognized directories" do - (path = logfile_pathname('not_matching')).mkpath + (path = logfile_pathname("not_matching")).mkpath assert_equal [[], [], [], [path]], import.prepare_import(logfile_pathname) end end describe "#import" do before do - create_logfile 'test.0.log' do - create_logfile_stream 'test', - metadata: Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'port'] + create_logfile "test.0.log" do + create_logfile_stream "test", + metadata: Hash["rock_task_name" => "task0", "rock_task_object_name" => "port"] end - FileUtils.touch logfile_pathname('test.txt') - FileUtils.touch logfile_pathname('test-events.log') - FileUtils.touch logfile_pathname('not_recognized_file') - logfile_pathname('not_recognized_dir').mkpath - FileUtils.touch logfile_pathname('not_recognized_dir', 'test') + FileUtils.touch logfile_pathname("test.txt") + FileUtils.touch logfile_pathname("test-events.log") + FileUtils.touch logfile_pathname("not_recognized_file") + logfile_pathname("not_recognized_dir").mkpath + FileUtils.touch logfile_pathname("not_recognized_dir", "test") end def tty_reporter - Pocolog::CLI::TTYReporter.new('', color: false, progress: false) + Pocolog::CLI::TTYReporter.new("", color: false, progress: false) end - it 'can import an empty folder' do + it "can import an empty folder" do Dir.mktmpdir do |dir| import.import([Pathname.new(dir)]) end end it "moves the results under the dataset's ID" do - flexmock(Dataset).new_instances.should_receive(:compute_dataset_digest). - and_return('ABCDEF') + flexmock(Dataset).new_instances.should_receive(:compute_dataset_digest) + .and_return("ABCDEF") import_dir = import.import([logfile_pathname]).dataset_path - assert_equal(datastore_path + 'core' + 'ABCDEF', import_dir) + assert_equal(datastore_path + "core" + "ABCDEF", import_dir) end - it 'raises if the target dataset ID already exists' do - flexmock(Dataset).new_instances.should_receive(:compute_dataset_digest). - and_return('ABCDEF') - (datastore_path + 'core' + 'ABCDEF').mkpath + it "raises if the target dataset ID already exists" do + flexmock(Dataset).new_instances.should_receive(:compute_dataset_digest) + .and_return("ABCDEF") + (datastore_path + "core" + "ABCDEF").mkpath assert_raises(Import::DatasetAlreadyExists) do import.import([logfile_pathname]) end end it "replaces the current dataset by the new one if the ID already exists but 'force' is true" do - digest = 'ABCDEF' + digest = "ABCDEF" flexmock(Dataset) .new_instances.should_receive(:compute_dataset_digest) .and_return(digest) - (datastore_path + 'core' + digest).mkpath - FileUtils.touch (datastore_path + 'core' + digest + 'file') + (datastore_path + "core" + digest).mkpath + FileUtils.touch(datastore_path + "core" + digest + "file") out, = capture_io do import.import( [logfile_pathname], reporter: tty_reporter, force: true ) end - assert_match /Replacing existing dataset #{digest} with new one/, out - assert !(datastore_path + digest + 'file').exist? + assert_match(/Replacing existing dataset #{digest} with new one/, out) + assert !(datastore_path + digest + "file").exist? end - it 'reports its progress' do + it "reports its progress" do # This is not really a unit test. It just exercises the code # path that reports progress, but checks nothing except the lack # of exceptions @@ -126,50 +128,51 @@ def tty_reporter import.import([logfile_pathname]) end end - it 'normalizes the pocolog logfiles' do + it "normalizes the pocolog logfiles" do expected_normalize_args = hsh( - output_path: datastore_path + 'incoming' + '0' + 'core' + 'pocolog', - index_dir: datastore_path + 'incoming' + '0' + 'cache' + 'pocolog') + output_path: datastore_path + "incoming" + "0" + "core" + "pocolog", + index_dir: datastore_path + "incoming" + "0" + "cache" + "pocolog" + ) - flexmock(Syskit::Log::Datastore).should_receive(:normalize). - with([logfile_pathname('test.0.log')], expected_normalize_args).once. - pass_thru + flexmock(Syskit::Log::Datastore).should_receive(:normalize) + .with([logfile_pathname("test.0.log")], expected_normalize_args).once + .pass_thru dataset = import.import([logfile_pathname]) - assert (dataset.dataset_path + 'pocolog' + 'task0::port.0.log').exist? + assert (dataset.dataset_path + "pocolog" + "task0::port.0.log").exist? end it "copies the text files" do import_dir = import.import([logfile_pathname]).dataset_path - assert logfile_pathname('test.txt').exist? - assert (import_dir + 'text' + 'test.txt').exist? + assert logfile_pathname("test.txt").exist? + assert (import_dir + "text" + "test.txt").exist? end it "copies the roby log files into roby-events.N.log" do import_dir = import.import([logfile_pathname]).dataset_path - assert logfile_pathname('test-events.log').exist? - assert (import_dir + 'roby-events.0.log').exist? + assert logfile_pathname("test-events.log").exist? + assert (import_dir + "roby-events.0.log").exist? end it "copies the unrecognized files" do import_dir = import.import([logfile_pathname]).dataset_path - assert logfile_pathname('not_recognized_file').exist? - assert logfile_pathname('not_recognized_dir').exist? - assert logfile_pathname('not_recognized_dir', 'test').exist? + assert logfile_pathname("not_recognized_file").exist? + assert logfile_pathname("not_recognized_dir").exist? + assert logfile_pathname("not_recognized_dir", "test").exist? - assert (import_dir + 'ignored' + 'not_recognized_file').exist? - assert (import_dir + 'ignored' + 'not_recognized_dir').exist? - assert (import_dir + 'ignored' + 'not_recognized_dir' + 'test').exist? + assert (import_dir + "ignored" + "not_recognized_file").exist? + assert (import_dir + "ignored" + "not_recognized_dir").exist? + assert (import_dir + "ignored" + "not_recognized_dir" + "test").exist? end it "imports the Roby metadata" do - roby_metadata = Array[Hash['app_name' => 'test']] - logfile_pathname("info.yml").open('w') do |io| + roby_metadata = Array[Hash["app_name" => "test"]] + logfile_pathname("info.yml").open("w") do |io| YAML.dump(roby_metadata, io) end dataset = import.import([logfile_pathname]) - assert_equal({ 'roby:app_name' => Set['test'] }, dataset.metadata) - assert_equal({ 'roby:app_name' => Set['test'] }, + assert_equal({ "roby:app_name" => Set["test"] }, dataset.metadata) + assert_equal({ "roby:app_name" => Set["test"] }, Dataset.new(dataset.dataset_path).metadata) end it "ignores the Roby metadata if it cannot be loaded" do - logfile_pathname("info.yml").open('w') do |io| + logfile_pathname("info.yml").open("w") do |io| io.write "%invalid_yaml" end @@ -177,7 +180,7 @@ def tty_reporter _out, err = capture_io do imported = import.import([logfile_pathname]) end - assert_match /failed to load Roby metadata/, err + assert_match(/failed to load Roby metadata/, err) assert_equal({}, imported.metadata) assert_equal({}, Dataset.new(imported.dataset_path).metadata) end diff --git a/test/datastore/index_build_test.rb b/test/datastore/index_build_test.rb index 01639b1..93a605c 100644 --- a/test/datastore/index_build_test.rb +++ b/test/datastore/index_build_test.rb @@ -1,16 +1,16 @@ # frozen_string_literal: true -require 'test_helper' -require 'roby/test/droby_log_helpers' -require 'syskit/log/datastore/index_build' +require "test_helper" +require "roby/test/droby_log_helpers" +require "syskit/log/datastore/index_build" module Syskit::Log class Datastore describe IndexBuild do attr_reader :datastore, :dataset, :index_build before do - @datastore = Datastore.create(logfile_pathname('datastore')) - @dataset = create_dataset 'TEST' + @datastore = Datastore.create(logfile_pathname("datastore")) + @dataset = create_dataset "TEST" @index_build = IndexBuild.new(datastore, dataset) end @@ -22,31 +22,31 @@ def cache_path dataset.cache_path end - describe '#rebuild_pocolog_indexes' do + describe "#rebuild_pocolog_indexes" do before do - create_logfile 'task::port.0.log' do + create_logfile "task::port.0.log" do create_logfile_stream( - 'test', metadata: { 'rock_task_name' => 'task', - 'rock_task_object_name' => 'port' } + "test", metadata: { "rock_task_name" => "task", + "rock_task_object_name" => "port" } ) write_logfile_sample Time.now, Time.now, 10 end end - it 'does nothing if the dataset has no pocolog streams' do - logfile_pathname('task::port.0.log').unlink + it "does nothing if the dataset has no pocolog streams" do + logfile_pathname("task::port.0.log").unlink index_build.rebuild_pocolog_indexes end - it 'creates the cache dir if it is missing' do + it "creates the cache dir if it is missing" do index_build.rebuild_pocolog_indexes assert( - (dataset.cache_path + 'pocolog').directory? + (dataset.cache_path + "pocolog").directory? ) end - it 'does nothing if a valid index file exists' do - pocolog_index_dir = (dataset.cache_path + 'pocolog') - open_logfile('task::port.0.log', index_dir: pocolog_index_dir).close - index_contents = (pocolog_index_dir + 'task::port.0.idx').read + it "does nothing if a valid index file exists" do + pocolog_index_dir = (dataset.cache_path + "pocolog") + open_logfile("task::port.0.log", index_dir: pocolog_index_dir).close + index_contents = (pocolog_index_dir + "task::port.0.idx").read flexmock(Pocolog::Format::Current) .should_receive(:rebuild_index_file) .never @@ -54,12 +54,12 @@ def cache_path index_build.rebuild_pocolog_indexes assert_equal( index_contents, - (dataset.cache_path + 'pocolog' + 'task::port.0.idx').read + (dataset.cache_path + "pocolog" + "task::port.0.idx").read ) end - it 'rebuilds the index if the file is not valid' do - pocolog_index_dir = (dataset.cache_path + 'pocolog') - open_logfile('task::port.0.log', index_dir: pocolog_index_dir).close + it "rebuilds the index if the file is not valid" do + pocolog_index_dir = (dataset.cache_path + "pocolog") + open_logfile("task::port.0.log", index_dir: pocolog_index_dir).close flexmock(Pocolog::Format::Current) .should_receive(:read_index_stream_info).once .and_raise(Pocolog::InvalidIndex) @@ -70,9 +70,9 @@ def cache_path index_build.rebuild_pocolog_indexes end it "forces index rebuilding if 'force' is true" do - pocolog_index_dir = (dataset.cache_path + 'pocolog') - open_logfile('task::port.0.log', index_dir: pocolog_index_dir).close - index_contents = (pocolog_index_dir + 'task::port.0.idx').read + pocolog_index_dir = (dataset.cache_path + "pocolog") + open_logfile("task::port.0.log", index_dir: pocolog_index_dir).close + index_contents = (pocolog_index_dir + "task::port.0.idx").read flexmock(Pocolog::Format::Current) .should_receive(:rebuild_index_file) .once.pass_thru @@ -80,70 +80,70 @@ def cache_path index_build.rebuild_pocolog_indexes(force: true) assert_equal( index_contents, - (dataset.cache_path + 'pocolog' + 'task::port.0.idx').read + (dataset.cache_path + "pocolog" + "task::port.0.idx").read ) end - it 'creates a new index file if none exists' do + it "creates a new index file if none exists" do flexmock(Pocolog::Format::Current) .should_receive(:rebuild_index_file) .once.pass_thru index_build.rebuild_pocolog_indexes(force: true) assert( - (dataset.cache_path + 'pocolog' + 'task::port.0.idx').exist? + (dataset.cache_path + "pocolog" + "task::port.0.idx").exist? ) end end - describe '#rebuild_roby_index' do + describe "#rebuild_roby_index" do include Roby::Test::DRobyLogHelpers before do - droby_create_event_log((dataset_path + 'roby-events.0.log').to_s) do + droby_create_event_log((dataset_path + "roby-events.0.log").to_s) do droby_write_event :test, 10 end end - it 'does nothing if there are no roby indexes' do - (dataset_path + 'roby-events.0.log').unlink + it "does nothing if there are no roby indexes" do + (dataset_path + "roby-events.0.log").unlink index_build.rebuild_roby_index end - it 'creates the cache dir if it is missing' do + it "creates the cache dir if it is missing" do index_build.rebuild_roby_index assert cache_path.directory? end - it 'does nothing if a valid index file exists' do + it "does nothing if a valid index file exists" do cache_path.mkpath Roby::DRoby::Logfile::Index.rebuild_file( - dataset_path + 'roby-events.0.log', - cache_path + 'roby-events.0.idx' + dataset_path + "roby-events.0.log", + cache_path + "roby-events.0.idx" ) flexmock(Roby::DRoby::Logfile::Index) .should_receive(:rebuild_file).never index_build.rebuild_roby_index end - it 'rebuilds if a valid index file exists but force is true' do + it "rebuilds if a valid index file exists but force is true" do cache_path.mkpath Roby::DRoby::Logfile::Index.rebuild_file( - dataset_path + 'roby-events.0.log', - cache_path + 'roby-events.0.idx' + dataset_path + "roby-events.0.log", + cache_path + "roby-events.0.idx" ) flexmock(Roby::DRoby::Logfile::Index) .should_receive(:rebuild_file).once.pass_thru index_build.rebuild_roby_index(force: true) end - it 'rebuilds if no index file exists' do + it "rebuilds if no index file exists" do flexmock(Roby::DRoby::Logfile::Index) .should_receive(:rebuild_file) .once.pass_thru index_build.rebuild_roby_index assert Roby::DRoby::Logfile::Index.valid_file?( - dataset_path + 'roby-events.0.log', - cache_path + 'roby-events.0.idx' + dataset_path + "roby-events.0.log", + cache_path + "roby-events.0.idx" ) end - it 'skips the roby file if its format is not current' do - (dataset_path + 'roby-events.0.log').open('w') do |io| + it "skips the roby file if its format is not current" do + (dataset_path + "roby-events.0.log").open("w") do |io| Roby::DRoby::Logfile.write_header(io, version: 0) end reporter = flexmock(Pocolog::CLI::NullReporter.new) diff --git a/test/datastore/normalize_test.rb b/test/datastore/normalize_test.rb index 057b094..145503b 100644 --- a/test/datastore/normalize_test.rb +++ b/test/datastore/normalize_test.rb @@ -1,5 +1,7 @@ -require 'test_helper' -require 'syskit/log/datastore/normalize' +# frozen_string_literal: true + +require "test_helper" +require "syskit/log/datastore/normalize" module Syskit::Log class Datastore @@ -12,18 +14,18 @@ class Datastore describe "#normalize" do before do - create_logfile 'file0.0.log' do - create_logfile_stream 'stream0', metadata: Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'port'] + create_logfile "file0.0.log" do + create_logfile_stream "stream0", metadata: Hash["rock_task_name" => "task0", "rock_task_object_name" => "port"] write_logfile_sample base_time + 2, base_time + 20, 2 - create_logfile_stream 'stream1', metadata: Hash['rock_task_name' => 'task1', 'rock_task_object_name' => 'port'] + create_logfile_stream "stream1", metadata: Hash["rock_task_name" => "task1", "rock_task_object_name" => "port"] write_logfile_sample base_time + 1, base_time + 10, 1 end end it "splits the file into a one-file-per-stream scheme" do - logfile_pathname('normalized').mkdir - normalize.normalize([logfile_pathname('file0.0.log')]) - normalized_dir = logfile_pathname('normalized') + logfile_pathname("normalized").mkdir + normalize.normalize([logfile_pathname("file0.0.log")]) + normalized_dir = logfile_pathname("normalized") stream = open_logfile_stream( normalized_dir + "task0::port.0.log", "task0.port" ) @@ -36,149 +38,151 @@ class Datastore stream.samples.to_a end it "generates valid index files for the normalized streams" do - logfile_pathname('normalized').mkdir - normalize.normalize([logfile_pathname('file0.0.log')]) - flexmock(Pocolog::Logfiles).new_instances. - should_receive(:rebuild_and_load_index). - never - normalized_dir = logfile_pathname('normalized') - open_logfile_stream (normalized_dir + "task0::port.0.log"), 'task0.port' - open_logfile_stream (normalized_dir + "task1::port.0.log"), 'task1.port' + logfile_pathname("normalized").mkdir + normalize.normalize([logfile_pathname("file0.0.log")]) + flexmock(Pocolog::Logfiles).new_instances + .should_receive(:rebuild_and_load_index) + .never + normalized_dir = logfile_pathname("normalized") + open_logfile_stream (normalized_dir + "task0::port.0.log"), "task0.port" + open_logfile_stream (normalized_dir + "task1::port.0.log"), "task1.port" end it "allows to specify the cache directory" do - logfile_pathname('normalized').mkdir + logfile_pathname("normalized").mkdir index_dir = logfile_pathname("cache") - normalize.normalize([logfile_pathname('file0.0.log')], index_dir: index_dir) - flexmock(Pocolog::Logfiles).new_instances. - should_receive(:rebuild_and_load_index). - never - normalized_dir = logfile_pathname('normalized') - open_logfile_stream (normalized_dir + "task0::port.0.log"), 'task0.port', index_dir: index_dir - open_logfile_stream (normalized_dir + "task1::port.0.log"), 'task1.port', index_dir: index_dir + normalize.normalize([logfile_pathname("file0.0.log")], index_dir: index_dir) + flexmock(Pocolog::Logfiles).new_instances + .should_receive(:rebuild_and_load_index) + .never + normalized_dir = logfile_pathname("normalized") + open_logfile_stream (normalized_dir + "task0::port.0.log"), "task0.port", index_dir: index_dir + open_logfile_stream (normalized_dir + "task1::port.0.log"), "task1.port", index_dir: index_dir end describe "digest generation" do it "optionally computes the sha256 digest of the generated file, without the prologue" do - logfile_pathname('normalized').mkdir - result = normalize.normalize([logfile_pathname('file0.0.log')], compute_sha256: true) + logfile_pathname("normalized").mkdir + result = normalize.normalize([logfile_pathname("file0.0.log")], compute_sha256: true) - path = logfile_pathname('normalized', 'task0::port.0.log') + path = logfile_pathname("normalized", "task0::port.0.log") expected = Digest::SHA256.hexdigest(path.read[Pocolog::Format::Current::PROLOGUE_SIZE..-1]) assert_equal expected, result[path].hexdigest end it "generates valid index files for the normalized streams" do - logfile_pathname('normalized').mkdir - normalize.normalize([logfile_pathname('file0.0.log')], compute_sha256: true) - flexmock(Pocolog::Logfiles).new_instances. - should_receive(:rebuild_and_load_index). - never - normalized_dir = logfile_pathname('normalized') - open_logfile_stream (normalized_dir + "task0::port.0.log"), 'task0.port' - open_logfile_stream (normalized_dir + "task1::port.0.log"), 'task1.port' + logfile_pathname("normalized").mkdir + normalize.normalize([logfile_pathname("file0.0.log")], compute_sha256: true) + flexmock(Pocolog::Logfiles).new_instances + .should_receive(:rebuild_and_load_index) + .never + normalized_dir = logfile_pathname("normalized") + open_logfile_stream (normalized_dir + "task0::port.0.log"), "task0.port" + open_logfile_stream (normalized_dir + "task1::port.0.log"), "task1.port" end end it "detects followup streams" do - create_logfile 'file0.1.log' do - create_logfile_stream 'stream0', metadata: Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'port'] + create_logfile "file0.1.log" do + create_logfile_stream "stream0", metadata: Hash["rock_task_name" => "task0", "rock_task_object_name" => "port"] write_logfile_sample base_time + 3, base_time + 30, 3 end - normalize.normalize([logfile_pathname('file0.0.log'), logfile_pathname('file0.1.log')]) - normalized_dir = logfile_pathname('normalized') - stream = open_logfile_stream (normalized_dir + "task0::port.0.log"), 'task0.port' + normalize.normalize([logfile_pathname("file0.0.log"), logfile_pathname("file0.1.log")]) + normalized_dir = logfile_pathname("normalized") + stream = open_logfile_stream (normalized_dir + "task0::port.0.log"), "task0.port" assert_equal [[base_time + 2, base_time + 20, 2], [base_time + 3, base_time + 30, 3]], stream.samples.to_a end it "raises if a potential followup stream has an non-matching realtime range" do - create_logfile 'file0.1.log' do - create_logfile_stream 'stream0', metadata: Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'port'] + create_logfile "file0.1.log" do + create_logfile_stream "stream0", metadata: Hash["rock_task_name" => "task0", "rock_task_object_name" => "port"] write_logfile_sample base_time + 1, base_time + 30, 3 end capture_io do assert_raises(Normalize::InvalidFollowupStream) do - normalize.normalize([logfile_pathname('file0.0.log'), logfile_pathname('file0.1.log')]) + normalize.normalize([logfile_pathname("file0.0.log"), logfile_pathname("file0.1.log")]) end end end it "raises if a potential followup stream has an non-matching logical time range" do - create_logfile 'file0.1.log' do - create_logfile_stream 'stream0', metadata: Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'port'] + create_logfile "file0.1.log" do + create_logfile_stream "stream0", metadata: Hash["rock_task_name" => "task0", "rock_task_object_name" => "port"] write_logfile_sample base_time + 3, base_time + 10, 3 end capture_io do assert_raises(Normalize::InvalidFollowupStream) do - normalize.normalize([logfile_pathname('file0.0.log'), logfile_pathname('file0.1.log')]) + normalize.normalize([logfile_pathname("file0.0.log"), logfile_pathname("file0.1.log")]) end end end it "raises if a potential followup stream has an non-matching type" do - create_logfile 'file0.1.log' do - stream_t = Typelib::Registry.new.create_numeric '/test_t', 8, :sint - create_logfile_stream 'stream0', - type: stream_t, - metadata: Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'port'] + create_logfile "file0.1.log" do + stream_t = Typelib::Registry.new.create_numeric "/test_t", 8, :sint + create_logfile_stream "stream0", + type: stream_t, + metadata: Hash["rock_task_name" => "task0", "rock_task_object_name" => "port"] write_logfile_sample base_time + 3, base_time + 30, 3 end capture_io do assert_raises(Normalize::InvalidFollowupStream) do - normalize.normalize([logfile_pathname('file0.0.log'), logfile_pathname('file0.1.log')]) + normalize.normalize([logfile_pathname("file0.0.log"), logfile_pathname("file0.1.log")]) end end end it "deletes newly created files if the initialization of a new file fails" do - create_logfile 'file0.1.log' do - create_logfile_stream 'stream0', - metadata: Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'port'] + create_logfile "file0.1.log" do + create_logfile_stream "stream0", + metadata: Hash["rock_task_name" => "task0", "rock_task_object_name" => "port"] write_logfile_sample base_time + 3, base_time + 30, 3 end - error_class = Class.new(Exception) + error_class = Class.new(RuntimeError) flexmock(File).new_instances.should_receive(:write).and_raise(error_class) - _out, err = capture_io do + _out, = capture_io do assert_raises(error_class) do - normalize.normalize([logfile_pathname('file0.0.log'), logfile_pathname('file0.1.log')]) + normalize.normalize([logfile_pathname("file0.0.log"), logfile_pathname("file0.1.log")]) end end - normalized_dir = logfile_pathname('normalized') + normalized_dir = logfile_pathname("normalized") refute (normalized_dir + "task0::port.0.log").exist? end end describe "#normalize_logfile" do it "skips invalid files" do - logfile_pathname('file0.0.log').open('w') do |io| + logfile_pathname("file0.0.log").open("w") do |io| io.write "INVALID" end reporter = flexmock(Pocolog::CLI::NullReporter.new) flexmock(reporter).should_receive(:current).and_return(10) - reporter.should_receive(:warn). - with("file0.0.log does not seem to be a valid pocolog file, skipping"). - once + reporter.should_receive(:warn) + .with("file0.0.log does not seem to be a valid pocolog file, skipping") + .once reporter.should_receive(:current=).with(17).once - assert_equal [nil, Array.new], normalize.normalize_logfile( - logfile_pathname('file0.0.log'), - logfile_pathname('normalized'), reporter: reporter) + assert_equal [nil, []], normalize.normalize_logfile( + logfile_pathname("file0.0.log"), + logfile_pathname("normalized"), reporter: reporter + ) end it "handles truncated files" do - create_logfile 'file0.0.log' do - create_logfile_stream 'stream0', - metadata: Hash['rock_task_name' => 'task0', 'rock_task_object_name' => 'port'] + create_logfile "file0.0.log" do + create_logfile_stream "stream0", + metadata: Hash["rock_task_name" => "task0", "rock_task_object_name" => "port"] write_logfile_sample base_time + 3, base_time + 30, 3 end - file0_path = logfile_pathname('file0.0.log') + file0_path = logfile_pathname("file0.0.log") file0_size = file0_path.stat.size - logfile_pathname('file0.0.log').open('a') do |io| + logfile_pathname("file0.0.log").open("a") do |io| io.truncate(file0_size - 1) end - logfile_pathname('normalized').mkpath + logfile_pathname("normalized").mkpath reporter = flexmock(Pocolog::CLI::NullReporter.new) flexmock(reporter).should_receive(:current).and_return(10) - reporter.should_receive(:warn). - with(/^file0.0.log looks truncated/). - once + reporter.should_receive(:warn) + .with(/^file0.0.log looks truncated/) + .once reporter.should_receive(:current=).with(10 + file0_size - 1).once error, ios = normalize.normalize_logfile( - logfile_pathname('file0.0.log'), - logfile_pathname('normalized'), reporter: reporter) + logfile_pathname("file0.0.log"), + logfile_pathname("normalized"), reporter: reporter + ) assert_nil error - assert_equal logfile_pathname('normalized', 'task0::port.0.log'), ios[0].path + assert_equal logfile_pathname("normalized", "task0::port.0.log"), ios[0].path end end end diff --git a/test/datastore_test.rb b/test/datastore_test.rb index 67120a9..56e0f90 100644 --- a/test/datastore_test.rb +++ b/test/datastore_test.rb @@ -1,4 +1,6 @@ -require 'test_helper' +# frozen_string_literal: true + +require "test_helper" module Syskit::Log describe Datastore do @@ -6,7 +8,7 @@ module Syskit::Log before do @root_path = Pathname.new(Dir.mktmpdir) - @datastore_path = root_path + 'datastore' + @datastore_path = root_path + "datastore" datastore_path.mkpath @datastore = Datastore.new(datastore_path) @@ -41,33 +43,33 @@ module Syskit::Log describe "#in_incoming" do it "creates an incoming directory in the datastore and yields it" do datastore.in_incoming do |core_path, cache_path| - assert_equal (datastore_path + 'incoming' + '0' + "core"), core_path + assert_equal (datastore_path + "incoming" + "0" + "core"), core_path assert core_path.directory? - assert_equal (datastore_path + 'incoming' + '0' + "cache"), cache_path + assert_equal (datastore_path + "incoming" + "0" + "cache"), cache_path assert cache_path.directory? end end it "handles having another process create a path concurrently" do (datastore_path + "incoming").mkpath called = false - flexmock(Pathname).new_instances.should_receive(:mkdir). - and_return do - if !called - called = true - raise Errno::EEXIST - end + flexmock(Pathname).new_instances.should_receive(:mkdir) + .and_return do + unless called + called = true + raise Errno::EEXIST end + end datastore.in_incoming do |core_path, cache_path| - assert_equal (datastore_path + 'incoming' + '1' + "core"), core_path - assert_equal (datastore_path + 'incoming' + '1' + "cache"), cache_path + assert_equal (datastore_path + "incoming" + "1" + "core"), core_path + assert_equal (datastore_path + "incoming" + "1" + "cache"), cache_path end end it "ignores existing paths" do (datastore_path + "incoming" + "0").mkpath datastore.in_incoming do |core_path, cache_path| - assert_equal (datastore_path + 'incoming' + '1' + "core"), core_path - assert_equal (datastore_path + 'incoming' + '1' + "cache"), cache_path + assert_equal (datastore_path + "incoming" + "1" + "core"), core_path + assert_equal (datastore_path + "incoming" + "1" + "cache"), cache_path end end it "deletes the created paths if they still exist at the end of the block" do @@ -89,12 +91,12 @@ module Syskit::Log describe "#has?" do attr_reader :digest before do - @digest = Datastore::Dataset.string_digest('exists') - (datastore_path + 'core' + digest).mkpath + @digest = Datastore::Dataset.string_digest("exists") + (datastore_path + "core" + digest).mkpath end it "returns false if there is no folder with the dataset digest in the store" do - refute datastore.has?(Datastore::Dataset.string_digest('does_not_exist')) + refute datastore.has?(Datastore::Dataset.string_digest("does_not_exist")) end it "returns true if there is a folder with the dataset digest in the store" do assert datastore.has?(digest) @@ -104,7 +106,7 @@ module Syskit::Log describe "#delete" do attr_reader :digest, :dataset_path, :cache_path before do - @digest = Datastore::Dataset.string_digest('exists') + @digest = Datastore::Dataset.string_digest("exists") @dataset_path = datastore.core_path_of(digest) dataset_path.mkpath @cache_path = datastore.cache_path_of(digest) @@ -129,7 +131,7 @@ module Syskit::Log describe "#get" do attr_reader :digest, :dataset_path before do - @digest = Datastore::Dataset.string_digest('exists') + @digest = Datastore::Dataset.string_digest("exists") @dataset_path = datastore.core_path_of(digest) dataset_path.mkpath dataset = Datastore::Dataset.new(dataset_path) @@ -162,8 +164,8 @@ module Syskit::Log create_dataset("a0fa") {} end it "returns a dataset whose digest starts with the given string" do - assert_equal datastore.core_path_of('a0ea'), - datastore.find_dataset_from_short_digest("a0e").dataset_path + assert_equal datastore.core_path_of("a0ea"), + datastore.find_dataset_from_short_digest("a0e").dataset_path end it "returns nil if nothing matches" do assert_nil datastore.find_dataset_from_short_digest("b") diff --git a/test/deployment_group_test.rb b/test/deployment_group_test.rb index 390e4d0..19eb3b2 100644 --- a/test/deployment_group_test.rb +++ b/test/deployment_group_test.rb @@ -1,68 +1,69 @@ -require 'test_helper' +# frozen_string_literal: true + +require "test_helper" module Syskit::Log describe Extensions::Configuration do attr_reader :group, :streams, :double_t before do @group = Syskit::Models::DeploymentGroup.new - double_t = Roby.app.default_loader.registry.get '/double' + double_t = Roby.app.default_loader.registry.get "/double" - create_logfile 'test.0.log' do - create_logfile_stream '/port0', type: double_t, - metadata: Hash['rock_task_name' => "task", - 'rock_task_object_name' => 'object0', - 'rock_task_model' => 'task::Model', - 'rock_stream_type' => 'port'] - create_logfile_stream '/port1_1', type: double_t, - metadata: Hash['rock_task_name' => "task", - 'rock_task_object_name' => 'object1', - 'rock_task_model' => 'task::Model', - 'rock_stream_type' => 'port'] - create_logfile_stream '/port1_2', type: double_t, - metadata: Hash['rock_task_name' => "task", - 'rock_task_object_name' => 'object1', - 'rock_task_model' => 'task::Model', - 'rock_stream_type' => 'port'] - create_logfile_stream '/property0', type: double_t, - metadata: Hash['rock_task_name' => "task", - 'rock_task_object_name' => 'object0', - 'rock_task_model' => 'task::Model', - 'rock_stream_type' => 'property'] - create_logfile_stream '/property1_1', type: double_t, - metadata: Hash['rock_task_name' => "task", - 'rock_task_object_name' => 'object1', - 'rock_task_model' => 'task::Model', - 'rock_stream_type' => 'property'] - create_logfile_stream '/property1_2', type: double_t, - metadata: Hash['rock_task_name' => "task", - 'rock_task_object_name' => 'object1', - 'rock_task_model' => 'task::Model', - 'rock_stream_type' => 'property'] + create_logfile "test.0.log" do + create_logfile_stream "/port0", type: double_t, + metadata: Hash["rock_task_name" => "task", + "rock_task_object_name" => "object0", + "rock_task_model" => "task::Model", + "rock_stream_type" => "port"] + create_logfile_stream "/port1_1", type: double_t, + metadata: Hash["rock_task_name" => "task", + "rock_task_object_name" => "object1", + "rock_task_model" => "task::Model", + "rock_stream_type" => "port"] + create_logfile_stream "/port1_2", type: double_t, + metadata: Hash["rock_task_name" => "task", + "rock_task_object_name" => "object1", + "rock_task_model" => "task::Model", + "rock_stream_type" => "port"] + create_logfile_stream "/property0", type: double_t, + metadata: Hash["rock_task_name" => "task", + "rock_task_object_name" => "object0", + "rock_task_model" => "task::Model", + "rock_stream_type" => "property"] + create_logfile_stream "/property1_1", type: double_t, + metadata: Hash["rock_task_name" => "task", + "rock_task_object_name" => "object1", + "rock_task_model" => "task::Model", + "rock_stream_type" => "property"] + create_logfile_stream "/property1_2", type: double_t, + metadata: Hash["rock_task_name" => "task", + "rock_task_object_name" => "object1", + "rock_task_model" => "task::Model", + "rock_stream_type" => "property"] end streams = Streams.from_dir(logfile_path) - @streams = streams.find_task_by_name('task') + @streams = streams.find_task_by_name("task") end describe "#use_pocolog_task" do it "registers the stream-to-port mappings for the matching ports on the deployment model" do task_m = Syskit::TaskContext.new_submodel deployment_m = Deployment.new_submodel - flexmock(Syskit::Log::Deployment). - should_receive(:for_streams). - with(streams, ->(h) { h[:model] == task_m && h[:name] == 'test' }). - and_return(mock = flexmock(deployment_m)) + flexmock(Syskit::Log::Deployment) + .should_receive(:for_streams) + .with(streams, ->(h) { h[:model] == task_m && h[:name] == "test" }) + .and_return(mock = flexmock(deployment_m)) - configured_deployment = group.use_pocolog_task(streams, name: 'test', model: task_m, allow_missing: true) + configured_deployment = group.use_pocolog_task(streams, name: "test", model: task_m, allow_missing: true) assert_equal mock, configured_deployment.model end # This really is a synthetic test it "allows for the deployment of a stream task" do - task_m = Syskit::TaskContext.new_submodel(orogen_model_name: 'task::Model') + task_m = Syskit::TaskContext.new_submodel(orogen_model_name: "task::Model") req = task_m.to_instance_requirements.use_deployment_group(streams) syskit_deploy(req) end end end end - diff --git a/test/deployment_test.rb b/test/deployment_test.rb index 25ed1ed..10ab7ab 100644 --- a/test/deployment_test.rb +++ b/test/deployment_test.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -require 'test_helper' +require "test_helper" module Syskit::Log describe Deployment do @@ -8,43 +8,43 @@ module Syskit::Log attr_reader :replay_manager, :streams, :port_stream, :task_m, :deployment_m attr_reader :subject before do - double_t = Roby.app.default_loader.registry.get '/double' + double_t = Roby.app.default_loader.registry.get "/double" - create_logfile 'test.0.log' do + create_logfile "test.0.log" do stream0 = create_logfile_stream( - '/port0', + "/port0", type: double_t, - metadata: { 'rock_task_name' => 'task', - 'rock_task_object_name' => 'out', - 'rock_stream_type' => 'port' } + metadata: { "rock_task_name" => "task", + "rock_task_object_name" => "out", + "rock_stream_type" => "port" } ) stream1 = create_logfile_stream( - '/port1', + "/port1", type: double_t, - metadata: { 'rock_task_name' => 'task', - 'rock_task_object_name' => 'other_out', - 'rock_stream_type' => 'port' } + metadata: { "rock_task_name" => "task", + "rock_task_object_name" => "other_out", + "rock_stream_type" => "port" } ) stream0.write Time.at(0), Time.at(0), 0 stream1.write Time.at(1), Time.at(1), 1 end @streams = Streams.from_dir(logfile_path) - .find_task_by_name('task') - @port_stream = streams.find_port_by_name('out') + .find_task_by_name("task") + @port_stream = streams.find_port_by_name("out") @task_m = Syskit::TaskContext.new_submodel do - output_port 'out', double_t + output_port "out", double_t end @replay_manager = execution_engine.pocolog_replay_manager @deployment_m = Syskit::Log::Deployment.for_streams( - streams, model: task_m, name: 'task' + streams, model: task_m, name: "task" ) plan.add_permanent_task( - @subject = deployment_m.new(process_name: 'test', on: 'pocolog') + @subject = deployment_m.new(process_name: "test", on: "pocolog") ) end - it 'gets notified of new samples when running' do + it "gets notified of new samples when running" do expect_execution { subject.start! } .to { emit subject.ready_event } flexmock(subject) @@ -54,14 +54,14 @@ module Syskit::Log replay_manager.step end - it 'does nothing if the streams are eof?' do + it "does nothing if the streams are eof?" do expect_execution { subject.start! } .to { emit subject.ready_event } replay_manager.step replay_manager.step end - describe 'dynamic stream addition and removal' do + describe "dynamic stream addition and removal" do attr_reader :other_deployment before do expect_execution { subject.start! } @@ -69,56 +69,56 @@ module Syskit::Log replay_manager.step other_task_m = Syskit::TaskContext.new_submodel do - output_port 'other_out', '/double' + output_port "other_out", "/double" end other_deployment_m = Syskit::Log::Deployment.for_streams( - streams, model: other_task_m, name: 'task' + streams, model: other_task_m, name: "task" ) @other_deployment = other_deployment_m.new( - process_name: 'other_test', on: 'pocolog' + process_name: "other_test", on: "pocolog" ) plan.add_permanent_task(other_deployment) end - it 'does not skip a sample when eof? and a new stream is added to the alignment' do + it "does not skip a sample when eof? and a new stream is added to the alignment" do replay_manager.step expect_execution { other_deployment.start! } .to { emit other_deployment.ready_event } flexmock(other_deployment) .should_receive(:process_sample) - .with(streams.find_port_by_name('other_out'), Time.at(1), 1) + .with(streams.find_port_by_name("other_out"), Time.at(1), 1) .once replay_manager.step end - it 'does not skip a sample when the current sample is from a stream that has been removed' do + it "does not skip a sample when the current sample is from a stream that has been removed" do expect_execution { other_deployment.start! } .to { emit other_deployment.ready_event } expect_execution { subject.stop! } .to { emit subject.stop_event } flexmock(other_deployment) .should_receive(:process_sample) - .with(streams.find_port_by_name('other_out'), Time.at(1), 1) + .with(streams.find_port_by_name("other_out"), Time.at(1), 1) .once replay_manager.step end end - it 'does not get notified if pending' do + it "does not get notified if pending" do flexmock(subject).should_receive(:process_sample).never replay_manager.step end - it 'does not get notified if stopped' do + it "does not get notified if stopped" do expect_execution { subject.start! }.to { emit subject.ready_event } expect_execution { subject.stop! }.to { emit subject.stop_event } flexmock(subject).should_receive(:process_sample).never replay_manager.step end - it 'forwards the samples to an existing, running, deployed task' do - plan.add(task = subject.task('task')) + it "forwards the samples to an existing, running, deployed task" do + plan.add(task = subject.task("task")) syskit_configure_and_start(task) reader = Orocos.allow_blocking_calls { task.orocos_task.out.reader } subject.process_sample(port_stream, Time.now, 1) @@ -126,15 +126,15 @@ module Syskit::Log assert_equal 1, sample end - it 'does not forward the samples to a configured task' do - plan.add(task = subject.task('task')) + it "does not forward the samples to a configured task" do + plan.add(task = subject.task("task")) syskit_configure(task) flexmock(task.orocos_task.out).should_receive(:write).never subject.process_sample(port_stream, Time.now, 1) end - it 'does not forward the samples to a finished task' do - plan.add(task = subject.task('task')) + it "does not forward the samples to a finished task" do + plan.add(task = subject.task("task")) syskit_configure_and_start(task) expect_execution { task.stop! } .to { emit task.stop_event } diff --git a/test/models/deployment_test.rb b/test/models/deployment_test.rb index 0daa9d8..c8e97e1 100644 --- a/test/models/deployment_test.rb +++ b/test/models/deployment_test.rb @@ -1,25 +1,27 @@ -require 'test_helper' +# frozen_string_literal: true + +require "test_helper" module Syskit::Log module Models describe Deployment do attr_reader :streams before do - double_t = Roby.app.default_loader.registry.get '/double' - mismatch_t = Typelib::Registry.new.create_numeric '/double', 4, :sint + double_t = Roby.app.default_loader.registry.get "/double" + mismatch_t = Typelib::Registry.new.create_numeric "/double", 4, :sint - create_logfile 'test.0.log' do - create_logfile_stream '/port0', type: double_t, - metadata: Hash['rock_task_name' => "task", - 'rock_task_object_name' => 'object0', - 'rock_stream_type' => 'port'] - create_logfile_stream '/port1', type: mismatch_t, - metadata: Hash['rock_task_name' => "task_with_mismatching_type", - 'rock_task_object_name' => 'port_with_mismatching_type', - 'rock_stream_type' => 'port'] + create_logfile "test.0.log" do + create_logfile_stream "/port0", type: double_t, + metadata: Hash["rock_task_name" => "task", + "rock_task_object_name" => "object0", + "rock_stream_type" => "port"] + create_logfile_stream "/port1", type: mismatch_t, + metadata: Hash["rock_task_name" => "task_with_mismatching_type", + "rock_task_object_name" => "port_with_mismatching_type", + "rock_stream_type" => "port"] end - @streams = Streams.from_dir(logfile_path). - find_task_by_name('task') + @streams = Streams.from_dir(logfile_path) + .find_task_by_name("task") Syskit::Log.logger.level = Logger::FATAL end @@ -33,53 +35,53 @@ module Models attr_reader :deployment_m, :task_m before do @task_m = task_m = Syskit::TaskContext.new_submodel do - input_port 'in', '/double' - output_port 'out', '/double' + input_port "in", "/double" + output_port "out", "/double" end - @deployment_m = Syskit::Log::Deployment.for_streams(TaskStreams.new, model: task_m, name: 'task') + @deployment_m = Syskit::Log::Deployment.for_streams(TaskStreams.new, model: task_m, name: "task") end it "raises ArgumentError if the port is not a port of the deployment's task model" do other_task_m = Syskit::TaskContext.new_submodel do - output_port 'out', '/double' + output_port "out", "/double" end assert_raises(ArgumentError) do - deployment_m.add_stream(streams.find_task_by_name('object0'), other_task_m.out_port) + deployment_m.add_stream(streams.find_task_by_name("object0"), other_task_m.out_port) end end it "raises MismatchingType if the stream and port have different types" do - streams = Streams.from_dir(logfile_path). - find_task_by_name('task_with_mismatching_type') + streams = Streams.from_dir(logfile_path) + .find_task_by_name("task_with_mismatching_type") replay_task_m = Syskit::Log::ReplayTaskContext.model_for(task_m.orogen_model) assert_raises(MismatchingType) do - deployment_m.add_stream(streams.find_port_by_name('port_with_mismatching_type'), replay_task_m.out_port) + deployment_m.add_stream(streams.find_port_by_name("port_with_mismatching_type"), replay_task_m.out_port) end end it "raises ArgumentError if the port is an input port" do assert_raises(ArgumentError) do - deployment_m.add_stream(streams.find_task_by_name('object0'), task_m.in_port) + deployment_m.add_stream(streams.find_task_by_name("object0"), task_m.in_port) end end end it "uses the task's port with the same name by default" do task_m = Syskit::TaskContext.new_submodel do - output_port 'object0', '/double' + output_port "object0", "/double" end replay_task_m = Syskit::Log::ReplayTaskContext.model_for(task_m.orogen_model) - deployment_m = Syskit::Log::Deployment.for_streams(TaskStreams.new, model: task_m, name: 'task') - deployment_m.add_stream(port_stream = streams.find_port_by_name('object0')) + deployment_m = Syskit::Log::Deployment.for_streams(TaskStreams.new, model: task_m, name: "task") + deployment_m.add_stream(port_stream = streams.find_port_by_name("object0")) assert_equal Hash[port_stream => replay_task_m.object0_port], - deployment_m.streams_to_port + deployment_m.streams_to_port end end describe "#add_streams_from" do it "issues a warning if allow_missing is true and some output ports do not have a matching stream" do task_m = Syskit::TaskContext.new_submodel do - output_port 'unknown_port', '/double' + output_port "unknown_port", "/double" end - deployment_m = Syskit::Log::Deployment.for_streams(TaskStreams.new, name: 'test', model: task_m) + deployment_m = Syskit::Log::Deployment.for_streams(TaskStreams.new, name: "test", model: task_m) flexmock(Syskit::Log, :strict).should_receive(:warn).with(/state/).once flexmock(Syskit::Log, :strict).should_receive(:warn).with(/unknown_port/).once deployment_m.add_streams_from(streams, allow_missing: true) @@ -87,9 +89,9 @@ module Models it "raises MissingStream if allow_missing is false and some output ports do not have a matching stream" do task_m = Syskit::TaskContext.new_submodel do - output_port 'unknown_port', '/double' + output_port "unknown_port", "/double" end - deployment_m = Syskit::Log::Deployment.for_streams(TaskStreams.new, name: 'test', model: task_m) + deployment_m = Syskit::Log::Deployment.for_streams(TaskStreams.new, name: "test", model: task_m) assert_raises(MissingStream) do deployment_m.add_streams_from(streams, allow_missing: false) end @@ -97,17 +99,16 @@ module Models it "adds the matching streams" do task_m = Syskit::TaskContext.new_submodel do - output_port 'object0', '/double' + output_port "object0", "/double" end replay_task_m = Syskit::Log::ReplayTaskContext.model_for(task_m.orogen_model) - deployment_m = Syskit::Log::Deployment.for_streams(TaskStreams.new, name: 'test', model: task_m) - flexmock(deployment_m).should_receive(:add_stream). - with(streams.find_port_by_name('object0'), replay_task_m.object0_port). - once + deployment_m = Syskit::Log::Deployment.for_streams(TaskStreams.new, name: "test", model: task_m) + flexmock(deployment_m).should_receive(:add_stream) + .with(streams.find_port_by_name("object0"), replay_task_m.object0_port) + .once deployment_m.add_streams_from(streams) end end end end end - diff --git a/test/models/replay_task_context_test.rb b/test/models/replay_task_context_test.rb index 86cf625..c0928c9 100644 --- a/test/models/replay_task_context_test.rb +++ b/test/models/replay_task_context_test.rb @@ -1,4 +1,6 @@ -require 'test_helper' +# frozen_string_literal: true + +require "test_helper" module Syskit::Log module Models describe ReplayTaskContext do @@ -19,7 +21,7 @@ module Models end it "sets the new model's name and registers it under OroGen::Pocolog" do task_m = Syskit::TaskContext.new_submodel( - orogen_model_name: 'project::Task' + orogen_model_name: "project::Task" ) replay_task_m = subject.model_for(task_m.orogen_model) assert_same ::OroGen::Pocolog.project.Task, replay_task_m @@ -33,7 +35,7 @@ module Models end it "copies the data services from the plain task model" do srv_m = Syskit::DataService.new_submodel - task_m.provides srv_m, as: 'test' + task_m.provides srv_m, as: "test" replay_task_m = subject.model_for(task_m.orogen_model) srv = replay_task_m.test_srv refute_nil srv @@ -42,18 +44,18 @@ module Models end it "copies the dynamic data services from the plain task model" do srv_m = Syskit::DataService.new_submodel do - output_port 'out', '/double' + output_port "out", "/double" end task_m = Syskit::TaskContext.new_submodel do - dynamic_output_port /^out_\w+$/, '/double' + dynamic_output_port(/^out_\w+$/, "/double") end - task_m.dynamic_service srv_m, as: 'test' do - provides srv_m, as: name, 'out' => "out_#{name}" + task_m.dynamic_service srv_m, as: "test" do + provides srv_m, as: name, "out" => "out_#{name}" end replay_task_m = subject.model_for(task_m.orogen_model) replay_task_m = replay_task_m.specialize - replay_task_m.require_dynamic_service 'test', as: 'dyn' + replay_task_m.require_dynamic_service "test", as: "dyn" srv = replay_task_m.dyn_srv assert_equal srv.out_port.to_component_port, replay_task_m.out_dyn_port end @@ -69,4 +71,3 @@ module Models end end end - diff --git a/test/replay_manager_test.rb b/test/replay_manager_test.rb index 7c681d9..aa15ce9 100644 --- a/test/replay_manager_test.rb +++ b/test/replay_manager_test.rb @@ -1,32 +1,34 @@ -require 'test_helper' +# frozen_string_literal: true + +require "test_helper" module Syskit::Log describe ReplayManager do attr_reader :subject, :streams, :port_stream, :task_m, :deployment_m before do - double_t = Roby.app.default_loader.registry.get '/double' + double_t = Roby.app.default_loader.registry.get "/double" - create_logfile 'test.0.log' do + create_logfile "test.0.log" do create_logfile_stream( - '/port0', + "/port0", type: double_t, - metadata: { 'rock_task_name' => 'task', - 'rock_task_object_name' => 'out', - 'rock_stream_type' => 'port' } + metadata: { "rock_task_name" => "task", + "rock_task_object_name" => "out", + "rock_stream_type" => "port" } ) end @streams = Streams.from_dir(logfile_pathname) - .find_task_by_name('task') - @port_stream = streams.find_port_by_name('out') + .find_task_by_name("task") + @port_stream = streams.find_port_by_name("out") @task_m = Syskit::TaskContext.new_submodel do - output_port 'out', double_t + output_port "out", double_t end plan = Roby::ExecutablePlan.new @subject = ReplayManager.new(plan.execution_engine) @deployment_m = Syskit::Log::Deployment - .for_streams(streams, model: task_m, name: 'task') + .for_streams(streams, model: task_m, name: "task") end describe "#register" do @@ -47,7 +49,7 @@ module Syskit::Log deployment_task = deployment_m.new flexmock(subject.stream_aligner) .should_receive(:add_streams) - .with(streams.find_port_by_name('out')) + .with(streams.find_port_by_name("out")) .once subject.register(deployment_task) end @@ -68,10 +70,10 @@ module Syskit::Log it "removes the streams that are managed by the deployment task from the aligner" do deployment_task = deployment_m.new subject.register(deployment_task) - flexmock(subject.stream_aligner). - should_receive(:remove_streams). - with(streams.find_port_by_name('out')). - once + flexmock(subject.stream_aligner) + .should_receive(:remove_streams) + .with(streams.find_port_by_name("out")) + .once subject.deregister(deployment_task) end it "does not deregister streams that are still in use by another deployment" do @@ -80,9 +82,9 @@ module Syskit::Log other_task = deployment_m.new subject.register(other_task) - flexmock(subject.stream_aligner). - should_receive(:remove_streams). - with().once.pass_thru + flexmock(subject.stream_aligner) + .should_receive(:remove_streams) + .with().once.pass_thru subject.deregister(other_task) end it "deregisters the deployment task from the targets for the stream" do @@ -133,17 +135,17 @@ module Syskit::Log end end - describe '#process_in_realtime' do + describe "#process_in_realtime" do before do - double_t = Roby.app.default_loader.registry.get '/double' + double_t = Roby.app.default_loader.registry.get "/double" - create_logfile 'test.0.log' do + create_logfile "test.0.log" do stream0 = create_logfile_stream( - '/port0', + "/port0", type: double_t, - metadata: { 'rock_task_name' => 'task', - 'rock_task_object_name' => 'out', - 'rock_stream_type' => 'port' } + metadata: { "rock_task_name" => "task", + "rock_task_object_name" => "out", + "rock_stream_type" => "port" } ) stream0.write Time.at(0), Time.at(0), 0 stream0.write Time.at(0), Time.at(1), 1 @@ -151,14 +153,14 @@ module Syskit::Log end streams = Streams.from_dir(logfile_pathname) - .find_task_by_name('task') + .find_task_by_name("task") task_m = Syskit::TaskContext.new_submodel do - output_port 'out', double_t + output_port "out", double_t end deployment_m = Syskit::Log::Deployment - .for_streams(streams, model: task_m, name: 'task') - deployment = deployment_m.new(process_name: 'test', on: 'pocolog') + .for_streams(streams, model: task_m, name: "task") + deployment = deployment_m.new(process_name: "test", on: "pocolog") plan.add_permanent_task(deployment) expect_execution { deployment.start! } .to { emit deployment.ready_event } @@ -172,10 +174,10 @@ module Syskit::Log subject.should_receive(:sleep) realtime = subject.base_real_time flexmock(Time).should_receive(:now).and_return { realtime } - subject.should_receive(:dispatch).once.with(0, Time.at(0)). - and_return { realtime += 1 } - subject.should_receive(:dispatch).once.with(0, Time.at(1)). - and_return { realtime += 1 } + subject.should_receive(:dispatch).once.with(0, Time.at(0)) + .and_return { realtime += 1 } + subject.should_receive(:dispatch).once.with(0, Time.at(1)) + .and_return { realtime += 1 } subject.process_in_realtime(1, limit_real_time: realtime + 1.1) end @@ -183,10 +185,10 @@ module Syskit::Log subject.should_receive(:sleep) realtime = subject.base_real_time flexmock(Time).should_receive(:now).and_return { realtime } - subject.should_receive(:dispatch).once.with(0, Time.at(0)). - and_return { realtime += 1 } - subject.should_receive(:dispatch).once.with(0, Time.at(1)). - and_return { realtime += 1 } + subject.should_receive(:dispatch).once.with(0, Time.at(0)) + .and_return { realtime += 1 } + subject.should_receive(:dispatch).once.with(0, Time.at(1)) + .and_return { realtime += 1 } subject.process_in_realtime(2, limit_real_time: realtime + 0.55) end @@ -208,10 +210,10 @@ module Syskit::Log realtime = subject.base_real_time flexmock(Time).should_receive(:now).and_return { realtime } subject.should_receive(:dispatch).with(0, Time.at(0)).globally.ordered - subject.should_receive(:sleep).explicitly.with(1).once.globally.ordered. - and_return { realtime += 1 } - subject.should_receive(:dispatch).with(0, Time.at(1)).globally.ordered. - and_return { realtime += 1 } + subject.should_receive(:sleep).explicitly.with(1).once.globally.ordered + .and_return { realtime += 1 } + subject.should_receive(:dispatch).with(0, Time.at(1)).globally.ordered + .and_return { realtime += 1 } subject.process_in_realtime(1, limit_real_time: realtime + 1.1) end @@ -219,8 +221,8 @@ module Syskit::Log realtime = subject.base_real_time flexmock(Time).should_receive(:now).and_return { realtime } subject.should_receive(:dispatch) - subject.should_receive(:sleep).explicitly.with(0.5).once.globally.ordered. - and_return { realtime += 0.5 } + subject.should_receive(:sleep).explicitly.with(0.5).once.globally.ordered + .and_return { realtime += 0.5 } subject.process_in_realtime(2, limit_real_time: realtime + 0.55) end @@ -234,4 +236,3 @@ module Syskit::Log end end end - diff --git a/test/roby-log-generation/scripts/accessors.rb b/test/roby-log-generation/scripts/accessors.rb index 053eabe..e5b5603 100644 --- a/test/roby-log-generation/scripts/accessors.rb +++ b/test/roby-log-generation/scripts/accessors.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # A single task model module Namespace class M < Roby::Task @@ -19,8 +21,7 @@ class Submodel < Roby::Task Roby.plan.add_permanent_task(task = Namespace::M.new) task.start! - task.stop_event.on do |ev| + task.stop_event.on do |_| Roby.app.quit end end - diff --git a/test/roby-log-generation/scripts/event_emission.rb b/test/roby-log-generation/scripts/event_emission.rb index f046b9f..4d8a039 100644 --- a/test/roby-log-generation/scripts/event_emission.rb +++ b/test/roby-log-generation/scripts/event_emission.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # A single task model class M < Roby::Task terminates @@ -10,7 +12,7 @@ class M < Roby::Task Robot.controller do Roby.plan.add_permanent_task(task = M.new) task.start! - task.stop_event.on do |ev| + task.stop_event.on do |_| Roby.app.quit end end diff --git a/test/roby-log-generation/scripts/model_registration.rb b/test/roby-log-generation/scripts/model_registration.rb index ed93fec..56a1e83 100644 --- a/test/roby-log-generation/scripts/model_registration.rb +++ b/test/roby-log-generation/scripts/model_registration.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Namespace task_service "ParentTaskService" class ParentModel < Roby::Task diff --git a/test/rock_stream_matcher_test.rb b/test/rock_stream_matcher_test.rb index d0ed71b..97f090b 100644 --- a/test/rock_stream_matcher_test.rb +++ b/test/rock_stream_matcher_test.rb @@ -1,20 +1,22 @@ -require 'test_helper' +# frozen_string_literal: true + +require "test_helper" module Syskit::Log describe RockStreamMatcher do attr_reader :streams before do - double_t = Typelib::Registry.new.create_numeric '/double', 8, :float - create_logfile 'test.0.log' do - create_logfile_stream 'task.port', type: double_t, metadata: Hash[rock_stream_type: 'port', rock_task_object_name: 'port', rock_task_name: 'task'] - create_logfile_stream 'task.property', metadata: Hash[rock_stream_type: 'property', rock_task_object_name: 'property', rock_task_name: 'task'] - create_logfile_stream 'stream_without_properties' - create_logfile_stream 'other_task.port', metadata: Hash[rock_task_name: 'other_task'] - create_logfile_stream 'stream_with_task_model', metadata: Hash[rock_task_model: 'orogen_model::Test'] + double_t = Typelib::Registry.new.create_numeric "/double", 8, :float + create_logfile "test.0.log" do + create_logfile_stream "task.port", type: double_t, metadata: Hash[rock_stream_type: "port", rock_task_object_name: "port", rock_task_name: "task"] + create_logfile_stream "task.property", metadata: Hash[rock_stream_type: "property", rock_task_object_name: "property", rock_task_name: "task"] + create_logfile_stream "stream_without_properties" + create_logfile_stream "other_task.port", metadata: Hash[rock_task_name: "other_task"] + create_logfile_stream "stream_with_task_model", metadata: Hash[rock_task_model: "orogen_model::Test"] end @streams = Streams.new - streams.add_file logfile_pathname('test.0.log') + streams.add_file logfile_pathname("test.0.log") end subject { RockStreamMatcher.new } @@ -24,49 +26,48 @@ def assert_finds_streams(query, *stream_names) describe "matching the stream type" do it "matches against ports" do - assert_finds_streams subject.ports, 'task.port' + assert_finds_streams subject.ports, "task.port" end it "matches against properties" do - assert_finds_streams subject.properties, 'task.property' + assert_finds_streams subject.properties, "task.property" end it "never matches streams that do not have the property" do end it "ORs ports and properties if both are specified" do - assert_finds_streams subject.ports.properties, 'task.port', 'task.property' + assert_finds_streams subject.ports.properties, "task.port", "task.property" end end describe "matching the task name" do it "matches tasks that have the name" do - assert_finds_streams subject.task_name('task'), 'task.port', 'task.property' + assert_finds_streams subject.task_name("task"), "task.port", "task.property" end it "ORs the different names" do - assert_finds_streams subject.task_name("task").task_name("other_task"), 'task.port', 'task.property', 'other_task.port' + assert_finds_streams subject.task_name("task").task_name("other_task"), "task.port", "task.property", "other_task.port" end end describe "matching the object name" do it "matches objects that have the name" do - assert_finds_streams subject.object_name('port'), 'task.port' + assert_finds_streams subject.object_name("port"), "task.port" end it "ORs the different names" do - assert_finds_streams subject.object_name("port").object_name("property"), 'task.port', 'task.property' + assert_finds_streams subject.object_name("port").object_name("property"), "task.port", "task.property" end end describe "matching the task model" do it "matches the task model by name" do task_m = Syskit::TaskContext.new_submodel - flexmock(task_m.orogen_model, :strict, name: 'orogen_model::Test') - assert_finds_streams subject.task_model(task_m), 'stream_with_task_model' + flexmock(task_m.orogen_model, :strict, name: "orogen_model::Test") + assert_finds_streams subject.task_model(task_m), "stream_with_task_model" end end describe "matching the type" do it "matches the type by name" do - assert_finds_streams subject.type(/double/), 'task.port' + assert_finds_streams subject.type(/double/), "task.port" end end end end - diff --git a/test/shell_interface_test.rb b/test/shell_interface_test.rb index b729d8f..7603562 100644 --- a/test/shell_interface_test.rb +++ b/test/shell_interface_test.rb @@ -1,4 +1,6 @@ -require 'test_helper' +# frozen_string_literal: true + +require "test_helper" module Syskit::Log describe ShellInterface do diff --git a/test/streams_test.rb b/test/streams_test.rb index 71157de..ffe3092 100644 --- a/test/streams_test.rb +++ b/test/streams_test.rb @@ -1,4 +1,6 @@ -require 'test_helper' +# frozen_string_literal: true + +require "test_helper" module Syskit::Log describe Streams do @@ -6,60 +8,60 @@ module Syskit::Log describe "#add_file" do it "adds the file's streams to the object" do - create_logfile 'test.0.log' do - create_logfile_stream '/task.file' + create_logfile "test.0.log" do + create_logfile_stream "/task.file" end - subject.add_file(logfile_pathname('test.0.log')) - assert_equal ['/task.file'], subject.each_stream.map(&:name) + subject.add_file(logfile_pathname("test.0.log")) + assert_equal ["/task.file"], subject.each_stream.map(&:name) end it "raises ENOENT if the file does not exist" do - assert_raises(Errno::ENOENT) { subject.add_file(Pathname('does_not_exist')) } + assert_raises(Errno::ENOENT) { subject.add_file(Pathname("does_not_exist")) } end end describe ".from_dir" do it "creates a new streams object and adds the dir converted to pathname" do - flexmock(Streams).new_instances.should_receive(:add_dir).once.with(Pathname.new('test')) - assert_kind_of Streams, Streams.from_dir('test') + flexmock(Streams).new_instances.should_receive(:add_dir).once.with(Pathname.new("test")) + assert_kind_of Streams, Streams.from_dir("test") end end describe ".from_file" do it "creates a new streams object and adds the file converted to pathname" do - flexmock(Streams).new_instances.should_receive(:add_file).once.with(Pathname.new('test.0.log')) - assert_kind_of Streams, Streams.from_file('test.0.log') + flexmock(Streams).new_instances.should_receive(:add_file).once.with(Pathname.new("test.0.log")) + assert_kind_of Streams, Streams.from_file("test.0.log") end end describe "#add_file_group" do it "adds the group's streams to self" do - create_logfile 'test0.0.log' do - create_logfile_stream '/stream0' - create_logfile_stream '/stream1' + create_logfile "test0.0.log" do + create_logfile_stream "/stream0" + create_logfile_stream "/stream1" end - create_logfile 'test1.0.log' do - create_logfile_stream '/stream0' - create_logfile_stream '/stream1' - create_logfile_stream '/stream2' + create_logfile "test1.0.log" do + create_logfile_stream "/stream0" + create_logfile_stream "/stream1" + create_logfile_stream "/stream2" end - flexmock(subject).should_receive(:add_stream). - with(->(s) { s.name == '/stream0' }).once - flexmock(subject).should_receive(:add_stream). - with(->(s) { s.name == '/stream1' }).once - flexmock(subject).should_receive(:add_stream). - with(->(s) { s.name == '/stream2' }).once - subject.add_file_group([logfile_pathname('test0.0.log'), logfile_pathname('test1.0.log')]) + flexmock(subject).should_receive(:add_stream) + .with(->(s) { s.name == "/stream0" }).once + flexmock(subject).should_receive(:add_stream) + .with(->(s) { s.name == "/stream1" }).once + flexmock(subject).should_receive(:add_stream) + .with(->(s) { s.name == "/stream2" }).once + subject.add_file_group([logfile_pathname("test0.0.log"), logfile_pathname("test1.0.log")]) end end - describe '#add_stream' do - describe 'sanitize metadata' do - it 'removes an empty rock_task_model' do - create_logfile 'test.0.log' do + describe "#add_stream" do + describe "sanitize metadata" do + it "removes an empty rock_task_model" do + create_logfile "test.0.log" do create_logfile_stream( - '/stream0', - metadata: { 'rock_task_model' => '' } + "/stream0", + metadata: { "rock_task_model" => "" } ) end flexmock(Syskit::Log) @@ -67,21 +69,21 @@ module Syskit::Log .with("removing empty metadata property 'rock_task_model' "\ "from /stream0") .once - stream = open_logfile_stream('test.0.log', '/stream0') + stream = open_logfile_stream("test.0.log", "/stream0") subject.add_stream(stream) - refute stream.metadata['rock_task_model'] + refute stream.metadata["rock_task_model"] end - it 'removes the nameservice prefix' do - create_logfile 'test0.0.log' do + it "removes the nameservice prefix" do + create_logfile "test0.0.log" do create_logfile_stream( - '/stream0', - metadata: { 'rock_task_name' => 'localhost/task' } + "/stream0", + metadata: { "rock_task_name" => "localhost/task" } ) end - stream = open_logfile_stream('test0.0.log', '/stream0') + stream = open_logfile_stream("test0.0.log", "/stream0") subject.add_stream(stream) - assert_equal 'task', stream.metadata['rock_task_name'] + assert_equal "task", stream.metadata["rock_task_name"] end end end @@ -96,40 +98,40 @@ module Syskit::Log subject.add_dir(logfile_pathname) end it "adds files that match the .NUM.log pattern" do - create_logfile('test0.0.log') {} - create_logfile('test1.0.log') {} - create_logfile('test2.0.log') {} - flexmock(subject).should_receive(:add_file_group). - with([logfile_pathname + 'test0.0.log']).once - flexmock(subject).should_receive(:add_file_group). - with([logfile_pathname + 'test1.0.log']).once - flexmock(subject).should_receive(:add_file_group). - with([logfile_pathname + 'test2.0.log']).once + create_logfile("test0.0.log") {} + create_logfile("test1.0.log") {} + create_logfile("test2.0.log") {} + flexmock(subject).should_receive(:add_file_group) + .with([logfile_pathname + "test0.0.log"]).once + flexmock(subject).should_receive(:add_file_group) + .with([logfile_pathname + "test1.0.log"]).once + flexmock(subject).should_receive(:add_file_group) + .with([logfile_pathname + "test2.0.log"]).once subject.add_dir(logfile_pathname) end it "opens files that belong together, together" do - create_logfile('test0.0.log') {} - create_logfile('test0.1.log') {} - create_logfile('test1.0.log') {} - flexmock(subject).should_receive(:add_file_group). - with([logfile_pathname + 'test0.0.log', logfile_pathname + "test0.1.log"]).once - flexmock(subject).should_receive(:add_file_group). - with([logfile_pathname + 'test1.0.log']).once + create_logfile("test0.0.log") {} + create_logfile("test0.1.log") {} + create_logfile("test1.0.log") {} + flexmock(subject).should_receive(:add_file_group) + .with([logfile_pathname + "test0.0.log", logfile_pathname + "test0.1.log"]).once + flexmock(subject).should_receive(:add_file_group) + .with([logfile_pathname + "test1.0.log"]).once subject.add_dir(logfile_pathname) end end describe "#make_file_groups_in_dir" do it "groups files that have the same basename together" do - create_logfile('test0.0.log') {} - create_logfile('test0.1.log') {} - create_logfile('test0.2.log') {} - create_logfile('test1.0.log') {} + create_logfile("test0.0.log") {} + create_logfile("test0.1.log") {} + create_logfile("test0.2.log") {} + create_logfile("test1.0.log") {} groups = subject.make_file_groups_in_dir(logfile_pathname) expected = [ - [(logfile_pathname + 'test0.0.log'), (logfile_pathname + 'test0.1.log'), (logfile_pathname + 'test0.2.log')], - [logfile_pathname + 'test1.0.log'] + [(logfile_pathname + "test0.0.log"), (logfile_pathname + "test0.1.log"), (logfile_pathname + "test0.2.log")], + [logfile_pathname + "test1.0.log"] ] assert_equal expected, groups end @@ -137,48 +139,48 @@ module Syskit::Log describe "#find_all_streams" do it "returns the streams that match the object" do - create_logfile 'test.0.log' do - create_logfile_stream '/task.file' - create_logfile_stream '/other.task.file' - create_logfile_stream '/does.not.match' + create_logfile "test.0.log" do + create_logfile_stream "/task.file" + create_logfile_stream "/other.task.file" + create_logfile_stream "/does.not.match" end subject.add_dir(logfile_pathname) streams = subject.streams query = flexmock - query.should_receive(:===). - with(->(s) { streams.include?(s) }). - and_return { |s| s != streams[2] } + query.should_receive(:===) + .with(->(s) { streams.include?(s) }) + .and_return { |s| s != streams[2] } assert_equal streams[0, 2], subject.find_all_streams(query) end end describe "#find_task_by_name" do before do - create_logfile 'test.0.log' do - create_logfile_stream '/test0', metadata: Hash['rock_task_name' => "task"] - create_logfile_stream '/test1', metadata: Hash['rock_task_name' => "task"] - create_logfile_stream '/does.not.match', metadata: Hash['rock_task_name' => 'another_task'] + create_logfile "test.0.log" do + create_logfile_stream "/test0", metadata: Hash["rock_task_name" => "task"] + create_logfile_stream "/test1", metadata: Hash["rock_task_name" => "task"] + create_logfile_stream "/does.not.match", metadata: Hash["rock_task_name" => "another_task"] end subject.add_dir(logfile_pathname) end it "returns nil if there are no matching tasks" do - assert !subject.find_task_by_name('does_not_exist') + assert !subject.find_task_by_name("does_not_exist") end it "returns a TaskStreams object with the matching streams" do - streams = subject.find_task_by_name('task') + streams = subject.find_task_by_name("task") assert_kind_of TaskStreams, streams - assert_equal Set['/test0', '/test1'], streams.each_stream.map(&:name).to_set + assert_equal Set["/test0", "/test1"], streams.each_stream.map(&:name).to_set end describe "method_missing accessor" do it "returns the streams" do streams = subject.task_task assert_kind_of TaskStreams, streams - assert_equal Set['/test0', '/test1'], streams.each_stream.map(&:name).to_set + assert_equal Set["/test0", "/test1"], streams.each_stream.map(&:name).to_set end it "raises NoMethodError if no task exists" do assert_raises(NoMethodError) do @@ -190,11 +192,11 @@ module Syskit::Log describe "#each_task" do before do - create_logfile 'test.0.log' do - create_logfile_stream '/test0', metadata: Hash['rock_task_model' => 'project::Task', 'rock_task_name' => "task"] - create_logfile_stream '/test1', metadata: Hash['rock_task_model' => 'project::Task', 'rock_task_name' => "task"] - create_logfile_stream '/other_project', metadata: Hash['rock_task_model' => 'other_project::Task', 'rock_task_name' => 'other_task'] - create_logfile_stream '/not_task_model', metadata: Hash['rock_task_name' => 'task_without_model'] + create_logfile "test.0.log" do + create_logfile_stream "/test0", metadata: Hash["rock_task_model" => "project::Task", "rock_task_name" => "task"] + create_logfile_stream "/test1", metadata: Hash["rock_task_model" => "project::Task", "rock_task_name" => "task"] + create_logfile_stream "/other_project", metadata: Hash["rock_task_model" => "other_project::Task", "rock_task_name" => "other_task"] + create_logfile_stream "/not_task_model", metadata: Hash["rock_task_name" => "task_without_model"] end subject.add_dir(logfile_pathname) end @@ -205,17 +207,20 @@ def should_warn(matcher) flexmock(Syskit::Log).should_receive(:warn).with(matcher).once end - it "ignores streams without a task model" do - task_m = Syskit::TaskContext.new_submodel orogen_model_name: 'project::Task' - other_task_m = Syskit::TaskContext.new_submodel orogen_model_name: 'other_project::Task' - assert_equal ['task', 'other_task'], subject.each_task.map(&:task_name) + it "does load tasks without a model by default" do + assert_equal %w[task other_task], subject.each_task.map(&:task_name) + end + + it "ignores tasks without a task model if configured to do so" do + assert subject.each_task(skip_tasks_without_models: true) + .map(&:task_name).empty? end it "does not attempt to load the model's project if the task model is known" do - Syskit::TaskContext.new_submodel orogen_model_name: 'project::Task' - Syskit::TaskContext.new_submodel orogen_model_name: 'other_project::Task' + Syskit::TaskContext.new_submodel orogen_model_name: "project::Task" + Syskit::TaskContext.new_submodel orogen_model_name: "other_project::Task" flexmock(app).should_receive(:using_task_library).never - subject.each_task.to_a + subject.each_task(load_models: true).to_a end it "ignores streams that have a malformed rock_task_model name" do @@ -227,7 +232,7 @@ def should_warn(matcher) ) end - should_warn /removing empty metadata property.*test1/ + should_warn(/removing empty metadata property.*test1/) streams = Streams.new streams.add_file Pathname(path) flexmock(app).should_receive(:using_task_library).never @@ -236,8 +241,8 @@ def should_warn(matcher) it "does not attempt to load the model's project if load_models is false" do flexmock(app).should_receive(:using_task_library).never - should_warn /ignored 2 streams.*project::Task.*\/test0, \/test1/ - should_warn /ignored.*other_project::Task.*other_project/ + should_warn(/ignored 2 streams.*project::Task.*\/test0, \/test1/) + should_warn(/ignored.*other_project::Task.*other_project/) tasks = subject.each_task( load_models: false, skip_tasks_without_models: true @@ -268,7 +273,7 @@ def should_warn(matcher) project_m.task_context "Task" loader.register_project_model(project_m) - should_warn /ignored 1 stream.*other_project::Task.*other_project/ + should_warn(/ignored 1 stream.*other_project::Task.*other_project/) tasks = subject.each_task( load_models: true, skip_tasks_without_models: true, loader: loader ) @@ -296,11 +301,9 @@ def should_warn(matcher) end it "groups the streams per task name" do - task_m = Syskit::TaskContext.new_submodel orogen_model_name: 'project::Task' - other_task_m = Syskit::TaskContext.new_submodel orogen_model_name: 'other_project::Task' task, other_task = subject.each_task.to_a - assert_equal ['/test0', '/test1'], task.streams.map(&:name) - assert_equal ['/other_project'], other_task.streams.map(&:name) + assert_equal ["/test0", "/test1"], task.streams.map(&:name) + assert_equal ["/other_project"], other_task.streams.map(&:name) end end diff --git a/test/task_streams_test.rb b/test/task_streams_test.rb index c8e38c7..6a7abc0 100644 --- a/test/task_streams_test.rb +++ b/test/task_streams_test.rb @@ -1,57 +1,59 @@ -require 'test_helper' +# frozen_string_literal: true + +require "test_helper" module Syskit::Log describe TaskStreams do attr_reader :subject before do - create_logfile 'test.0.log' do - create_logfile_stream '/port0', - metadata: Hash['rock_task_name' => "task", - 'rock_task_object_name' => 'object0', - 'rock_stream_type' => 'port'] - create_logfile_stream '/port1_1', - metadata: Hash['rock_task_name' => "task", - 'rock_task_object_name' => 'object1', - 'rock_stream_type' => 'port'] - create_logfile_stream '/port1_2', - metadata: Hash['rock_task_name' => "task", - 'rock_task_object_name' => 'object1', - 'rock_stream_type' => 'port'] - create_logfile_stream '/property0', - metadata: Hash['rock_task_name' => "task", - 'rock_task_object_name' => 'object0', - 'rock_stream_type' => 'property'] - create_logfile_stream '/property1_1', - metadata: Hash['rock_task_name' => "task", - 'rock_task_object_name' => 'object1', - 'rock_stream_type' => 'property'] - create_logfile_stream '/property1_2', - metadata: Hash['rock_task_name' => "task", - 'rock_task_object_name' => 'object1', - 'rock_stream_type' => 'property'] + create_logfile "test.0.log" do + create_logfile_stream "/port0", + metadata: Hash["rock_task_name" => "task", + "rock_task_object_name" => "object0", + "rock_stream_type" => "port"] + create_logfile_stream "/port1_1", + metadata: Hash["rock_task_name" => "task", + "rock_task_object_name" => "object1", + "rock_stream_type" => "port"] + create_logfile_stream "/port1_2", + metadata: Hash["rock_task_name" => "task", + "rock_task_object_name" => "object1", + "rock_stream_type" => "port"] + create_logfile_stream "/property0", + metadata: Hash["rock_task_name" => "task", + "rock_task_object_name" => "object0", + "rock_stream_type" => "property"] + create_logfile_stream "/property1_1", + metadata: Hash["rock_task_name" => "task", + "rock_task_object_name" => "object1", + "rock_stream_type" => "property"] + create_logfile_stream "/property1_2", + metadata: Hash["rock_task_name" => "task", + "rock_task_object_name" => "object1", + "rock_stream_type" => "property"] end streams = Streams.from_dir(logfile_pathname) - @subject = streams.find_task_by_name('task') + @subject = streams.find_task_by_name("task") end describe "#find_port_by_name" do it "returns nil if there are no matches" do - assert !subject.find_port_by_name('does_not_exist') + assert !subject.find_port_by_name("does_not_exist") end it "returns the matching port stream" do - object = subject.find_port_by_name('object0') + object = subject.find_port_by_name("object0") assert_kind_of ::Pocolog::DataStream, object - assert_equal '/port0', object.name + assert_equal "/port0", object.name end it "raises Ambiguous if there are more than one port with the given name" do assert_raises(Ambiguous) do - subject.find_port_by_name('object1') + subject.find_port_by_name("object1") end end describe "access through #method_missing" do it "returns a single match if there is one" do - assert_equal '/port0', subject.object0_port.name + assert_equal "/port0", subject.object0_port.name end it "raises Ambiguous for multiple matches" do assert_raises(Ambiguous) do @@ -68,22 +70,22 @@ module Syskit::Log describe "#find_property_by_name" do it "returns nil if there are no matches" do - assert !subject.find_property_by_name('does_not_exist') + assert !subject.find_property_by_name("does_not_exist") end it "returns the matching port stream" do - object = subject.find_property_by_name('object0') + object = subject.find_property_by_name("object0") assert_kind_of ::Pocolog::DataStream, object - assert_equal '/property0', object.name + assert_equal "/property0", object.name end it "raises Ambiguous if there are more than one port with the given name" do assert_raises(Ambiguous) do - subject.find_property_by_name('object1') + subject.find_property_by_name("object1") end end describe "access through #method_missing" do it "returns a single match if there is one" do - assert_equal '/property0', subject.object0_property.name + assert_equal "/property0", subject.object0_property.name end it "raises Ambiguous for multiple matches" do assert_raises(Ambiguous) do @@ -115,30 +117,30 @@ module Syskit::Log describe "models are declared" do before do subject.streams.each do |s| - s.metadata['rock_task_model'] = 'orogen::Model' + s.metadata["rock_task_model"] = "orogen::Model" end end it "raises Unknown if some streams do not have a declared model" do - subject.streams.first.metadata.delete('rock_task_model') + subject.streams.first.metadata.delete("rock_task_model") assert_raises(Unknown) do subject.orogen_model_name end end it "raises Ambiguous if the streams declare multiple models" do - subject.streams.first.metadata['rock_task_model'] = 'orogen::AnotherModel' + subject.streams.first.metadata["rock_task_model"] = "orogen::AnotherModel" assert_raises(Ambiguous) do subject.orogen_model_name end end it "returns the model if there is only one" do - assert_equal 'orogen::Model', subject.orogen_model_name + assert_equal "orogen::Model", subject.orogen_model_name end describe "#model" do it "returns the resolved model" do - task_m = Syskit::TaskContext.new_submodel(name: 'orogen::Model') - flexmock(task_m.orogen_model).should_receive(:name).and_return('orogen::Model') + task_m = Syskit::TaskContext.new_submodel(name: "orogen::Model") + flexmock(task_m.orogen_model).should_receive(:name).and_return("orogen::Model") assert_equal task_m, subject.model end it "raises Unknown if the model cannot be resolved" do @@ -152,27 +154,26 @@ module Syskit::Log describe "#each_port_stream" do it "enumerates the streams that are a task's port" do - ports = subject.each_port_stream. - map { |name, stream| [name, stream.name] }.to_set + ports = subject.each_port_stream + .map { |name, stream| [name, stream.name] }.to_set expected = Set[ - ['object0', '/port0'], - ['object1', '/port1_1'], - ['object1', '/port1_2']] + ["object0", "/port0"], + ["object1", "/port1_1"], + ["object1", "/port1_2"]] assert_equal expected, ports end end describe "#each_property_stream" do it "enumerates the streams that are a task's property" do - ports = subject.each_property_stream. - map { |name, stream| [name, stream.name] }.to_set + ports = subject.each_property_stream + .map { |name, stream| [name, stream.name] }.to_set expected = Set[ - ['object0', '/property0'], - ['object1', '/property1_1'], - ['object1', '/property1_2']] + ["object0", "/property0"], + ["object1", "/property1_1"], + ["object1", "/property1_2"]] assert_equal expected, ports end end end end - diff --git a/test/test_helper.rb b/test/test_helper.rb index 93fbaba..17375e2 100644 --- a/test/test_helper.rb +++ b/test/test_helper.rb @@ -1,10 +1,12 @@ -$LOAD_PATH.unshift File.expand_path('../lib', __dir__) -require 'syskit/test/self' -require 'syskit/log' -require 'pocolog' -require 'pocolog/test_helpers' -require 'minitest/autorun' -require 'syskit/log/datastore/index_build' +# frozen_string_literal: true + +$LOAD_PATH.unshift File.expand_path("../lib", __dir__) +require "syskit/test/self" +require "syskit/log" +require "pocolog" +require "pocolog/test_helpers" +require "minitest/autorun" +require "syskit/log/datastore/index_build" module Syskit::Log module Test @@ -13,8 +15,8 @@ module Test def setup @pocolog_log_level = Pocolog.logger.level Pocolog.logger.level = Logger::WARN - unless Roby.app.loaded_plugin?('syskit-log') - Roby.app.add_plugin('syskit-log', Syskit::Log::Plugin) + unless Roby.app.loaded_plugin?("syskit-log") + Roby.app.add_plugin("syskit-log", Syskit::Log::Plugin) end super @@ -33,8 +35,8 @@ def create_datastore(path) @datastore = Datastore.create(path) end - def create_dataset(digest, metadata: Hash.new) - if !@datastore + def create_dataset(digest, metadata: {}) + unless @datastore raise ArgumentError, "must call #create_datastore before #create_dataset" end @@ -58,22 +60,6 @@ def create_dataset(digest, metadata: Hash.new) end end - # Create a stream in a normalized dataset - def create_normalized_stream(name, type: int32_t, metadata: Hash.new) - logfile_basename = name.gsub('/', ':').gsub(/^:/, '') + ".0.log" - logfile_path = Pathname.new(logfile_path(logfile_basename)) - create_logfile logfile_basename do - stream = create_logfile_stream(name, type: type, metadata: metadata) - yield if block_given? - flush_logfile - registry_checksum = Streams.save_registry_in_normalized_dataset(logfile_path, stream) - Streams.update_normalized_metadata(logfile_pathname) do |metadata| - metadata << Streams.create_metadata_entry(logfile_path, stream, registry_checksum) - end - end - return logfile_path - end - def roby_log_path(name) Pathname(__dir__) + "roby-logs" + "#{name}-events.log" end