diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5f1199a3d8a..850839e94f4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -239,6 +239,7 @@ jobs: cargo install --locked --debug --path ./forc-plugins/forc-doc cargo install --locked --debug --path ./forc-plugins/forc-tx cargo install --locked --debug --path ./forc-plugins/forc-crypto + cargo install --locked --debug --path ./forc-plugins/forc-migrate cargo install --locked --debug forc-explore - name: Install mdbook-forc-documenter run: cargo install --locked --debug --path ./scripts/mdbook-forc-documenter diff --git a/Cargo.lock b/Cargo.lock index d4e1fd6c522..c012ee3100a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2859,6 +2859,26 @@ dependencies = [ "tokio", ] +[[package]] +name = "forc-migrate" +version = "0.66.5" +dependencies = [ + "anyhow", + "clap", + "forc-pkg", + "forc-tracing 0.66.5", + "forc-util", + "itertools 0.13.0", + "num-bigint", + "sha2 0.10.8", + "sway-ast", + "sway-core", + "sway-error", + "sway-features", + "sway-types", + "swayfmt", +] + [[package]] name = "forc-pkg" version = "0.66.5" diff --git a/Cargo.toml b/Cargo.toml index abfff09eedb..18b09366ca1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,6 +9,7 @@ members = [ "forc-plugins/forc-doc", "forc-plugins/forc-fmt", "forc-plugins/forc-lsp", + "forc-plugins/forc-migrate", "forc-plugins/forc-tx", "forc-test", "forc-tracing", diff --git a/docs/book/spell-check-custom-words.txt b/docs/book/spell-check-custom-words.txt index 3b51147b9ee..3f082eb0d5f 100644 --- a/docs/book/spell-check-custom-words.txt +++ b/docs/book/spell-check-custom-words.txt @@ -231,4 +231,9 @@ fmt deallocated deallocate destructors -destructor \ No newline at end of file +destructor +semiautomatically +FuelLabs +github +toml +hardcoded \ No newline at end of file diff --git a/docs/book/src/SUMMARY.md b/docs/book/src/SUMMARY.md index 262437ea2cc..afef1b6d6e0 100644 --- a/docs/book/src/SUMMARY.md +++ b/docs/book/src/SUMMARY.md @@ -105,3 +105,4 @@ - [forc explore](./forc/plugins/forc_explore.md) - [forc fmt](./forc/plugins/forc_fmt.md) - [forc lsp](./forc/plugins/forc_lsp.md) + - [forc migrate](./forc/plugins/forc_migrate.md) diff --git a/docs/book/src/forc/plugins/forc_migrate.md b/docs/book/src/forc/plugins/forc_migrate.md new file mode 100644 index 00000000000..fed1c5ac884 --- /dev/null +++ b/docs/book/src/forc/plugins/forc_migrate.md @@ -0,0 +1 @@ +# forc migrate diff --git a/forc-plugins/forc-doc/src/cli.rs b/forc-plugins/forc-doc/src/cli.rs index dff10298122..050c934df43 100644 --- a/forc-plugins/forc-doc/src/cli.rs +++ b/forc-plugins/forc-doc/src/cli.rs @@ -8,7 +8,7 @@ forc_util::cli_examples! { [ Build the docs for a project in the current path and open it in the browser => "forc doc --open" ] [ Build the docs for a project located in another path => "forc doc --path {path}" ] [ Build the docs for the current project exporting private types => "forc doc --document-private-items" ] - [ Build the docs offline without downloading any dependency from the network => "forc doc --offline" ] + [ Build the docs offline without downloading any dependencies => "forc doc --offline" ] } } @@ -35,11 +35,8 @@ pub struct Command { /// Meaning it will only try to use previously downloaded dependencies. #[clap(long = "offline")] pub offline: bool, - /// Silent mode. Don't output any warnings or errors to the command line. - #[clap(long = "silent", short = 's')] - pub silent: bool, /// Requires that the Forc.lock file is up-to-date. If the lock file is missing, or it - /// needs to be updated, Forc will exit with an error + /// needs to be updated, Forc will exit with an error. #[clap(long)] pub locked: bool, /// Do not build documentation for dependencies. @@ -50,10 +47,11 @@ pub struct Command { /// Possible values: PUBLIC, LOCAL, #[clap(long)] pub ipfs_node: Option, - #[cfg(test)] pub(crate) doc_path: Option, - #[clap(flatten)] pub experimental: sway_features::CliFields, + /// Silent mode. Don't output any warnings or errors to the command line. + #[clap(long = "silent", short = 's')] + pub silent: bool, } diff --git a/forc-plugins/forc-fmt/src/main.rs b/forc-plugins/forc-fmt/src/main.rs index 331c3e51373..c7a981bbf10 100644 --- a/forc-plugins/forc-fmt/src/main.rs +++ b/forc-plugins/forc-fmt/src/main.rs @@ -7,7 +7,7 @@ use forc_pkg::{ WorkspaceManifestFile, }; use forc_tracing::{init_tracing_subscriber, println_error, println_green, println_red}; -use forc_util::fs_locking::PidFileLocking; +use forc_util::fs_locking::is_file_dirty; use prettydiff::{basic::DiffOp, diff_lines}; use std::{ default::Default, @@ -101,15 +101,6 @@ fn run() -> Result<()> { Ok(()) } -/// Checks if the specified file is marked as "dirty". -/// This is used to prevent formatting files that are currently open in an editor -/// with unsaved changes. -/// -/// Returns `true` if a corresponding "dirty" flag file exists, `false` otherwise. -fn is_file_dirty>(path: X) -> bool { - PidFileLocking::lsp(path.as_ref()).is_locked() -} - /// Recursively get a Vec of subdirectories that contains a Forc.toml. fn get_sway_dirs(workspace_dir: PathBuf) -> Vec { let mut dirs_to_format = vec![]; diff --git a/forc-plugins/forc-migrate/Cargo.toml b/forc-plugins/forc-migrate/Cargo.toml new file mode 100644 index 00000000000..c26988e84c6 --- /dev/null +++ b/forc-plugins/forc-migrate/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "forc-migrate" +version.workspace = true +description = "Migrate Sway projects to the next breaking change version of Sway." +authors.workspace = true +edition.workspace = true +homepage.workspace = true +license.workspace = true +repository.workspace = true + +[dependencies] +anyhow.workspace = true +clap = { workspace = true, features = ["derive"] } +forc-pkg.workspace = true +forc-tracing.workspace = true +forc-util.workspace = true +itertools.workspace = true +num-bigint.workspace = true +sha2.workspace = true +sway-ast.workspace = true +sway-core.workspace = true +sway-error.workspace = true +sway-features.workspace = true +sway-types.workspace = true +swayfmt.workspace = true \ No newline at end of file diff --git a/forc-plugins/forc-migrate/src/cli/commands/check.rs b/forc-plugins/forc-migrate/src/cli/commands/check.rs new file mode 100644 index 00000000000..f99a2b67845 --- /dev/null +++ b/forc-plugins/forc-migrate/src/cli/commands/check.rs @@ -0,0 +1,88 @@ +use clap::Parser; + +use crate::{ + cli::{ + self, + shared::{ + compile_package, create_migration_diagnostic, print_features_and_migration_steps, + }, + }, + get_migration_steps_or_return, + migrations::{DryRun, MigrationStepKind}, +}; +use anyhow::{Ok, Result}; +use forc_util::format_diagnostic; +use itertools::Itertools; +use sway_core::Engines; + +forc_util::cli_examples! { + crate::cli::Opt { + [ Check the project in the current path => "forc migrate check"] + [ Check the project located in another path => "forc migrate check --path {path}" ] + } +} + +/// Check the project for code that needs to be migrated. +/// +/// Dry-runs the migration steps and prints places in code that need to be reviewed or changed. +#[derive(Debug, Parser)] +pub(crate) struct Command { + #[clap(flatten)] + pub check: cli::shared::Compile, +} + +pub(crate) fn exec(command: Command) -> Result<()> { + let migration_steps = get_migration_steps_or_return!(); + let engines = Engines::default(); + let build_instructions = command.check; + + let mut program_info = compile_package(&engines, &build_instructions)?; + + // Dry-run all the migration steps. + let mut check_result = vec![]; + for (feature, migration_steps) in migration_steps.iter() { + for migration_step in migration_steps.iter() { + let migration_point_spans = match migration_step.kind { + MigrationStepKind::Instruction(instruction) => instruction(&program_info)?, + MigrationStepKind::CodeModification(modification, _) => { + modification(&mut program_info.as_mut(), DryRun::Yes)? + } + MigrationStepKind::Interaction(instruction, _, _) => instruction(&program_info)?, + }; + + check_result.push((feature, migration_step, migration_point_spans)); + } + } + + // For every migration step, display the found occurrences in code that require migration effort, if any. + for (feature, migration_step, occurrences_spans) in check_result.iter() { + if let Some(diagnostic) = + create_migration_diagnostic(engines.se(), feature, migration_step, occurrences_spans) + { + format_diagnostic(&diagnostic); + } + } + + // Display the summary of the migration effort. + let features_and_migration_steps = check_result + .iter() + .chunk_by(|(feature, _, _)| feature) + .into_iter() + .map(|(key, chunk)| { + ( + **key, + chunk + .map(|(_, migration_step, migration_point_spans)| { + (*migration_step, Some(migration_point_spans.len())) + }) + .collect::>(), + ) + }) + .collect::>(); + + println!("Migration effort:"); + println!(); + print_features_and_migration_steps(&features_and_migration_steps); + + Ok(()) +} diff --git a/forc-plugins/forc-migrate/src/cli/commands/mod.rs b/forc-plugins/forc-migrate/src/cli/commands/mod.rs new file mode 100644 index 00000000000..3017d098ac7 --- /dev/null +++ b/forc-plugins/forc-migrate/src/cli/commands/mod.rs @@ -0,0 +1,3 @@ +pub(crate) mod check; +pub(crate) mod run; +pub(crate) mod show; diff --git a/forc-plugins/forc-migrate/src/cli/commands/run.rs b/forc-plugins/forc-migrate/src/cli/commands/run.rs new file mode 100644 index 00000000000..64329a9ddb9 --- /dev/null +++ b/forc-plugins/forc-migrate/src/cli/commands/run.rs @@ -0,0 +1,440 @@ +use std::{ + collections::HashSet, + path::{Path, PathBuf}, +}; + +use anyhow::{bail, Ok, Result}; +use clap::Parser; +use forc_tracing::{println_action_green, println_action_yellow, println_yellow_bold}; +use forc_util::{format_diagnostic, fs_locking::is_file_dirty}; +use itertools::Itertools; +use sway_ast::{attribute::Annotated, Module}; +use sway_core::{ + language::lexed::{LexedModule, LexedProgram}, + Engines, +}; +use sway_error::formatting::*; +use sway_features::Feature; +use sway_types::{SourceEngine, Span}; +use swayfmt::Formatter; + +use crate::{ + cli::{ + self, + shared::{ + compile_package, create_migration_diagnostic, detailed_migration_guide_msg, + max_feature_name_len, PROJECT_IS_COMPATIBLE, + }, + }, + get_migration_steps_or_return, instructive_error, + migrations::{DryRun, MigrationStep, MigrationStepKind, MigrationSteps, ProgramInfo}, +}; + +forc_util::cli_examples! { + crate::cli::Opt { + [ Migrate the project in the current path => "forc migrate run"] + [ Migrate the project located in another path => "forc migrate run --path {path}" ] + [ Migrate the project offline without downloading any dependencies => "forc migrate run --offline" ] + } +} + +/// Migrate the project. +/// +/// Runs the migration steps and and guides you through the migration process. +#[derive(Debug, Parser)] +pub(crate) struct Command { + #[clap(flatten)] + pub run: cli::shared::Compile, +} + +/// Contains information about lexed [Module]s that are modified +/// during a migration step. +struct ModifiedModules<'a> { + source_engine: &'a SourceEngine, + modified_modules_paths: HashSet, +} + +impl<'a> ModifiedModules<'a> { + fn new(source_engine: &'a SourceEngine, occurrences_spans: &[Span]) -> Self { + Self { + source_engine, + modified_modules_paths: occurrences_spans + .iter() + .filter_map(|span| span.source_id().copied()) + .filter(|source_id| !source_engine.is_source_id_autogenerated(source_id)) + .map(|source_id| source_engine.get_path(&source_id)) + .collect(), + } + } + + /// Returns the `module`s path, if the `module` was modified. + fn get_path_if_modified(&self, module: &Module) -> Option { + module.source_id().and_then(|source_id| { + let path = self.source_engine.get_path(&source_id); + if self.modified_modules_paths.contains(&path) { + Some(path) + } else { + None + } + }) + } + + /// Returns the paths of modified modules, that are at the same + /// time marked as "dirty", means in-use by some other programs + /// like IDEs. + fn get_dirty_modified_modules_paths(&self) -> Vec<&PathBuf> { + self.modified_modules_paths + .iter() + .filter(|path| is_file_dirty(path)) + .collect() + } +} + +pub(crate) fn exec(command: Command) -> Result<()> { + let migration_steps = get_migration_steps_or_return!(); + let engines = Engines::default(); + let build_instructions = command.run; + + let mut program_info = compile_package(&engines, &build_instructions)?; + + // For migrations, we go with the following workflow. + // We have three possible situations: + // - we skip a migration step if it doesn't have any occurrences in code. + // We say that the step is *checked*. + // - we *check* an instruction migration step if it does have occurrences in code. + // We print those occurrences. + // - we *migrate* a code transformation step if it does have changes in code. + // We rewrite original code files with the changed code. + // We print just the number of the applied transformations. + // + // Skipping (checked) and checking will move to the next migration step. + // + // Migrating will stop the further execution of migration steps **if there are manual migration actions** + // to be done by developers. In that case, it will ask for manual action and instruct developers to review + // the changes before continuing migration. + // + // Migrating **without manual migration actions** will move to the next migration step **in the same feature**. + // If that was the last migration step in the feature, the migration will stop, and instruct the developer + // to review the migrations done in that feature, before continuing to migrate the next experimental feature. + + print_migrating_action(migration_steps); + + let max_len = max_feature_name_len(migration_steps); + let last_migration_feature = migration_steps + .last() + .expect( + "`get_migration_steps_or_return!` guarantees that the `migration_steps` are not empty", + ) + .0; + let mut current_feature_migration_has_code_changes = false; + for (feature, migration_steps) in migration_steps.iter() { + for migration_step in migration_steps.iter() { + match migration_step.kind { + MigrationStepKind::Instruction(instruction) => { + let occurrences_spans = instruction(&program_info)?; + + print_instruction_result( + &engines, + max_len, + feature, + migration_step, + &occurrences_spans, + ); + + if !occurrences_spans.is_empty() { + println_yellow_bold("If you've already reviewed the above points, you can ignore this info."); + } + } + MigrationStepKind::CodeModification(modification, manual_migration_actions) => { + let occurrences_spans = modification(&mut program_info.as_mut(), DryRun::No)?; + + output_modified_modules( + &build_instructions.manifest_dir()?, + &program_info, + &occurrences_spans, + )?; + + let stop_migration_process = print_modification_result( + max_len, + feature, + migration_step, + manual_migration_actions, + &occurrences_spans, + &mut current_feature_migration_has_code_changes, + ); + if stop_migration_process == StopMigrationProcess::Yes { + return Ok(()); + } + } + MigrationStepKind::Interaction( + instruction, + interaction, + manual_migration_actions, + ) => { + let instruction_occurrences_spans = instruction(&program_info)?; + + print_instruction_result( + &engines, + max_len, + feature, + migration_step, + &instruction_occurrences_spans, + ); + + // We have occurrences, let's continue with the interaction. + if !instruction_occurrences_spans.is_empty() { + let interaction_occurrences_spans = + interaction(&mut program_info.as_mut())?; + + output_modified_modules( + &build_instructions.manifest_dir()?, + &program_info, + &interaction_occurrences_spans, + )?; + + let stop_migration_process = print_modification_result( + max_len, + feature, + migration_step, + manual_migration_actions, + &interaction_occurrences_spans, + &mut current_feature_migration_has_code_changes, + ); + if stop_migration_process == StopMigrationProcess::Yes { + return Ok(()); + } + } + } + }; + } + + // If there were code changes and this is not the last feature, + // stop for a review before continuing with the next feature. + if current_feature_migration_has_code_changes { + if *feature == last_migration_feature { + print_migration_finished_action(); + } else { + print_continue_migration_action("Review the changed code"); + } + + return Ok(()); + } + } + + // We've run through all the migration steps. + // Print the confirmation message, even if there were maybe infos + // displayed for manual reviews. + print_migration_finished_action(); + + Ok(()) +} + +#[derive(PartialEq, Eq)] +enum StopMigrationProcess { + Yes, + No, +} + +fn print_modification_result( + max_len: usize, + feature: &Feature, + migration_step: &MigrationStep, + manual_migration_actions: &[&str], + occurrences_spans: &[Span], + current_feature_migration_has_code_changes: &mut bool, +) -> StopMigrationProcess { + if occurrences_spans.is_empty() { + print_checked_action(max_len, feature, migration_step); + StopMigrationProcess::No + } else { + print_changing_code_action(max_len, feature, migration_step); + + // Print the confirmation. + println!( + "Source code successfully changed ({} change{}).", + occurrences_spans.len(), + plural_s(occurrences_spans.len()) + ); + + // Check if we can proceed with the next migration step or break for manual action. + if !migration_step.has_manual_actions() { + // Mark the feature as having made code changes in the migration, and proceed with the + // next migration step *within the same feature*, if any. + *current_feature_migration_has_code_changes = true; + + StopMigrationProcess::No + } else { + // Display the manual migration actions and stop the further execution of the migration steps. + println!(); + println!("You still need to manually:"); + manual_migration_actions + .iter() + .for_each(|help| println!("- {help}")); + println!(); + println!("{}", detailed_migration_guide_msg(feature)); + print_continue_migration_action("Do the above manual changes"); + + StopMigrationProcess::Yes + } + } +} + +fn print_instruction_result( + engines: &Engines, + max_len: usize, + feature: &Feature, + migration_step: &MigrationStep, + occurrences_spans: &[Span], +) { + if occurrences_spans.is_empty() { + print_checked_action(max_len, feature, migration_step); + } else { + print_review_action(max_len, feature, migration_step); + + if let Some(diagnostic) = + create_migration_diagnostic(engines.se(), feature, migration_step, occurrences_spans) + { + format_diagnostic(&diagnostic); + } + } +} + +/// Outputs modified modules, if any, to their original files. +/// +/// A module is considered modified, if any of the [Span]s in `occurrences_spans` +/// has that module as its source. +fn output_modified_modules( + manifest_dir: &Path, + program_info: &ProgramInfo, + occurrences_spans: &[Span], +) -> Result<()> { + if occurrences_spans.is_empty() { + return Ok(()); + } + + let modified_modules = ModifiedModules::new(program_info.engines.se(), occurrences_spans); + + check_that_modified_modules_are_not_dirty(&modified_modules)?; + + output_changed_lexed_program(manifest_dir, &modified_modules, &program_info.lexed_program)?; + + Ok(()) +} + +fn check_that_modified_modules_are_not_dirty(modified_modules: &ModifiedModules) -> Result<()> { + let dirty_modules = modified_modules.get_dirty_modified_modules_paths(); + if !dirty_modules.is_empty() { + bail!(instructive_error("Files cannot be changed, because they are open in an editor and contain unsaved changes.", + &[ + "The below files are open in an editor and contain unsaved changes:".to_string(), + ] + .into_iter() + .chain(dirty_modules.iter().map(|file| format!(" - {}", file.display()))) + .chain(vec!["Please save the open files before running the migrations.".to_string()]) + .collect::>() + )); + } + Ok(()) +} + +fn output_changed_lexed_program( + manifest_dir: &Path, + modified_modules: &ModifiedModules, + lexed_program: &LexedProgram, +) -> Result<()> { + fn output_modules_rec( + manifest_dir: &Path, + modified_modules: &ModifiedModules, + lexed_module: &LexedModule, + ) -> Result<()> { + if let Some(path) = modified_modules.get_path_if_modified(&lexed_module.tree) { + let mut formatter = Formatter::from_dir(manifest_dir)?; + + let annotated_module = Annotated { + // TODO: Handle annotations instead of stripping them. + // See: https://github.com/FuelLabs/sway/issues/6802 + attribute_list: vec![], + value: lexed_module.tree.clone(), + }; + + let code = formatter.format_module(&annotated_module)?; + + std::fs::write(path, code)?; + } + + for (_, lexed_submodule) in lexed_module.submodules.iter() { + output_modules_rec(manifest_dir, modified_modules, &lexed_submodule.module)?; + } + + Ok(()) + } + + output_modules_rec(manifest_dir, modified_modules, &lexed_program.root) +} + +fn print_migrating_action(migration_steps: MigrationSteps) { + println_action_green( + "Migrating", + &format!( + "Breaking change feature{} {}", + plural_s(migration_steps.len()), + sequence_to_str( + &migration_steps + .iter() + .map(|(feature, _)| feature.name()) + .collect_vec(), + Enclosing::None, + 4 + ), + ), + ); +} + +fn print_changing_code_action(max_len: usize, feature: &Feature, migration_step: &MigrationStep) { + println_action_yellow( + "Changing", + &full_migration_step_title(max_len, feature, migration_step), + ); +} + +fn print_checked_action(max_len: usize, feature: &Feature, migration_step: &MigrationStep) { + println_action_green( + "Checked", + &full_migration_step_title(max_len, feature, migration_step), + ); +} + +fn print_review_action(max_len: usize, feature: &Feature, migration_step: &MigrationStep) { + println_action_yellow( + "Review", + &full_migration_step_title(max_len, feature, migration_step), + ); +} + +fn print_migration_finished_action() { + println_action_green("Finished", PROJECT_IS_COMPATIBLE); +} + +fn print_continue_migration_action(txt: &str) { + println_action_yellow( + "Continue", + &format!( + "{} and re-run `forc migrate` to finish the migration process", + txt + ), + ); +} + +/// Returns the [MigrationStep::title] prefixed by its [Feature::name]. +fn full_migration_step_title( + max_len: usize, + feature: &Feature, + migration_step: &MigrationStep, +) -> String { + let feature_name_len = max_len + 2; + format!( + "{: "forc migrate show"] + } +} + +/// Show the upcoming breaking change features and their migration steps. +#[derive(Debug, Parser)] +pub(crate) struct Command {} + +pub(crate) fn exec(_command: Command) -> Result<()> { + let migration_steps = get_migration_steps_or_return!(); + + let feature_name_len = max_feature_name_len(migration_steps); + + // Convert migration steps to form suitable for printing (adding `None` for time estimates.) + let migration_steps = migration_steps + .iter() + .map(|(feature, steps)| { + ( + *feature, + steps.iter().map(|step| (step, None)).collect::>(), + ) + }) + .collect::>(); + + // Print the list of breaking change features. + println!("Breaking change features:"); + println!( + "{}", + sequence_to_list( + &migration_steps + .iter() + .map(|(feature, _)| format!( + "{:feature_name_len$} ({})", + feature.name(), + feature.url() + )) + .collect_vec(), + Indent::Single, + usize::MAX + ) + .join("\n") + ); + println!(); + + // Print migration steps. + let mut num_of_steps_per_execution_kind = HashMap::::new(); + migration_steps + .iter() + .flat_map(|(_, steps)| steps) + .for_each(|(step, _)| { + *num_of_steps_per_execution_kind + .entry(step.execution()) + .or_insert(0) += 1 + }); + let num_of_steps_per_execution_kind = num_of_steps_per_execution_kind + .into_iter() + .filter(|(_, count)| *count > 0) + .sorted_by_key(|(execution, _)| *execution) + .map(|(execution, count)| { + format!( + "{count} {}", + match execution { + MigrationStepExecution::Manual => "manual", + MigrationStepExecution::Semiautomatic => "semiautomatic", + MigrationStepExecution::Automatic => "automatic", + }, + ) + }) + .collect_vec(); + println!( + "Migration steps ({}):", + sequence_to_str( + &num_of_steps_per_execution_kind, + Enclosing::None, + usize::MAX + ) + ); + print_features_and_migration_steps(&migration_steps); + + // Print experimental feature flags. + let features = migration_steps.iter().map(|(feature, _)| feature.name()); + + println!("Experimental feature flags:"); + println!( + "- for Forc.toml: experimental = {{ {} }}", + features + .clone() + .map(|feature| format!("{feature} = true")) + .collect::>() + .join(", ") + ); + println!( + "- for CLI: --experimental {}", + features.collect::>().join(",") + ); + + Ok(()) +} diff --git a/forc-plugins/forc-migrate/src/cli/mod.rs b/forc-plugins/forc-migrate/src/cli/mod.rs new file mode 100644 index 00000000000..2933350dc73 --- /dev/null +++ b/forc-plugins/forc-migrate/src/cli/mod.rs @@ -0,0 +1,75 @@ +//! The command line interface for `forc migrate`. +mod commands; +mod shared; + +use anyhow::Result; +use clap::{Parser, Subcommand}; +use forc_tracing::{init_tracing_subscriber, LevelFilter, TracingSubscriberOptions}; + +use self::commands::{check, run, show}; + +use check::Command as CheckCommand; +use run::Command as RunCommand; +use show::Command as ShowCommand; + +fn help() -> &'static str { + Box::leak( + format!( + "Examples:\n{}{}{}", + show::examples(), + check::examples(), + run::examples(), + ) + .trim_end() + .to_string() + .into_boxed_str(), + ) +} + +/// Forc plugin for migrating Sway projects to the next breaking change version of Sway. +#[derive(Debug, Parser)] +#[clap( + name = "forc-migrate", + after_help = help(), + version +)] +pub(crate) struct Opt { + /// The command to run + #[clap(subcommand)] + command: ForcMigrate, +} + +impl Opt { + fn silent(&self) -> bool { + match &self.command { + ForcMigrate::Show(_) => true, + ForcMigrate::Check(command) => command.check.silent, + ForcMigrate::Run(command) => command.run.silent, + } + } +} + +#[derive(Subcommand, Debug)] +enum ForcMigrate { + Show(ShowCommand), + Check(CheckCommand), + Run(RunCommand), +} + +pub fn run_cli() -> Result<()> { + let opt = Opt::parse(); + + let tracing_options = TracingSubscriberOptions { + silent: Some(opt.silent()), + log_level: Some(LevelFilter::INFO), + ..Default::default() + }; + + init_tracing_subscriber(tracing_options); + + match opt.command { + ForcMigrate::Show(command) => show::exec(command), + ForcMigrate::Check(command) => check::exec(command), + ForcMigrate::Run(command) => run::exec(command), + } +} diff --git a/forc-plugins/forc-migrate/src/cli/shared.rs b/forc-plugins/forc-migrate/src/cli/shared.rs new file mode 100644 index 00000000000..b3cb0abd83d --- /dev/null +++ b/forc-plugins/forc-migrate/src/cli/shared.rs @@ -0,0 +1,303 @@ +use std::path::PathBuf; + +use anyhow::{bail, Ok, Result}; +use clap::Parser; +use forc_pkg as pkg; +use forc_pkg::{ + manifest::{GenericManifestFile, ManifestFile}, + source::IPFSNode, +}; +use forc_tracing::println_action_green; +use sway_core::{BuildTarget, Engines}; +use sway_error::diagnostic::*; +use sway_features::Feature; +use sway_types::{SourceEngine, Span}; + +use crate::migrations::{MigrationStepKind, MigrationStepsWithOccurrences}; +use crate::{ + instructive_error, + migrations::{MigrationStep, MigrationStepExecution, ProgramInfo}, +}; + +/// Args that can be shared between all commands that `compile` a package. E.g. `check`, `run`. +#[derive(Debug, Default, Parser)] +pub(crate) struct Compile { + /// Path to the project. + /// + /// If not specified, current working directory will be used. + #[clap(short, long)] + pub path: Option, + /// Offline mode, prevents Forc from using the network when managing dependencies. + /// Meaning it will only try to use previously downloaded dependencies. + #[clap(long = "offline")] + pub offline: bool, + /// Requires that the Forc.lock file is up-to-date. If the lock file is missing, or it + /// needs to be updated, Forc will exit with an error. + #[clap(long)] + pub locked: bool, + /// The IPFS Node to use for fetching IPFS sources. + /// + /// Possible values: PUBLIC, LOCAL, + #[clap(long)] + pub ipfs_node: Option, + #[clap(flatten)] + pub experimental: sway_features::CliFields, + /// Silent mode. Don't output any warnings or errors to the command line. + #[clap(long = "silent", short = 's')] + pub silent: bool, +} + +impl Compile { + /// Returns the [Compile::path] if provided, otherwise the current directory. + pub(crate) fn manifest_dir(&self) -> std::io::Result { + if let Some(path) = &self.path { + std::result::Result::Ok(PathBuf::from(path)) + } else { + std::env::current_dir() + } + } +} + +// Clippy issue. It erroneously assumes that `vec!`s in `instructive_error` calls are not needed. +#[allow(clippy::useless_vec)] +pub(crate) fn compile_package<'a>( + engines: &'a Engines, + build_instructions: &Compile, +) -> Result> { + let manifest_dir = build_instructions.manifest_dir()?; + let manifest = ManifestFile::from_dir(manifest_dir.clone())?; + let ManifestFile::Package(pkg_manifest) = &manifest else { + bail!(instructive_error( + "`forc migrate` does not support migrating workspaces.", + &vec![ + &format!("\"{}\" is a workspace.", manifest.dir().to_string_lossy()), + "Please migrate each workspace member individually.", + ] + )); + }; + + println_action_green( + "Compiling", + &format!( + "{} ({})", + pkg_manifest.project_name(), + manifest.dir().to_string_lossy() + ), + ); + + let member_manifests = manifest.member_manifests()?; + let lock_path = manifest.lock_path()?; + + let ipfs_node = build_instructions.ipfs_node.clone().unwrap_or_default(); + let plan = pkg::BuildPlan::from_lock_and_manifests( + &lock_path, + &member_manifests, + build_instructions.locked, + build_instructions.offline, + &ipfs_node, + )?; + + let include_tests = true; // We want to migrate the tests as well. + let mut compile_results = pkg::check( + &plan, + BuildTarget::default(), + build_instructions.silent, + None, + include_tests, + engines, + None, + &build_instructions.experimental.experimental, + &build_instructions.experimental.no_experimental, + )?; + + let Some(programs) = + compile_results + .pop() + .and_then(|(programs, handler)| if handler.has_errors() { None } else { programs }) + else { + bail!(instructive_compilation_error( + &pkg_manifest.path().to_string_lossy() + )); + }; + + let core::result::Result::Ok(ty_program) = programs.typed else { + bail!(instructive_compilation_error( + &pkg_manifest.path().to_string_lossy() + )); + }; + + return Ok(ProgramInfo { + lexed_program: programs.lexed, + ty_program, + engines, + }); + + fn instructive_compilation_error(manifest_dir: &str) -> String { + instructive_error("The Sway project cannot be compiled.", &vec![ + &format!("`forc migrate` could not compile the Sway project located at \"{manifest_dir}\"."), + "To see the compilation errors, run `forc build` on the project.", + "Did you maybe forget to specify experimental features?", + "If the project uses experimental features, they need to be specified when running `forc migrate`.", + "E.g.:", + " forc migrate run --experimental ,", + ]) + } +} + +pub(crate) const PROJECT_IS_COMPATIBLE: &str = + "Project is compatible with the next breaking change version of Sway"; + +pub(crate) fn print_features_and_migration_steps( + features_and_migration_steps: MigrationStepsWithOccurrences, +) { + let show_migration_effort = features_and_migration_steps + .iter() + .flat_map(|(_, steps)| steps.iter().map(|step| step.1)) + .all(|occurrences| occurrences.is_some()); + + let mut total_migration_effort = 0; + for (feature, migration_steps) in features_and_migration_steps { + println!("{}", feature.name()); + for (migration_step, occurrence) in migration_steps.iter() { + println!( + " {} {}", + match migration_step.execution() { + MigrationStepExecution::Manual => "[M]", + MigrationStepExecution::Semiautomatic => "[S]", + MigrationStepExecution::Automatic => "[A]", + }, + migration_step.title + ); + + if show_migration_effort { + let count = occurrence + .expect("if the `show_migration_effort` is true, all occurrences are `Some`"); + // For automatic steps **that have occurrences**, plan ~10 minutes + // for the review of the automatically changed code. + let migration_effort_in_mins = if migration_step.duration == 0 && count > 0 { + 10 + } else { + // Otherwise, a very simple linear calculation will give + // a decent and useful rough estimate. + count * migration_step.duration + }; + println!( + " Occurrences: {count:>5} Migration effort (hh::mm): ~{}\n", + duration_to_str(migration_effort_in_mins) + ); + total_migration_effort += migration_effort_in_mins; + } + } + + if !show_migration_effort { + println!(); + } + } + + if show_migration_effort { + println!( + "Total migration effort (hh::mm): ~{}", + duration_to_str(total_migration_effort) + ); + + // If there are no occurrences in code that require migration, + // inform that the project is compatible with the next breaking change version of Sway. + let num_of_occurrences = features_and_migration_steps + .iter() + .flat_map(|(_, steps)| steps.iter().map(|step| step.1.unwrap_or(0))) + .sum::(); + if num_of_occurrences == 0 { + println!(); + println!("{PROJECT_IS_COMPATIBLE}."); + } + } +} + +/// Creates a single migration [Diagnostic] that shows **all the occurrences** in code +/// that require migration effort expected by the `migration_step`. +/// +/// Returns `None` if the migration step is not necessary, in other words, if there +/// are no occurrences in code that require this particular migration. +pub(crate) fn create_migration_diagnostic( + source_engine: &SourceEngine, + feature: &Feature, + migration_step: &MigrationStep, + occurrences_spans: &[Span], +) -> Option { + if occurrences_spans.is_empty() { + return None; + } + + let description = format!("[{}] {}", feature.name(), migration_step.title); + Some(Diagnostic { + reason: Some(Reason::new(Code::migrations(1), description)), + issue: Issue::info(source_engine, occurrences_spans[0].clone(), "".into()), + hints: occurrences_spans + .iter() + .skip(1) + .map(|span| Hint::info(source_engine, span.clone(), "".into())) + .collect(), + help: migration_step + .help + .iter() + .map(|help| help.to_string()) + .chain(if migration_step.help.is_empty() { + vec![] + } else { + vec![Diagnostic::help_empty_line()] + }) + .chain(match migration_step.kind { + MigrationStepKind::Instruction(_) => vec![], + MigrationStepKind::CodeModification(_, []) => vec![], + MigrationStepKind::CodeModification(_, manual_migration_actions) => { + get_manual_migration_actions_help(manual_migration_actions) + } + MigrationStepKind::Interaction(_, _, []) => vec![ + "This migration step will interactively modify the code, based on your input." + .to_string(), + Diagnostic::help_empty_line(), + ], + MigrationStepKind::Interaction(_, _, manual_migration_actions) => vec![ + "This migration step will interactively modify the code, based on your input." + .to_string(), + Diagnostic::help_empty_line(), + ] + .into_iter() + .chain(get_manual_migration_actions_help(manual_migration_actions)) + .collect(), + }) + .chain(vec![detailed_migration_guide_msg(feature)]) + .collect(), + }) +} + +fn get_manual_migration_actions_help(manual_migration_actions: &[&str]) -> Vec { + ["After the migration, you will still need to:".to_string()] + .into_iter() + .chain( + manual_migration_actions + .iter() + .map(|help| format!("- {help}")) + .chain(vec![Diagnostic::help_empty_line()]), + ) + .collect() +} + +pub(crate) fn detailed_migration_guide_msg(feature: &Feature) -> String { + format!("For a detailed migration guide see: {}", feature.url()) +} + +fn duration_to_str(duration_in_mins: usize) -> String { + let hours = duration_in_mins / 60; + let minutes = duration_in_mins % 60; + + format!("{hours:#02}:{minutes:#02}") +} + +pub(crate) fn max_feature_name_len(features: &[(Feature, T)]) -> usize { + features + .iter() + .map(|(feature, _)| feature.name().len()) + .max() + .unwrap_or_default() +} diff --git a/forc-plugins/forc-migrate/src/lib.rs b/forc-plugins/forc-migrate/src/lib.rs new file mode 100644 index 00000000000..ab8272a07d8 --- /dev/null +++ b/forc-plugins/forc-migrate/src/lib.rs @@ -0,0 +1,57 @@ +pub mod cli; +#[macro_use] +mod migrations; +mod matching; +mod modifying; + +use std::fmt::Display; +use std::io::{self, Write}; + +/// Returns a single error string formed of the `error` and `instructions`. +/// The returned string is formatted to be used as an error message in the [anyhow::bail] macro. +fn instructive_error(error: E, instructions: &[I]) -> String { + let mut error_message = vec![format!("{error}")]; + instructions + .iter() + .map(|inst| format!(" {inst}")) + .for_each(|inst| error_message.push(inst)); + error_message.join("\n") +} + +/// Returns a single error string representing an internal error. +/// The returned string is formatted to be used as an error message in the [anyhow::bail] macro. +fn internal_error(error: E) -> String { + instructive_error(error, &[ + "This is an internal error and signifies a bug in the `forc migrate` tool.", + "Please report this error by filing an issue at https://github.com/FuelLabs/sway/issues/new?template=bug_report.yml.", + ]) +} + +/// Prints a menu containing numbered `options` and asks to choose one of them. +/// Returns zero-indexed index of the chosen option. +fn print_single_choice_menu + Display>(options: &[S]) -> usize { + assert!( + options.len() > 1, + "There must be at least two options to choose from." + ); + + for (i, option) in options.iter().enumerate() { + println!("{}. {option}", i + 1); + } + + let mut choice = usize::MAX; + while choice == 0 || choice > options.len() { + print!("Enter your choice [1..{}]: ", options.len()); + io::stdout().flush().unwrap(); + let mut input = String::new(); + choice = match std::io::stdin().read_line(&mut input) { + Ok(_) => match input.trim().parse() { + Ok(choice) => choice, + Err(_) => continue, + }, + Err(_) => continue, + } + } + + choice - 1 +} diff --git a/forc-plugins/forc-migrate/src/main.rs b/forc-plugins/forc-migrate/src/main.rs new file mode 100644 index 00000000000..3bf4542b4df --- /dev/null +++ b/forc-plugins/forc-migrate/src/main.rs @@ -0,0 +1,5 @@ +use anyhow::Result; + +fn main() -> Result<()> { + forc_migrate::cli::run_cli() +} diff --git a/forc-plugins/forc-migrate/src/matching/lexed_tree.rs b/forc-plugins/forc-migrate/src/matching/lexed_tree.rs new file mode 100644 index 00000000000..25dce32ba6e --- /dev/null +++ b/forc-plugins/forc-migrate/src/matching/lexed_tree.rs @@ -0,0 +1,155 @@ +//! This module contains helper functions for matching elements within a lexed program. + +use super::{any_mut, LexedElementsMatcher, LexedElementsMatcherDeep}; +use sway_ast::{ItemKind, ItemStorage, StorageEntry, StorageField}; +use sway_core::language::lexed::{LexedModule, LexedProgram}; + +impl LexedElementsMatcher for LexedProgram { + fn match_elems<'a, F>(&'a mut self, predicate: F) -> impl Iterator + where + F: Fn(&&'a mut ItemStorage) -> bool + Clone + 'a, + ItemStorage: 'a, + { + // Storage can be declared only in the root of a contract. + self.root.match_elems(predicate) + } +} + +impl LexedElementsMatcher for LexedModule { + fn match_elems<'a, F>(&'a mut self, predicate: F) -> impl Iterator + where + F: Fn(&&'a mut ItemStorage) -> bool + Clone + 'a, + ItemStorage: 'a, + { + self.tree + .items + .iter_mut() + .map(|annotated_item| &mut annotated_item.value) + .filter_map(move |decl| match decl { + ItemKind::Storage(ref mut item_storage) => { + if predicate(&item_storage) { + Some(item_storage) + } else { + None + } + } + _ => None, + }) + } +} + +impl LexedElementsMatcher for ItemStorage { + fn match_elems<'a, F>(&'a mut self, predicate: F) -> impl Iterator + where + F: Fn(&&'a mut StorageField) -> bool + Clone + 'a, + StorageField: 'a, + { + self.entries + .inner + .iter_mut() + .map(|annotated_item| &mut annotated_item.value) + .filter_map(move |storage_entry| { + storage_entry.field.as_mut().filter(|sf| predicate(sf)) + }) + } +} + +impl LexedElementsMatcherDeep for ItemStorage { + fn match_elems_deep<'a, F>(&'a mut self, predicate: F) -> Vec<&'a mut StorageField> + where + F: Fn(&&'a mut StorageField) -> bool + Clone + 'a, + StorageField: 'a, + { + fn recursively_collect_storage_fields_in_storage_entry<'a, P>( + result: &mut Vec<&'a mut StorageField>, + predicate: P, + storage_entry: &'a mut StorageEntry, + ) where + P: Fn(&&'a mut StorageField) -> bool + Clone + 'a, + { + if let Some(ref mut sf) = storage_entry.field { + if predicate(&sf) { + result.push(sf) + } + } + + if let Some(ref mut namespace) = storage_entry.namespace { + namespace + .inner + .iter_mut() + .map(|annotated_item| &mut annotated_item.value) + .for_each(|storage_entry| { + recursively_collect_storage_fields_in_storage_entry( + result, + predicate.clone(), + storage_entry.as_mut(), + ) + }); + } + } + + let mut result = vec![]; + self.entries + .inner + .iter_mut() + .map(|annotated_item| &mut annotated_item.value) + .for_each(|storage_entry| { + recursively_collect_storage_fields_in_storage_entry( + &mut result, + predicate.clone(), + storage_entry, + ) + }); + + result + } +} + +pub mod matchers { + use super::*; + + pub(crate) fn storage_decl

(parent: &mut P) -> Option<&mut ItemStorage> + where + P: LexedElementsMatcher, + { + parent.match_elems(any_mut).next() + } + + #[allow(dead_code)] + pub(crate) fn storage_fields<'a, P, F>( + parent: &'a mut P, + predicate: F, + ) -> impl Iterator + where + F: Fn(&&'a mut StorageField) -> bool + Clone + 'a, + P: LexedElementsMatcher, + { + parent.match_elems(predicate) + } + + pub(crate) fn storage_fields_deep<'a, S, F>( + scope: &'a mut S, + predicate: F, + ) -> Vec<&'a mut StorageField> + where + F: Fn(&&'a mut StorageField) -> bool + Clone + 'a, + S: LexedElementsMatcherDeep, + { + scope.match_elems_deep(predicate) + } +} + +pub mod predicates { + pub mod lexed_storage_field { + use super::super::*; + + #[allow(dead_code)] + pub(crate) fn with_in_keyword(storage_field: &&mut StorageField) -> bool { + storage_field.key_expr.is_some() + } + + pub(crate) fn without_in_keyword(storage_field: &&mut StorageField) -> bool { + storage_field.key_expr.is_none() + } + } +} diff --git a/forc-plugins/forc-migrate/src/matching/mod.rs b/forc-plugins/forc-migrate/src/matching/mod.rs new file mode 100644 index 00000000000..989d1398544 --- /dev/null +++ b/forc-plugins/forc-migrate/src/matching/mod.rs @@ -0,0 +1,251 @@ +//! This module contains common API for matching elements +//! within a lexed or a typed tree. +//! +//! A typical migration will search for certain elements in the +//! lexed or typed tree and modify them within the lexed tree. +//! +//! In the long term we want to have advanced infrastructure for both +//! matching and modifying parts of the trees, as discussed in +//! https://github.com/FuelLabs/sway/issues/6836. +//! +//! Currently, we will start (very) small, by providing reusable +//! module functions for matching parts of the trees. +//! +//! For concrete examples, see the match functions and trait impls +//! implemented in the sub-modules. +//! +//! ## Design decisions +//! +//! The goal was pragmatic. To create a simple to develop and extend API that +//! will offer easy discoverability of provided functions and methods, all in +//! order to move cumbersome and error-prone matching code out of the migration +//! logic. +//! +//! Migrations will use module level match functions to either search directly +//! within a parent or recursively (deep) within a scope. Match functions can +//! accept predicates to filter the searched elements. The predicates deliberately +//! accept `&&TElement` or `&&mut TElement` so that can be easily passed to +//! [Iterator::filter] function. +//! +//! ## Matching elements in trees +//! +//! Functions matching on lexed tree require mutable references as +//! input and return mutable references as output. This is according +//! to the premise that the non-code-modifying analysis will be done +//! on typed trees, while the code-modifying will be done on the +//! mutable lexed tree, as well as the typed tree. +//! +//! Matching can be done either directly within a parent, or recursively +//! within a scope. E.g., we can match for `StorageField`s that are +//! directly under the `storage` declaration, or for all `StorageField`s +//! that are in the `storage` declaration, in any of the namespaces, +//! recursively. +//! +//! Searching for elements "in-between", e.g., `StorageField`s in a particular +//! sub-namespace, is currently not supported, and must be done manually +//! within a migration. +//! +//! Matching is done on lexical or typed elements like, e.g., `StorageField`, +//! or `TyStorageField`, without any more convenient abstraction provided for +//! matching. This is also a simple beginning. A better matching framework +//! would expose a stable higher level abstraction for matching and modifying. +//! +//! ## Locating equivalent elements across trees +//! +//! Often we will find an element in the lexed tree, e.g., a `StorageField` in +//! order to change it, but will need additional information from its typed tree +//! counterpart, `TyStorageField`, or vice versa. The [TyLocate] trait offers +//! the [TyLocate::locate] method for finding a typed equivalent of a lexed +//! element. The [LexedLocate] does the opposite. +//! +//! Locating an equivalent will in most of the cases be implemented via equality +//! of spans. Locating can also cause multiple traversals of the same part of +//! a tree. For migrations, this will not cause a performance problem. + +mod lexed_tree; +mod typed_tree; + +pub(crate) use typed_tree::matchers as ty_match; +pub(crate) use typed_tree::predicates::ty_storage_field; + +pub(crate) use lexed_tree::matchers as lexed_match; +pub(crate) use lexed_tree::predicates::lexed_storage_field; + +/// Matches for typed tree elements of type `T` located **directly** within +/// the typed tree element `self`. +/// +/// The matched elements must satisfy the `predicate`. +pub(crate) trait TyElementsMatcher { + fn match_elems<'a, P>(&'a self, predicate: P) -> impl Iterator + where + P: Fn(&&'a T) -> bool + Clone + 'a, + T: 'a; +} + +/// Matches for typed tree elements of type `T` located **recursively** within +/// the typed tree element `self` or any of its children. The meaning of a +/// "child" depends on the exact tree element `self`. +/// +/// The matched elements must satisfy the `predicate`. +pub(crate) trait TyElementsMatcherDeep { + fn match_elems_deep<'a, F>(&'a self, predicate: F) -> Vec<&'a T> + where + F: Fn(&&'a T) -> bool + Clone + 'a, + T: 'a; +} + +/// Within a typed tree element `self`, locates and returns the element of type `Ty`, +/// that is the typed equivalent of the `lexed_element`. +pub(crate) trait TyLocate { + fn locate(&self, lexed_element: &Lexed) -> Option<&Ty>; +} + +/// Matches for lexed tree elements of type `T` located **directly** within +/// the lexed tree element `self`. +/// +/// The matched elements must satisfy the `predicate`. +pub(crate) trait LexedElementsMatcher { + fn match_elems<'a, F>(&'a mut self, predicate: F) -> impl Iterator + where + F: Fn(&&'a mut T) -> bool + Clone + 'a, + T: 'a; +} + +/// Matches for lexed tree elements of type `T` located **recursively** within +/// the lexed tree element `self` or any of its children. The meaning of a +/// "child" depends on the exact tree element `self`. +/// +/// The matched elements must satisfy the `predicate`. +pub(crate) trait LexedElementsMatcherDeep { + fn match_elems_deep<'a, F>(&'a mut self, predicate: F) -> Vec<&'a mut T> + where + F: Fn(&&'a mut T) -> bool + Clone + 'a, + T: 'a; +} + +/// Within a lexed tree element `self`, locates and returns the element of type `Lexed`, +/// that is the lexed equivalent of the `ty_element`. +#[allow(dead_code)] +pub(crate) trait LexedLocate { + fn locate(&mut self, ty_element: &Ty) -> Option<&mut Lexed>; +} + +/// A predicate that returns true for any input. +/// Convenient to use in [TyElementsMatcher] and [TyElementsMatcherDeep]. +pub(crate) fn any(_t: &&T) -> bool { + true +} + +/// A predicate that returns true for any input. +/// Convenient to use in [LexedElementsMatcher] and [LexedElementsMatcherDeep]. +pub(crate) fn any_mut(_t: &&mut T) -> bool { + true +} + +/// Returns a predicate that evaluates to true if all the predicates passed +/// as arguments evaluate to true. +#[macro_export] +macro_rules! all_of { + ($($i:expr),+) => { + $crate::matching::all_of([$($i, )*].as_slice()) + }; +} + +/// Returns a predicate that evaluates to true if all the `predicates` +/// evaluate to true. +/// +/// Not intended to be used directly. Use [all_of!] macro instead. +#[allow(dead_code)] +pub(crate) fn all_of(predicates: &[P]) -> impl Fn(&&T) -> bool + Clone + '_ +where + P: Fn(&&T) -> bool + Clone, +{ + move |t: &&T| { + let mut res = true; + for predicate in predicates { + res &= predicate(t); + } + res + } +} + +/// Returns a predicate that evaluates to true if all the predicates passed +/// as arguments evaluate to true. +#[macro_export] +macro_rules! all_of_mut { + ($($i:expr),+) => { + $crate::matching::all_of_mut([$($i, )*].as_slice()) + }; +} + +/// Returns a predicate that evaluates to true if all the `predicates` +/// evaluate to true. +/// +/// Not intended to be used directly. Use [all_of_mut!] macro instead. +#[allow(dead_code)] +pub(crate) fn all_of_mut(predicates: &[P]) -> impl Fn(&&mut T) -> bool + Clone + '_ +where + P: Fn(&&mut T) -> bool + Clone, +{ + move |t: &&mut T| { + let mut res = true; + for predicate in predicates { + res &= predicate(t); + } + res + } +} + +/// Returns a predicate that evaluates to true if any of the predicates passed +/// as arguments evaluate to true. +#[macro_export] +macro_rules! any_of { + ($($i:expr),+) => { + $crate::matching::any_of([$($i, )*].as_slice()) + }; +} + +/// Returns a predicate that evaluates to true if any of the `predicates` +/// evaluate to true. +/// +/// Not intended to be used directly. Use [any_of!] macro instead. +#[allow(dead_code)] +pub(crate) fn any_of(predicates: &[P]) -> impl Fn(&&T) -> bool + Clone + '_ +where + P: Fn(&&T) -> bool + Clone, +{ + move |t: &&T| { + let mut res = false; + for predicate in predicates { + res |= predicate(t); + } + res + } +} + +/// Returns a predicate that evaluates to true if any of the predicates passed +/// as arguments evaluate to true. +#[macro_export] +macro_rules! any_of_mut { + ($($i:expr),+) => { + $crate::matching::any_of_mut([$($i, )*].as_slice()) + }; +} + +/// Returns a predicate that evaluates to true if any of the `predicates` +/// evaluate to true. +/// +/// Not intended to be used directly. Use [any_of_mut!] macro instead. +#[allow(dead_code)] +pub(crate) fn any_of_mut(predicates: &[P]) -> impl Fn(&&mut T) -> bool + Clone + '_ +where + P: Fn(&&mut T) -> bool + Clone, +{ + move |t: &&mut T| { + let mut res = false; + for predicate in predicates { + res |= predicate(t); + } + res + } +} diff --git a/forc-plugins/forc-migrate/src/matching/typed_tree.rs b/forc-plugins/forc-migrate/src/matching/typed_tree.rs new file mode 100644 index 00000000000..a15387c6c6a --- /dev/null +++ b/forc-plugins/forc-migrate/src/matching/typed_tree.rs @@ -0,0 +1,123 @@ +//! This module contains helper functions for matching elements within a typed program. + +use super::{any, TyElementsMatcher, TyElementsMatcherDeep, TyLocate}; +use sway_ast::StorageField; +use sway_core::{ + decl_engine::id::DeclId, + language::ty::{TyAstNodeContent, TyDecl, TyModule, TyProgram, TyStorageDecl, TyStorageField}, +}; +use sway_types::Spanned; + +impl TyElementsMatcher> for TyProgram { + fn match_elems<'a, F>(&'a self, predicate: F) -> impl Iterator> + where + F: Fn(&&'a DeclId) -> bool + Clone + 'a, + DeclId: 'a, + { + // Storage can be declared only in the root of a contract. + self.root_module.match_elems(predicate) + } +} + +impl TyElementsMatcher> for TyModule { + fn match_elems<'a, F>(&'a self, predicate: F) -> impl Iterator> + where + F: Fn(&&'a DeclId) -> bool + Clone + 'a, + DeclId: 'a, + { + self.all_nodes + .iter() + .filter_map(move |decl| match &decl.content { + TyAstNodeContent::Declaration(TyDecl::StorageDecl(storage_decl)) => { + if predicate(&&storage_decl.decl_id) { + Some(&storage_decl.decl_id) + } else { + None + } + } + _ => None, + }) + } +} + +impl TyElementsMatcher for TyStorageDecl { + fn match_elems<'a, F>(&'a self, predicate: F) -> impl Iterator + where + F: Fn(&&'a TyStorageField) -> bool + Clone + 'a, + TyStorageField: 'a, + { + self.fields + .iter() + // In the `TyStorageDecl`, all the fields are flattened. + // But we need to preserve the semantics of non-deep matching + // and return only those that are directly under the storage. + .filter(|sf| sf.full_name().starts_with("storage.")) + .filter(predicate) + } +} + +impl TyElementsMatcherDeep for TyStorageDecl { + fn match_elems_deep<'a, F>(&'a self, predicate: F) -> Vec<&'a TyStorageField> + where + F: Fn(&&'a TyStorageField) -> bool + Clone + 'a, + TyStorageField: 'a, + { + self.fields.iter().filter(predicate).collect() + } +} + +impl TyLocate for TyStorageDecl { + fn locate(&self, lexed_element: &StorageField) -> Option<&TyStorageField> { + self.fields + .iter() + .find(|field| field.name.span() == lexed_element.name.span()) + } +} + +pub mod matchers { + use super::*; + + pub(crate) fn storage_decl

(parent: &P) -> Option> + where + P: TyElementsMatcher>, + { + parent.match_elems(any).next().copied() + } + + #[allow(dead_code)] + pub(crate) fn storage_fields<'a, P, F>( + parent: &'a P, + predicate: F, + ) -> impl Iterator + where + F: Fn(&&'a TyStorageField) -> bool + Clone + 'a, + P: TyElementsMatcher, + { + parent.match_elems(predicate) + } + + pub(crate) fn storage_fields_deep<'a, S, F>( + scope: &'a S, + predicate: F, + ) -> Vec<&'a TyStorageField> + where + F: Fn(&&'a TyStorageField) -> bool + Clone + 'a, + S: TyElementsMatcherDeep, + { + scope.match_elems_deep(predicate) + } +} + +pub mod predicates { + pub mod ty_storage_field { + use super::super::*; + + pub(crate) fn with_in_keyword(storage_field: &&TyStorageField) -> bool { + storage_field.key_expression.is_some() + } + + pub(crate) fn without_in_keyword(storage_field: &&TyStorageField) -> bool { + storage_field.key_expression.is_none() + } + } +} diff --git a/forc-plugins/forc-migrate/src/migrations/demo.rs b/forc-plugins/forc-migrate/src/migrations/demo.rs new file mode 100644 index 00000000000..6c48b74ab09 --- /dev/null +++ b/forc-plugins/forc-migrate/src/migrations/demo.rs @@ -0,0 +1,120 @@ +//! This module contains demo migrations used for learning and testing the migration tool. + +use std::vec; + +use crate::migrations::{visit_lexed_modules_mut, MutProgramInfo}; +use anyhow::{Ok, Result}; +use sway_ast::{ + attribute::Annotated, + keywords::{FnToken, Keyword}, + Braces, CodeBlockContents, FnSignature, ItemFn, Module, Parens, Punctuated, +}; +use sway_core::Engines; +use sway_types::{Ident, Span, Spanned}; + +use super::{DryRun, MigrationStep, MigrationStepKind}; + +#[allow(dead_code)] +pub(super) const INSERT_EMPTY_FUNCTION_STEP: MigrationStep = MigrationStep { + title: "Insert `empty_function` at the end of every module", + duration: 0, + kind: MigrationStepKind::CodeModification(insert_empty_function_step, &[]), + help: &[ + "Migration will insert an empty function named `empty_function`", + "at the end of every module, unless the function with the same", + "name already exists in the module.", + "E.g., `fn empty_function() {}`.", + ], +}; + +fn insert_empty_function_step( + program_info: &mut MutProgramInfo, + dry_run: DryRun, +) -> Result> { + fn insert_empty_function_step_impl( + _engines: &Engines, + module: &mut Module, + dry_run: DryRun, + ) -> Result> { + // TODO: Simplify this demo migration by using matchers and modifiers. + let mut result = vec![]; + + // Code transformations must be idempotent. In this demo, if the function + // with the name `empty_function` already exists, we do not insert it. + let existing_empty_function = module + .items + .iter() + .map(|annotated| &annotated.value) + .filter_map(|decl| match decl { + sway_ast::ItemKind::Fn(module_fn) => Some(module_fn), + _ => None, + }) + .find(|module_fn| module_fn.fn_signature.name.as_str() == "empty_function"); + + if existing_empty_function.is_some() { + return Ok(result); + } + + // If the module is empty, insert right after the module kind, + // otherwise, after the last item. + let result_span = match module.items.last() { + Some(annotated_item) => annotated_item.span(), + None => module.semicolon_token.span(), + }; + + result.push(result_span.clone()); + + if matches!(dry_run, DryRun::Yes) { + return Ok(result); + } + + // Not a dry-run, proceed with the code change. + + let insert_span = Span::empty_at_end(&result_span); + + // Construct the `empty_function`. + // Note that we are using the `insert_span` for all the required spans. + let empty_function = sway_ast::ItemKind::Fn(ItemFn { + fn_signature: FnSignature { + visibility: None, + fn_token: FnToken::new(insert_span.clone()), + name: Ident::new_with_override("empty_function".into(), insert_span.clone()), + generics: None, + arguments: Parens { + inner: sway_ast::FnArgs::Static(Punctuated { + value_separator_pairs: vec![], + final_value_opt: None, + }), + span: insert_span.clone(), + }, + return_type_opt: None, + where_clause_opt: None, + }, + body: Braces { + inner: CodeBlockContents { + statements: vec![], + final_expr_opt: None, + span: insert_span.clone(), + }, + span: insert_span, + }, + }); + + // Add the constructed `empty_function` to the module items. + module.items.push(Annotated { + attribute_list: vec![], + value: empty_function, + }); + + Ok(result) + } + + let res = visit_lexed_modules_mut( + program_info.engines, + program_info.lexed_program, + dry_run, + insert_empty_function_step_impl, + )?; + + Ok(res.into_iter().flatten().collect()) +} diff --git a/forc-plugins/forc-migrate/src/migrations/mod.rs b/forc-plugins/forc-migrate/src/migrations/mod.rs new file mode 100644 index 00000000000..df060920728 --- /dev/null +++ b/forc-plugins/forc-migrate/src/migrations/mod.rs @@ -0,0 +1,355 @@ +//! This module contains common API for defining and implementing individual +//! [MigrationStep]s. +//! +//! Migration steps are defined in the submodules. Every submodule has the name +//! of the corresponding breaking change Sway feature and contains all the +//! migration steps needed to migrate that feature. +//! +//! The special [demo] submodule contains demo migrations used for learning and testing +//! the migration tool. + +mod demo; +mod references; +mod storage_domains; + +use std::collections::HashSet; + +use anyhow::Result; +use sway_ast::Module; +use sway_core::{ + language::{ + lexed::{LexedModule, LexedProgram}, + ty::TyProgram, + }, + Engines, +}; +use sway_features::Feature; +use sway_types::Span; + +pub(crate) struct ProgramInfo<'a> { + pub lexed_program: LexedProgram, + pub ty_program: TyProgram, + pub engines: &'a Engines, +} + +/// Wrapper over [ProgramInfo] that provides write access +/// to the [LexedProgram], but only read access to the +/// [TyProgram] and the [Engines]. It is used in migrations +/// that modify the source code by altering the lexed program. +pub(crate) struct MutProgramInfo<'a> { + pub lexed_program: &'a mut LexedProgram, + #[allow(dead_code)] + pub ty_program: &'a TyProgram, + pub engines: &'a Engines, +} + +impl<'a> ProgramInfo<'a> { + pub(crate) fn as_mut(&mut self) -> MutProgramInfo { + MutProgramInfo { + lexed_program: &mut self.lexed_program, + ty_program: &self.ty_program, + engines: self.engines, + } + } +} + +/// A single migration step in the overall migration process. +pub(crate) struct MigrationStep { + /// Migration step unique title. + /// + /// Formulated as a continuation of a suggestion to a developer: You should \. + /// + /// Titles are short, start with a capital letter and do not end in punctuation. + /// + /// E.g.: Replace `ref mut` function parameters with `&mut` + /// + /// In particular, titles of the manual migration steps start with "Review". + pub title: &'static str, + /// An estimated time (in minutes) needed for the manual part of migrating + /// a single typical occurrence of the change represented by this step. + /// + /// The estimate includes **all** the manual effort. + /// + /// E.g., to replace a single `ref mut` function parameter with `&mut`, the migration + /// will change the function signature. The manual part of the effort will be changing + /// the callers and eventually adding dereferencing in the function body. + /// + /// Fully automated migration steps, and only them, can have `duration` set to zero. + pub duration: usize, + pub kind: MigrationStepKind, + /// A short help for the migration step. + /// + /// If the `kind` is a [MigrationStepKind::CodeModification], start the help + /// with "Migration will", to point out that the migration is a (semi)automatic one + /// and causes changes in the source file. + /// + /// E.g.: Migration will replace `ref mut` function parameters with `&mut`. + /// + /// It is advisable to provide the short help, but it is not mandatory. + /// Every migration step will have an automatic help line that points to + /// the detailed migration guide provided in the feature tracking issue. + pub help: &'static [&'static str], +} + +#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] +pub(crate) enum MigrationStepExecution { + Manual, + Semiautomatic, + Automatic, +} + +impl MigrationStep { + pub(crate) fn execution(&self) -> MigrationStepExecution { + use MigrationStepExecution::*; + match self.kind { + MigrationStepKind::Instruction(_) => Manual, + MigrationStepKind::CodeModification(_, manual_migration_actions) + if !manual_migration_actions.is_empty() => + { + Semiautomatic + } + MigrationStepKind::CodeModification(_, _) => Automatic, + MigrationStepKind::Interaction(_, _, _) => Semiautomatic, + } + } + + pub(crate) fn has_manual_actions(&self) -> bool { + match self.kind { + MigrationStepKind::Instruction(_) => true, + MigrationStepKind::CodeModification(_, []) => false, + MigrationStepKind::CodeModification(_, _) => true, + MigrationStepKind::Interaction(_, _, []) => false, + MigrationStepKind::Interaction(_, _, _) => true, + } + } +} + +/// Denotes that a migration step that changes the source code should +/// be executed in a dry-run mode, means just returning the places in code +/// to be changed, but without performing the actual change. +#[derive(Clone, Copy)] +pub(crate) enum DryRun { + Yes, + No, +} + +/// A function that analyses a program given by the [ProgramInfo] and returns +/// the [Span]s of all the places in the program code that need to be addressed +/// during a manual migration step. +/// +/// The function does not modify the original program, and can use either the +/// [ProgramInfo::lexed_program] or the [ProgramInfo::ty_program], or both, +/// to perform the analysis. +type InstructionFn = for<'a> fn(&'a ProgramInfo<'a>) -> Result>; + +/// A function that analyses a program given by the [MutProgramInfo] and returns +/// the [Span]s of all the places in the **original** program code that will be changed +/// during an automatic or semiautomatic migration step. +/// +/// The function modifies the [LexedProgram] to perform the required code change, +/// unless the [DryRun] parameter is set to [DryRun::Yes]. +type CodeModificationFn = for<'a> fn(&'a mut MutProgramInfo<'a>, DryRun) -> Result>; + +/// A function that interacts with the developer, eventually modifying the original +/// program given by [MutProgramInfo]. The developer's input decides if the modification +/// will happen or not. +/// +/// Returns the [Span]s of all the places in the **original** program code that are +/// changed during the interaction. +type InteractionFn = for<'a> fn(&'a mut MutProgramInfo<'a>) -> Result>; + +/// A function that visits the [Module], potentially alters it, and returns a +/// [Result] containing related information about the [Module]. +/// +/// For its usages, see [visit_lexed_modules_mut]. +type ModuleVisitorFn = for<'a> fn(&'a Engines, &'a mut Module, DryRun) -> Result; + +pub(crate) enum MigrationStepKind { + /// A migration step that provides instructions to developers, + /// and explains a manual action they should take. + Instruction(InstructionFn), + /// A migration step that automatically modifies the original source code, + /// and eventually gives additional instructions to developers, + /// for manual post-migration actions. + /// + /// The [CodeModificationFn] modifies and overwrites the original source code. + /// The second parameter are the _manual migration actions_. + /// Those actions need to be done by developers after the automatic part + /// of the migration is executed. + /// + /// Manual migration actions start with a small letter and end with a dot. + /// + /// E.g.: change function callers, by adding `&mut` to passed parameters. + /// + /// **If a [MigrationStepKind::CodeModification] does not have + /// _manual migration actions_ it is considered to be a fully automated migration, + /// after witch the migration process can safely continue.** + CodeModification(CodeModificationFn, &'static [&'static str]), + /// A migration step that first provides instructions to developers, + /// and afterwards interacts with them, giving additional instructions + /// and asking for additional input. + /// + /// Based on the input gotten during the interaction, the [InteractionFn] + /// can modify the original source code. + /// + /// The second parameter are the _manual migration actions_. + /// Those actions still need to be done by developers after the automatic part + /// of the migration is executed during the interaction. + /// + /// Manual migration actions start with a small letter and end with a dot. + /// + /// E.g.: change function callers, by adding `&mut` to passed parameters. + /// + /// **If a [MigrationStepKind::Interaction] does not have + /// _manual migration actions_ it is considered to be finished after the interaction, + /// after witch the migration process can safely continue.** + /// + /// Note that in a general case, the [InstructionFn] and the [InteractionFn] + /// can return different [Span]s. E.g., during the instruction a single + /// span can be returned pointing to a module in which the change needs + /// to be done, while the interaction will return the actual places in the + /// module that were modified. + Interaction(InstructionFn, InteractionFn, &'static [&'static str]), +} + +/// A convenient method for visiting all the [LexedModule]s within a [LexedProgram]. +/// The `visitor` will be called for every module, and the method will return the +/// [Vec] containing the results of all the visitor calls. +/// +/// The `visitor` can mutate the modules. +pub(crate) fn visit_lexed_modules_mut( + engines: &Engines, + lexed_program: &mut LexedProgram, + dry_run: DryRun, + visitor: ModuleVisitorFn, +) -> Result> { + fn visit_modules_rec( + engines: &Engines, + lexed_module: &mut LexedModule, + dry_run: DryRun, + visitor: ModuleVisitorFn, + result: &mut Vec, + ) -> Result<()> { + let visitor_result = visitor(engines, &mut lexed_module.tree, dry_run)?; + result.push(visitor_result); + for (_, lexed_submodule) in lexed_module.submodules.iter_mut() { + visit_modules_rec( + engines, + &mut lexed_submodule.module, + dry_run, + visitor, + result, + )?; + } + Ok(()) + } + + let mut result = vec![]; + visit_modules_rec( + engines, + &mut lexed_program.root, + dry_run, + visitor, + &mut result, + )?; + Ok(result) +} + +/// Registered [MigrationStep]s. +pub(crate) type MigrationSteps = &'static [(Feature, &'static [MigrationStep])]; + +/// Keeps the number of occurrences of each [MigrationStep] +/// after the analysis is executed. +pub(crate) type MigrationStepsWithOccurrences<'a> = + &'a [(Feature, Vec<(&'a MigrationStep, Option)>)]; + +/// Returns a non-empty set of consistent migration steps. +/// +/// All the CLI commands require at least one migration step. +/// This macro conveniently short-circuits and returns, +/// if there are no migration steps defined. +/// +/// Panics if the migration steps are not consistent. +#[macro_export] +macro_rules! get_migration_steps_or_return { + () => {{ + let migration_steps = $crate::migrations::get_migration_steps(); + + if migration_steps.is_empty() { + println!("There are currently no migration steps defined for the upcoming breaking change version of Sway."); + return Ok(()); + } + + migration_steps + }}; +} + +pub(crate) fn get_migration_steps() -> MigrationSteps { + assert_migration_steps_consistency(MIGRATION_STEPS); + MIGRATION_STEPS +} + +/// Panics if the migration steps are not consistent. +fn assert_migration_steps_consistency(migration_steps: MigrationSteps) { + if migration_steps.is_empty() { + return; + } + + // Each experimental feature can appear only once in the migration steps. + let num_of_features_in_migration_steps = migration_steps.len(); + let num_of_unique_features_in_migration_steps = migration_steps + .iter() + .map(|(feature, _)| feature) + .collect::>() + .len(); + if num_of_features_in_migration_steps != num_of_unique_features_in_migration_steps { + panic!("Inconsistent migration steps: each experimental feature can appear only once in the migration steps."); + } + + // Migration step titles must be unique. + let num_of_migration_steps = migration_steps + .iter() + .map(|(_, steps)| steps.len()) + .sum::(); + let num_of_migration_steps_with_unique_title = migration_steps + .iter() + .flat_map(|(_, steps)| steps.iter().map(|step| step.title)) + .collect::>() + .len(); + if num_of_migration_steps != num_of_migration_steps_with_unique_title { + panic!("Inconsistent migration steps: migration step titles must be unique."); + } + + // Only fully automatic steps can have duration set to zero. + let has_non_automatic_steps_with_zero_duration = migration_steps + .iter() + .flat_map(|(_, steps)| { + steps.iter().map(|step| { + ( + matches!(step.execution(), MigrationStepExecution::Automatic), + step.duration, + ) + }) + }) + .any(|(is_automatic, duration)| !is_automatic && duration == 0); + if has_non_automatic_steps_with_zero_duration { + panic!("Inconsistent migration steps: only fully automatic steps can have duration set to zero."); + } +} + +/* + ------------------------------ Migration Steps ------------------------------- + Below are the actual migration steps. Change those steps for every new + breaking change version of Sway, by removing the previous steps and adding the + ones relevant for the next breaking change version. +*/ + +/// The list of the migration steps, grouped by the Sway features that cause +/// the breaking changes behind the migration steps. +const MIGRATION_STEPS: MigrationSteps = &[( + Feature::StorageDomains, + &[ + self::storage_domains::REVIEW_STORAGE_SLOT_KEYS_STEP, + self::storage_domains::DEFINE_BACKWARD_COMPATIBLE_STORAGE_SLOT_KEYS_STEP, + ], +)]; diff --git a/forc-plugins/forc-migrate/src/migrations/references.rs b/forc-plugins/forc-migrate/src/migrations/references.rs new file mode 100644 index 00000000000..9be3432ee5d --- /dev/null +++ b/forc-plugins/forc-migrate/src/migrations/references.rs @@ -0,0 +1,138 @@ +use std::vec; + +use crate::migrations::{visit_lexed_modules_mut, MutProgramInfo}; +use anyhow::{Ok, Result}; +use itertools::Itertools; +use sway_ast::{ + keywords::{AmpersandToken, Keyword, MutToken, Token}, + Module, +}; +use sway_core::Engines; +use sway_types::{Span, Spanned}; + +use super::{DryRun, MigrationStep, MigrationStepKind}; + +#[allow(dead_code)] +pub(super) const REPLACE_REF_MUT_FN_PARAMETERS_STEP: MigrationStep = MigrationStep { + title: "Replace `ref mut` function parameters with `&mut`", + duration: 5, + kind: MigrationStepKind::CodeModification( + replace_ref_mut_fn_parameters_step, + &[ + "change function callers, by adding `&mut` to passed parameters.", + "change function bodies, by dereferencing (`*`) parameters where needed.", + ], + ), + help: &[ + "Migration will replace `ref mut` function parameters with `&mut`.", + "E.g., `ref mut x: u64` will become `x: &mut u64`.", + ], +}; + +// TODO: This is an incomplete implementation of the migration step. +// It does not search for all possible occurrences of `ref mut`. +// It is provided as an example of how complex migrations that +// transform code can be written. The complete implementation +// will be provided by the time the "references" experimental +// feature get out of the experimental phase. +// +// Also, this migration step will be disabled for the next +// breaking change version of Sway. It is currently enabled for +// the sake of testing and trying out the `forc migrate` tool. + +// TODO: Simplify this migration by using matchers and modifiers. +fn replace_ref_mut_fn_parameters_step( + program_info: &mut MutProgramInfo, + dry_run: DryRun, +) -> Result> { + fn replace_ref_mut_fn_parameters_step_impl( + _engines: &Engines, + module: &mut Module, + dry_run: DryRun, + ) -> Result> { + let mut result = vec![]; + + // TODO: Current implementation inspects only module functions. Extend it + // to cover all functions (in traits, self-impls, trait-impls, etc.). + + for module_fn in module + .items + .iter_mut() + .map(|annotated| &mut annotated.value) + .filter_map(|decl| match decl { + sway_ast::ItemKind::Fn(module_fn) => Some(module_fn), + _ => None, + }) + { + let fn_args = &mut module_fn.fn_signature.arguments.inner; + + let fn_args = match fn_args { + sway_ast::FnArgs::Static(punctuated) => punctuated, + sway_ast::FnArgs::NonStatic { .. } => unreachable!( + "Module functions are always static and cannot have the `self` argument." + ), + }; + + let mut fn_args = fn_args.iter_mut().collect_vec(); + + if fn_args.is_empty() { + continue; + } + + for fn_arg in fn_args.iter_mut() { + match &mut fn_arg.pattern { + sway_ast::Pattern::Var { + reference: ref_opt @ Some(_), + mutable: mut_opt @ Some(_), + name, + } => { + // Note that we cannot bind is `Some`s, because we would be mutually borrowing twice, + // once in, e.g., `ref_opt` and once in `Some` for its part. + // That's why, unfortunately, the `expect`. + let result_span = Span::join( + ref_opt + .as_ref() + .expect("`ref_opt` is `Some` in the match arm pattern") + .span(), + &name.span(), + ); + result.push(result_span); + + // Replace `ref mut` with `&mut` if it is not a dry-run. + if matches!(dry_run, DryRun::No) { + *ref_opt = None; + *mut_opt = None; + + // We will insert the `&` and `mut` tokens right before the existing argument type. + let insert_span = Span::empty_at_start(&fn_arg.ty.span()); + + // Modify the original type to the reference to it. + fn_arg.ty = sway_ast::Ty::Ref { + ampersand_token: AmpersandToken::new(insert_span.clone()), + mut_token: Some(MutToken::new(insert_span)), + ty: Box::new(fn_arg.ty.clone()), + }; + } + + // TODO: Find the usages of the function and add `&mut` to the passed parameters. + + // TODO: Dereference the parameters in the function body. + } + _ => continue, + } + } + } + + Ok(result) + } + + let res = visit_lexed_modules_mut( + program_info.engines, + program_info.lexed_program, + dry_run, + replace_ref_mut_fn_parameters_step_impl, + )?; + + Ok(res.into_iter().flatten().collect()) + // Ok(res) +} diff --git a/forc-plugins/forc-migrate/src/migrations/storage_domains.rs b/forc-plugins/forc-migrate/src/migrations/storage_domains.rs new file mode 100644 index 00000000000..470863653c9 --- /dev/null +++ b/forc-plugins/forc-migrate/src/migrations/storage_domains.rs @@ -0,0 +1,263 @@ +use std::collections::HashSet; + +use super::{MigrationStep, MigrationStepKind, MutProgramInfo}; +use crate::{ + internal_error, + matching::{ + lexed_match, lexed_storage_field, ty_match, + ty_storage_field::{with_in_keyword, without_in_keyword}, + TyLocate, + }, + migrations::ProgramInfo, + modifying::Modifier, + print_single_choice_menu, +}; +use anyhow::{bail, Ok, Result}; +use itertools::Itertools; +use num_bigint::BigUint; +use sha2::{Digest, Sha256}; +use sway_core::language::{ + ty::{TyExpressionVariant, TyStorageField}, + CallPath, CallPathType, Literal, +}; +use sway_error::formatting::{self, sequence_to_list}; +use sway_types::{Ident, Span, Spanned}; + +pub(super) const REVIEW_STORAGE_SLOT_KEYS_STEP: MigrationStep = MigrationStep { + title: "Review explicitly defined slot keys in storage declarations (`in` keywords)", + duration: 2, + kind: MigrationStepKind::Instruction(review_storage_slot_keys_step), + help: &[ + "If the slot keys used in `in` keywords represent keys generated for `storage` fields", + "by the Sway compiler, those keys might need to be recalculated.", + " ", + "The previous formula for calculating storage keys was: `sha256(\"storage.\")`.", + "The new formula is: `sha256((0u8, \"storage.\"))`.", + ], +}; + +pub(super) const DEFINE_BACKWARD_COMPATIBLE_STORAGE_SLOT_KEYS_STEP: MigrationStep = MigrationStep { + title: "Explicitly define storage slot keys if they need to be backward compatible", + // We will be pointing to the storage declaration and offer automatic migration. + // In case of a suggestion the manual effort will be reviewing the purpose of the + // contract, which we will approximate with 10 minutes. + duration: 10, + kind: MigrationStepKind::Interaction( + define_backward_compatible_storage_slot_keys_step_instruction, + define_backward_compatible_storage_slot_keys_step_interaction, + &[], + ), + help: &[ + "If the contract owning this storage is behind a proxy, or for any other reason needs", + "to use previous storage slot keys, those keys must be explicitly assigned to the", + "storage fields by using the `in` keyword.", + " ", + "E.g.:", + " storage {", + " field in : u64 = 0,", + " }", + " ", + "The previous formula for calculating storage keys was: `sha256(\"storage.\")`.", + "The new formula is: `sha256((0u8, \"storage.\"))`.", + ], +}; + +fn review_storage_slot_keys_step(program_info: &ProgramInfo) -> Result> { + let mut res = vec![]; + + let Some(storage_decl_id) = ty_match::storage_decl(&program_info.ty_program) else { + return Ok(res); + }; + + let storage_decl = &*program_info.engines.de().get_storage(&storage_decl_id); + + let well_known_slot_keys = get_well_known_slot_keys(); + let well_known_slot_keys_constants = get_well_known_slot_keys_constants(); + + for (storage_field, key_expression) in + ty_match::storage_fields_deep(storage_decl, with_in_keyword) + .iter() + .map(|sf| { + ( + sf, + sf.key_expression + .as_ref() + .expect("storage key has in keyword"), + ) + }) + { + // If the key expression represents a well known slot defined in + // Sway Standards or Sway Libraries do not suggest to check it. + let is_well_known_slot_key = match &key_expression.expression { + TyExpressionVariant::Literal(Literal::B256(slot_key)) => { + well_known_slot_keys.contains(&BigUint::from_bytes_be(slot_key.as_slice())) + } + TyExpressionVariant::ConstantExpression { + call_path: Some(call_path), + .. + } => well_known_slot_keys_constants.contains(call_path), + _ => false, + }; + if is_well_known_slot_key { + continue; + } + + // If the storage fields are behind a proxy, and must contain the backwards compatibility, + // the next migration, will assign them the slots calculated by the previous algorithm. + // + // If we see that the `in` keyword assigns a literal that corresponds to the slot calculated + // by the previous algorithm, we recognize it as backwards compatibility and do not suggest to + // review the slot. + let is_backward_compatibility_slot_key = match &key_expression.expression { + TyExpressionVariant::Literal(Literal::B256(slot_key)) => { + slot_key == &get_previous_slot_key(storage_field) + } + _ => false, + }; + if is_backward_compatibility_slot_key { + continue; + } + + res.push(key_expression.span.clone()); + } + + Ok(res) +} + +fn define_backward_compatible_storage_slot_keys_step_instruction( + program_info: &ProgramInfo, +) -> Result> { + let mut res = vec![]; + + let Some(storage_decl_id) = ty_match::storage_decl(&program_info.ty_program) else { + return Ok(res); + }; + + let storage_decl = &*program_info.engines.de().get_storage(&storage_decl_id); + + // It is hard to have any better heuristic here. Essentially, every contract + // could be behind a proxy and we do not have a mean to detected that. + // So, we will provide the suggestion if the storage has any fields without the `in` keyword. + // The suggestion is shown only once on the entire `storage` declaration, + // to avoid cluttering. The interaction part of the step will then provide + // more detailed information and guide the developers. + if !ty_match::storage_fields_deep(storage_decl, without_in_keyword).is_empty() { + res.push(storage_decl.span.clone()); + } + + Ok(res) +} + +fn define_backward_compatible_storage_slot_keys_step_interaction( + program_info: &mut MutProgramInfo, +) -> Result> { + let mut res = vec![]; + let Some(storage_decl_id) = ty_match::storage_decl(program_info.ty_program) else { + return Ok(res); + }; + + let storage_decl = &*program_info.engines.de().get_storage(&storage_decl_id); + + let storage_fields_without_in_keyword = + ty_match::storage_fields_deep(storage_decl, without_in_keyword); + + println!( + "The following storage fields will have slot keys calculated by using the new formula:" + ); + sequence_to_list( + &storage_fields_without_in_keyword + .iter() + .map(|field| field.full_name()) + .collect_vec(), + formatting::Indent::Single, + 10, + ) + .iter() + .for_each(|field_full_name| println!("{field_full_name}")); + println!(); + println!("Do you want these fields to have backward compatible storage slot keys, calculated"); + println!("by using the previous formula?"); + println!(); + println!("If yes, this migration step will insert `in` keywords to all of the above fields,"); + println!("and calculate the storage slot keys by using the previous formula."); + println!(); + + if print_single_choice_menu(&[ + "Yes, assign the backward compatible storage slot keys.", + "No, this contract does not require backwards compatibility.", + ]) == 0 + { + let Some(storage_declaration) = lexed_match::storage_decl(program_info.lexed_program) + else { + bail!(internal_error( + "Lexical storage declaration cannot be found." + )); + }; + + for lexed_storage_field in lexed_match::storage_fields_deep( + storage_declaration, + lexed_storage_field::without_in_keyword, + ) { + let Some(ty_storage_field) = storage_decl.locate(lexed_storage_field) else { + bail!(internal_error(format!( + "Typed storage field \"{}\" cannot be found.", + lexed_storage_field.name + ))); + }; + + res.push(ty_storage_field.name.span()); + + let mut storage_key_modifier = Modifier::new(lexed_storage_field); + storage_key_modifier.with_in_key(BigUint::from_bytes_be( + get_previous_slot_key(ty_storage_field).as_slice(), + )); + } + } + + Ok(res) +} + +/// Returns storage slot keys defined in Sway Standards and Sway Libraries, +/// as [BigUint]s that represents `b256` storage addresses. +fn get_well_known_slot_keys() -> HashSet { + // For SRC14 well-known slot keys see: https://docs.fuel.network/docs/sway-libs/upgradability/#upgradability-library + let src14_target = BigUint::parse_bytes( + b"7bb458adc1d118713319a5baa00a2d049dd64d2916477d2688d76970c898cd55", + 16, + ) + .unwrap(); + let src14_proxy_owner = BigUint::parse_bytes( + b"bb79927b15d9259ea316f2ecb2297d6cc8851888a98278c0a2e03e1a091ea754", + 16, + ) + .unwrap(); + + HashSet::from_iter(vec![src14_target, src14_proxy_owner]) +} + +/// Returns [CallPath]s of constants that hold storage slot keys +/// defined in Sway Standards and Sway Libraries. +fn get_well_known_slot_keys_constants() -> HashSet { + let slot_keys_constants = vec![ + // For SRC14 well-known slot keys see: https://docs.fuel.network/docs/sway-libs/upgradability/#upgradability-library + ("sway_libs", "upgradability", "PROXY_OWNER_STORAGE"), + ("standards", "src14", "SRC14_TARGET_STORAGE"), + ] + .into_iter() + .map(|path_parts| CallPath { + prefixes: vec![ + Ident::new_no_span(path_parts.0.into()), + Ident::new_no_span(path_parts.1.into()), + ], + suffix: Ident::new_no_span(path_parts.2.into()), + callpath_type: CallPathType::Full, + }); + + HashSet::from_iter(slot_keys_constants) +} + +fn get_previous_slot_key(storage_field: &TyStorageField) -> [u8; 32] { + let mut hasher = Sha256::new(); + hasher.update(storage_field.full_name()); + hasher.finalize().into() +} diff --git a/forc-plugins/forc-migrate/src/modifying/mod.rs b/forc-plugins/forc-migrate/src/modifying/mod.rs new file mode 100644 index 00000000000..c2d9a1d6229 --- /dev/null +++ b/forc-plugins/forc-migrate/src/modifying/mod.rs @@ -0,0 +1,14 @@ +//! This module contains common API for modifying elements within a lexed tree. + +mod storage_field; + +/// A wrapper around a lexed tree element that will be modified. +pub(crate) struct Modifier<'a, T> { + element: &'a mut T, +} + +impl<'a, T> Modifier<'a, T> { + pub(crate) fn new(element: &'a mut T) -> Self { + Self { element } + } +} diff --git a/forc-plugins/forc-migrate/src/modifying/storage_field.rs b/forc-plugins/forc-migrate/src/modifying/storage_field.rs new file mode 100644 index 00000000000..bb172e701a2 --- /dev/null +++ b/forc-plugins/forc-migrate/src/modifying/storage_field.rs @@ -0,0 +1,64 @@ +use num_bigint::BigUint; +use sway_ast::{ + keywords::{InToken, Keyword}, + Expr, LitInt, StorageField, +}; +use sway_types::{Span, Spanned}; + +use super::Modifier; + +pub(crate) trait ToInKey { + fn to_in_key(self, span: Span) -> Expr; +} + +impl ToInKey for BigUint { + fn to_in_key(self, span: Span) -> Expr { + Expr::Literal(sway_ast::Literal::Int(LitInt { + span, + parsed: self, + ty_opt: None, + is_generated_b256: true, + })) + } +} + +impl ToInKey for Expr { + fn to_in_key(self, _span: Span) -> Expr { + // TODO: Provide infrastructure for replacing spans on the elements + // of a lexed tree. This will be useful in modifications in + // which we generate new tree elements by copying existing. + // + // Until then, in this demo on how to develop `Modifier`s, + // just return `self`, without the spans replaced. + self + } +} + +impl<'a> Modifier<'a, StorageField> { + pub(crate) fn with_in_key(&mut self, key: K) -> &mut Self { + // If the `in` token already exists, just replace the key and leave the `in` + // token as is. Place the key after the `in` token. + let insert_span = if let Some(in_token) = &self.element.in_token { + Span::empty_at_end(&in_token.span()) + } else { + // Otherwise, place the `in` token after the name. + Span::empty_at_end(&self.element.name.span()) + }; + + if self.element.in_token.is_none() { + self.element.in_token = Some(InToken::new(insert_span.clone())); + } + + self.element.key_expr = Some(key.to_in_key(insert_span)); + + self + } + + #[allow(dead_code)] + pub(crate) fn without_in_key(&mut self) -> &mut Self { + self.element.in_token = None; + self.element.key_expr = None; + + self + } +} diff --git a/forc-tracing/src/lib.rs b/forc-tracing/src/lib.rs index c331b0a8a71..e4c6c860ac4 100644 --- a/forc-tracing/src/lib.rs +++ b/forc-tracing/src/lib.rs @@ -23,20 +23,24 @@ fn get_action_indentation(action: &str) -> String { /// Prints an action message with a green-bold prefix like " Compiling ". pub fn println_action_green(action: &str, txt: &str) { - tracing::info!( - "{}{} {}", - get_action_indentation(action), - Colour::Green.bold().paint(action), - txt - ); + println_action(action, txt, Colour::Green); } /// Prints an action message with a red-bold prefix like " Removing ". pub fn println_action_red(action: &str, txt: &str) { + println_action(action, txt, Colour::Red); +} + +/// Prints an action message with a yellow-bold prefix like " Finished ". +pub fn println_action_yellow(action: &str, txt: &str) { + println_action(action, txt, Colour::Yellow); +} + +fn println_action(action: &str, txt: &str, color: Colour) { tracing::info!( "{}{} {}", get_action_indentation(action), - Colour::Red.bold().paint(action), + color.bold().paint(action), txt ); } @@ -64,6 +68,18 @@ pub fn println_green(txt: &str) { println_std_out(txt, Colour::Green); } +pub fn println_yellow(txt: &str) { + println_std_out(txt, Colour::Yellow); +} + +pub fn println_green_bold(txt: &str) { + tracing::info!("{}", Colour::Green.bold().paint(txt)); +} + +pub fn println_yellow_bold(txt: &str) { + tracing::info!("{}", Colour::Yellow.bold().paint(txt)); +} + pub fn println_yellow_err(txt: &str) { println_std_err(txt, Colour::Yellow); } diff --git a/forc-util/src/fs_locking.rs b/forc-util/src/fs_locking.rs index 8e6f438337b..3a769f7c5d5 100644 --- a/forc-util/src/fs_locking.rs +++ b/forc-util/src/fs_locking.rs @@ -138,6 +138,15 @@ impl PidFileLocking { } } +/// Checks if the specified file is marked as "dirty". +/// This is used to prevent changing files that are currently open in an editor +/// with unsaved changes. +/// +/// Returns `true` if a corresponding "dirty" flag file exists, `false` otherwise. +pub fn is_file_dirty>(path: X) -> bool { + PidFileLocking::lsp(path.as_ref()).is_locked() +} + #[cfg(test)] mod test { use super::PidFileLocking; diff --git a/forc-util/src/lib.rs b/forc-util/src/lib.rs index 03775dcf8f1..eb1ede2d6b3 100644 --- a/forc-util/src/lib.rs +++ b/forc-util/src/lib.rs @@ -448,7 +448,7 @@ pub fn create_diagnostics_renderer() -> Renderer { ) } -fn format_diagnostic(diagnostic: &Diagnostic) { +pub fn format_diagnostic(diagnostic: &Diagnostic) { /// Temporary switch for testing the feature. /// Keep it false until we decide to fully support the diagnostic codes. const SHOW_DIAGNOSTIC_CODE: bool = false; @@ -500,6 +500,7 @@ fn format_diagnostic(diagnostic: &Diagnostic) { let renderer = create_diagnostics_renderer(); match diagnostic.level() { + Level::Info => tracing::info!("{}\n____\n", renderer.render(snippet)), Level::Warning => tracing::warn!("{}\n____\n", renderer.render(snippet)), Level::Error => tracing::error!("{}\n____\n", renderer.render(snippet)), } @@ -561,6 +562,7 @@ fn format_diagnostic(diagnostic: &Diagnostic) { fn diagnostic_level_to_annotation_type(level: Level) -> AnnotationType { match level { + Level::Info => AnnotationType::Info, Level::Warning => AnnotationType::Warning, Level::Error => AnnotationType::Error, } diff --git a/scripts/mdbook-forc-documenter/examples/forc_migrate.md b/scripts/mdbook-forc-documenter/examples/forc_migrate.md new file mode 100644 index 00000000000..d1f4972e002 --- /dev/null +++ b/scripts/mdbook-forc-documenter/examples/forc_migrate.md @@ -0,0 +1,188 @@ + + +# Migrating Sway projects + +`forc-migrate` guides you through breaking changes between Sway versions. It fully or semiautomatically adapts your code, making it compatible with the next breaking change version of Sway. + +`forc-migrate` migrates the code to the _next_ breaking change version of Sway. That means, if you want to migrate to, e.g., Sway v0.**67**.0, you will need to use the _latest v0.**66**.x_ version of the `forc-migrate`. + +For example, let's say that your Sway project is on version _v0.66.1_, and that the latest v0.66 version is _v0.66.42_. You should first update your Fuel toolchain to version _v0.66.42_ of `forc`, and compile your project with that version: + +```text +fuelup component add forc@0.66.42 +``` + +Sway guarantees that all the versions with the same minor version, _0.66_ in the above example, are compatible. That means that the latest patch version, _0.66.42_ in the example, will correctly compile your project. + +## Showing the breaking changes + +Once you've installed the latest non-breaking version of `forc-migrate`, use the `show` command to make yourself familiar with the upcoming breaking changes: + +```text +forc migrate show +``` + +A typical output of the `show` command will look like this: + +```text +Breaking change features: + - storage_domains (https://github.com/FuelLabs/sway/issues/6701) + - references (https://github.com/FuelLabs/sway/issues/5063) + +Migration steps (1 manual and 1 semiautomatic): +storage_domains + [M] Review explicitly defined slot keys in storage declarations (`in` keywords) + +references + [S] Replace `ref mut` function parameters with `&mut` + +Experimental feature flags: +- for Forc.toml: experimental = { storage_domains = true, references = true } +- for CLI: --experimental storage_domains,references +``` + +The output will contain: + +- the upcoming breaking change features, `storage_domains` and `references` in this example, +- their tracking issues on GitHub, with detailed migration guides, +- and the migration steps potentially required to migrate existing code. + +The migration steps can be _manual_, _semiautomatic_, or fully _automatic_. They are marked in the output with `[M]`, `[S]`, and `[A]`, respectively. + +The `show` command will also provide experimental feature flags that will be needed during the migration, as explained in the next chapter. + +## Migrating a single Sway project + +Let's assume that we want to migrate a Sway project called `my_project` that depends on `std` and a `third_party_lib`. + +First, we will go to the folder that contains `my_project`, e.g.: `cd my_project`. All of the upcoming CLI commands assume that we are running the `forc-migrate` tool within the `my_project` folder. + +Before migrating the code, make sure that the project builds without any errors by running: + +```text +forc build +``` + +### Check the migration summary + +Next, let's `check` the project first. The `check` command will dry-run the migration steps. It will not do any changes in code, but will provide a detailed information of all the places in code that need to be either reviewed or changed during the migration process. The `check` command will also provide a rough time estimate for the migration. + +```text +forc migrate check +``` + +The output of the `check` command will end in a summary of the migration effort, containing: + +- the number of occurrences of a particular migration step in the project's code, +- the rough migration effort estimate for each migration step, +- and the rough total migration effort. + +```text +Migration effort: + +storage_domains + [M] Review explicitly defined slot keys in storage declarations (`in` keywords) + Occurrences: 3 Migration effort (hh::mm): ~00:06 + +references + [S] Replace `ref mut` function parameters with `&mut` + Occurrences: 18 Migration effort (hh::mm): ~01:30 + +Total migration effort (hh::mm): ~01:36 +``` + +Before the summary, instructions will be shown for each migration step. A typical instruction output for a single migration step will contain: + +- the name of the step, +- the places in code affected by the migration step, +- and the short help with a link to the detailed migration guide. + +```text +info: [references] Replace `ref mut` function parameters with `&mut` + --> my_project/src/main.sw:30:51 + | +... +30 | fn ref_mut_fn(ref mut x: u64) {} + | --------- +... +35 | fn another_ref_mut_fn(ref mut arg: S) {} + | ----------- + | + = help: Migration will replace `ref mut` function parameters with `&mut`. + = help: E.g., `ref mut x: u64` will become `x: &mut u64`. + = help: + = help: After the migration, you will still need to: + = help: - change function callers, by adding `&mut` to passed parameters. + = help: - change function bodies, by dereferencing (`*`) parameters where needed. + = help: + = help: For a detailed migration guide see: https://github.com/FuelLabs/sway/issues/5063 +``` + +### Update dependencies + +Before running the migrations on the project itself, **first update the project dependencies to the versions that use the next Sway breaking change version**. + +In our example, the `my_project`'s `Forc.toml` file will have the `[dependencies]` section similar to this one: + +```toml +[dependencies] +std = { git = "https://github.com/FuelLabs/sway", tag = "v0.66.1" } +third_party_lib = { git = "https://github.com/ThirdParty/swaylib", tag = "v1.0.0" } +``` + +Assuming that the `third_party_lib` version compatible with Sway v0.67.0 is the version v2.0.0 we will end up in the following changes: + +```toml +[dependencies] +# Changed v0.66.1 -> v0.67.0 +std = { git = "https://github.com/FuelLabs/sway", tag = "v0.67.0" } +# Changed v1.0.0 -> v2.0.0 +third_party_lib = { git = "https://github.com/ThirdParty/swaylib", tag = "v2.0.0" } +``` + +Run `forc build` to make sure that the project still compiles. **At this point, it is very likely that you will need to compile the project with the experimental features turned on.** The reason is the likelihood that either the new `std` or the `third_party_lib` uses the new Sway features. + +To compile the project with experimental features, you can take the feature flags from the `forc migrate show` output, and place them either in the `[build-profile]` section of the projects `Forc.toml` file, or pass them to `forc build` via the command line. + +```text +Experimental feature flags: +- for Forc.toml: experimental = { storage_domains = true, references = true } +- for CLI: --experimental storage_domains,references +``` + +In the remaining part of this tutorial, we will be passing the feature flags via the command line. E.g.: + +```text +forc build --experimental storage_domains,references +``` + +### Run the migrations + +Once the `my_project` successfully builds with updated dependencies, we can `run` the migration steps on it. E.g.: + +```text +forc migrate run --experimental storage_domains,references +``` + +The `run` command will execute the migration steps, and guide you through the migration process. For each migration step, the output of the step can be one of the following: + +| Step output | Meaning | +| ----------- | ------- | +| Checked | The step is executed and does not require any changes in code. No action needed. | +| Review | The step suggests a manual code review. | +| Changing | The step is automatically changing the code. There might be additional manual actions needed. | + +At the end of the `run`, the migration will either guide you to: + +- `Continue` the migration process by performing the manual actions and re-running the `forc migrate run` afterwards, +- or will mark the migration process as `Finished`. At this point, your project will be compatible with the next breaking change version of Sway. + +## Migrating workspaces + +To migrate a workspace, you will need to migrate each workspace member separately, following the above procedure. The projects should be migrated in order of their dependencies. + +## Additional after-migration steps + +There are some additional manual steps that might be needed after the migration. + +E.g., if tests use hardcoded contract IDs, those need to be changed, because the new version of Sway will, very likely, produce different bytecode. diff --git a/sway-ast/src/literal.rs b/sway-ast/src/literal.rs index 0b175235506..0e560519a06 100644 --- a/sway-ast/src/literal.rs +++ b/sway-ast/src/literal.rs @@ -17,6 +17,18 @@ pub struct LitInt { pub span: Span, pub parsed: BigUint, pub ty_opt: Option<(LitIntType, Span)>, + /// True if this [LitInt] represents a `b256` hex literal + /// in a manually generated lexed tree. + /// + /// `b256` hex literals are not explicitly modeled in the + /// [Literal]. During parsing, they are parsed as [LitInt] + /// with [LitInt::ty_opt] set to `None`. + /// + /// To properly render `b256` manually created hex literals, + /// that are not backed by a [Span] in the source code, + /// we need this additional information, to distinguish + /// them from `u256` hex literals. + pub is_generated_b256: bool, } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Hash, Serialize, Deserialize)] diff --git a/sway-ast/src/module.rs b/sway-ast/src/module.rs index 41c8c7a2909..31278b36c13 100644 --- a/sway-ast/src/module.rs +++ b/sway-ast/src/module.rs @@ -1,3 +1,5 @@ +use sway_types::SourceId; + use crate::priv_prelude::*; #[derive(Clone, Debug, Serialize)] @@ -17,6 +19,10 @@ impl Module { } }) } + + pub fn source_id(&self) -> Option { + self.kind.span().source_id().copied() + } } impl Spanned for Module { diff --git a/sway-ast/src/punctuated.rs b/sway-ast/src/punctuated.rs index af6cf423f89..94db8e000c6 100644 --- a/sway-ast/src/punctuated.rs +++ b/sway-ast/src/punctuated.rs @@ -21,6 +21,20 @@ impl Punctuated { } } + pub fn iter(&self) -> impl Iterator { + self.value_separator_pairs + .iter() + .map(|(t, _)| t) + .chain(self.final_value_opt.iter().map(|t| &**t)) + } + + pub fn iter_mut(&mut self) -> impl Iterator { + self.value_separator_pairs + .iter_mut() + .map(|(t, _)| t) + .chain(self.final_value_opt.iter_mut().map(|t| &mut **t)) + } + /// Returns true if the [Punctuated] ends with the punctuation token. /// E.g., `fn fun(x: u64, y: u64,)`. pub fn has_trailing_punctuation(&self) -> bool { diff --git a/sway-ast/src/token.rs b/sway-ast/src/token.rs index 31161a5f862..4419e8969aa 100644 --- a/sway-ast/src/token.rs +++ b/sway-ast/src/token.rs @@ -43,7 +43,6 @@ pub enum CommentKind { /// /// ```sway /// pub fn main() -> bool { - /// /// // Newlined comment /// true /// } diff --git a/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs b/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs index c8f3a492732..5615851fb60 100644 --- a/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs +++ b/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs @@ -3415,6 +3415,7 @@ fn literal_to_literal( parsed, ty_opt, span, + is_generated_b256: _, } = lit_int; match ty_opt { None => { diff --git a/sway-error/src/diagnostic.rs b/sway-error/src/diagnostic.rs index 504734309e8..ee23bdde377 100644 --- a/sway-error/src/diagnostic.rs +++ b/sway-error/src/diagnostic.rs @@ -5,10 +5,10 @@ use sway_types::{SourceEngine, Span}; /// Provides detailed, rich description of a compile error or warning. #[derive(Debug, Default)] pub struct Diagnostic { - pub(crate) reason: Option, // TODO: Make mandatory once we remove all old-style warnings and errors. - pub(crate) issue: Issue, - pub(crate) hints: Vec, - pub(crate) help: Vec, + pub reason: Option, // TODO: Make mandatory once we remove all old-style warnings and errors. + pub issue: Issue, + pub hints: Vec, + pub help: Vec, } impl Diagnostic { @@ -23,7 +23,8 @@ impl Diagnostic { match self.issue.label_type { LabelType::Error => Level::Error, LabelType::Warning => Level::Warning, - _ => unreachable!("The diagnostic level can be only Error or Warning, and this is enforced via Diagnostics API.") + LabelType::Info => Level::Info, + _ => unreachable!("The diagnostic level can be only Error, Warning, or Info, and this is enforced via Diagnostics API.") } } @@ -122,6 +123,7 @@ impl Diagnostic { #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] pub enum Level { + Info, Warning, #[default] Error, @@ -250,6 +252,12 @@ impl Issue { label: Label::error(source_engine, span, text), } } + + pub fn info(source_engine: &SourceEngine, span: Span, text: String) -> Self { + Self { + label: Label::info(source_engine, span, text), + } + } } impl Default for Issue { @@ -377,6 +385,7 @@ pub enum DiagnosticArea { TypeChecking, SemanticAnalysis, Warnings, + Migrations, } impl DiagnosticArea { @@ -388,6 +397,7 @@ impl DiagnosticArea { Self::TypeChecking => "E3", Self::SemanticAnalysis => "E4", Self::Warnings => "W0", + Self::Migrations => "M0", } } } @@ -424,6 +434,10 @@ impl Code { Self::new(DiagnosticArea::Warnings, number) } + pub fn migrations(number: u16) -> Code { + Self::new(DiagnosticArea::Migrations, number) + } + fn new(area: DiagnosticArea, number: u16) -> Self { debug_assert!( 0 < number && number < 999, diff --git a/sway-error/src/formatting.rs b/sway-error/src/formatting.rs index b4483424d6d..c7616fb342b 100644 --- a/sway-error/src/formatting.rs +++ b/sway-error/src/formatting.rs @@ -1,5 +1,4 @@ -//! This module contains various helper functions for easier formatting and creation of user-friendly -//! diagnostic messages. +//! This module contains various helper functions for easier formatting and creation of user-friendly messages. use std::{ borrow::Cow, @@ -12,10 +11,7 @@ use sway_types::{SourceEngine, SourceId}; /// Returns the file name (with extension) for the provided `source_id`, /// or `None` if the `source_id` is `None` or the file name cannot be /// obtained. -pub(crate) fn get_file_name( - source_engine: &SourceEngine, - source_id: Option<&SourceId>, -) -> Option { +pub fn get_file_name(source_engine: &SourceEngine, source_id: Option<&SourceId>) -> Option { match source_id { Some(source_id) => source_engine.get_file_name(source_id), None => None, @@ -24,7 +20,7 @@ pub(crate) fn get_file_name( /// Returns reading-friendly textual representation for `number` smaller than or equal to 10 /// or its numeric representation if it is greater than 10. -pub(crate) fn number_to_str(number: usize) -> String { +pub fn number_to_str(number: usize) -> String { match number { 0 => "zero".to_string(), 1 => "one".to_string(), @@ -41,7 +37,7 @@ pub(crate) fn number_to_str(number: usize) -> String { } } -pub(crate) enum Enclosing { +pub enum Enclosing { #[allow(dead_code)] None, DoubleQuote, @@ -60,7 +56,7 @@ impl Display for Enclosing { } } -pub(crate) enum Indent { +pub enum Indent { #[allow(dead_code)] None, Single, @@ -87,14 +83,14 @@ impl Display for Indent { /// with the text "and more". /// /// E.g.: -/// [a] => "a" -/// [a, b] => "a" and "b" -/// [a, b, c] => "a", "b" and "c" -/// [a, b, c, d] => "a", "b", "c" and one more -/// [a, b, c, d, e] => "a", "b", "c" and two more +/// - \[a\] => "a" +/// - \[a, b\] => "a" and "b" +/// - \[a, b, c\] => "a", "b" and "c" +/// - \[a, b, c, d\] => "a", "b", "c" and one more +/// - \[a, b, c, d, e\] => "a", "b", "c" and two more /// /// Panics if the `sequence` is empty, or `max_items` is zero. -pub(crate) fn sequence_to_str(sequence: &[T], enclosing: Enclosing, max_items: usize) -> String +pub fn sequence_to_str(sequence: &[T], enclosing: Enclosing, max_items: usize) -> String where T: Display, { @@ -107,18 +103,14 @@ where /// with the text "or more". /// /// E.g.: -/// [a] => "a" -/// [a, b] => "a" or "b" -/// [a, b, c] => "a", "b" or "c" -/// [a, b, c, d] => "a", "b", "c" or one more -/// [a, b, c, d, e] => "a", "b", "c" or two more +/// - \[a\] => "a" +/// - \[a, b\] => "a" or "b" +/// - \[a, b, c\] => "a", "b" or "c" +/// - \[a, b, c, d\] => "a", "b", "c" or one more +/// - \[a, b, c, d, e\] => "a", "b", "c" or two more /// /// Panics if the `sequence` is empty, or `max_items` is zero. -pub(crate) fn sequence_to_str_or( - sequence: &[T], - enclosing: Enclosing, - max_items: usize, -) -> String +pub fn sequence_to_str_or(sequence: &[T], enclosing: Enclosing, max_items: usize) -> String where T: Display, { @@ -195,18 +187,18 @@ where /// with the text "and more". /// /// E.g.: -/// * [a] => +/// * \[a\] => /// - a -/// * [a, b] => +/// * \[a, b\] => /// - a /// - b -/// * [a, b, c, d, e] => +/// * \[a, b, c, d, e\] => /// - a /// - b /// - and three more /// /// Panics if the `sequence` is empty, or `max_items` is zero. -pub(crate) fn sequence_to_list(sequence: &[T], indent: Indent, max_items: usize) -> Vec +pub fn sequence_to_list(sequence: &[T], indent: Indent, max_items: usize) -> Vec where T: Display, { @@ -238,7 +230,7 @@ where /// Returns "s" if `count` is different than 1, otherwise empty string. /// Convenient for building simple plural of words. -pub(crate) fn plural_s(count: usize) -> &'static str { +pub fn plural_s(count: usize) -> &'static str { if count == 1 { "" } else { @@ -247,7 +239,7 @@ pub(crate) fn plural_s(count: usize) -> &'static str { } /// Returns "is" if `count` is 1, otherwise "are". -pub(crate) fn is_are(count: usize) -> &'static str { +pub fn is_are(count: usize) -> &'static str { if count == 1 { "is" } else { @@ -256,7 +248,7 @@ pub(crate) fn is_are(count: usize) -> &'static str { } /// Returns `singular` if `count` is 1, otherwise `plural`. -pub(crate) fn singular_plural<'a>(count: usize, singular: &'a str, plural: &'a str) -> &'a str { +pub fn singular_plural<'a>(count: usize, singular: &'a str, plural: &'a str) -> &'a str { if count == 1 { singular } else { @@ -274,7 +266,7 @@ pub(crate) fn singular_plural<'a>(count: usize, singular: &'a str, plural: &'a s /// SomeName -> SomeName /// std::ops::Eq -> Eq /// some_lib::Struct -> Struct -pub(crate) fn call_path_suffix_with_args(call_path: &String) -> Cow { +pub fn call_path_suffix_with_args(call_path: &String) -> Cow { match call_path.rfind(':') { Some(index) if index < call_path.len() - 1 => { Cow::Owned(call_path.split_at(index + 1).1.to_string()) @@ -290,7 +282,7 @@ pub(crate) fn call_path_suffix_with_args(call_path: &String) -> Cow { /// `word` is in singular. /// /// If an article is returned, it is followed by a space, e.g. "a ". -pub(crate) fn a_or_an(word: &'static str) -> &'static str { +pub fn a_or_an(word: &'static str) -> &'static str { let is_a = in_definite::is_an(word); match is_a { in_definite::Is::An => "an ", @@ -300,7 +292,7 @@ pub(crate) fn a_or_an(word: &'static str) -> &'static str { } /// Returns `text` with the first character turned into ASCII uppercase. -pub(crate) fn ascii_sentence_case(text: &String) -> Cow { +pub fn ascii_sentence_case(text: &String) -> Cow { if text.is_empty() || text.chars().next().unwrap().is_uppercase() { Cow::Borrowed(text) } else { @@ -325,7 +317,7 @@ pub(crate) fn ascii_sentence_case(text: &String) -> Cow { /// } /// the returned value, with ellipses, will be: /// if x {... -pub(crate) fn first_line(text: &str, with_ellipses: bool) -> Cow { +pub fn first_line(text: &str, with_ellipses: bool) -> Cow { if !text.contains('\n') { Cow::Borrowed(text) } else { @@ -340,11 +332,7 @@ pub(crate) fn first_line(text: &str, with_ellipses: bool) -> Cow { /// at most `max_num_of_suggestions` elements. /// /// The implementation is taken and adapted from the [Clap project](https://github.com/clap-rs/clap/blob/50f7646cf72dd7d4e76d9284d76bdcdaceb7c049/clap_builder/src/parser/features/suggestions.rs#L11). -pub(crate) fn did_you_mean( - v: &str, - possible_values: I, - max_num_of_suggestions: usize, -) -> Vec +pub fn did_you_mean(v: &str, possible_values: I, max_num_of_suggestions: usize) -> Vec where T: AsRef, I: IntoIterator, diff --git a/sway-features/src/lib.rs b/sway-features/src/lib.rs index fdff644741a..953827a30e5 100644 --- a/sway-features/src/lib.rs +++ b/sway-features/src/lib.rs @@ -11,7 +11,7 @@ macro_rules! features { )* ]; - #[derive(Copy, Clone, Debug, ValueEnum)] + #[derive(Copy, Clone, Debug, ValueEnum, PartialEq, Eq, Hash)] #[value(rename_all = "snake")] pub enum Feature { $( @@ -19,6 +19,28 @@ macro_rules! features { )* } + impl Feature { + pub fn name(&self) -> &'static str { + match self { + $( + Feature::[<$name:camel>] => { + stringify!([<$name:snake>]) + }, + )* + } + } + + pub fn url(&self) -> &'static str { + match self { + $( + Feature::[<$name:camel>] => { + $url + }, + )* + } + } + } + impl std::str::FromStr for Feature { type Err = Error; @@ -133,6 +155,8 @@ features! { "https://github.com/FuelLabs/sway/issues/5727", storage_domains = false, "https://github.com/FuelLabs/sway/issues/6701", + references = true, + "https://github.com/FuelLabs/sway/issues/5063", } #[derive(Clone, Debug, Default, Parser)] diff --git a/sway-parse/src/expr/mod.rs b/sway-parse/src/expr/mod.rs index c6c3329946d..aef6aca38ba 100644 --- a/sway-parse/src/expr/mod.rs +++ b/sway-parse/src/expr/mod.rs @@ -603,6 +603,7 @@ fn parse_projection(parser: &mut Parser, ctx: ParseExprCtx) -> ParseResult span, parsed, ty_opt, + is_generated_b256: _, } = lit_int; if ty_opt.is_some() { return Err( diff --git a/sway-parse/src/token.rs b/sway-parse/src/token.rs index bf307f328fc..3436e017d1b 100644 --- a/sway-parse/src/token.rs +++ b/sway-parse/src/token.rs @@ -736,6 +736,7 @@ fn lex_int_lit( span: span(l, index, end_opt.unwrap_or(l.src.len())), parsed: big_uint, ty_opt, + is_generated_b256: false, }); Ok(Some(CommentedTokenTree::Tree(literal.into()))) diff --git a/sway-types/src/span.rs b/sway-types/src/span.rs index 5ea94b3c2a0..21e2b75213f 100644 --- a/sway-types/src/span.rs +++ b/sway-types/src/span.rs @@ -94,6 +94,32 @@ impl Span { }) } + /// Creates an empty [Span], means a span whose [Span::start] and [Span::end] are the same. + /// The resulting empty [Span] will point to the start of the provided `span` and + /// be in the same file. + pub fn empty_at_start(span: &Span) -> Span { + Span::new( + span.src().clone(), + span.start(), + span.start(), + span.source_id().copied(), + ) + .expect("the existing `span` is a valid `Span`") + } + + /// Creates an empty [Span], means a span whose [Span::start] and [Span::end] are the same. + /// The resulting empty [Span] will point to the end of the provided `span` and + /// be in the same file. + pub fn empty_at_end(span: &Span) -> Span { + Span::new( + span.src().clone(), + span.end(), + span.end(), + span.source_id().copied(), + ) + .expect("the existing `span` is a valid `Span`") + } + pub fn from_string(source: String) -> Span { let len = source.len(); Span::new(Arc::from(source), 0, len, None).unwrap() diff --git a/swayfmt/src/utils/language/literal.rs b/swayfmt/src/utils/language/literal.rs index 8fecedae9eb..a2530204d15 100644 --- a/swayfmt/src/utils/language/literal.rs +++ b/swayfmt/src/utils/language/literal.rs @@ -32,18 +32,31 @@ impl Format for Literal { // and use the actual spans to obtain the strings. if lit_int.span.is_empty() { - write!(formatted_code, "{}", lit_int.parsed)?; + // Format `u256` and `b256` as hex literals. + if lit_int.is_generated_b256 + || matches!(&lit_int.ty_opt, Some((LitIntType::U256, _))) + { + write!(formatted_code, "0x{:064x}", lit_int.parsed)?; + } else { + write!(formatted_code, "{}", lit_int.parsed)?; + } if let Some((int_type, _)) = &lit_int.ty_opt { let int_type = match int_type { - LitIntType::U8 => "u8", - LitIntType::U16 => "u16", - LitIntType::U32 => "u32", - LitIntType::U64 => "u64", - LitIntType::U256 => "u256", - LitIntType::I8 => "i8", - LitIntType::I16 => "i16", - LitIntType::I32 => "i32", - LitIntType::I64 => "i64", + LitIntType::U8 => "_u8", + LitIntType::U16 => "_u16", + LitIntType::U32 => "_u32", + LitIntType::U64 => "_u64", + LitIntType::U256 => { + if lit_int.is_generated_b256 { + "" + } else { + "_u256" + } + } + LitIntType::I8 => "_i8", + LitIntType::I16 => "_i16", + LitIntType::I32 => "_i32", + LitIntType::I64 => "_i64", }; write!(formatted_code, "{}", int_type)?; } diff --git a/test/src/e2e_vm_tests/test_programs/should_fail/invalid_cfg_arg/stdout.snap b/test/src/e2e_vm_tests/test_programs/should_fail/invalid_cfg_arg/stdout.snap index 84c0fa58caf..993f90f8407 100644 --- a/test/src/e2e_vm_tests/test_programs/should_fail/invalid_cfg_arg/stdout.snap +++ b/test/src/e2e_vm_tests/test_programs/should_fail/invalid_cfg_arg/stdout.snap @@ -11,7 +11,7 @@ warning | 1 | predicate; 2 | #[cfg(c)] a - | --- Unexpected attribute value: "c" for attribute: "cfg" expected value "target" or "program_type" or "experimental_new_encoding" or "experimental_storage_domains" + | --- Unexpected attribute value: "c" for attribute: "cfg" expected value "target" or "program_type" or "experimental_new_encoding" or "experimental_storage_domains" or "experimental_references" | ____ diff --git a/test/src/sdk-harness/test_projects/tx_fields/mod.rs b/test/src/sdk-harness/test_projects/tx_fields/mod.rs index f7e7308737d..b732ccab1f0 100644 --- a/test/src/sdk-harness/test_projects/tx_fields/mod.rs +++ b/test/src/sdk-harness/test_projects/tx_fields/mod.rs @@ -708,6 +708,208 @@ mod tx { assert_eq!(predicate_balance, 0); } } + + #[tokio::test] + async fn can_get_tx_blob() { + // Prepare wallet and provider + let mut wallet = WalletUnlocked::new_random(None); + let num_coins = 100; + let coins = setup_single_asset_coins( + wallet.address(), + AssetId::zeroed(), + num_coins, + DEFAULT_COIN_AMOUNT, + ); + + let provider = setup_test_provider(coins, vec![], None, None) + .await + .unwrap(); + wallet.set_provider(provider.clone()); + + // Get the predicate + let predicate_data = TestTxTypePredicateEncoder::default() + .encode_data(SwayTransaction::Blob) + .unwrap(); + let predicate: Predicate = Predicate::load_from(TX_TYPE_PREDICATE_BYTECODE_PATH) + .unwrap() + .with_provider(provider.clone()) + .with_data(predicate_data); + let predicate_coin_amount = 100; + + // Predicate has no funds + let predicate_balance = predicate + .get_asset_balance(&provider.base_asset_id()) + .await + .unwrap(); + assert_eq!(predicate_balance, 0); + + // Transfer enough funds to the predicate + wallet + .transfer( + predicate.address(), + predicate_coin_amount, + *provider.base_asset_id(), + TxPolicies::default(), + ) + .await + .unwrap(); + + // Predicate has funds + let predicate_balance = predicate + .get_asset_balance(&provider.base_asset_id()) + .await + .unwrap(); + assert_eq!(predicate_balance as usize, predicate_coin_amount as usize); + + // Prepare blobs + let max_words_per_blob = 10_000; + let blobs = Contract::load_from(TX_CONTRACT_BYTECODE_PATH, LoadConfiguration::default()) + .unwrap() + .convert_to_loader(max_words_per_blob) + .unwrap() + .blobs() + .to_vec(); + + let blob = blobs[0].clone(); + // Inputs for predicate + let predicate_input = predicate + .get_asset_inputs_for_amount(*provider.base_asset_id(), 1, None) + .await + .unwrap(); + + // Outputs for predicate + let predicate_output = + wallet.get_asset_outputs_for_amount(&wallet.address(), *provider.base_asset_id(), 1); + + let mut builder = BlobTransactionBuilder::default().with_blob(blob); + + // Append the predicate to the transaction + builder.inputs.push(predicate_input.get(0).unwrap().clone()); + builder + .outputs + .push(predicate_output.get(0).unwrap().clone()); + + wallet.adjust_for_fee(&mut builder, 0).await.unwrap(); + wallet.add_witnesses(&mut builder).unwrap(); + + let tx = builder.build(&provider).await.unwrap(); + provider + .send_transaction_and_await_commit(tx) + .await + .unwrap() + .check(None) + .unwrap(); + + // The predicate has spent it's funds + let predicate_balance = predicate + .get_asset_balance(&provider.base_asset_id()) + .await + .unwrap(); + assert_eq!(predicate_balance, 0); + } + + #[tokio::test] + async fn can_get_witness_in_tx_blob() { + // Prepare wallet and provider + let mut wallet = WalletUnlocked::new_random(None); + let num_coins = 100; + let coins = setup_single_asset_coins( + wallet.address(), + AssetId::zeroed(), + num_coins, + DEFAULT_COIN_AMOUNT, + ); + + let provider = setup_test_provider(coins, vec![], None, None) + .await + .unwrap(); + wallet.set_provider(provider.clone()); + + // Prepare blobs + let max_words_per_blob = 10_000; + let blobs = Contract::load_from(TX_CONTRACT_BYTECODE_PATH, LoadConfiguration::default()) + .unwrap() + .convert_to_loader(max_words_per_blob) + .unwrap() + .blobs() + .to_vec(); + + let blob = blobs[0].clone(); + + let mut builder = BlobTransactionBuilder::default().with_blob(blob.clone()); + + // Prepare the predicate + let witnesses = builder.witnesses().clone(); + let predicate_data = TestTxWitnessPredicateEncoder::new(EncoderConfig { + max_depth: 10, + max_tokens: 100_000, + }) + .encode_data( // Blob and witnesses are just wrappers for Vec, and function the same in case of Transaction::Blob, so using blobs here instead of witnesses + 0, + blobs.len() as u64 + 1, + blob.len() as u64, + blob.bytes()[0..64].try_into().unwrap(), + ) + .unwrap(); + let predicate: Predicate = Predicate::load_from(TX_WITNESS_PREDICATE_BYTECODE_PATH) + .unwrap() + .with_provider(provider.clone()) + .with_data(predicate_data); + let predicate_coin_amount = 100; + + // Predicate has no funds + let predicate_balance = predicate + .get_asset_balance(&provider.base_asset_id()) + .await + .unwrap(); + assert_eq!(predicate_balance, 0); + wallet + .transfer( + predicate.address(), + predicate_coin_amount, + *provider.base_asset_id(), + TxPolicies::default(), + ) + .await + .unwrap(); + + // Predicate has funds + let predicate_balance = predicate + .get_asset_balance(&provider.base_asset_id()) + .await + .unwrap(); + assert_eq!(predicate_balance as usize, predicate_coin_amount as usize); + + // Inputs for predicate + let predicate_input = predicate + .get_asset_inputs_for_amount(*provider.base_asset_id(), 1, None) + .await + .unwrap(); + + // Outputs for predicate + let predicate_output = + wallet.get_asset_outputs_for_amount(&wallet.address(), *provider.base_asset_id(), 1); + + // Append the predicate to the transaction + builder.inputs.push(predicate_input.get(0).unwrap().clone()); + builder + .outputs + .push(predicate_output.get(0).unwrap().clone()); + + wallet.add_witnesses(&mut builder).unwrap(); + wallet.adjust_for_fee(&mut builder, 0).await.unwrap(); + + let tx = builder.build(provider.clone()).await.unwrap(); + + provider.send_transaction(tx).await.unwrap(); + + // The predicate has spent it's funds + let predicate_balance = predicate + .get_asset_balance(&provider.base_asset_id()) + .await + .unwrap(); + assert_eq!(predicate_balance, 0); + } } mod inputs { @@ -935,6 +1137,103 @@ mod inputs { } } + #[tokio::test] + async fn can_get_input_count_in_tx_blob() { + // Prepare wallet and provider + let mut wallet = WalletUnlocked::new_random(None); + let num_coins = 100; + let coins = setup_single_asset_coins( + wallet.address(), + AssetId::zeroed(), + num_coins, + DEFAULT_COIN_AMOUNT, + ); + let provider = setup_test_provider(coins, vec![], None, None) + .await + .unwrap(); + wallet.set_provider(provider.clone()); + + // Prepare blobs + let max_words_per_blob = 10_000; + let blobs = + Contract::load_from(TX_CONTRACT_BYTECODE_PATH, LoadConfiguration::default()) + .unwrap() + .convert_to_loader(max_words_per_blob) + .unwrap() + .blobs() + .to_vec(); + + let blob = blobs[0].clone(); + + let mut builder = BlobTransactionBuilder::default().with_blob(blob); + + // Prepare the predicate + let predicate_data = TestTxInputCountPredicateEncoder::default() + .encode_data(builder.inputs().len() as u16 + 1u16) // Add one for this predicate + .unwrap(); + let predicate: Predicate = Predicate::load_from(TX_INPUT_COUNT_PREDICATE_BYTECODE_PATH) + .unwrap() + .with_provider(provider.clone()) + .with_data(predicate_data); + let predicate_coin_amount = 100; + + // Predicate has no funds + let predicate_balance = predicate + .get_asset_balance(&provider.base_asset_id()) + .await + .unwrap(); + assert_eq!(predicate_balance, 0); + wallet + .transfer( + predicate.address(), + predicate_coin_amount, + *provider.base_asset_id(), + TxPolicies::default(), + ) + .await + .unwrap(); + + // Predicate has funds + let predicate_balance = predicate + .get_asset_balance(&provider.base_asset_id()) + .await + .unwrap(); + assert_eq!(predicate_balance as usize, predicate_coin_amount as usize); + + // Inputs for predicate + let predicate_input = predicate + .get_asset_inputs_for_amount(*provider.base_asset_id(), 1, None) + .await + .unwrap(); + + // Outputs for predicate + let predicate_output = wallet.get_asset_outputs_for_amount( + &wallet.address(), + *provider.base_asset_id(), + 1, + ); + + // Append the predicate to the transaction + builder.inputs.push(predicate_input.get(0).unwrap().clone()); + builder + .outputs + .push(predicate_output.get(0).unwrap().clone()); + + wallet.add_witnesses(&mut builder).unwrap(); + wallet.adjust_for_fee(&mut builder, 0).await.unwrap(); + + // Submit the transaction + let tx = builder.build(&provider).await.unwrap(); + provider.send_transaction(tx).await.unwrap(); + + // The predicate has spent it's funds + let predicate_balance = predicate + .get_asset_balance(&provider.base_asset_id()) + .await + .unwrap(); + assert_eq!(predicate_balance, 0); + } + mod message { use fuels::types::{coin_type::CoinType, transaction_builders::TransactionBuilder}; @@ -1688,6 +1987,104 @@ mod outputs { } } + #[tokio::test] + async fn can_get_output_count_in_tx_blob() { + // Prepare wallet and provider + let mut wallet = WalletUnlocked::new_random(None); + let num_coins = 100; + let coins = setup_single_asset_coins( + wallet.address(), + AssetId::zeroed(), + num_coins, + DEFAULT_COIN_AMOUNT, + ); + let provider = setup_test_provider(coins, vec![], None, None) + .await + .unwrap(); + wallet.set_provider(provider.clone()); + + // Prepare blobs + let max_words_per_blob = 10_000; + let blobs = + Contract::load_from(TX_CONTRACT_BYTECODE_PATH, LoadConfiguration::default()) + .unwrap() + .convert_to_loader(max_words_per_blob) + .unwrap() + .blobs() + .to_vec(); + + let blob = blobs[0].clone(); + + let mut builder = BlobTransactionBuilder::default().with_blob(blob); + + // Prepare the predicate + let predicate_data = TestTxOutputCountPredicateEncoder::default() + .encode_data(builder.inputs().len() as u16 + 1u16) // Add one for this predicate + .unwrap(); + let predicate: Predicate = + Predicate::load_from(TX_OUTPUT_COUNT_PREDICATE_BYTECODE_PATH) + .unwrap() + .with_provider(provider.clone()) + .with_data(predicate_data); + let predicate_coin_amount = 100; + + // Predicate has no funds + let predicate_balance = predicate + .get_asset_balance(&provider.base_asset_id()) + .await + .unwrap(); + assert_eq!(predicate_balance, 0); + wallet + .transfer( + predicate.address(), + predicate_coin_amount, + *provider.base_asset_id(), + TxPolicies::default(), + ) + .await + .unwrap(); + + // Predicate has funds + let predicate_balance = predicate + .get_asset_balance(&provider.base_asset_id()) + .await + .unwrap(); + assert_eq!(predicate_balance as usize, predicate_coin_amount as usize); + + // Inputs for predicate + let predicate_input = predicate + .get_asset_inputs_for_amount(*provider.base_asset_id(), 1, None) + .await + .unwrap(); + + // Outputs for predicate + let predicate_output = wallet.get_asset_outputs_for_amount( + &wallet.address(), + *provider.base_asset_id(), + 1, + ); + + // Append the predicate to the transaction + builder.inputs.push(predicate_input.get(0).unwrap().clone()); + builder + .outputs + .push(predicate_output.get(0).unwrap().clone()); + + wallet.add_witnesses(&mut builder).unwrap(); + wallet.adjust_for_fee(&mut builder, 0).await.unwrap(); + + // Submit the transaction + let tx = builder.build(&provider).await.unwrap(); + provider.send_transaction(tx).await.unwrap(); + + // The predicate has spent it's funds + let predicate_balance = predicate + .get_asset_balance(&provider.base_asset_id()) + .await + .unwrap(); + assert_eq!(predicate_balance, 0); + } + #[tokio::test] async fn can_get_tx_output_change_details() { // Prepare predicate @@ -1708,7 +2105,6 @@ mod outputs { .unwrap(); let instance = TxOutputContract::new(contract_id.clone(), wallet.clone()); - // Send tokens to the contract let _ = wallet .force_transfer_to_contract(&contract_id, 10, asset_id, TxPolicies::default())