diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..3550a30 --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use flake diff --git a/.github/ISSUE_TEMPLATE/BUG_REPORT.yml b/.github/ISSUE_TEMPLATE/BUG_REPORT.yml new file mode 100644 index 0000000..ecd0b1d --- /dev/null +++ b/.github/ISSUE_TEMPLATE/BUG_REPORT.yml @@ -0,0 +1,49 @@ +name: Bug Report +description: Report unexpected behavior or crashes +title: "[BUG] " +labels: ["bug-report", "triage"] +body: + - type: checkboxes + attributes: + label: Pre-submission Checklist + options: + - label: I've checked existing issues and pull requests + required: true + - label: I've read the [Code of Conduct](https://github.com/FlakySL/translatable/blob/main/CODE_OF_CONDUCT.md) + required: true + - label: Are you using the latest translatable version? + required: true + + - type: dropdown + attributes: + label: Component + options: + - Core library + - Macros crate + - Documentation + validations: + required: true + + - type: input + attributes: + label: Rust Version + placeholder: Output of `rustc --version` + validations: + required: true + + - type: textarea + attributes: + label: Reproduction Steps + description: Step-by-step instructions to reproduce the issue + validations: + required: true + + - type: textarea + attributes: + label: Expected vs Actual Behavior + description: What you expected to happen vs what actually happened + + - type: textarea + attributes: + label: Additional Context + description: Logs, screenshots, or code samples diff --git a/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.yml b/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.yml new file mode 100644 index 0000000..8f32766 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.yml @@ -0,0 +1,37 @@ +name: Feature Request +description: Suggest an idea for Translatable +title: "[FEATURE] " +labels: ["feature-request", "triage"] +body: + - type: checkboxes + attributes: + label: Pre-submission Checklist + options: + - label: I've checked existing issues and pull requests + required: true + - label: I've read the [Code of Conduct](https://github.com/FlakySL/translatable/blob/main/CODE_OF_CONDUCT.md) + required: true + + - type: textarea + attributes: + label: Problem Description + description: What problem are you trying to solve? + validations: + required: true + + - type: textarea + attributes: + label: Proposed Solution + description: How should Translatable address this problem? + validations: + required: true + + - type: textarea + attributes: + label: Alternatives Considered + description: Other ways this could potentially be solved + + - type: textarea + attributes: + label: Additional Context + description: Potential disadvantages, edge cases, or examples diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..3c25e40 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,26 @@ +blank_issues_enabled: false + +contact_links: + - name: "💬 Community Help (Discord)" + url: https://discord.gg/AJWFyps23a + about: | + For general questions, discussion, or brainstorming: + - Get real-time help from maintainers + - Discuss potential features + - Chat with other contributors + *Please check existing issues first!* + + - name: "⚖️ Code of Conduct" + url: https://github.com/FlakySL/translatable/blob/main/CODE_OF_CONDUCT.md + about: | + All community interactions must follow the project + code of conduct, which is based on the contributor covenant. + *Required reading before participating* + + - name: "🚨 Moderation Contact" + url: mailto:moderation@flaky.es + about: | + For urgent moderation issues: + - Code of Conduct violations + - Community safety concerns + - Escalation requests diff --git a/.github/SUPPORT.md b/.github/SUPPORT.md new file mode 100644 index 0000000..d195d31 --- /dev/null +++ b/.github/SUPPORT.md @@ -0,0 +1,3 @@ +# Useful resources + +- [Discord server](https://discord.gg/AJWFyps23a) diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..201a8df --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,7 @@ +version: 2 +updates: + - package-ecosystem: "cargo" + directory: "/" + schedule: + interval: "weekly" + diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..9c1d668 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,15 @@ + +## Pre-submission Checklist + +- [ ] I've checked existing issues and pull requests +- [ ] I've read the [Code of Conduct](https://github.com/FlakySL/translatable/blob/main/CODE_OF_CONDUCT.md) +- [ ] I've [implemented tests](https://github.com/FlakySL/translatable/blob/main/translatable/tests/README.md) for my changes +- [ ] I've listed all my changes in the `Changes` section + +## Changes + +- + +## Linked Issues + +- fixes # diff --git a/.github/workflows/overall-coverage.yml b/.github/workflows/overall-coverage.yml new file mode 100644 index 0000000..3251bf5 --- /dev/null +++ b/.github/workflows/overall-coverage.yml @@ -0,0 +1,56 @@ +name: tests + +on: + push: + branches: [main] + pull_request: + +jobs: + coverage: + runs-on: ubuntu-latest + permissions: + contents: read + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set-up Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + components: llvm-tools-preview + + - name: Install cargo-binstall@latest + uses: cargo-bins/cargo-binstall@main + + - name: Install cargo-llvm-cov + run: | + cargo binstall cargo-llvm-cov + + - name: Generate coverage and get percentage + id: coverage + run: | + set -o pipefail + make cov export-lcov=1 | tee output.log + coverage=$(grep 'Total Coverage: ' output.log | awk '{print $3}') + echo "coverage_percentage=${coverage%\%}" >> $GITHUB_OUTPUT + echo "Detected coverage: ${coverage}" + + - name: Fail if overall coverage is below 80% + run: | + if (( $(echo "${{ steps.coverage.outputs.coverage_percentage }} < 80" | bc -l) )); then + echo "❌ Coverage is below 80% (${{ steps.coverage.outputs.coverage_percentage }}%)" + exit 1 + else + echo "✅ Coverage meets requirement (${{ steps.coverage.outputs.coverage_percentage }}%)" + fi + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v5 + with: + file: coverage.lcov + fail_ci_if_error: true + token: ${{ secrets.CODECOV_TOKEN }} + slug: FlakySL/translatable + verbose: true diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..7eab478 --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,75 @@ +name: publish to crates.io + +on: + workflow_run: + workflows: ["tests"] + types: + - completed + +jobs: + publish: + runs-on: ubuntu-latest + permissions: + contents: write + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set-up Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + + - name: Set-up semver + run: | + curl -fsSL https://deb.nodesource.com/setup_lts.x | sudo -E bash - + sudo apt install -y nodejs + npm install -g semver + + - name: Get crate version + id: crate_version + run: | + VERSION=$(cargo metadata --no-deps --format-version 1 | jq -r '.packages[0].version') + echo "version=$VERSION" >> $GITHUB_OUTPUT + + - name: Get latest git tag + id: latest_tag + run: | + TAG=$(git describe --tags --abbrev=0 || echo "") + echo "tag=$TAG" >> $GITHUB_OUTPUT + + - name: Compare versions + id: should_publish + run: | + VERSION="${{ steps.crate_version.outputs.version }}" + TAG="${{ steps.latest_tag.outputs.tag }}" + + if [ -z "$TAG" ]; then + TAG="0.0.0" + fi + + if semver -r "> $TAG" "$VERSION"; then + echo "publish=true" >> $GITHUB_OUTPUT + else + echo "publish=false" >> $GITHUB_OUTPUT + fi + + - name: Publish to crates.io + if: steps.should_publish.outputs.publish == 'true' + run: | + cargo publish -p translatable_shared + cargo publish -p translatable_proc + cargo publish -p translatable + env: + CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_IO_TOKEN }} + + - name: Crate and push new git tag + if: steps.should_publish.outputs.publish == 'true' + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + git tag ${{ steps.crate_version.outputs.version }} + git push origin ${{ steps.crate_version.outputs.version }} diff --git a/.gitignore b/.gitignore index ea8c4bf..e594a4d 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,5 @@ /target +.bacon-locations +.direnv + +**/*.lcov diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..b371ad6 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Translatable Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +education, socio-economic status, nationality, personal appearance, race, +religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at [`moderation@flaky.es`](mailto:moderation@flaky.es). +All complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 1.4, available at [contributor covenant]. + +[homepage]: https://www.contributor-covenant.org +[contributor covenant]: https://www.contributor-covenant.org/version/1/4/code-of-conduct.html diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..7331b59 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,56 @@ +# Contributing to Translatable + +In Translatable, we welcome contributions from everyone, including bug reports, +pull requests, and feedback. This document serves as guidance if you are +considering submitting any of the above. + +## Submitting Bug Reports and Feature Requests + +To submit a bug report or feature request, you can open an issue in this +repository: [`FlakySL/translatable`](https://github.com/FlakySL/translatable). + +When reporting a bug or requesting help, please include sufficient details +to allow others to reproduce the behavior you're encountering. For guidance on +how to approach this, read about [How to Create a Minimal, Reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example). + +When making a feature request, please clearly explain: + +1. The problem you want to solve +2. How Translatable could help address this problem +3. Any potential alternatives +4. Possible disadvantages of your proposal + +Before submitting, please verify that no existing issue addresses your specific +problem/request. If you want to elaborate on a problem or discuss it further, +you can use our [Discord channel](https://discord.gg/AJWFyps23a) at Flaky. + +We recommend using the issue templates provided in this repository. + +## Making Pull Requests + +Before adding a feature on your behalf, we'd rather for it to be evaluated +in a issue before, we appreciate the time and effort our contributors have +and we don't want to waste it, so we'd rather talk about your feature before +you working on it. + +When submitting a pull request make sure the code you added is tested and +documented, if it isn't you will be asked to document/test it before merging. + +To add tests please refer to the [testing documentation] on the tests folder +in the `translatable` crate. + +## Running Tests and Compiling the Project + +This project uses GNU [make](https://www.gnu.org/software/make/). + +- Run tests using `make test`. +- Compile the project using `make build`. + +## Code of Conduct + +The Translatable community follows the [Rust Code of Conduct](https://www.rust-lang.org/policies/code-of-conduct). +For moderation issues or escalation, please contact Esteve or Luis at +[moderation@flaky.es](mailto:moderation@flaky.es) rather than the Rust +moderation team. + +[testing documentation]: ./translatable/tests/README.md diff --git a/Cargo.lock b/Cargo.lock index 017ecf1..19d1486 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8,6 +8,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" +[[package]] +name = "glob" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" + [[package]] name = "hashbrown" version = "0.15.2" @@ -22,14 +28,20 @@ checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "indexmap" -version = "2.7.1" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" +checksum = "3954d50fe15b02142bf25d3b8bdadb634ec3948f103d04ffe3031bc8fe9d7058" dependencies = [ "equivalent", "hashbrown", ] +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + [[package]] name = "memchr" version = "2.7.4" @@ -38,48 +50,66 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "proc-macro2" -version = "1.0.93" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.38" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] [[package]] name = "rustversion" -version = "1.0.19" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" +checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "serde" -version = "1.0.218" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.218" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", "syn", ] +[[package]] +name = "serde_json" +version = "1.0.140" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + [[package]] name = "serde_spanned" version = "0.6.8" @@ -113,29 +143,44 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.98" +version = "2.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" +checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] +[[package]] +name = "target-triple" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ac9aa371f599d22256307c24a9d748c041e548cbf599f35d890f9d365361790" + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + [[package]] name = "thiserror" -version = "2.0.11" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "2.0.11" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", @@ -144,61 +189,227 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" +checksum = "900f6c86a685850b1bc9f6223b20125115ee3f31e01207d81655bbcc0aea9231" dependencies = [ "serde", "serde_spanned", - "toml_datetime", - "toml_edit", + "toml_datetime 0.6.9", + "toml_edit 0.22.26", ] [[package]] name = "toml_datetime" -version = "0.6.8" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_datetime" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.22.24" +version = "0.22.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" +checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e" dependencies = [ "indexmap", "serde", "serde_spanned", - "toml_datetime", + "toml_datetime 0.6.9", + "toml_write", "winnow", ] +[[package]] +name = "toml_edit" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f23a5f4511b296579b6c83e437fe85fa7ece22e3ec44e45ddb975bcf57c3dd" +dependencies = [ + "indexmap", + "toml_datetime 0.7.0", + "toml_parser", + "toml_writer", + "winnow", +] + +[[package]] +name = "toml_parser" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97200572db069e74c512a14117b296ba0a80a30123fbbb5aa1f4a348f639ca30" +dependencies = [ + "winnow", +] + +[[package]] +name = "toml_write" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfb942dfe1d8e29a7ee7fcbde5bd2b9a25fb89aa70caea2eba3bee836ff41076" + +[[package]] +name = "toml_writer" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64" + [[package]] name = "translatable" -version = "0.1.0" +version = "1.0.0" dependencies = [ "quote", - "serde", + "thiserror", + "toml_edit 0.23.1", + "translatable_proc", + "translatable_shared", + "trybuild", +] + +[[package]] +name = "translatable_proc" +version = "1.0.0" +dependencies = [ + "proc-macro2", + "quote", "strum", "syn", "thiserror", + "toml_edit 0.23.1", + "translatable_shared", +] + +[[package]] +name = "translatable_shared" +version = "1.0.0" +dependencies = [ + "proc-macro2", + "quote", + "strum", + "syn", + "thiserror", + "toml_edit 0.23.1", +] + +[[package]] +name = "trybuild" +version = "1.0.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c9bf9513a2f4aeef5fdac8677d7d349c79fdbcc03b9c86da6e9d254f1e43be2" +dependencies = [ + "glob", + "serde", + "serde_derive", + "serde_json", + "target-triple", + "termcolor", "toml", ] [[package]] name = "unicode-ident" -version = "1.0.17" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.7.3" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1" +checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" dependencies = [ "memchr", ] diff --git a/Cargo.toml b/Cargo.toml index 9444e29..bfc9e5b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,15 +1,3 @@ -[package] -name = "translatable" -version = "0.1.0" -edition = "2024" - -[lib] -proc-macro = true - -[dependencies] -quote = "1.0.38" -serde = { version = "1.0.218", features = ["derive"] } -strum = { version = "0.27.1", features = ["derive"] } -syn = { version = "2.0.98", features = ["full"] } -thiserror = "2.0.11" -toml = "0.8.20" +[workspace] +resolver = "2" +members = ["translatable", "translatable_proc", "translatable_shared"] diff --git a/GOVERNANCE.md b/GOVERNANCE.md new file mode 100644 index 0000000..80956b3 --- /dev/null +++ b/GOVERNANCE.md @@ -0,0 +1,9 @@ +# Governance and Moderation + +This project is mainly maintained by the authors + +- Esteve Autet `esteve.autet@flaky.es` +- Chiko `luis.degnan@flaky.es` + +There is no hierarchy established (yet) but this might be subject to +change soon. For any inquiries you can contact any of the emails listed above. diff --git a/GPLv3-LICENSE b/GPLv3-LICENSE new file mode 100644 index 0000000..281d399 --- /dev/null +++ b/GPLv3-LICENSE @@ -0,0 +1,619 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..0d012be --- /dev/null +++ b/LICENSE @@ -0,0 +1,28 @@ +This software is dual-licensed: + +1. GNU GENERAL PUBLIC LICENSE Version 3 (GPLv3) + You can redistribute and/or modify this software under the terms + of the GPLv3 as published by the Free Software Foundation. + + You should have received a copy of the GNU General Public License + along with this program (see `GPLv3-LICENSE`). If not, + see . + +2. Commercial License + Closed-source, proprietary, or commercial use of this software + requires a commercial license. + + If you want to use this software in a closed-source or proprietary + project, or if your project is not licensed under the GPLv3, please contact: + + Flaky, Sl. + licensing@flaky.es + + for a custom license for your use case. + +--- + +Copyright (c) 2025-2030 Flaky, Sl. + +This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty +of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..1250935 --- /dev/null +++ b/Makefile @@ -0,0 +1,16 @@ +LCOV_FILE ?= coverage.lcov + +test: + cargo test -p translatable -- --nocapture --color=always --test-threads=1 + +cov: +ifdef export-lcov + @echo "Generating LCOV report..." + @coverage=$$(cargo llvm-cov -- --nocapture --test-threads=1 --color=never | grep '^TOTAL' | awk '{print $$10}'); \ + cargo llvm-cov --lcov -- --nocapture --test-threads=1 --color=always > $(LCOV_FILE); \ + echo "LCOV report saved to $(LCOV_FILE)"; \ + echo "Total Coverage: $$coverage%" +else + @coverage=$$(cargo llvm-cov -- --nocapture --test-threads=1 --color=never | grep '^TOTAL' | awk '{print $$10}'); \ + echo "Total Coverage: $$coverage%" +endif diff --git a/README-MACROS.md b/README-MACROS.md new file mode 100644 index 0000000..8981d82 --- /dev/null +++ b/README-MACROS.md @@ -0,0 +1,14 @@ +# Translatable Macros + +This crate exists solely to provide macros for the [Translatable] +crate. Using this crate without the main Translatable crate is +**not supported**, and any support requests or bug reports regarding +standalone usage will be redirected to the +[Translatable] crate. + +## Licensing + +This crate shares the same licensing terms as [Translatable], +as these crates are essentially part of the same ecosystem. + +[translatable]: https://crates.io/crates/translatable diff --git a/README-SHARED.md b/README-SHARED.md new file mode 100644 index 0000000..334354a --- /dev/null +++ b/README-SHARED.md @@ -0,0 +1,14 @@ +# Translatable Shared + +This crate exists solely to provide utilities for the [Translatable] +crate. Using this crate without the main Translatable crate is +**not supported**, and any support requests or bug reports regarding +standalone usage will be redirected to the +[Translatable] crate. + +## Licensing + +This crate shares the same licensing terms as [Translatable], +as these crates are essentially part of the same ecosystem. + +[translatable]: https://crates.io/crates/translatable diff --git a/README.md b/README.md new file mode 100644 index 0000000..a06b12a --- /dev/null +++ b/README.md @@ -0,0 +1,222 @@ +![translatable-readme](https://github.com/user-attachments/assets/4994514f-bbcc-48ea-a086-32e684adcd3a) + +[![Crates.io](https://badges.ws/crates/v/translatable)](https://crates.io/crates/translatable) +[![License](https://badges.ws/crates/l/translatable)](https://docs.rs/translatable) +[![Docs.rs](https://badges.ws/crates/docs/translatable)](https://docs.rs/translatable) +[![Downloads](https://badges.ws/crates/dt/translatable)](https://docs.rs/translatable) +[![Codecov](https://img.shields.io/codecov/c/github/FlakySL/translatable)](https://app.codecov.io/gh/FlakySL/translatable) +![tests](https://github.com/FlakySL/translatable/actions/workflows/overall-coverage.yml/badge.svg) +[![discord](https://badges.ws/discord/online/1344769456731197450)](https://discord.gg/AJWFyps23a) + +A robust internationalization solution for Rust featuring compile-time validation, ISO 639-1 compliance, and TOML-based translation management. + +**This library prioritizes ergonomics over raw performance.** +Our goal is not to be *blazingly fast* but to provide the most user-friendly experience for implementing translations—whether you're a first-time user or an experienced developer. If you require maximum performance, consider alternative libraries, a custom implementation, or even hard-coded values on the stack. + +## Table of Contents 📖 + +- [Use Cases](#use-cases-) +- [Features](#features-) +- [Installation](#installation-) +- [Usage](#usage-%EF%B8%8F) +- [Example implementation](#example-implementation-) +- [Licensing](#license-) + +## Features 🚀 + +- **ISO 639-1 Standard**: Full support for 180+ language codes/names. +- **Adaptative optimizations**: Optimizations generated depending on call dynamism. +- **Translation templating**: Make replacements with templates on your translations out of the box. +- **Compile-Time validation**: Error reporting with *rust-analyzer* for static parameters. +- **Custom file structure**: Translatable uses a walkdir implementation. Configure your translations folder. +- **Conflict resolution**: Define translation processing rules with a `translatable.toml` file in the root directory. + +## Use Cases 🔍 + +You may use translatable to write responses in back-end applications. Here is +an example of how you can integrate this with [actix-web](https://actix.rs/). + +```rust +use actix_web::{HttpRequest, HttpResponse, Responder, get}; +use translatable::{translation, Language}; + +#[get("/echo")] +pub async fn get_echo(req: HttpRequest) -> impl Responder { + let language = req + .headers() + .get("Accept-Language") + .and_then(|v| v.as_str().ok()) + .and_then(|v| v.parse::().ok()) + .unwrap_or(Language::EN); + + HttpResponse::Ok() + .body( + match translation!(language, static routes::responses::get_echo) { + Ok(t) => t, + Err(err) => concat!("Translation error ", err.to_string()) + } + ) +} +``` + +Or use it for front-end with [Leptos](https://leptos.dev/). + +```rust +use leptos::prelude::*; +use translatable::{translation, Language}; + +#[component] +pub fn Greeting(language: Language) -> impl IntoView { + let message = match translation!(language, static pages::misc::greeting) { + Ok(t) => t, + Err(err) => { + log::error!("Translation error {err:#}"); + "Translation error.".into() + } + }; + + view! { +

{ message }

+ } +} +``` + +## Installation 📦 + +Add the following to your `Cargo.toml` under the `dependencies` section + +```toml +translatable = "1.0.0" +``` + +## Usage 🛠️ + +### Configuration + +There are things you can configure on how translations are loaded from the folder, for this +you should make a `translatable.toml` in the root of the project, and abide by the following +configuration values. + +| Key | Value type | Description | +|-----------|------------------------------------|--------------------------------------------------------------------------------------------------------------------------------| +| `path` | `String` | Where the translation files will be stored, non translation files in that folder will cause errors. | +| `seek_mode` | `"alphabetical"` \| `"unalphabetical"` | The found translations are ordered by file name, based on this field. | +| `overlap` | `"overwrite"` \| `"ignore"` | Orderly if a translation is found `"overwrite"` will keep searching for translations and `"ignore"` will preserve the current one. | + +`seek_mode` and `overlap` only reverse the translations as convenient, this way the process +doesn't get repeated every time a translation is loaded. + +### Translation file format + +All the translation files are going to be loaded from the path specified in the configuration, +all the files inside the path must be TOML files and sub folders, a `walk_dir` algorithm is used +to load all the translations inside that folder. + +The translation files have three rules +- Objects can only contain objects and translations. Top level can only contain objects. +- If an object contains another object, it can only contain other objects (known as nested object). +- If an object contains a string, it can only contain other strings (known as translation object). + +Translation strings can contain templates, you may add sets of braces to the string with a key inside +and replace them while loading the translations with the macro. + +### Loading translations + +The load configuration such as `seek_mode` and `overlap` is not relevant here, as previously +specified, these configuration values only get applied once by reversing the translations conveniently. + +To load translations you make use of the `translatable::translation` macro, that macro requires at least two +parameters to be passed. + +The first parameter consists of the language which can be passed dynamically as a variable or an expression +that resolves to a `Translatable::Language`, or statically as a `&'static str` literal. For static values, the translation must comply with the `ISO 639-1` standard, as it is parsed to a `Translatable::Language` in compile time. + +The second parameter consists of the path, which can be passed dynamically as a variable or an expression +that resolves to a `Vec` containing each path section, or statically with the following +syntax `static path::to::translation`. + +The rest of parameters are `meta-variable patterns` also known as `key = value` parameters or key-value pairs, +these are processed as replaces, *or format if the call is all-static*. When a template (`{}`) is found with +the name of a key inside it gets replaced for whatever is the `Display` implementation of the value. This meaning +that the value must always implement `Display`. Otherwise, if you want to have a `{}` inside your translation, +you can escape it the same way `format!` does, by using `{{}}`. Just like object construction works in rust, if +you have a parameter like `x = x`, you can shorten it to `x`. The keys inside braces are XID validated. + +Have in mind that templates are specific to each translation, each language can contain it's own set +of templates, it is recommended that while loading a translation all the possible templates and combinations +are set if the language is dynamic. Templates are not validated, they are just replaced if found, otherwise +ignored, if not found the original template will remain untouched. + +Depending on whether the parameters are static or dynamic the macro will act different, differing whether +the checks are compile-time or run-time, the following table is a macro behavior matrix. + +| Parameters | Compile-Time checks | Return type | +|----------------------------------------------------|-----------------------------------|-------------------------| +| `static language` + `static path` (most optimized) | Path existence, Language validity | `String` | +| `dynamic language` + `dynamic path` | None | `Result` | +| `static language` + `dynamic path` | Language validity | `Result` | +| `dynamic language` + `static path` (commonly used) | Path existence | `Result` | + +- For the error handling, if you want to integrate this with `thiserror` you can use a `#[from] translatable::Error`, +as a nested error, all the errors implement display. + +- The runtime errors implement a `cause()` method that returns a heap allocated `String` with the error reason, essentially the error display. That method is marked with `#[cold]`, use it in paths that don't evaluate all the time, +prefer using `or_else` than `or` which are lazy loaded methods. + +## Example implementation 📂 + +The following examples are an example application structure for a possible +real project. + +### Example application tree + +```plain +project-root/ +├── Cargo.toml +├── translatable.toml +├── translations/ +│ └── app.toml +└── src/ + └── main.rs +``` + +### Example translation file (translations/app.toml) + +Notice how `common.greeting` has a template named `name`. + +```toml +[welcome_message] +en = "Welcome to our app!" +es = "¡Bienvenido a nuestra aplicación!" + +[common.greeting] +en = "Hello {name}!" +es = "¡Hola {name}!" +``` + +### Example application usage + +Notice how there is a template, this template is being replaced by the +`name = "john"` key value pair passed as third parameter. + +```rust +use translatable::{translation, Language}; + +fn main() { + let dynamic_lang = header.parse::(); + let dynamic_path = vec!["common", "greeting"]; + + assert!(translation!("es", static common::greeting, name = "john") == "¡Hola john!"); + assert!(translation!("es", dynamic_path, name = "john").unwrap() == "¡Hola john!".into()); + assert!(translation!(dynamic_lang, static common::greeting, name = "john").unwrap() == "¡Hola john!".into()); + assert!(translation!(dynamic_lang, dynamic_path, name = "john").unwrap() == "¡Hola john!".into()); +} +``` + +## License 📜 + +This repository is dual licensed, TLDR. If your repository is open source, the library +is free of use, otherwise contact [licensing@flaky.es](mailto:licensing@flaky.es) for a custom license for your +use case. + +For more information read the [license](./LICENSE) file. diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..46fc58b --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,19 @@ +# Security Vulnerabilities + +**If you find any security issues, please reach out to any of the maintainers +listed in our [governance.md].** We take all security reports seriously and +will get back to you as soon as possible. + +We also have security measures in place by using automated tools for managing dependencies. +Our project **strongly** relies on [dependabot] to: + +- Check for security vulnerabilities +- Update dependencies when needed +- Maintain all dependencies up to date + +This automated system helps us apply security patches regularly, reducing the +need for manual checks on dependencies and ensuring that we are using the +latest versions of libraries to prevent security issues. + +[dependabot]: https://docs.github.com/en/code-security/dependabot +[governance.md]: GOVERNANCE.md diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000..df894dd --- /dev/null +++ b/codecov.yml @@ -0,0 +1,8 @@ +coverage: + status: + project: + default: + target: 80% + patch: + default: + target: 80% diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..6df32df --- /dev/null +++ b/flake.lock @@ -0,0 +1,130 @@ +{ + "nodes": { + "crane": { + "locked": { + "lastModified": 1745454774, + "narHash": "sha256-oLvmxOnsEKGtwczxp/CwhrfmQUG2ym24OMWowcoRhH8=", + "owner": "ipetkov", + "repo": "crane", + "rev": "efd36682371678e2b6da3f108fdb5c613b3ec598", + "type": "github" + }, + "original": { + "owner": "ipetkov", + "repo": "crane", + "type": "github" + } + }, + "fenix": { + "inputs": { + "nixpkgs": "nixpkgs", + "rust-analyzer-src": "rust-analyzer-src" + }, + "locked": { + "lastModified": 1745649180, + "narHash": "sha256-3Ptviong+IYr9y3W6ddJMQDn/VpnTQHgwGU3i022HtA=", + "owner": "nix-community", + "repo": "fenix", + "rev": "585fc772cd167cad7d30222b2eb5f5e4bb2166b9", + "type": "github" + }, + "original": { + "owner": "nix-community", + "repo": "fenix", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1745526057, + "narHash": "sha256-ITSpPDwvLBZBnPRS2bUcHY3gZSwis/uTe255QgMtTLA=", + "owner": "nixos", + "repo": "nixpkgs", + "rev": "f771eb401a46846c1aebd20552521b233dd7e18b", + "type": "github" + }, + "original": { + "owner": "nixos", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs_2": { + "locked": { + "lastModified": 1745377448, + "narHash": "sha256-jhZDfXVKdD7TSEGgzFJQvEEZ2K65UMiqW5YJ2aIqxMA=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "507b63021ada5fee621b6ca371c4fca9ca46f52c", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixpkgs-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "crane": "crane", + "fenix": "fenix", + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs_2" + } + }, + "rust-analyzer-src": { + "flake": false, + "locked": { + "lastModified": 1745591749, + "narHash": "sha256-ynI1QfQEMHHuO+hJ8RLzhCo31XLm86vI7zRjKMQ45BQ=", + "owner": "rust-lang", + "repo": "rust-analyzer", + "rev": "df594ba8f4f72064002a4170eea031ba4300f087", + "type": "github" + }, + "original": { + "owner": "rust-lang", + "ref": "nightly", + "repo": "rust-analyzer", + "type": "github" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..66b8dcf --- /dev/null +++ b/flake.nix @@ -0,0 +1,44 @@ +{ + description = "Flake configuration file for translatable development."; + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; + crane.url = "github:ipetkov/crane"; + fenix.url = "github:nix-community/fenix"; + flake-utils.url = "github:numtide/flake-utils"; + }; + + outputs = { nixpkgs, flake-utils, fenix, ... }@inputs: + flake-utils.lib.eachDefaultSystem (system: + let + pkgs = nixpkgs.legacyPackages.${system}; + crane = inputs.crane.mkLib pkgs; + toolchainToml = ./rust-toolchain.toml; + + # Determine the Rust toolchain + toolchain = with fenix.packages.${system}; + (if builtins.pathExists toolchainToml then + fromToolchainFile { + file = toolchainToml; + sha256 = "sha256-X/4ZBHO3iW0fOenQ3foEvscgAPJYl2abspaBThDOukI="; + } + else + combine [ + stable.rustc + stable.rust-src + stable.cargo + complete.rustfmt + stable.clippy + stable.rust-analyzer + stable.llvm-tools-preview + ]); + + # Override the toolchain in crane + craneLib = crane.overrideToolchain toolchain; + in { + devShells.default = craneLib.devShell { + packages = with pkgs; [ toolchain gnumake cargo-llvm-cov ]; + + env = { LAZYVIM_RUST_DIAGNOSTICS = "bacon-ls"; }; + }; + }); +} diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 0000000..fb606ef --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,11 @@ +[toolchain] +channel = "stable" +profile = "minimal" +components = [ + "rustfmt", + "clippy", + "rust-analyzer", + "rustc", + "rust-src", + "llvm-tools-preview", +] diff --git a/rustfmt.toml b/rustfmt.toml new file mode 100644 index 0000000..66f96ac --- /dev/null +++ b/rustfmt.toml @@ -0,0 +1,38 @@ +# Formatting width settings +max_width = 100 +use_small_heuristics = "Max" + +# Indentation & spacing +tab_spaces = 4 +hard_tabs = false +newline_style = "Unix" + +# Function & struct formatting +fn_single_line = false +struct_lit_width = 40 +struct_variant_width = 40 + +# Imports & ordering +imports_granularity = "Module" +group_imports = "StdExternalCrate" +reorder_imports = true +reorder_modules = true +imports_layout = "HorizontalVertical" + +# Wrapping & line breaking +wrap_comments = true +format_strings = true +match_arm_leading_pipes = "Preserve" +match_block_trailing_comma = true +trailing_comma = "Vertical" +chain_width = 0 + +# Miscellaneous +condense_wildcard_suffixes = true +force_explicit_abi = true +merge_derives = true +normalize_comments = true +normalize_doc_attributes = true +use_field_init_shorthand = true +use_try_shorthand = true +edition = "2024" diff --git a/src/config.rs b/src/config.rs deleted file mode 100644 index 1c6995a..0000000 --- a/src/config.rs +++ /dev/null @@ -1,79 +0,0 @@ -use std::{fs::read_to_string, io::Error as IoError, sync::OnceLock}; -use serde::Deserialize; -use thiserror::Error; -use toml::{from_str as toml_from_str, de::Error as TomlError}; - -#[derive(Error, Debug)] -pub enum ConfigError { - #[error("An IO error occurred: {0:#}")] - Io(#[from] IoError), - - #[error( - "Toml parse error '{}'{}", - .0.message(), - .0.span().map(|l| format!(" in ./translatable.toml:{}:{}", l.start, l.end)).unwrap_or("".into()) - )] - ParseToml(#[from] TomlError) -} - -#[derive(Deserialize)] -#[serde(rename = "snake_case")] -pub enum SeekMode { - Alphabetical, - Unalphabetical -} - -#[derive(Deserialize)] -#[serde(rename = "snake_case")] -pub enum TranslationOverlap { - Overwrite, - Ignore -} - -// tracking issue: https://github.com/serde-rs/serde/issues/1030 -#[doc(hidden)] -fn __d_path() -> String { "./translations".into() } -#[doc(hidden)] -fn __d_seek_mode() -> SeekMode { SeekMode::Alphabetical } -#[doc(hidden)] -fn __d_overlap() -> TranslationOverlap { TranslationOverlap::Overwrite } - -#[derive(Deserialize)] -pub struct TranslatableConfig { - #[serde(default = "__d_path")] - path: String, - #[serde(default = "__d_seek_mode")] - seek_mode: SeekMode, - #[serde(default = "__d_overlap")] - overlap: TranslationOverlap -} - -impl TranslatableConfig { - pub fn path(&self) -> &str { - &self.path - } - - pub fn seek_mode(&self) -> &SeekMode { - &self.seek_mode - } - - pub fn overlap(&self) -> &TranslationOverlap { - &self.overlap - } -} - -static TRANSLATABLE_CONFIG: OnceLock = OnceLock::new(); - -pub fn load_config() -> Result<&'static TranslatableConfig, ConfigError> { - if let Some(config) = TRANSLATABLE_CONFIG.get() { - return Ok(config); - } - - let config = toml_from_str( - read_to_string("./translatable.toml") - .unwrap_or("".into()) // if no config file is found use defaults. - .as_str() - )?; - - Ok(TRANSLATABLE_CONFIG.get_or_init(|| config)) -} diff --git a/src/languages.rs b/src/languages.rs deleted file mode 100644 index 008092d..0000000 --- a/src/languages.rs +++ /dev/null @@ -1,156 +0,0 @@ -use strum::EnumIter; -use strum::IntoEnumIterator; - -#[derive(Debug, EnumIter)] -pub enum Iso639a { - AB, - AA, - AF, - SQ, - AM, - AR, - HY, - AS, - AY, - AZ, - BA, - EU, - BN, - DZ, - BH, - BI, - BR, - BG, - MY, - BE, - KM, - CA, - ZH, - CO, - HR, - CS, - DA, - NL, - EN, - EO, - ET, - FO, - FJ, - FI, - FR, - FY, - GL, - KA, - DE, - EL, - KL, - GN, - GU, - HA, - HE, - HI, - HU, - IS, - ID, - IA, - IE, - IU, - IK, - GA, - IT, - JA, - JV, - KN, - KS, - KK, - RW, - KY, - RN, - KO, - KU, - LO, - LA, - LV, - LN, - LT, - MK, - MG, - MS, - ML, - MT, - MI, - MR, - MO, - MN, - NA, - NE, - NO, - OC, - OR, - PS, - PL, - PT, - PA, - QU, - RM, - RO, - RU, - SM, - SG, - SA, - SR, - SH, - ST, - TN, - SN, - SD, - SI, - SS, - SK, - SL, - SO, - ES, - SU, - SW, - SV, - TL, - TG, - TA, - TT, - TE, - TH, - BO, - TI, - TO, - TS, - TR, - TK, - TW, - UG, - UK, - UR, - UZ, - VI, - VO, - CY, - WO, - XH, - YI, - YO, - ZA, - ZU -} - -impl Iso639a { - pub fn is_valid(lang: &str) -> bool { - Self::languages() - .iter() - .any(|valid_lang| valid_lang == lang) - } - - pub fn languages() -> Vec { - Self::iter() - .map(|lang| format!("{:?}", lang).to_lowercase()) - .collect() - } -} diff --git a/src/lib.rs b/src/lib.rs deleted file mode 100644 index d97ee3b..0000000 --- a/src/lib.rs +++ /dev/null @@ -1,16 +0,0 @@ -use macros::{translation_macro, RawTranslationArgs}; -use proc_macro::TokenStream; -use syn::parse_macro_input; - -mod config; -mod macros; -mod translations; -mod languages; - -#[proc_macro] -pub fn translation(input: TokenStream) -> TokenStream { - translation_macro( - parse_macro_input!(input as RawTranslationArgs) - .into() - ) -} diff --git a/src/macros.rs b/src/macros.rs deleted file mode 100644 index 28a7226..0000000 --- a/src/macros.rs +++ /dev/null @@ -1,115 +0,0 @@ -use proc_macro::TokenStream; -use syn::{parse::{Parse, ParseStream}, Expr, ExprLit, ExprPath, Lit, Result as SynResult, token::Static, Token}; -use quote::quote; -use crate::translations::load_translation_static; - -pub struct RawTranslationArgs { - language: Expr, - _comma: Token![,], - static_marker: Option, - path: Expr -} - -pub enum TranslationPathType { - OnScopeExpression(TokenStream), - CompileTimePath(String) -} - -pub enum TranslationLanguageType { - OnScopeExpression(TokenStream), - CompileTimeLiteral(String) -} - -pub struct TranslationArgs { - language: TranslationLanguageType, - path: TranslationPathType -} - -impl Parse for RawTranslationArgs { - fn parse(input: ParseStream) -> SynResult { - Ok(RawTranslationArgs { - language: input.parse()?, - _comma: input.parse()?, - static_marker: input.parse()?, - path: input.parse()? - }) - } -} - -impl TranslationPathType { - pub fn dynamic(self) -> TokenStream { - match self { - Self::OnScopeExpression(tokens) => tokens, - Self::CompileTimePath(cmp_val) => quote!(#cmp_val).into(), - } - } -} - -impl TranslationLanguageType { - pub fn dynamic(self) -> TokenStream { - match self { - Self::OnScopeExpression(tokens) => tokens, - Self::CompileTimeLiteral(cmp_val) => quote!(#cmp_val).into(), - } - } -} - -impl Into for RawTranslationArgs { - fn into(self) -> TranslationArgs { - let is_path_static = self.static_marker.is_some(); - - TranslationArgs { - language: match self.language { - Expr::Lit(ExprLit { lit: Lit::Str(lit_str), .. }) => { - TranslationLanguageType::CompileTimeLiteral(lit_str.value()) - }, - other => { - TranslationLanguageType::OnScopeExpression(quote!(#other).into()) - } - }, - - path: match self.path { - Expr::Path(ExprPath { path, .. }) if is_path_static => { - TranslationPathType::CompileTimePath( - path - .segments - .iter() - .map(|s| s - .ident - .to_string() - ) - .collect::>() - .join(".") - .to_string() - ) - }, - - path => { - TranslationPathType::OnScopeExpression(quote!(#path).into()) - } - } - } - } -} - -pub fn translation_macro(args: TranslationArgs) -> TokenStream { - if let TranslationPathType::CompileTimePath(path) = args.path { - if let TranslationLanguageType::CompileTimeLiteral(lang) = args.language { - return match load_translation_static(&lang, &path) { - Ok(Some(translation)) => quote!(#translation).into(), - - Ok(None) => { - let error_fmt = format!("The language \'{lang}\' is not available for \'{path}\'"); - quote!(compile_error!(#error_fmt)).into() - }, - - Err(err) => { - let error_fmt = err.to_string(); - quote!(compile_error!(#error_fmt)).into() - } - } - } - } - - quote!("").into() -} diff --git a/src/translations.rs b/src/translations.rs deleted file mode 100644 index ead8c71..0000000 --- a/src/translations.rs +++ /dev/null @@ -1,117 +0,0 @@ -use std::{fs::{read_dir, read_to_string}, io::Error as IoError, sync::OnceLock}; -use proc_macro::TokenStream; -use thiserror::Error; -use toml::{Table, de::Error as TomlError}; -use crate::{config::{load_config, ConfigError, SeekMode, TranslationOverlap}, macros::{TranslationLanguageType, TranslationPathType}, languages::Iso639a}; - -#[derive(Error, Debug)] -pub enum TranslationError { - #[error("{0}")] - Config(#[from] ConfigError), - - #[error("An IO Error occurred: {0:#}")] - Io(#[from] IoError), - - #[error("The path contains invalid unicode characters.")] - InvalidUnicode, - - #[error( - "Toml parse error '{}'{}", - .0.message(), - .0.span().map(|l| format!(" in {}:{}:{}", .1, l.start, l.end)).unwrap_or("".into()) - )] - ParseToml(TomlError, String), - - #[error( - "'{0}' is not valid ISO 639-1, valid languages include: {valid}", - valid = Iso639a::languages().join(", ") - )] - InvalidLangauge(String) -} - -static TRANSLATIONS: OnceLock> = OnceLock::new(); - -fn walk_dir(path: &str) -> Result, TranslationError> { - let directory = read_dir(path)? - .into_iter() - .collect::, _>>()?; - - let mut result = Vec::new(); - - for path in directory { - let path = path.path(); - - if path.is_dir() { - result.extend(walk_dir( - path - .to_str() - .ok_or(TranslationError::InvalidUnicode)? - )?); - } else { - result.push( - path - .to_string_lossy() - .to_string() - ); - } - } - - Ok(result) -} - -fn load_translations() -> Result<&'static Vec, TranslationError> { - if let Some(translations) = TRANSLATIONS.get() { - return Ok(translations); - } - - let config = load_config()?; - - let mut translation_paths = walk_dir(config.path())?; - translation_paths.sort_by_key(|path| path.to_lowercase()); - - if let SeekMode::Unalphabetical = config.seek_mode() { - translation_paths.reverse(); - } - - let translations = translation_paths - .iter() - .map(|path| Ok( - read_to_string(&path)? - .parse::
() - .map_err(|err| TranslationError::ParseToml(err, path.clone()))? - )) - .collect::, TranslationError>>()?; - - Ok(TRANSLATIONS.get_or_init(|| translations)) -} - -pub fn load_translation_static(lang: &str, path: &str) -> Result, TranslationError> { - let translations = load_translations()?; - let config = load_config()?; - - if !Iso639a::is_valid(lang) { - return Err(TranslationError::InvalidLangauge(lang.into())) - } - - let mut choosen_translation = None; - for translation in translations { - choosen_translation = path - .split('.') - .fold(Some(translation), |acc, key| acc?.get(key)?.as_table()) - .and_then(|translation| translation.get(lang)) - .map(|translation| translation.to_string()); - - if choosen_translation.is_some() && matches!(config.overlap(), TranslationOverlap::Ignore) { - break; - } - } - - Ok(choosen_translation) -} - -pub fn load_translation_dynamic(lang: TranslationLanguageType, path: TranslationPathType) -> TokenStream { - let lang = lang.dynamic(); - let path = path.dynamic(); - - todo!() -} diff --git a/tests/static.rs b/tests/static.rs deleted file mode 100644 index f9e231a..0000000 --- a/tests/static.rs +++ /dev/null @@ -1,6 +0,0 @@ -use translatable::translation; - -#[test] -fn get_salutation() { - translation!("es", static salutation::test); -} diff --git a/translatable.toml b/translatable.toml new file mode 100644 index 0000000..a95aec2 --- /dev/null +++ b/translatable.toml @@ -0,0 +1,2 @@ + +path = "./translatable/tests/environments/everything_valid/translations/" diff --git a/translatable/Cargo.toml b/translatable/Cargo.toml new file mode 100644 index 0000000..a54d232 --- /dev/null +++ b/translatable/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "translatable" +description = "A robust internationalization solution for Rust featuring compile-time validation, ISO 639-1 compliance, and TOML-based translation management. " +repository = "https://github.com/FlakySL/translatable" +license = "GPL-3.0" +readme = "../README.md" +version = "1.0.0" +edition = "2024" +authors = ["Esteve Autet ", "Chiko "] +keywords = [ + "i18n", + "translations", + "idioms", + "languages", + "internazionalization", +] + +[dependencies] +thiserror = "2.0.12" +translatable_proc = { version = "1", path = "../translatable_proc" } +translatable_shared = { version = "1", path = "../translatable_shared/" } + +[dev-dependencies] +quote = "1.0.40" +toml_edit = "0.23.1" +trybuild = "1.0.105" diff --git a/translatable/src/error.rs b/translatable/src/error.rs new file mode 100644 index 0000000..3552de3 --- /dev/null +++ b/translatable/src/error.rs @@ -0,0 +1,102 @@ +//! Runtime error module. +//! +//! This module contains all the runtime +//! errors that could be generated by +//! macro calls or user-facing helper +//! method invocations. + +use thiserror::Error; +use translatable_shared::misc::language::Language; +use translatable_shared::translations::node::TranslationNodeError; + +/// Macro runtime error handling. +/// +/// Used in [`translation`] invocations for non +/// compile-time validations and errors. +/// +/// Use the [`Display`] implementation to obtain the +/// error message, [`Self::cause`] is available as +/// a helper method for such purpose. Read it's +/// documentation before using. +/// +/// [`translation`]: crate::translation +/// [`Display`]: std::fmt::Display +#[derive(Error, Debug)] +pub enum RuntimeError { + /// Translation node error derivations. + /// + /// [`TranslationNode`] construction + /// failure, usually nesting missmatch, invalid + /// template validation... + /// + /// [`Display`] directly forwards the inner + /// error [`Display`] value. + /// + /// The enum implements + /// [`From`] to allow + /// conversion from + /// [`TranslationNodeError`]. + /// + /// **Parameters** + /// * `0` - The [`TranslationNodeError`] derivation. + /// + /// [`TranslationNode`]: crate::shared::translations::node::TranslationNode + /// [`TranslationNodeError`]: crate::shared::translations::node::TranslationNodeError + /// [`Display`]: std::fmt::Display + #[error("{0:#}")] + TranslationNode(#[from] TranslationNodeError), + + /// Dynamic path resolve error. + /// + /// The specified path may not be found + /// in any of the translation files. + /// + /// This is not related to runtime language + /// validity, check [`LanguageNotAvailable`] + /// for that purpose. + /// + /// **Parameters** + /// * `0` - The path that could not be found + /// appended with it's separator. + /// + /// [`LanguageNotAvailable`]: crate::Error::LanguageNotAvailable + #[error("The path '{0}' could not be found")] + PathNotFound(String), + + /// Dynamic language obtention error. + /// + /// This specifically happens when a language + /// is not available for a specific translation. + /// + /// Language parsing is delegated to the user, + /// the language parameter must be a [`Language`], + /// if it's a &[`str`] the validation is made in compile + /// time. In that case we don't reach runtime. + /// + /// **Parameters** + /// * `0` - The language that is not available. + /// * `1` - The path for which the language is not available + /// appended with it's separator. + #[error("The language '{0:?}' ('{0:#}') is not available for the path '{1}'")] + LanguageNotAvailable(Language, String), +} + +impl RuntimeError { + /// Runtime error display helper. + /// + /// This method is marked as `#[cold]` + /// so it should be called lazily with + /// monads such as [`ok_or_else`] or any + /// other `or_else` method. + /// + /// **Returns** + /// A heap allocated [`String`] containing + /// the cause of the error. + /// + /// [`ok_or_else`]: std::option::Option::ok_or_else + #[cold] + #[inline] + pub fn cause(&self) -> String { + format!("{self:#}") + } +} diff --git a/translatable/src/lib.rs b/translatable/src/lib.rs new file mode 100644 index 0000000..76397e7 --- /dev/null +++ b/translatable/src/lib.rs @@ -0,0 +1,43 @@ +//! # Translatable +//! +//! A robust internationalization solution for +//! Rust featuring compile-time validation, +//! ISO 639-1 compliance, and TOML-based +//! translation management. + +#![warn(missing_docs)] + +mod error; + +/// Runtime error re-export. +/// +/// This `use` statement renames +/// the run time error as a common +/// error by rust practice and exports +/// it. +#[rustfmt::skip] +pub use error::RuntimeError as Error; + +/// Macro re-exports. +/// +/// This `use` statement re-exports +/// all the macros on `translatable_proc` +/// which only work if included from +/// this module due to path generation. +#[rustfmt::skip] +pub use translatable_proc::translation; + +#[rustfmt::skip] +pub use translatable_proc::translation_context; + +/// Language enum re-export. +/// +/// This `use` statement re-exports +/// from the hidden shared re-export +/// for user convenience on parsing. +#[rustfmt::skip] +pub use shared::misc::language::Language; + +#[doc(hidden)] +#[rustfmt::skip] +pub use translatable_shared as shared; diff --git a/translatable/tests/README.md b/translatable/tests/README.md new file mode 100644 index 0000000..0bc1714 --- /dev/null +++ b/translatable/tests/README.md @@ -0,0 +1,37 @@ +# Tests + +First of all, thanks for your intention on contributing to this project. + +In this crate we aim for stability and ease of use for all the macros the crate +declares, we want to be helpful not a burden. To accomplish this we need to test +every part of the crate. + +There are two types of test declared in this crate. + +## Integration Testing + +Integration testing helps us test the user experience, what errors should the user +receive on miss-use of a macro whether it's runtime or not. + +The integration tests that pass should be prefixed as `pass_`, otherwise as `fail_`, +the structure for the tests is separated by parameters, so `language/` parameter, +`path/` parameter and `templates/` parameters. Environments is meant to simulate +miss-configuration and the respective errors that should give. + +The tests that pass should also be tested in runtime, so added to the mod file as +modules and annotated conditionally with `#[cfg(test)] #[test]`. + +## Unitary Testing + +Unitary testing is simpler, as it's only functions possessing functions usually from +`translatable::shared`, each module should have its own file and every function +in the module should be tested. + +## Running the tests + +This project uses make for some command recipes. You can run `make test` and it will +test the application with the correct parameters. + +If you are using `cargo test` directly make sure to run the tests with `--test-threds=1`, +there are locks in place so nothing happens, but to make sure you should do that +anyway. diff --git a/translatable/tests/environments/everything_valid/translations/test.toml b/translatable/tests/environments/everything_valid/translations/test.toml new file mode 100644 index 0000000..6c88b20 --- /dev/null +++ b/translatable/tests/environments/everything_valid/translations/test.toml @@ -0,0 +1,16 @@ + +# test no templates in string. +[greetings.formal] +es = "Bueno conocerte." +en = "Nice to meet you." + +# test single template in string. +[greetings.informal] +es = "Hey {user}, todo bien?" +en = "What's good {user}?" + +# test multiple templates in same string. +[auditory.actions.delete_user] +es = "{author} ha borrado al usuario {target}." +en = "{author} deleted the user {target}." + diff --git a/translatable/tests/environments/translations_malformed/translations/test.toml b/translatable/tests/environments/translations_malformed/translations/test.toml new file mode 100644 index 0000000..ba40268 --- /dev/null +++ b/translatable/tests/environments/translations_malformed/translations/test.toml @@ -0,0 +1,3 @@ + +[some.translation] +value = 1 diff --git a/translatable/tests/integration/config/fail_config_invalid_enums.rs b/translatable/tests/integration/config/fail_config_invalid_enums.rs new file mode 100644 index 0000000..9ba836e --- /dev/null +++ b/translatable/tests/integration/config/fail_config_invalid_enums.rs @@ -0,0 +1,9 @@ +// the macro isn't filled because the expected +// failure is on configuration. + +#[allow(unused_imports)] +use translatable::{translation, Language}; + +fn main() { + let _ = translation!(Language::ES, vec![""]); +} diff --git a/translatable/tests/integration/config/fail_config_invalid_enums.stderr b/translatable/tests/integration/config/fail_config_invalid_enums.stderr new file mode 100644 index 0000000..c985428 --- /dev/null +++ b/translatable/tests/integration/config/fail_config_invalid_enums.stderr @@ -0,0 +1,7 @@ +error: Couldn't parse configuration entry '49854835093459fjkdjfkj' for 'overlap' + --> tests/integration/config/fail_config_invalid_enums.rs + | + | let _ = translation!(Language::ES, vec![""]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: this error originates in the macro `translation` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/translatable/tests/integration/config/fail_config_path_missmatch.rs b/translatable/tests/integration/config/fail_config_path_missmatch.rs new file mode 100644 index 0000000..9ba836e --- /dev/null +++ b/translatable/tests/integration/config/fail_config_path_missmatch.rs @@ -0,0 +1,9 @@ +// the macro isn't filled because the expected +// failure is on configuration. + +#[allow(unused_imports)] +use translatable::{translation, Language}; + +fn main() { + let _ = translation!(Language::ES, vec![""]); +} diff --git a/translatable/tests/integration/config/fail_config_path_missmatch.stderr b/translatable/tests/integration/config/fail_config_path_missmatch.stderr new file mode 100644 index 0000000..9a6e191 --- /dev/null +++ b/translatable/tests/integration/config/fail_config_path_missmatch.stderr @@ -0,0 +1,7 @@ +error: IO Error: "No such file or directory (os error 2)". Please check the specified path in your configuration file. + --> tests/integration/config/fail_config_path_missmatch.rs + | + | let _ = translation!(Language::ES, vec![""]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: this error originates in the macro `translation` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/translatable/tests/integration/config/fail_translations_malformed.rs b/translatable/tests/integration/config/fail_translations_malformed.rs new file mode 100644 index 0000000..9ba836e --- /dev/null +++ b/translatable/tests/integration/config/fail_translations_malformed.rs @@ -0,0 +1,9 @@ +// the macro isn't filled because the expected +// failure is on configuration. + +#[allow(unused_imports)] +use translatable::{translation, Language}; + +fn main() { + let _ = translation!(Language::ES, vec![""]); +} diff --git a/translatable/tests/integration/config/fail_translations_malformed.stderr b/translatable/tests/integration/config/fail_translations_malformed.stderr new file mode 100644 index 0000000..4da0199 --- /dev/null +++ b/translatable/tests/integration/config/fail_translations_malformed.stderr @@ -0,0 +1,7 @@ +error: A nesting can only contain translation objects or other nestings + --> tests/integration/config/fail_translations_malformed.rs + | + | let _ = translation!(Language::ES, vec![""]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: this error originates in the macro `translation` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/translatable/tests/integration/context/fail_disallowed_type.rs b/translatable/tests/integration/context/fail_disallowed_type.rs new file mode 100644 index 0000000..dc80d25 --- /dev/null +++ b/translatable/tests/integration/context/fail_disallowed_type.rs @@ -0,0 +1,12 @@ +#[allow(unused_imports)] // trybuild +use ::{std::collections::HashMap, translatable::translation_context}; + +#[translation_context(base_path = greetings)] +struct Context { + formal: i32, + informal: String, +} + +#[allow(unused)] +fn main() {} // trybuild + diff --git a/translatable/tests/integration/context/fail_disallowed_type.stderr b/translatable/tests/integration/context/fail_disallowed_type.stderr new file mode 100644 index 0000000..d0af50b --- /dev/null +++ b/translatable/tests/integration/context/fail_disallowed_type.stderr @@ -0,0 +1,7 @@ +error: Only String' and '&str' is allowed for translation contexts + --> tests/integration/context/fail_disallowed_type.rs:4:1 + | +4 | #[translation_context(base_path = greetings)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: this error originates in the attribute macro `translation_context` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/translatable/tests/integration/context/fail_fallback_is_raw.rs b/translatable/tests/integration/context/fail_fallback_is_raw.rs new file mode 100644 index 0000000..6a422ff --- /dev/null +++ b/translatable/tests/integration/context/fail_fallback_is_raw.rs @@ -0,0 +1,13 @@ +use std::collections::HashMap; +use translatable::{translation_context, Language}; + +#[translation_context(base_path = greetings, fallback_language = "en")] +struct Context { + formal: String, + informal: String +} + +fn main() { + let ctx = Context::load_translations(Language::ES, &HashMap::new()); + assert!(ctx.is_ok()); // invalid call +} diff --git a/translatable/tests/integration/context/fail_fallback_is_raw.stderr b/translatable/tests/integration/context/fail_fallback_is_raw.stderr new file mode 100644 index 0000000..0126b0e --- /dev/null +++ b/translatable/tests/integration/context/fail_fallback_is_raw.stderr @@ -0,0 +1,8 @@ +error[E0599]: no method named `is_ok` found for struct `Context` in the current scope + --> tests/integration/context/fail_fallback_is_raw.rs:12:17 + | +4 | #[translation_context(base_path = greetings, fallback_language = "en")] + | ----------------------------------------------------------------------- method `is_ok` not found for this struct +... +12 | assert!(ctx.is_ok()); // invalid call + | ^^^^^ method not found in `Context` diff --git a/translatable/tests/integration/context/fail_invalid_base_path.rs b/translatable/tests/integration/context/fail_invalid_base_path.rs new file mode 100644 index 0000000..e91dfd4 --- /dev/null +++ b/translatable/tests/integration/context/fail_invalid_base_path.rs @@ -0,0 +1,9 @@ +use translatable::translation_context; + +#[translation_context(base_path = hello)] +struct Context { + formal: String, + informal: String +} + +fn main() {} diff --git a/translatable/tests/integration/context/fail_invalid_base_path.stderr b/translatable/tests/integration/context/fail_invalid_base_path.stderr new file mode 100644 index 0000000..aedd58d --- /dev/null +++ b/translatable/tests/integration/context/fail_invalid_base_path.stderr @@ -0,0 +1,7 @@ +error: A translation with the path 'hello::formal' could not be found + --> tests/integration/context/fail_invalid_base_path.rs:3:1 + | +3 | #[translation_context(base_path = hello)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: this error originates in the attribute macro `translation_context` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/translatable/tests/integration/context/fail_invalid_fallback.rs b/translatable/tests/integration/context/fail_invalid_fallback.rs new file mode 100644 index 0000000..5aa1353 --- /dev/null +++ b/translatable/tests/integration/context/fail_invalid_fallback.rs @@ -0,0 +1,9 @@ +use translatable::translation_context; + +#[translation_context(base_path = greetings, fallback_language = "invalid")] +struct Context { + formal: String, + informal: String +} + +fn main() {} diff --git a/translatable/tests/integration/context/fail_invalid_fallback.stderr b/translatable/tests/integration/context/fail_invalid_fallback.stderr new file mode 100644 index 0000000..98180be --- /dev/null +++ b/translatable/tests/integration/context/fail_invalid_fallback.stderr @@ -0,0 +1,5 @@ +error: Invalid language literal 'invalid' is not a valid ISO-639-1 language + --> tests/integration/context/fail_invalid_fallback.rs:3:66 + | +3 | #[translation_context(base_path = greetings, fallback_language = "invalid")] + | ^^^^^^^^^ diff --git a/translatable/tests/integration/context/fail_no_fallback_is_result.rs b/translatable/tests/integration/context/fail_no_fallback_is_result.rs new file mode 100644 index 0000000..5abd09f --- /dev/null +++ b/translatable/tests/integration/context/fail_no_fallback_is_result.rs @@ -0,0 +1,13 @@ +use std::collections::HashMap; +use translatable::{translation_context, Language}; + +#[translation_context(base_path = greetings)] +struct Context { + formal: String, + informal: String +} + +fn main() { + let ctx = Context::load_translations(Language::ES, &HashMap::new()); + assert!(ctx.formal); // invalid call +} diff --git a/translatable/tests/integration/context/fail_no_fallback_is_result.stderr b/translatable/tests/integration/context/fail_no_fallback_is_result.stderr new file mode 100644 index 0000000..be42190 --- /dev/null +++ b/translatable/tests/integration/context/fail_no_fallback_is_result.stderr @@ -0,0 +1,10 @@ +error[E0609]: no field `formal` on type `Result` + --> tests/integration/context/fail_no_fallback_is_result.rs:12:17 + | +12 | assert!(ctx.formal); // invalid call + | ^^^^^^ unknown field + | +help: one of the expressions' fields has a field of the same name + | +12 | assert!(ctx.unwrap().formal); // invalid call + | +++++++++ diff --git a/translatable/tests/integration/context/mod.rs b/translatable/tests/integration/context/mod.rs new file mode 100644 index 0000000..acef2bf --- /dev/null +++ b/translatable/tests/integration/context/mod.rs @@ -0,0 +1,3 @@ +pub mod pass_fallback_catch; +pub mod pass_invalid_runtime_language; +pub mod pass_without_params; diff --git a/translatable/tests/integration/context/pass_fallback_catch.rs b/translatable/tests/integration/context/pass_fallback_catch.rs new file mode 100644 index 0000000..a220a1a --- /dev/null +++ b/translatable/tests/integration/context/pass_fallback_catch.rs @@ -0,0 +1,22 @@ +#[allow(unused_imports)] // trybuild +use ::{std::collections::HashMap, translatable::translation_context}; + +#[translation_context(base_path = greetings, fallback_language = "en")] +struct Context { + formal: String, + informal: String, +} + +#[test] +fn pass_fallback_catch() { + let translations = + Context::load_translations(translatable::Language::AA, &HashMap::from([ + ("user", "John") + ])); + + assert_eq!(translations.formal, "Nice to meet you."); + assert_eq!(translations.informal, "What's good John?"); +} + +#[allow(unused)] +fn main() {} // trybuild diff --git a/translatable/tests/integration/context/pass_invalid_runtime_language.rs b/translatable/tests/integration/context/pass_invalid_runtime_language.rs new file mode 100644 index 0000000..85acd33 --- /dev/null +++ b/translatable/tests/integration/context/pass_invalid_runtime_language.rs @@ -0,0 +1,21 @@ +#![allow(dead_code)] + +#[allow(unused_imports)] // trybuild +use ::{std::collections::HashMap, translatable::translation_context}; + +#[translation_context(base_path = greetings)] +struct Context { + formal: String, + informal: String, +} + +#[test] +fn pass_invalid_runtime_language() { + let translations = + Context::load_translations(translatable::Language::AA, &HashMap::::new()); + + assert!(translations.is_err()); +} + +#[allow(unused)] +fn main() {} // trybuild diff --git a/translatable/tests/integration/context/pass_without_params.rs b/translatable/tests/integration/context/pass_without_params.rs new file mode 100644 index 0000000..cbf2f50 --- /dev/null +++ b/translatable/tests/integration/context/pass_without_params.rs @@ -0,0 +1,18 @@ +#[allow(unused_imports)] // trybuild +use translatable::translation_context; + +#[translation_context] +struct Context { + #[path(greetings::formal)] + formal: String, + #[path(greetings::informal)] + informal: String, +} + +#[test] +fn pass_without_params() { + +} + +#[allow(unused)] +fn main() {} // trybuild diff --git a/translatable/tests/integration/mod.rs b/translatable/tests/integration/mod.rs new file mode 100644 index 0000000..f374719 --- /dev/null +++ b/translatable/tests/integration/mod.rs @@ -0,0 +1,2 @@ +pub mod context; +pub mod translation; diff --git a/translatable/tests/integration/translation/language/fail_static_invalid.rs b/translatable/tests/integration/translation/language/fail_static_invalid.rs new file mode 100644 index 0000000..4bd4c76 --- /dev/null +++ b/translatable/tests/integration/translation/language/fail_static_invalid.rs @@ -0,0 +1,6 @@ +#[allow(unused_imports)] +use translatable::translation; + +fn main() { + translation!("xx", static greetings::formal); +} diff --git a/translatable/tests/integration/translation/language/fail_static_invalid.stderr b/translatable/tests/integration/translation/language/fail_static_invalid.stderr new file mode 100644 index 0000000..9f5a77f --- /dev/null +++ b/translatable/tests/integration/translation/language/fail_static_invalid.stderr @@ -0,0 +1,5 @@ +error: The literal 'xx' is an invalid ISO 639-1 string, and cannot be parsed + --> tests/integration/translation/language/fail_static_invalid.rs:5:18 + | +5 | translation!("xx", static greetings::formal); + | ^^^^ diff --git a/translatable/tests/integration/translation/language/mod.rs b/translatable/tests/integration/translation/language/mod.rs new file mode 100644 index 0000000..0676883 --- /dev/null +++ b/translatable/tests/integration/translation/language/mod.rs @@ -0,0 +1,5 @@ +pub mod pass_dynamic_enum; +pub mod pass_dynamic_expr; +pub mod pass_dynamic_invalid_runtime; +pub mod pass_static_lowercase; +pub mod pass_static_uppercase; diff --git a/translatable/tests/integration/translation/language/pass_dynamic_enum.rs b/translatable/tests/integration/translation/language/pass_dynamic_enum.rs new file mode 100644 index 0000000..537165d --- /dev/null +++ b/translatable/tests/integration/translation/language/pass_dynamic_enum.rs @@ -0,0 +1,14 @@ +#[allow(unused_imports)] // trybuild +use translatable::{Language, translation}; + +#[cfg(test)] +#[test] +pub fn pass_dynamic_enum() { + let translation = translation!(Language::ES, static greetings::formal) + .expect("Expected translation generation to be OK"); + + assert_eq!(translation, "Bueno conocerte."); +} + +#[allow(dead_code)] +fn main() {} // trybuild diff --git a/translatable/tests/integration/translation/language/pass_dynamic_expr.rs b/translatable/tests/integration/translation/language/pass_dynamic_expr.rs new file mode 100644 index 0000000..d5afe75 --- /dev/null +++ b/translatable/tests/integration/translation/language/pass_dynamic_expr.rs @@ -0,0 +1,17 @@ +#[allow(unused_imports)] // trybuild +use translatable::translation; + +#[cfg(test)] +#[test] +pub fn pass_dynamic_expr() { + let translation = translation!( + "es".parse().expect("Expected language parsing to be OK"), + static greetings::formal + ) + .expect("Expected translation generation to be OK"); + + assert_eq!(translation, "Bueno conocerte."); +} + +#[allow(dead_code)] +fn main() {} // trybuild diff --git a/translatable/tests/integration/translation/language/pass_dynamic_invalid_runtime.rs b/translatable/tests/integration/translation/language/pass_dynamic_invalid_runtime.rs new file mode 100644 index 0000000..8e8dd02 --- /dev/null +++ b/translatable/tests/integration/translation/language/pass_dynamic_invalid_runtime.rs @@ -0,0 +1,13 @@ +#[allow(unused_imports)] // trybuild +use translatable::{Language, translation}; + +#[cfg(test)] +#[test] +pub fn pass_dynamic_invalid_runtime() { + let language = "invalid".parse::(); + + assert!(language.is_err()); +} + +#[allow(dead_code)] +fn main() {} // trybuild diff --git a/translatable/tests/integration/translation/language/pass_static_lowercase.rs b/translatable/tests/integration/translation/language/pass_static_lowercase.rs new file mode 100644 index 0000000..c18a052 --- /dev/null +++ b/translatable/tests/integration/translation/language/pass_static_lowercase.rs @@ -0,0 +1,13 @@ +#[allow(unused_imports)] // trybuild +use translatable::translation; + +#[cfg(test)] +#[test] +pub fn pass_static_lowercase() { + let translation = translation!("es", static greetings::formal); + + assert_eq!(translation, "Bueno conocerte."); +} + +#[allow(dead_code)] +fn main() {} // trybuild diff --git a/translatable/tests/integration/translation/language/pass_static_uppercase.rs b/translatable/tests/integration/translation/language/pass_static_uppercase.rs new file mode 100644 index 0000000..710e048 --- /dev/null +++ b/translatable/tests/integration/translation/language/pass_static_uppercase.rs @@ -0,0 +1,13 @@ +#[allow(unused_imports)] // trybuild +use translatable::translation; + +#[cfg(test)] +#[test] +pub fn pass_static_uppercase() { + let translation = translation!("ES", static greetings::formal); + + assert_eq!(translation, "Bueno conocerte."); +} + +#[allow(dead_code)] +fn main() {} // trybuild diff --git a/translatable/tests/integration/translation/mod.rs b/translatable/tests/integration/translation/mod.rs new file mode 100644 index 0000000..b600327 --- /dev/null +++ b/translatable/tests/integration/translation/mod.rs @@ -0,0 +1,3 @@ +pub mod language; +pub mod path; +pub mod templates; diff --git a/translatable/tests/integration/translation/path/fail_generic_arguments.rs b/translatable/tests/integration/translation/path/fail_generic_arguments.rs new file mode 100644 index 0000000..4562b19 --- /dev/null +++ b/translatable/tests/integration/translation/path/fail_generic_arguments.rs @@ -0,0 +1,6 @@ +#[allow(unused_imports)] +use translatable::translation; + +fn main() { + translation!("es", static greetings::formal); +} diff --git a/translatable/tests/integration/translation/path/fail_generic_arguments.stderr b/translatable/tests/integration/translation/path/fail_generic_arguments.stderr new file mode 100644 index 0000000..7a83806 --- /dev/null +++ b/translatable/tests/integration/translation/path/fail_generic_arguments.stderr @@ -0,0 +1,5 @@ +error: A translation path can't contain generic arguments. + --> tests/integration/translation/path/fail_generic_arguments.rs:5:48 + | +5 | translation!("es", static greetings::formal); + | ^^^^^^^^ diff --git a/translatable/tests/integration/translation/path/fail_static_nonexistent.rs b/translatable/tests/integration/translation/path/fail_static_nonexistent.rs new file mode 100644 index 0000000..51a6865 --- /dev/null +++ b/translatable/tests/integration/translation/path/fail_static_nonexistent.rs @@ -0,0 +1,6 @@ +#[allow(unused_imports)] +use translatable::translation; + +fn main() { + translation!("es", static non::existing::path); +} diff --git a/translatable/tests/integration/translation/path/fail_static_nonexistent.stderr b/translatable/tests/integration/translation/path/fail_static_nonexistent.stderr new file mode 100644 index 0000000..128ba6f --- /dev/null +++ b/translatable/tests/integration/translation/path/fail_static_nonexistent.stderr @@ -0,0 +1,7 @@ +error: The path 'non::existing::path' could not be found + --> tests/integration/translation/path/fail_static_nonexistent.rs:5:5 + | +5 | translation!("es", static non::existing::path); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: this error originates in the macro `translation` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/translatable/tests/integration/translation/path/mod.rs b/translatable/tests/integration/translation/path/mod.rs new file mode 100644 index 0000000..c4c603d --- /dev/null +++ b/translatable/tests/integration/translation/path/mod.rs @@ -0,0 +1,2 @@ +pub mod pass_dynamic_expr; +pub mod pass_static_existing; diff --git a/translatable/tests/integration/translation/path/pass_dynamic_expr.rs b/translatable/tests/integration/translation/path/pass_dynamic_expr.rs new file mode 100644 index 0000000..ada7c14 --- /dev/null +++ b/translatable/tests/integration/translation/path/pass_dynamic_expr.rs @@ -0,0 +1,19 @@ +#[allow(unused_imports)] // trybuild +use translatable::translation; + +#[cfg(test)] +#[test] +pub fn pass_dynamic_expr() { + let translation = translation!( + "es", + "greetings.formal" + .split(".") + .collect() + ) + .expect("Expected translation generation to be OK"); + + assert_eq!(translation, "Bueno conocerte."); +} + +#[allow(dead_code)] +fn main() {} // trybuild diff --git a/translatable/tests/integration/translation/path/pass_static_existing.rs b/translatable/tests/integration/translation/path/pass_static_existing.rs new file mode 100644 index 0000000..d37c4ab --- /dev/null +++ b/translatable/tests/integration/translation/path/pass_static_existing.rs @@ -0,0 +1,13 @@ +#[allow(unused_imports)] // trybuild +use translatable::translation; + +#[cfg(test)] +#[test] +pub fn pass_static_existing() { + let translation = translation!("es", static greetings::formal); + + assert_eq!(translation, "Bueno conocerte."); +} + +#[allow(dead_code)] +fn main() {} // trybuild diff --git a/translatable/tests/integration/translation/templates/fail_invalid_ident.rs b/translatable/tests/integration/translation/templates/fail_invalid_ident.rs new file mode 100644 index 0000000..9542925 --- /dev/null +++ b/translatable/tests/integration/translation/templates/fail_invalid_ident.rs @@ -0,0 +1,6 @@ +#[allow(unused_imports)] +use translatable::translation; + +fn main() { + translation!("es", static greetings::informal, %%$invalid = $ident); +} diff --git a/translatable/tests/integration/translation/templates/fail_invalid_ident.stderr b/translatable/tests/integration/translation/templates/fail_invalid_ident.stderr new file mode 100644 index 0000000..dd964ea --- /dev/null +++ b/translatable/tests/integration/translation/templates/fail_invalid_ident.stderr @@ -0,0 +1,5 @@ +error: expected identifier + --> tests/integration/translation/templates/fail_invalid_ident.rs:5:52 + | +5 | translation!("es", static greetings::informal, %%$invalid = $ident); + | ^ diff --git a/translatable/tests/integration/translation/templates/fail_not_display.rs b/translatable/tests/integration/translation/templates/fail_not_display.rs new file mode 100644 index 0000000..d089999 --- /dev/null +++ b/translatable/tests/integration/translation/templates/fail_not_display.rs @@ -0,0 +1,8 @@ +#[allow(unused_imports)] +use translatable::translation; + +struct NotDisplay; + +fn main() { + translation!("es", static greetings::informal, user = NotDisplay); +} diff --git a/translatable/tests/integration/translation/templates/fail_not_display.stderr b/translatable/tests/integration/translation/templates/fail_not_display.stderr new file mode 100644 index 0000000..0a78c8d --- /dev/null +++ b/translatable/tests/integration/translation/templates/fail_not_display.stderr @@ -0,0 +1,22 @@ +error[E0599]: `NotDisplay` doesn't implement `std::fmt::Display` + --> tests/integration/translation/templates/fail_not_display.rs:7:5 + | +4 | struct NotDisplay; + | ----------------- method `to_string` not found for this struct because it doesn't satisfy `NotDisplay: ToString` or `NotDisplay: std::fmt::Display` +... +7 | translation!("es", static greetings::informal, user = NotDisplay); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `NotDisplay` cannot be formatted with the default formatter + | + = note: the following trait bounds were not satisfied: + `NotDisplay: std::fmt::Display` + which is required by `NotDisplay: ToString` + = note: in format strings you may be able to use `{:?}` (or {:#?} for pretty-print) instead +note: the trait `std::fmt::Display` must be implemented + --> $RUST/core/src/fmt/mod.rs + | + | pub trait Display { + | ^^^^^^^^^^^^^^^^^ + = help: items from traits can only be used if the trait is implemented and in scope + = note: the following trait defines an item `to_string`, perhaps you need to implement it: + candidate #1: `ToString` + = note: this error originates in the macro `translation` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/translatable/tests/integration/translation/templates/fail_value_not_found.rs b/translatable/tests/integration/translation/templates/fail_value_not_found.rs new file mode 100644 index 0000000..2f665db --- /dev/null +++ b/translatable/tests/integration/translation/templates/fail_value_not_found.rs @@ -0,0 +1,8 @@ +#[allow(unused_imports)] +use translatable::translation; + +struct NotDisplay; + +fn main() { + translation!("es", static greetings::informal, user); +} diff --git a/translatable/tests/integration/translation/templates/fail_value_not_found.stderr b/translatable/tests/integration/translation/templates/fail_value_not_found.stderr new file mode 100644 index 0000000..1ed020e --- /dev/null +++ b/translatable/tests/integration/translation/templates/fail_value_not_found.stderr @@ -0,0 +1,5 @@ +error[E0425]: cannot find value `user` in this scope + --> tests/integration/translation/templates/fail_value_not_found.rs:7:52 + | +7 | translation!("es", static greetings::informal, user); + | ^^^^ not found in this scope diff --git a/translatable/tests/integration/translation/templates/mod.rs b/translatable/tests/integration/translation/templates/mod.rs new file mode 100644 index 0000000..306f03a --- /dev/null +++ b/translatable/tests/integration/translation/templates/mod.rs @@ -0,0 +1,4 @@ +pub mod pass_ident_ref; +pub mod pass_multiple_templates; +pub mod pass_trailing_comma; +pub mod pass_trailing_comma_no_args; diff --git a/translatable/tests/integration/translation/templates/pass_ident_ref.rs b/translatable/tests/integration/translation/templates/pass_ident_ref.rs new file mode 100644 index 0000000..34fdf84 --- /dev/null +++ b/translatable/tests/integration/translation/templates/pass_ident_ref.rs @@ -0,0 +1,15 @@ +#[allow(unused_imports)] // trybuild +use translatable::translation; + +#[cfg(test)] +#[test] +pub fn pass_dynamic_expr() { + let user = "Juan"; + + let translation = translation!("es", static greetings::informal, user); + + assert_eq!(translation, "Hey Juan, todo bien?"); +} + +#[allow(dead_code)] +fn main() {} // trybuild diff --git a/translatable/tests/integration/translation/templates/pass_multiple_templates.rs b/translatable/tests/integration/translation/templates/pass_multiple_templates.rs new file mode 100644 index 0000000..c59b18f --- /dev/null +++ b/translatable/tests/integration/translation/templates/pass_multiple_templates.rs @@ -0,0 +1,16 @@ +#[allow(unused_imports)] // trybuild +use translatable::translation; + +#[cfg(test)] +#[test] +pub fn pass_dynamic_expr() { + let author = "Juan"; + let target = "Pepito"; + + let translation = translation!("es", static auditory::actions::delete_user, author, target); + + assert_eq!(translation, "Juan ha borrado al usuario Pepito."); +} + +#[allow(dead_code)] +fn main() {} // trybuild diff --git a/translatable/tests/integration/translation/templates/pass_trailing_comma.rs b/translatable/tests/integration/translation/templates/pass_trailing_comma.rs new file mode 100644 index 0000000..c153c0f --- /dev/null +++ b/translatable/tests/integration/translation/templates/pass_trailing_comma.rs @@ -0,0 +1,16 @@ +#[allow(unused_imports)] // trybuild +use translatable::translation; + +#[cfg(test)] +#[test] +pub fn pass_dynamic_expr() { + let author = "Juan"; + let target = "Pepito"; + + let translation = translation!("es", static auditory::actions::delete_user, author, target,); + + assert_eq!(translation, "Juan ha borrado al usuario Pepito."); +} + +#[allow(dead_code)] +fn main() {} // trybuild diff --git a/translatable/tests/integration/translation/templates/pass_trailing_comma_no_args.rs b/translatable/tests/integration/translation/templates/pass_trailing_comma_no_args.rs new file mode 100644 index 0000000..849de90 --- /dev/null +++ b/translatable/tests/integration/translation/templates/pass_trailing_comma_no_args.rs @@ -0,0 +1,13 @@ +#[allow(unused_imports)] // trybuild +use translatable::translation; + +#[cfg(test)] +#[test] +pub fn pass_static_existing() { + let translation = translation!("es", static greetings::formal,); + + assert_eq!(translation, "Bueno conocerte."); +} + +#[allow(dead_code)] +fn main() {} // trybuild diff --git a/translatable/tests/integration_tests.rs b/translatable/tests/integration_tests.rs new file mode 100644 index 0000000..7b0fc89 --- /dev/null +++ b/translatable/tests/integration_tests.rs @@ -0,0 +1,106 @@ +use std::env::{remove_var, set_var}; +use std::fs::canonicalize; +use std::sync::Mutex; + +use trybuild::TestCases; + +mod integration; + +const PATH_ENV: &str = "TRANSLATABLE_LOCALES_PATH"; +const OVERLAP_ENV: &str = "TRANSLATABLE_OVERLAP"; + +static ENV_MUTEX: Mutex<()> = Mutex::new(()); + +macro_rules! lock_env { + () => { + let _env_guard = ENV_MUTEX.lock(); + }; +} + +#[inline] +unsafe fn set_default_env() { + unsafe { + set_locales_env("everything_valid"); + remove_var(OVERLAP_ENV); + } +} + +#[inline] +unsafe fn set_locales_env(env: &str) { + unsafe { + set_var( + PATH_ENV, + canonicalize(format!("./tests/environments/{env}/translations/")).unwrap(), + ); + } +} + +#[test] +fn valid_environment() { + unsafe { + let t = TestCases::new(); + + lock_env!(); + + set_default_env(); + set_locales_env("everything_valid"); + + t.pass("./tests/integration/translation/language/pass*.rs"); + t.compile_fail("./tests/integration/translation/language/fail*.rs"); + + t.pass("./tests/integration/translation/path/pass*.rs"); + t.compile_fail("./tests/integration/translation/path/fail*.rs"); + + t.pass("./tests/integration/translation/templates/pass*.rs"); + t.compile_fail("./tests/integration/translation/templates/fail*.rs"); + + t.pass("./tests/integration/context/pass*.rs"); + t.compile_fail("./tests/integration/context/fail*.rs"); + } +} + +#[test] +fn invalid_tests_path() { + unsafe { + let t = TestCases::new(); + + lock_env!(); + + set_default_env(); + set_var(PATH_ENV, "something_invalid"); + + // invalid path in configuration. + t.compile_fail("./tests/integration/config/fail_config_path_missmatch.rs"); + } +} + +#[test] +fn invalid_config_value() { + unsafe { + let t = TestCases::new(); + + lock_env!(); + + set_default_env(); + set_locales_env("everything_valid"); + set_var(OVERLAP_ENV, "49854835093459fjkdjfkj"); + + // invalid enum value in configuration. + t.compile_fail("./tests/integration/config/fail_config_invalid_enums.rs"); + } +} + +#[test] +fn translations_malformed() { + unsafe { + let t = TestCases::new(); + + lock_env!(); + + set_default_env(); + set_locales_env("translations_malformed"); + + // translation file rule broken. + t.compile_fail("./tests/integration/config/fail_translations_malformed.rs"); + } +} diff --git a/translatable/tests/unitary/collection_generation.rs b/translatable/tests/unitary/collection_generation.rs new file mode 100644 index 0000000..40b2f83 --- /dev/null +++ b/translatable/tests/unitary/collection_generation.rs @@ -0,0 +1,38 @@ +use std::collections::HashMap; + +use quote::quote; +use translatable_shared::macros::collections::{map_to_tokens, map_transform_to_tokens}; + +#[test] +pub fn map_to_tokens_has_literals() { + let tokens = map_to_tokens(&{ + let mut map = HashMap::new(); + map.insert("key1", 1); + map.insert("key2", 2); + + map + }) + .to_string(); + + assert!(tokens.contains("\"key1\"")); + assert!(tokens.contains("1")); + assert!(tokens.contains("\"key2\"")); + assert!(tokens.contains("2")); +} + +#[test] +pub fn map_transform_to_tokens_has_literals() { + let tokens = map_transform_to_tokens( + &{ + let mut map = HashMap::new(); + map.insert("key1", 1i32); + + map + }, + |key, value| quote! { (#key, #value.to_string()) }, + ) + .to_string() + .replace(" ", ""); // normalize + + assert!(tokens.contains("vec![(\"key1\",1i32.to_string())]")); +} diff --git a/translatable/tests/unitary/display_to_error_tokens.rs b/translatable/tests/unitary/display_to_error_tokens.rs new file mode 100644 index 0000000..a6f4000 --- /dev/null +++ b/translatable/tests/unitary/display_to_error_tokens.rs @@ -0,0 +1,15 @@ +use translatable_shared::macros::errors::IntoCompileError; + +#[test] +pub fn display_to_error_tokens() { + let display = "test".to_string(); + + let to_out_compile_error = display + .to_out_compile_error() + .to_string() + .replace(" ", ""); // normalize + + assert!(to_out_compile_error.contains("fn")); + assert!(to_out_compile_error.contains("__()")); + assert!(to_out_compile_error.contains("std::compile_error!")); +} diff --git a/translatable/tests/unitary/language_enum.rs b/translatable/tests/unitary/language_enum.rs new file mode 100644 index 0000000..ffd40bd --- /dev/null +++ b/translatable/tests/unitary/language_enum.rs @@ -0,0 +1,26 @@ +use quote::ToTokens; +use translatable::Language; + +#[test] +pub fn language_enum_parsing_case_insensitive() { + let language_lower = "es".parse::(); + let language_upper = "ES".parse::(); + + assert!(language_lower.is_ok()); + assert!(language_upper.is_ok()); +} + +#[test] +pub fn language_enum_to_tokens() { + let language_tokens = Language::ES + .into_token_stream() + .to_string() + .replace(" ", ""); // normalize the path. + + assert!(language_tokens.contains("translatable::shared::misc::language::Language::ES")); +} + +#[test] +pub fn display_matches() { + assert_eq!(Language::ES.to_string(), "Spanish"); +} diff --git a/translatable/tests/unitary/mod.rs b/translatable/tests/unitary/mod.rs new file mode 100644 index 0000000..336c0f5 --- /dev/null +++ b/translatable/tests/unitary/mod.rs @@ -0,0 +1,6 @@ +pub mod collection_generation; +pub mod language_enum; +pub mod runtime_error; +pub mod templating; +pub mod translation_collection; +pub mod display_to_error_tokens; diff --git a/translatable/tests/unitary/runtime_error.rs b/translatable/tests/unitary/runtime_error.rs new file mode 100644 index 0000000..7cab73a --- /dev/null +++ b/translatable/tests/unitary/runtime_error.rs @@ -0,0 +1,14 @@ +use translatable::{Error, Language}; + +#[test] +pub fn runtime_error_outputs() { + assert_eq!( + Error::PathNotFound("path::to::translation".into()).cause(), + "The path 'path::to::translation' could not be found" + ); + + assert_eq!( + Error::LanguageNotAvailable(Language::ES, "path::to::translation".into()).cause(), + "The language 'ES' ('Spanish') is not available for the path 'path::to::translation'" + ) +} diff --git a/translatable/tests/unitary/templating.rs b/translatable/tests/unitary/templating.rs new file mode 100644 index 0000000..b633ce8 --- /dev/null +++ b/translatable/tests/unitary/templating.rs @@ -0,0 +1,67 @@ +use std::collections::HashMap; +use std::str::FromStr; + +use translatable_shared::misc::templating::FormatString; + +#[test] +pub fn does_not_replace_not_found() { + let result = FormatString::from_str("Hello {name}") + .expect("Format string to be valid.") + .replace_with(&HashMap::new()); + + assert_eq!(result, "Hello {name}"); +} + +#[test] +pub fn replaces_single_template() { + let result = FormatString::from_str("Hello {name}") + .expect("Format string to be valid.") + .replace_with(&HashMap::from([("name".into(), "Josh".into())])); + + assert_eq!(result, "Hello Josh"); +} + +#[test] +pub fn replaces_multiple_templates() { + let result = FormatString::from_str("Hello {name} how are you doing {day}?") + .expect("Format string to be valid.") + .replace_with(&HashMap::from([ + ("name".into(), "Josh".into()), + ("day".into(), "today".into()), + ])); + + assert_eq!(result, "Hello Josh how are you doing today?"); +} + +#[test] +pub fn replaces_mix_found_not_found() { + let result = FormatString::from_str("Hello {name} how are you doing {day}?") + .expect("Format string to be valid.") + .replace_with(&HashMap::from([("name".into(), "Josh".into())])); + + assert_eq!(result, "Hello Josh how are you doing {day}?"); +} + +#[test] +pub fn fails_unclosed_template() { + let result = FormatString::from_str("Hello {"); + + assert!(result.is_err()); +} + +#[test] +pub fn escapes_templates() { + let result = FormatString::from_str("You write escaped templates like {{ this }}.") + .expect("Format string to be valid.") + .replace_with(&HashMap::from([("this".into(), "not replaced".into())])); + + assert_eq!(result, "You write escaped templates like {{ this }}.") +} + +#[test] +pub fn gives_original_string() { + let result = FormatString::from_str("Hello {name} how are you doing {day}?") + .expect("Format string to be valid."); + + assert_eq!(result.original(), "Hello {name} how are you doing {day}?"); +} diff --git a/translatable/tests/unitary/translation_collection.rs b/translatable/tests/unitary/translation_collection.rs new file mode 100644 index 0000000..33740d8 --- /dev/null +++ b/translatable/tests/unitary/translation_collection.rs @@ -0,0 +1,57 @@ +use std::collections::HashMap; + +use toml_edit::DocumentMut; +use translatable::Language; +use translatable_shared::translations::collection::TranslationNodeCollection; +use translatable_shared::translations::node::TranslationNode; + +const FILE_1: &str = r#" +[greetings.formal] +es = "Hola" +en = "Hello" +"#; + +const FILE_2: &str = r#" +[greetings.informal] +es = "Que haces?" +en = "Wyd?" +"#; + +#[test] +pub fn loads_and_finds_collection() { + let collection = TranslationNodeCollection::new(HashMap::from([ + ( + "a".into(), + TranslationNode::try_from( + FILE_1 + .parse::() + .expect("TOML to be parsed correctly.") + .as_table(), + ) + .expect("TOML to follow the translation rules."), + ), + ( + "b".into(), + TranslationNode::try_from( + FILE_2 + .parse::() + .expect("TOML to be parsed correctly.") + .as_table(), + ) + .expect("TOML to follow the translation rules."), + ), + ])); + + let translation = collection + .find_path( + &"greetings.formal" + .split(".") + .collect(), + ) + .expect("Translation to be found.") + .get(&Language::ES) + .expect("Language to be available.") + .replace_with(&HashMap::new()); + + assert_eq!(translation, "Hola"); +} diff --git a/translatable/tests/unitary_tests.rs b/translatable/tests/unitary_tests.rs new file mode 100644 index 0000000..9f69699 --- /dev/null +++ b/translatable/tests/unitary_tests.rs @@ -0,0 +1,3 @@ +// Errors are not tested. + +mod unitary; diff --git a/translatable_proc/Cargo.toml b/translatable_proc/Cargo.toml new file mode 100644 index 0000000..4b8bb26 --- /dev/null +++ b/translatable_proc/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "translatable_proc" +description = "Proc macro crate for the translatable library." +repository = "https://github.com/FlakySL/translatable" +license = "GPL-3.0" +readme = "../README-MACROS.md" +version = "1.0.0" +edition = "2024" +authors = ["Esteve Autet ", "Chiko "] + +[lib] +proc-macro = true + +[dependencies] +proc-macro2 = "1.0.95" +quote = "1.0.38" +strum = { version = "0.27.1", features = ["derive"] } +syn = { version = "2.0.98", features = ["full"] } +thiserror = "2.0.11" +toml_edit = "0.23.1" +translatable_shared = { version = "1", path = "../translatable_shared/" } diff --git a/translatable_proc/src/data/config.rs b/translatable_proc/src/data/config.rs new file mode 100644 index 0000000..c11b367 --- /dev/null +++ b/translatable_proc/src/data/config.rs @@ -0,0 +1,265 @@ +//! User configuration module. +//! +//! This module defines the structures and +//! helper functions for parsing and loading +//! user configuration files. + +use std::env::var; +use std::fs::read_to_string; +use std::io::Error as IoError; +use std::sync::OnceLock; + +use strum::EnumString; +use thiserror::Error; +use toml_edit::{DocumentMut, TomlError}; + +/// Configuration error enum. +/// +/// Used for compile-time configuration +/// errors, such as errors while opening +/// files or parsing a file format. +/// +/// The errors from this enum are directly +/// shown in rust-analyzer. +#[derive(Error, Debug)] +pub enum ConfigError { + /// IO error derivations. + /// + /// Usually errors while interacting + /// with the file system. + /// + /// [`Display`] forwards the inner error [`Display`] + /// value with some prefix text. + /// + /// The enum implements [`From`] to + /// allow conversion from [`std::io::Error`]. + /// + /// **Parameters** + /// * `0` - The IO error derivation. + /// + /// [`Display`]: std::fmt::Display + /// [`From`]: std::io::Error + #[error("IO error reading configuration: {0:#}")] + Io(#[from] IoError), + + /// TOML deserialization error derivations. + /// + /// The configuration file contents could + /// not be parsed as TOML. + /// + /// The error is formatted displaying + /// the file name hardcoded as `./translatable.toml` + /// and appended with the line and character. + /// + /// The enum implements [`From`] to + /// allow conversion from [`toml::de::Error`] + /// + /// **Parameters** + /// * `0` - The TOML deserialization error derivation. + /// + /// [`From`]: toml::de::Error + #[error( + "TOML parse error '{}'{}", + .0.message(), + .0.span() + .map(|l| format!(" in ./translatable.toml:{}:{}", l.start, l.end)) + .unwrap_or_else(|| "".into()) + )] + ParseToml(#[from] TomlError), + + /// Parse value error. + /// + /// There was an error while parsing + /// a specific configuration entry, + /// since these are mapped to enums in + /// most cases. + /// + /// The error has a custom format + /// displaying the key and value + /// that should have been parsed. + /// + /// **Parameters** + /// * `0` - The configuration key for which the entry + /// could not be parsed. + /// * `1` - The configuration value that couldn't be + /// parsed. + #[error("Couldn't parse configuration entry '{1}' for '{0}'")] + InvalidValue(String, String), +} + +/// Defines the search strategy for configuration files. +/// +/// Represents the possible values of the parsed `seek_mode` +/// field, which determine the order in which file paths +/// are considered when opening configuration files. +#[derive(Default, Clone, Copy, EnumString)] +pub enum SeekMode { + /// Alphabetical order (default) + #[default] + Alphabetical, + + /// Reverse alphabetical order + Unalphabetical, +} + +/// Strategy for resolving translation conflicts. +/// +/// This enum defines how overlapping translations +/// are handled when multiple sources provide values +/// for the same key. The selected strategy determines +/// whether newer translations replace existing ones or +/// if the first encountered translation is preserved. +#[derive(Default, Clone, Copy, EnumString)] +pub enum TranslationOverlap { + /// Last found translation overwrites previous ones (default) + #[default] + Overwrite, + + /// First found translation is preserved + Ignore, +} + +/// Main configuration structure for the translation system. +/// +/// Holds all the core parameters used to control how translation files are +/// located, processed, and how conflicts are resolved between overlapping +/// translations. +pub struct MacroConfig { + /// Path to the directory containing translation files. + /// + /// Specifies the base location where the system will search for + /// translation files. + /// + /// # Example + /// ```toml + /// path = "./locales" + /// ``` + path: String, + + /// File processing order strategy. + /// + /// Defines the order in which translation files are processed. + /// Default: alphabetical order. + seek_mode: SeekMode, + + /// Translation conflict resolution strategy. + /// + /// Determines the behavior when multiple files contain the same + /// translation key. + overlap: TranslationOverlap, +} + +impl MacroConfig { + /// Get reference to the configured locales path. + /// + /// **Returns** + /// The path to the directory where translation files are expected + /// to be located. + pub fn path(&self) -> &str { + &self.path + } + + /// Get the current seek mode strategy. + /// + /// **Returns** + /// The configured strategy used to determine the order in which + /// translation files are processed. + pub fn seek_mode(&self) -> SeekMode { + self.seek_mode + } + + /// Get the current overlap resolution strategy. + /// + /// **Returns** + /// The configured strategy for resolving translation conflicts + /// when multiple files define the same key. + pub fn overlap(&self) -> TranslationOverlap { + self.overlap + } +} + +/// Global configuration cache. +/// +/// Stores the initialized [`MacroConfig`] instance, which holds the +/// configuration for the translation system. The [`OnceLock`] ensures the +/// configuration is initialized only once and can be safely accessed across +/// multiple threads after that initialization. +static TRANSLATABLE_CONFIG: OnceLock = OnceLock::new(); + +/// Load the global translation configuration. +/// +/// Initializes and returns a reference to the shared [`MacroConfig`] instance. +/// Configuration values are loaded in the following priority order: +/// environment variables override `translatable.toml`, and missing values fall +/// back to hardcoded defaults. +/// +/// The configuration is cached after the first successful load, and reused on +/// subsequent calls. +/// +/// **Returns** +/// A `Result` containing either: +/// * [`Ok(&MacroConfig)`] — The loaded configuration as a reference to the +/// cached macro configuration. +/// * [`Err(ConfigError)`] — An error because environment couldn't be read or +/// `translatable.toml` couldn't be read. +/// +/// [`Ok(&MacroConfig)`]: MacroConfig +/// [`Err(ConfigError)`]: ConfigError +pub fn load_config() -> Result<&'static MacroConfig, ConfigError> { + if let Some(config) = TRANSLATABLE_CONFIG.get() { + return Ok(config); + } + + let toml_content = read_to_string("./translatable.toml") + .unwrap_or_default() + .parse::()?; + + macro_rules! config_value { + ($env_var:expr, $key:expr, $default:expr) => { + var($env_var) + .ok() + .or_else(|| { + toml_content + .get($key) + .and_then(|v| v.as_str()) + .map(|v| v.to_string()) + }) + .unwrap_or_else(|| $default.into()) + }; + + (parse($env_var:expr, $key:expr, $default:expr)) => {{ + let value = var($env_var) + .ok() + .or_else(|| { + toml_content + .get($key) + .and_then(|v| v.as_str()) + .map(|v| v.to_string()) + }); + + if let Some(value) = value { + value + .parse() + .map_err(|_| ConfigError::InvalidValue($key.into(), value.into())) + } else { + Ok($default) + } + }}; + } + + let config = MacroConfig { + path: config_value!("TRANSLATABLE_LOCALES_PATH", "path", "./translations"), + overlap: config_value!(parse( + "TRANSLATABLE_OVERLAP", + "overlap", + TranslationOverlap::Ignore + ))?, + seek_mode: config_value!(parse( + "TRANSLATABLE_SEEK_MODE", + "seek_mode", + SeekMode::Alphabetical + ))?, + }; + + Ok(TRANSLATABLE_CONFIG.get_or_init(|| config)) +} diff --git a/translatable_proc/src/data/mod.rs b/translatable_proc/src/data/mod.rs new file mode 100644 index 0000000..af291da --- /dev/null +++ b/translatable_proc/src/data/mod.rs @@ -0,0 +1,13 @@ +//! External data obtention module. +//! +//! This module contains the sub-modules +//! to obtain the translation data and +//! related configuration. +//! +//! The only thing that should possibly +//! be used outside is the [`translations`] +//! module, as the config is mostly +//! to read the translations from the files. + +pub mod config; +pub mod translations; diff --git a/translatable_proc/src/data/translations.rs b/translatable_proc/src/data/translations.rs new file mode 100644 index 0000000..6998af4 --- /dev/null +++ b/translatable_proc/src/data/translations.rs @@ -0,0 +1,219 @@ +//! Translation obtention module. +//! +//! This module is used to obtain +//! translations from their respective files. +//! +//! This module uses `crate::data::config` to +//! to load the translations and order them +//! based on the configuration provided +//! by the module. + +use std::fs::{read_dir, read_to_string}; +use std::io::Error as IoError; +use std::sync::OnceLock; + +use thiserror::Error; +use toml_edit::{DocumentMut, TomlError}; +use translatable_shared::translations::collection::TranslationNodeCollection; +use translatable_shared::translations::node::{TranslationNode, TranslationNodeError}; + +use super::config::{ConfigError, SeekMode, TranslationOverlap, load_config}; + +/// Translation retrieval error enum. +/// +/// Represents errors that can occur during compile-time translation +/// retrieval. This includes I/O issues, configuration loading failures, +/// TOML deserialization errors, and translation node parsing errors. +/// +/// The errors from this enum are directly surfaced in `rust-analyzer` +/// to assist with early detection and debugging. +#[derive(Error, Debug)] +pub enum TranslationDataError { + /// I/O error derivation. + /// + /// Raised when an I/O operation fails during translation + /// retrieval, typically caused by filesystem-level issues. + /// + /// [`Display`] will forward the inner [`std::io::Error`] + /// representation prefixed with additional context. + /// + /// The enum implements [`From`] to allow + /// automatic conversion from `IoError`. + /// + /// **Parameters** + /// * `0` — The underlying I/O error. + /// + /// [`From`]: std::io::Error + /// [`Display`]: std::fmt::Display + #[error("IO Error: \"{0:#}\". Please check the specified path in your configuration file.")] + Io(#[from] IoError), + + /// Configuration loading failure. + /// + /// Raised when the translation configuration cannot be loaded + /// successfully, typically due to invalid values or missing + /// configuration data. + /// + /// [`Display`] will forward the inner [`ConfigError`] message. + /// + /// The enum implements [`From`] to allow automatic + /// conversion from the underlying error. + /// + /// **Parameters** + /// * `0` — The configuration error encountered. + /// + /// [`Display`]: std::fmt::Display + #[error("{0:#}")] + LoadConfig(#[from] ConfigError), + + /// Invalid Unicode path. + /// + /// Raised when a filesystem path cannot be processed due to + /// invalid Unicode characters. + /// + /// This error signals that the translation system cannot proceed + /// with a non-Unicode-compatible path. + #[error("Couldn't open path, found invalid unicode characters")] + InvalidUnicode, + + /// TOML deserialization failure. + /// + /// Raised when the contents of a translation file cannot be + /// parsed as valid TOML data. + /// + /// The formatted error message includes the deserialization reason, + /// the location within the file (if available), and the file path. + /// + /// **Parameters** + /// * `0` — The [`toml::de::Error`] carrying the underlying deserialization + /// error. + /// * `1` — The file path of the TOML file being parsed. + #[error( + "TOML Deserialization error '{reason}' {span} in {1}", + reason = _0.message(), + span = _0 + .span() + .map(|range| format!("on {}:{}", range.start, range.end)) + .unwrap_or_else(String::new) + )] + ParseToml(TomlError, String), + + /// Translation node parsing failure. + /// + /// Raised when the translation system cannot correctly parse + /// a translation node, typically due to invalid formatting + /// or missing expected data. + /// + /// The enum implements [`From`] for + /// seamless conversion. + /// + /// **Parameters** + /// * `0` — The translation node error encountered. + #[error("{0:#}")] + Node(#[from] TranslationNodeError), +} + +/// Global thread-safe cache for loaded translations. +/// +/// Stores all parsed translations in memory after the first +/// successful load. Uses [`OnceLock`] to ensure that the translation +/// data is initialized only once in a thread-safe manner. +static TRANSLATIONS: OnceLock = OnceLock::new(); + +/// Recursively walks the target directory to discover all translation files. +/// +/// Uses an iterative traversal strategy to avoid recursion depth limitations. +/// Paths are returned as [`String`] values, ready for processing. +/// +/// Any filesystem errors, invalid paths, or read failures are reported +/// via `TranslationDataError`. +/// +/// **Arguments** +/// * `path` — Root directory to scan for translation files. +/// +/// **Returns** +/// A `Result` containing either: +/// * [`Ok(Vec)`] — A flat list of discovered file paths. +/// * [`Err(TranslationDataError)`] — If traversal fails at any point. +/// +/// [`Ok(Vec)`]: std::vec::Vec +/// [`Err(TranslationDataError)`]: TranslationDataError +fn walk_dir(path: &str) -> Result, TranslationDataError> { + let mut stack = vec![path.to_string()]; + let mut result = Vec::new(); + + while let Some(current_path) = stack.pop() { + let directory = read_dir(¤t_path)?.collect::, _>>()?; + + for entry in directory { + let path = entry.path(); + if path.is_dir() { + stack.push( + path.to_str() + .ok_or(TranslationDataError::InvalidUnicode)? + .to_string(), + ); + } else { + result.push( + path.to_string_lossy() + .to_string(), + ); + } + } + } + + Ok(result) +} + +/// Loads and caches translations from the configured directory. +/// +/// On the first invocation, this function: +/// - Reads the translation directory path from the loaded configuration. +/// - Recursively walks the directory to discover all translation files. +/// - Sorts the file list according to the configured `seek_mode`. +/// - Parses each file and validates its content. +/// +/// Once successfully loaded, the parsed translations are stored +/// in a global [`OnceLock`]-backed cache and reused for the lifetime +/// of the process. +/// +/// This function will return a reference to the cached translations +/// on every subsequent call. +/// +/// **Returns** +/// A [`Result`] containing either: +/// * [`Ok(&TranslationNodeCollection)`] — The parsed and cached translations. +/// * [`Err(TranslationDataError)`] — An error because any of the translation +/// files couldn't be read. +/// +/// [`Ok(&TranslationNodeCollection)`]: TranslationNodeCollection +/// [`Err(TranslationDataError)`]: TranslationDataError +pub fn load_translations() -> Result<&'static TranslationNodeCollection, TranslationDataError> { + if let Some(translations) = TRANSLATIONS.get() { + return Ok(translations); + } + + let config = load_config()?; + let mut translation_paths = walk_dir(config.path())?; + + // Apply sorting based on configuration + translation_paths.sort_by_key(|path| path.to_lowercase()); + if matches!(config.seek_mode(), SeekMode::Unalphabetical) + || matches!(config.overlap(), TranslationOverlap::Overwrite) + { + translation_paths.reverse(); + } + + let translations = translation_paths + .iter() + .map(|path| { + let table = read_to_string(path)? + .parse::() + .map_err(|err| TranslationDataError::ParseToml(err, path.clone()))?; + + Ok((path.clone(), TranslationNode::try_from(table.as_table())?)) + }) + .collect::>()?; + + Ok(TRANSLATIONS.get_or_init(|| translations)) +} diff --git a/translatable_proc/src/lib.rs b/translatable_proc/src/lib.rs new file mode 100644 index 0000000..547b82f --- /dev/null +++ b/translatable_proc/src/lib.rs @@ -0,0 +1,107 @@ +//! Macro declarations for the `translatable` crate. +//! +//! This crate shouldn't be used by itself, +//! since the macros generate code with the context +//! of the `translatable` library. +//! +//! The `translatable` library re-exports the macros +//! declared in this crate. + +#![warn(missing_docs)] + +use macro_generation::context::context_macro; +use macro_generation::translation::translation_macro; +use macro_input::context::{ContextMacroArgs, ContextMacroStruct}; +use macro_input::translation::TranslationMacroArgs; +use proc_macro::TokenStream; +use syn::parse_macro_input; + +mod data; +mod macro_generation; +mod macro_input; + +/// # Translation obtention macro. +/// +/// This macro generates the way to obtain a translation +/// from the translation files in the directory defined +/// in the `translatable.toml` file. +/// +/// **Parameters** +/// * `language` - A string literal for static inference or an instance of +/// `translatable::Language` for dynamic inference. +/// * `path` - A pat prefixed with `static` for static inference or a `Vec` +/// for dynamic inference. +/// * `replacements` - Arguments similar to python's `kwargs` for the +/// translation replacements. +/// +/// This macro provides optimizations depending on the dynamism +/// of the parameters while calling the macro. +/// +/// The optimizations are described the following way +/// - If path is static, no runtime lookup will be required +/// - If the path is dynamic, the file structure will be hardcoded. +/// +/// - If the language is static, the validation will be reported by +/// `rust-analyzer`. +/// - If the language is dynamic the validation will be reported in runtime in +/// the `Err` branch. +/// +/// - If both are dynamic a single [`String`] will be generated. +/// +/// Independently of any other parameter, the `replacements` parameter +/// is always dynamic (context based). +/// +/// You can shorten it's invocation if a similar identifier is on scope, +/// for example `x = x` can be shortened with `x`. +/// +/// Replacement parameters are not validated, if a parameter exists it will be +/// replaced otherwise it won't. +/// +/// **Returns** +/// A `Result` containing either: +/// * `Ok(String)` - If the invocation is successful. +/// * `Err(translatable::Error)` - If the invocation fails with a runtime error. +#[proc_macro] +pub fn translation(input: TokenStream) -> TokenStream { + translation_macro(parse_macro_input!(input as TranslationMacroArgs).into()).into() +} + +/// # Translation context macro +/// +/// This macro converts a struct into a translation context. +/// +/// By definition that struct shouldn't be used for anything else, +/// but nothing stops you from doing so. +/// +/// This macro applies a rule to the struct. All fields must be +/// a `String` or `&str`. +/// +/// You can configure some parameters as a punctuated [`MetaNameValue`], +/// these are +/// - `base_path`: A path that gets prepended to all fields. +/// - `fallback_language`: A language that must be available for all +/// paths and changes the return type of the `load_translations` method. +/// +/// All the fields on the struct now point to paths in your translation +/// files, you can extend these paths applying the `#[path()]` attribute +/// with a [`TranslationPath`]. Otherwise the path will be appended as +/// the field identifier. +/// +/// The field and struct visibility are kept as original. +/// +/// This macro also generates a method called `load_translations` dynamically +/// that loads all translations and returns an instance of the struct, +/// optionally wrapped on a result depending on the `fallback_language` +/// parameter value. +/// +/// [`MetaNameValue`]: syn::MetaNameValue +/// [`TranslationPath`]: macro_input::utils::translation_path::TranslationPath +#[proc_macro_attribute] +pub fn translation_context(attr: TokenStream, item: TokenStream) -> TokenStream { + context_macro( + parse_macro_input!(attr as ContextMacroArgs), + parse_macro_input!(item as ContextMacroStruct), + ) + .into() +} diff --git a/translatable_proc/src/macro_generation/context.rs b/translatable_proc/src/macro_generation/context.rs new file mode 100644 index 0000000..d78e6f6 --- /dev/null +++ b/translatable_proc/src/macro_generation/context.rs @@ -0,0 +1,178 @@ +//! [`#\[translation_context\]`] macro output module. +//! +//! This module contains the required for +//! the generation of the [`#\[translation_context\]`] macro tokens +//! with intrinsics from `macro_input::context`. +//! +//! [`#\[translation_context\]`]: crate::translation_context + +use proc_macro2::TokenStream as TokenStream2; +use quote::{ToTokens, quote}; +use thiserror::Error; +use translatable_shared::handle_macro_result; +use translatable_shared::macros::collections::map_to_tokens; + +use crate::data::translations::load_translations; +use crate::macro_input::context::{ContextMacroArgs, ContextMacroStruct}; + +/// Macro compile-time translation resolution error. +/// +/// Represents errors that can occur while compiling the +/// [`#\[translation_context\]`] macro. This includes cases where a translation +/// path cannot be found or fallback is not available for all the translations +/// in the context. +/// +/// These errors are reported at compile-time by `rust-analyzer` +/// for immediate feedback while invoking the [`#\[translation_context\]`] +/// macro. +/// +/// [`#\[translation_context\]`]: crate::translation_context +#[derive(Error, Debug)] +enum MacroCompileError { + /// The requested translation path could not be found. + /// + /// **Parameters** + /// * `0` — The translation path, displayed in `::` notation. + #[error("A translation with the path '{0}' could not be found")] + TranslationNotFound(String), + + /// A fallback is not available for a specified translation path. + #[error("One of the translations doesn't have the fallback language available")] + FallbackNotAvailable, + + /// One of the fields type is not a &str or String. + #[error("Only String' and '&str' is allowed for translation contexts")] + TypeNotAllowed, +} + +/// [`#\[translation_context\]`] macro output generation. +/// +/// Expands into a struct that implements structured translation +/// loading. +/// +/// If there is a fallback language configured, this is checked +/// with all the paths and then the `load_translations` generated +/// method will return the same structure instead of a Result. +/// +/// **Arguments** +/// * `macro_args` - The parsed arguments for the macro invocation. +/// * `macro_input` - The parsed macro tokens themselves. +/// +/// **Returns** +/// A TokenStream representing the implementation. +/// +/// [`#\[translation_context\]`]: crate::translation_context +pub fn context_macro( + macro_args: ContextMacroArgs, + macro_input: ContextMacroStruct, +) -> TokenStream2 { + let translations = handle_macro_result!(out load_translations()); + let base_path = macro_args.base_path(); + + let struct_pub = macro_input.visibility(); + let struct_ident = macro_input.ident(); + + let struct_fields = handle_macro_result!(out + macro_input + .fields() + .iter() + .map(|field| { + let field_ty = field.ty().to_token_stream().to_string(); + if matches!(field_ty.as_str(), "String" | "&str") { + Ok(field) + } else { + Err(MacroCompileError::TypeNotAllowed) + } + }) + .collect::, _>>() + ); + + let loadable_translations = handle_macro_result!(out + macro_input + .fields() + .iter() + .map(|field| { + let path_segments = base_path + .merge(&field.path()); + + let path_segments_display = path_segments + .join("::"); + + let translation = translations + .find_path(&path_segments) + .ok_or(MacroCompileError::TranslationNotFound(path_segments.join("::")))?; + + let translation_tokens = map_to_tokens(translation); + let ident = field.ident(); + + let handler = if let Some(fallback_language) = macro_args.fallback_language() { + if let Some(translation) = translation.get(&fallback_language) { + quote! { + .unwrap_or(&#translation) + } + } else { + return Err(MacroCompileError::FallbackNotAvailable); + } + } else { + quote! { + .ok_or_else(|| translatable::Error::LanguageNotAvailable( + language.clone(), + #path_segments_display.to_string() + ))? + } + }; + + Ok(quote! { + #ident: #translation_tokens + .get(&language) + #handler + .replace_with(&replacements) + }) + }) + .collect::, MacroCompileError>>() + ); + + let is_lang_some = macro_args + .fallback_language() + .is_some(); + + let load_ret_ty = if is_lang_some { + quote! { Self } + } else { + quote! { Result } + }; + + let load_ret_stmnt = if is_lang_some { + quote! { + Self { + #(#loadable_translations),* + } + } + } else { + quote! { + Ok(Self { + #(#loadable_translations),* + }) + } + }; + + quote! { + #struct_pub struct #struct_ident { + #(#struct_fields),* + } + + impl #struct_ident { + #struct_pub fn load_translations( + language: translatable::Language, + replacements: &std::collections::HashMap + ) -> #load_ret_ty { + let replacements = replacements + .iter() + .map(|(key, value)| (key.to_string(), value.to_string())) + .collect::>(); + + #load_ret_stmnt + } + } + } +} diff --git a/translatable_proc/src/macro_generation/mod.rs b/translatable_proc/src/macro_generation/mod.rs new file mode 100644 index 0000000..eeddfdf --- /dev/null +++ b/translatable_proc/src/macro_generation/mod.rs @@ -0,0 +1,14 @@ +//! Macro generation module. +//! +//! This module contains the sub-modules +//! to generate any kind of macro, in the +//! `lib.rs` file, a call to any of this +//! modules may be issued with intrinsics +//! from the [`macro_input`] module. +//! +//! Each module represents a single macro. +//! +//! [`macro_input`]: crate::macro_input + +pub mod context; +pub mod translation; diff --git a/translatable_proc/src/macro_generation/translation.rs b/translatable_proc/src/macro_generation/translation.rs new file mode 100644 index 0000000..21263a5 --- /dev/null +++ b/translatable_proc/src/macro_generation/translation.rs @@ -0,0 +1,163 @@ +//! [`translation!()`] macro output module. +//! +//! This module contains the required for +//! the generation of the [`translation!()`] macro tokens +//! with intrinsics from [`macro_input::translation`]. +//! +//! [`translation!()`]: crate::translation +//! [`macro_input::translation`]: super::super::macro_input::translation + +use proc_macro2::TokenStream as TokenStream2; +use quote::{ToTokens, quote}; +use thiserror::Error; +use translatable_shared::handle_macro_result; +use translatable_shared::macros::collections::{map_to_tokens, map_transform_to_tokens}; +use translatable_shared::misc::language::Language; + +use crate::data::translations::load_translations; +use crate::macro_input::translation::TranslationMacroArgs; +use crate::macro_input::utils::input_type::InputType; + +/// Macro compile-time translation resolution error. +/// +/// Represents errors that can occur while compiling the [`translation!()`] +/// macro. This includes cases where a translation path cannot be found or +/// a language variant is unavailable at the specified path. +/// +/// These errors are reported at compile-time by `rust-analyzer` +/// for immediate feedback while invoking the [`translation!()`] macro. +/// +/// [`translation!()`]: crate::translation +#[derive(Error, Debug)] +enum MacroCompileError { + /// The requested translation path could not be found. + /// + /// **Parameters** + /// * `0` — The translation path, displayed in `::` notation. + #[error("The path '{0}' could not be found")] + PathNotFound(String), + + /// The requested language is not available for the provided translation + /// path. + /// + /// **Parameters** + /// * `0` — The requested `Language`. + /// * `1` — The translation path where the language was expected. + #[error("The language '{0:?}' ('{0:#}') is not available for the path '{1}'")] + LanguageNotAvailable(Language, String), +} + +/// [`translation!()`] macro output generation. +/// +/// Expands into code that resolves a translation string based on the input +/// language and translation path, performing placeholder substitutions +/// if applicable. +/// +/// If the language and path are fully static, the translation will be resolved +/// during macro expansion. Otherwise, the generated code will include runtime +/// resolution logic. +/// +/// If the path or language is invalid at compile time, an appropriate +/// `MacroCompileError` will be reported. +/// +/// **Arguments** +/// * `input` — Structured arguments defining the translation path, language, +/// and any placeholder replacements obtained from [`macro_input::translation`]. +/// +/// **Returns** +/// Generated `TokenStream2` representing the resolved translation string or +/// runtime lookup logic. +/// +/// [`macro_input::translation`]: super::super::macro_input::translation +/// [`translation!()`]: crate::translation +pub fn translation_macro(input: TranslationMacroArgs) -> TokenStream2 { + let translations = handle_macro_result!(load_translations()); + + let template_replacements = map_transform_to_tokens( + input.replacements(), + |key, value| quote! { (stringify!(#key).to_string(), #value.to_string()) }, + ); + + if let InputType::Static(language) = input.language() { + if let InputType::Static(path) = input.path() { + let path_segments = path.segments(); + let static_path_display = path_segments.join("::"); + + let translation_object = translations + .find_path(path_segments) + .ok_or_else(|| MacroCompileError::PathNotFound(static_path_display.clone())); + + let translation = handle_macro_result!( + handle_macro_result!(translation_object) + .get(language) + .ok_or_else(|| { + MacroCompileError::LanguageNotAvailable( + language.clone(), + static_path_display.clone(), + ) + }) + ); + + return quote! { + #translation + .replace_with(&#template_replacements) + }; + } + } + + let language = match input.language() { + InputType::Static(language) => language + .clone() + .to_token_stream(), + InputType::Dynamic(language) => quote! { + translatable::shared::misc::language::Language::from(#language) + }, + }; + + let translation_object = match input.path() { + InputType::Static(path) => { + let path_segments = path.segments(); + let static_path_display = path_segments.join("::"); + + let translation_object = translations + .find_path(path_segments) + .ok_or_else(|| MacroCompileError::PathNotFound(static_path_display.clone())); + + let translations_tokens = map_to_tokens(handle_macro_result!(translation_object)); + + quote! { + #[doc(hidden)] + let path: Vec<_> = vec![#(#path_segments.to_string()),*]; + + #translations_tokens + } + }, + + InputType::Dynamic(path) => { + let translations_tokens = translations.to_token_stream(); + + quote! { + #[doc(hidden)] + let path: Vec<_> = #path; + + #translations_tokens + .find_path(&path) + .ok_or_else(|| translatable::Error::PathNotFound(path.join("::")))? + } + }, + }; + + quote! { + (|| -> Result { + std::result::Result::Ok({ + #[doc(hidden)] + let language = #language; + + #translation_object + .get(&language) + .ok_or_else(|| translatable::Error::LanguageNotAvailable(language, path.join("::")))? + .replace_with(&#template_replacements) + }) + })() + } +} diff --git a/translatable_proc/src/macro_input/context.rs b/translatable_proc/src/macro_input/context.rs new file mode 100644 index 0000000..f36e6a2 --- /dev/null +++ b/translatable_proc/src/macro_input/context.rs @@ -0,0 +1,393 @@ +//! [`#\[translation_context\]`] input parsing module. +//! +//! This module declares a structure that implements +//! [`Parse`] for it to be used with [`parse_macro_input`]. +//! +//! [`#\[translation_context\]`]: crate::translation_context +//! [`parse_macro_input`]: syn::parse_macro_input + +use std::str::FromStr; + +use proc_macro2::TokenStream; +use quote::{ToTokens, TokenStreamExt, quote}; +use syn::parse::{Parse, ParseStream}; +use syn::{ + Error as SynError, + Expr, + ExprLit, + Field, + Ident, + ItemStruct, + Lit, + MetaNameValue, + Result as SynResult, + Token, + Type, + Visibility, + parse2, +}; +use thiserror::Error; +use translatable_shared::macros::errors::IntoCompileError; +use translatable_shared::misc::language::Language; + +use super::utils::translation_path::TranslationPath; + +/// Parse error for [`ContextMacroArgs`] and [`ContextMacroStruct`]. +/// +/// Represents errors that can occur while parsing the +/// [`#\[translation_context\]`] macro input. This error is only used while +/// parsing compile-time input, as runtime input is validated in runtime. +/// +/// [`#\[translation_context\]`]: crate::translation_context +#[derive(Error, Debug)] +enum MacroArgsError { + /// Invalid field type error. + /// + /// Usually from using an invalid struct type, such + /// as tuple or unit. + #[error("Only named fields are allowed")] + InvalidFieldType, + + /// Invalid language parameter for fallback. + /// + /// Fallback only supports static language, same + /// as the [`translation!()`] macro static language + /// parameter. + /// + /// [`translation!()`]: crate::translation + #[error("Only a language literal is allowed")] + OnlyLangLiteralAllowed, + + /// Invalid ISO-639-1 language literal. + /// + /// Language literals must be ISO-639-1 compliant. + /// + /// **Parameters** + /// * `0` - The invalid language literal. + #[error("Invalid language literal '{0}' is not a valid ISO-639-1 language")] + InvalidLanguageLiteral(String), + + /// Invalid macro parameter. + /// + /// **Parameters** + /// * `0` - The unknown parameter key. + #[error("Unknown key '{0}', allowed keys are 'fallback_language' and 'base_path'")] + UnknownKey(String), +} + +/// The arguments passed to the context macro. +/// +/// These arguments are passed literally as a punctuated +/// [`MetaNameValue`] separated by `Token![,]`. +/// +/// These act as configuration overrides for each context +/// struct. +pub struct ContextMacroArgs { + /// Field base path. + /// + /// A base path to be prepended to all + /// field paths. + base_path: TranslationPath, + + /// Context fallback language. + /// + /// The fallback should be available + /// in all the specified paths, removes + /// the need to handle errors if a language + /// is not available for a specific translation. + fallback_language: Option, +} + +/// A field inside a translation context struct. +/// +/// Fields are parsed independently and moved +/// to a [`ContextMacroStruct`], this contains +/// data about how to load a translation. +pub struct ContextMacroField { + /// The translation path. + /// + /// This path is appended to the + /// path passed to the struct configuration. + path: Option, + + /// The field visibility. + /// + /// This gets literally rendered as is. + visibility: Visibility, + + /// The field name. + /// + /// This gets literally rendered as is. + ident: Ident, + + /// The field type. + /// + /// Validated but rendered as is. + ty: Type, +} + +/// Translation context struct data. +/// +/// This parses the struct necessary data +/// to re-generate it preparated to load +/// translations, loading [`ContextMacroField`]s +/// too. +pub struct ContextMacroStruct { + /// The struct visibility. + /// + /// This gets literally rendered as is. + visibility: Visibility, + + /// The struct name. + /// + /// This gets literally rendered as is. + ident: Ident, + + /// The struct fields. + /// + /// Get rendered as specified in the + /// [`ContextMacroField::to_tokens`] implementation. + fields: Vec, +} + +impl ContextMacroArgs { + /// Base path getter. + /// + /// **Returns** + /// A reference to the `base_path`. + #[inline] + #[allow(unused)] + pub fn base_path(&self) -> &TranslationPath { + &self.base_path + } + + /// Fallback language getter. + /// + /// **Returns** + /// A reference o the `fallback_language`. + #[inline] + #[allow(unused)] + pub fn fallback_language(&self) -> Option { + self.fallback_language + .clone() + } +} + +/// [`Parse`] implementation for [`ContextMacroArgs`]. +/// +/// This implementation is to be used within [`parse_macro_input!()`] +/// and parses the macro arguments to modify the macro behavior. +/// +/// [`parse_macro_input!()`]: syn::parse_macro_input +impl Parse for ContextMacroArgs { + fn parse(input: ParseStream) -> SynResult { + let values = input.parse_terminated(MetaNameValue::parse, Token![,])?; + let mut base_path = None; + let mut fallback_language = None; + + for kvp in values { + let key = kvp + .path + .to_token_stream() + .to_string(); + + match key.as_str() { + "base_path" => { + base_path = Some(parse2::( + kvp.value + .to_token_stream(), + )?); + }, + + "fallback_language" => { + if let Expr::Lit(ExprLit { lit: Lit::Str(lit), .. }) = kvp.value { + fallback_language = Some( + Language::from_str( + lit.value() + .as_str(), + ) + .map_err(|_| { + MacroArgsError::InvalidLanguageLiteral(lit.value()) + .to_syn_error(lit) + })?, + ); + } else { + return Err(MacroArgsError::OnlyLangLiteralAllowed.to_syn_error(kvp.value)); + } + }, + + key => { + return Err(MacroArgsError::UnknownKey(key.to_string()).to_syn_error(kvp.path)); + }, + } + } + + let base_path = base_path.unwrap_or_else(|| TranslationPath::default()); + + Ok(Self { base_path, fallback_language }) + } +} + +impl ContextMacroField { + /// Path getter. + /// + /// The path specified in the attribute + /// otherwise a path with a single segment + /// as the attribute ident. Alternative lazily + /// evaluated. + /// + /// **Returns** + /// The corresponding translation path for the field. + #[inline] + #[allow(unused)] + pub fn path(&self) -> TranslationPath { + self.path + .clone() + .unwrap_or_else(|| { + TranslationPath::new( + vec![ + self.ident + .to_string(), + ], + self.ident + .span(), + ) + }) + } + + /// Visibility getter. + /// + /// **Returns** + /// A reference to this field's visibility. + #[inline] + #[allow(unused)] + pub fn visibility(&self) -> &Visibility { + &self.visibility + } + + /// Identifier getter. + /// + /// **Returns** + /// A reference to this field's identifier. + #[inline] + #[allow(unused)] + pub fn ident(&self) -> &Ident { + &self.ident + } + + /// Type getter. + /// + /// **Returns** + /// A reference to this field's type. + #[inline] + #[allow(unused)] + pub fn ty(&self) -> &Type { + &self.ty + } +} + +/// [`ToTokens`] implementation for [`ContextMacroField`]. +/// +/// This implementation is used to convert the +/// data stored in this struct to the tokens +/// it represnets. +impl ToTokens for ContextMacroField { + fn to_tokens(&self, tokens: &mut TokenStream) { + let visibility = self.visibility(); + let ident = self.ident(); + let ty = self.ty(); + + tokens.append_all(quote! { + #visibility #ident: #ty + }); + } +} + +/// [`TryFrom`] implementation for [`ContextMacroField`]. +/// +/// This implementation is used to parse +/// the custom metadata from a struct field. +impl TryFrom for ContextMacroField { + type Error = SynError; + + fn try_from(field: Field) -> Result { + let path = field + .attrs + .iter() + .find(|field| { + field + .path() + .is_ident("path") + }) + .map(|field| field.parse_args::()) + .transpose()?; + + let is_pub = field + .vis + .clone(); + + let ident = field + .ident + .clone() + .ok_or(MacroArgsError::InvalidFieldType.to_syn_error(&field))?; + + let ty = field.ty; + + Ok(Self { path, visibility: is_pub, ident, ty }) + } +} + +impl ContextMacroStruct { + /// Visibility getter. + /// + /// **Returns** + /// A reference to this struct's visibility. + #[inline] + #[allow(unused)] + pub fn visibility(&self) -> &Visibility { + &self.visibility + } + + /// Identifier getter. + /// + /// **Returns** + /// A reference o this idenitifer visibility. + #[inline] + #[allow(unused)] + pub fn ident(&self) -> &Ident { + &self.ident + } + + /// Fields getter. + /// + /// **Returns** + /// A slice to all the fields in this struct. + #[inline] + #[allow(unused)] + pub fn fields(&self) -> &[ContextMacroField] { + &self.fields + } +} + +/// [`Parse`] implementation for [`ContextMacroStruct`]. +/// +/// This implementation is used to parse the struct +/// trough [`parse_macro_input!()`]. +/// +/// [`parse_macro_input!()`]: syn::parse_macro_input +impl Parse for ContextMacroStruct { + fn parse(input: ParseStream) -> SynResult { + let structure = input.parse::()?; + + let is_pub = structure.vis; + let ident = structure.ident; + + let fields = structure + .fields + .into_iter() + .map(|field| ContextMacroField::try_from(field)) + .collect::, _>>()?; + + Ok(Self { visibility: is_pub, ident, fields }) + } +} diff --git a/translatable_proc/src/macro_input/mod.rs b/translatable_proc/src/macro_input/mod.rs new file mode 100644 index 0000000..1ff13d5 --- /dev/null +++ b/translatable_proc/src/macro_input/mod.rs @@ -0,0 +1,15 @@ +//! Macro input parsing module. +//! +//! This module contains the sub-modules +//! to parse macro input for specific +//! macros, the parsed input is usually +//! fed to [`macro_generation`] as intrinsics. +//! +//! Each sub-module represents a different macro, +//! except for separated utils. +//! +//! [`macro_generation`]: crate::macro_generation + +pub mod context; +pub mod translation; +pub mod utils; diff --git a/translatable_proc/src/macro_input/translation.rs b/translatable_proc/src/macro_input/translation.rs new file mode 100644 index 0000000..7b93b6c --- /dev/null +++ b/translatable_proc/src/macro_input/translation.rs @@ -0,0 +1,169 @@ +//! [`translation!()`] input parsing module. +//! +//! This module declares a structure that implements +//! [`Parse`] for it to be used with [`parse_macro_input`]. +//! +//! [`translation!()`]: crate::translation +//! [`parse_macro_input`]: syn::parse_macro_input + +use std::collections::HashMap; + +use proc_macro2::TokenStream as TokenStream2; +use quote::ToTokens; +use syn::parse::{Parse, ParseStream}; +use syn::token::Static; +use syn::{Expr, ExprLit, Ident, Lit, Result as SynResult, Token}; +use thiserror::Error; +use translatable_shared::macros::errors::IntoCompileError; +use translatable_shared::misc::language::Language; + +use super::utils::input_type::InputType; +use super::utils::translation_path::TranslationPath; + +/// Parse error for [`TranslationMacroArgs`]. +/// +/// Represents errors that can occur while parsing the [`translation!()`] +/// macro input. This error is only used while parsing compile-time input, +/// as runtime input is validated in runtime. +/// +/// [`translation!()`]: crate::translation +#[derive(Error, Debug)] +enum MacroArgsError { + /// An error while parsing a compile-time String value + /// was found. + #[error("The literal '{0}' is an invalid ISO 639-1 string, and cannot be parsed")] + InvalidIsoLiteral(String), +} + +/// [`translation!()`] macro input arguments. +/// +/// This structure implements [`Parse`] to parse +/// [`translation!()`] macro arguments using +/// [`parse_macro_input`], to later be used +/// in the [`translation_macro`] function. +/// +/// [`translation!()`]: crate::translation +/// [`parse_macro_input`]: syn::parse_macro_input +/// [`translation_macro`]: crate::macro_generation::translation::translation_macro +pub struct TranslationMacroArgs { + /// Represents the user specified language + /// which may be static if the specified language + /// is a string literal or a `Language` enum tagged + /// union instance, otherwise dynamic and represented + /// as a `TokenStream`. + language: InputType, + + /// Represents a toml path to find the translation + /// object in the previously parsed TOML from the + /// translation files, this can be static if specified + /// as `static path::to::translation` or dynamic if + /// it's another expression, this way represented as a + /// [`TokenStream2`]. + path: InputType, + + /// Stores the replacement arguments for the translation + /// templates such as `Hello {name}` if found on a translation. + /// + /// If a call such as `a` is found, it will be implicitly + /// converted to `a = a` thus stored like so in the hash map. + replacements: HashMap, +} + +/// [`translation!()`] macro args parsing implementation. +/// +/// This implementation's purpose is to parse [`TokenStream`] +/// with the [`parse_macro_input`] macro. +impl Parse for TranslationMacroArgs { + fn parse(input: ParseStream) -> SynResult { + let parsed_language_arg = + match input.parse::()? { + Expr::Lit(ExprLit { lit: Lit::Str(literal), .. }) => { + match literal + .value() + .parse::() + { + Ok(language) => InputType::Static(language), + + Err(_) => Err(MacroArgsError::InvalidIsoLiteral(literal.value()) + .to_syn_error(literal))?, + } + }, + + other => InputType::Dynamic(other.into_token_stream()), + }; + + input.parse::()?; + + let parsed_path_arg = match input.parse::() { + Ok(_) => InputType::Static(input.parse::()?), + + Err(_) => InputType::Dynamic( + input + .parse::()? + .to_token_stream(), + ), + }; + + let mut replacements = HashMap::new(); + if input.peek(Token![,]) { + while !input.is_empty() { + input.parse::()?; + + if input.is_empty() { + break; + } + + let key = input.parse::()?; + let value = match input.parse::() { + Ok(_) => input + .parse::()? + .to_token_stream(), + + Err(_) => key + .clone() + .into_token_stream(), + }; + + replacements.insert(key, value); + } + } + + Ok(Self { + language: parsed_language_arg, + path: parsed_path_arg, + replacements, + }) + } +} + +impl TranslationMacroArgs { + /// `self.language` reference getter. + /// + /// **Returns** + /// A reference to `self.language` as [`InputType`]. + #[inline] + #[allow(unused)] + pub fn language(&self) -> &InputType { + &self.language + } + + /// `self.path` reference getter. + /// + /// **Returns** + /// A reference to `self.path` as [`InputType>`] + #[inline] + #[allow(unused)] + pub fn path(&self) -> &InputType { + &self.path + } + + /// `self.replacements` reference getter. + /// + /// **Returns** + /// A reference to `self.replacements` as [`HashMap`] + #[inline] + #[allow(unused)] + pub fn replacements(&self) -> &HashMap { + &self.replacements + } +} diff --git a/translatable_proc/src/macro_input/utils/input_type.rs b/translatable_proc/src/macro_input/utils/input_type.rs new file mode 100644 index 0000000..2754ac2 --- /dev/null +++ b/translatable_proc/src/macro_input/utils/input_type.rs @@ -0,0 +1,61 @@ +//! Input type abstraction for macro argument separation. +//! +//! This module defines the [`InputType`] enum, +//! which is used to distinguish between static +//! and dynamic values during macro input parsing. + +use proc_macro2::TokenStream as TokenStream2; +use quote::ToTokens; + +/// Input type differentiation enum. +/// +/// Represents whether an input is a static, +/// compile-time known value or a dynamic, +/// runtime expression. This differentiation +/// allows the translation system to apply +/// optimizations based on the input nature. +pub enum InputType { + /// Statically known value. + /// + /// The input is fully resolved at compile time, which allows + /// the macro system to optimize for constant substitution and + /// code simplification. + /// + /// **Parameters** + /// * `0` — The static value. + Static(T), + + /// Dynamically evaluated input. + /// + /// The input is represented as a [`TokenStream2`] expression, + /// which is evaluated at runtime rather than compile time. + /// + /// **Parameters** + /// * `0` — The dynamic [`TokenStream2`] expression. + Dynamic(TokenStream2), +} + +/// [`InputType`] runtime normalization implementation. +/// +/// This implementation is used to convert [`InputType`] +/// into normalized runtime values in many aspects, only +/// if T implements [`ToTokens`]. +impl InputType { + /// [`InputType`] to [`TokenStream2`] conversion. + /// + /// This method takes an [`InputType`] and converts + /// any of it's branches to a [`TokenStream2`] if + /// available. + /// + /// **Returns** + /// A [`TokenStream2`] representation of whatever the value + /// is in the [`InputType`]. + #[inline] + #[allow(unused)] + fn dynamic(self) -> TokenStream2 { + match self { + Self::Static(value) => value.to_token_stream(), + Self::Dynamic(value) => value, + } + } +} diff --git a/translatable_proc/src/macro_input/utils/mod.rs b/translatable_proc/src/macro_input/utils/mod.rs new file mode 100644 index 0000000..d4a7ad5 --- /dev/null +++ b/translatable_proc/src/macro_input/utils/mod.rs @@ -0,0 +1,2 @@ +pub mod input_type; +pub mod translation_path; diff --git a/translatable_proc/src/macro_input/utils/translation_path.rs b/translatable_proc/src/macro_input/utils/translation_path.rs new file mode 100644 index 0000000..560ca30 --- /dev/null +++ b/translatable_proc/src/macro_input/utils/translation_path.rs @@ -0,0 +1,152 @@ +//! [`TranslationPath`] module. +//! +//! This module declares an abstraction +//! to parse [`syn::Path`] disallowing +//! generic type arguments. +//! +//! This module doesn't have anything +//! to do with [`std::path`]. + +use proc_macro2::Span; +use syn::parse::{Parse, ParseStream}; +use syn::spanned::Spanned; +use syn::{Error as SynError, Path, PathArguments, Result as SynResult}; + +/// Static translation path parser. +/// +/// This parser structure is an abstraction +/// of [`syn::Path`] but disallowing generic +/// types. +/// +/// The structure is spanned preserving +/// the original path unless defaulted, otherwise +/// the span is callsite. +/// +/// The structure is completly immutable. +#[derive(Clone)] +pub struct TranslationPath { + /// The path segments. + /// + /// The segments are translated + /// from a `syn::Path` as + /// x::y -> vec!["x", "y"]. + segments: Vec, + + /// The path original span + /// unless default, then empty. + span: Span, +} + +/// [`TranslationPath`] macro parsing implementation. +/// +/// Used to parse arguments with [`parse2`] or [`parse_macro_input!`] +/// within attribute arguments. +/// +/// [`parse2`]: syn::parse2 +/// [`parse_macro_input!`]: syn::parse_macro_input +impl Parse for TranslationPath { + fn parse(input: ParseStream) -> SynResult { + let path = input.parse::()?; + + let span = path.span(); + let segments = path + .segments + .into_iter() + .map(|segment| match segment.arguments { + PathArguments::None => Ok(segment + .ident + .to_string()), + + error => Err(SynError::new_spanned( + error, + "A translation path can't contain generic arguments.", + )), + }) + .collect::>()?; + + Ok(Self { segments, span }) + } +} + +/// Default implementation for [`TranslationPath`]. +/// +/// Used to create empty translation paths usually +/// for fallbacks with `Option::::unwrap_or_else()`. +/// +/// The span generated for a [`TranslationPath::default`] call is +/// [`Span::call_site`]. +impl Default for TranslationPath { + fn default() -> Self { + Self { + segments: Vec::new(), + span: Span::call_site(), + } + } +} + +impl TranslationPath { + /// Constructor function for [`TranslationPath`]. + /// + /// This constructor function should be called with + /// partial arguments from another function. Nothing + /// happens if it's not. + /// + /// **Arguments** + /// * `segments` - The segments this path is made of x::y -> vec!["x", "y"]. + /// * `span` - The original location or where this path should return errors + /// if it may. + /// + /// **Returns** + /// A constructed instance of [`TranslationPath`]. + #[inline] + pub fn new(segments: Vec, span: Span) -> Self { + Self { segments, span } + } + + /// Path merging helper method. + /// + /// This method takes both internal path segments and appends + /// both making a vector out of the merge. + /// + /// Since spans cannot be split or we may not have multiple + /// spans without having a complex structure then the span + /// is directly not preserved. + /// + /// **Arguments** + /// * `other` - The path this instance should be merged with. + /// + /// **Returns** + /// A single vector with both internal paths merged. + pub fn merge(&self, other: &Self) -> Vec { + // TODO: merge spans (not yet in #19) + [ + self.segments() + .to_vec(), + other + .segments() + .to_vec(), + ] + .concat() + } + + /// Internal segments getter. + /// + /// **Returns** + /// The internal segments. + #[inline] + #[allow(unused)] + pub fn segments(&self) -> &Vec { + &self.segments + } + + /// Internal span getter. + /// + /// **Returns** + /// The internal span. + #[inline] + #[allow(unused)] + pub fn span(&self) -> Span { + // TODO: possibly implement Spanned + self.span + } +} diff --git a/translatable_shared/Cargo.toml b/translatable_shared/Cargo.toml new file mode 100644 index 0000000..4af74fa --- /dev/null +++ b/translatable_shared/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "translatable_shared" +description = "Shared dependencies crate for translatable." +repository = "https://github.com/FlakySL/translatable" +license = "GPL-3.0" +readme = "../README-SHARED.md" +version = "1.0.0" +edition = "2024" +authors = ["Esteve Autet ", "Chiko "] + +[dependencies] +proc-macro2 = "1.0.95" +quote = "1.0.40" +strum = { version = "0.27.1", features = ["derive", "strum_macros"] } +syn = { version = "2.0.100", features = ["full"] } +thiserror = "2.0.12" +toml_edit = "0.23.1" diff --git a/translatable_shared/src/lib.rs b/translatable_shared/src/lib.rs new file mode 100644 index 0000000..ff03a65 --- /dev/null +++ b/translatable_shared/src/lib.rs @@ -0,0 +1,15 @@ +//! Shared util declarations for `translatable` and `translatable_proc` +//! +//! This crate shouldn't be used by itself, +//! since it contains macro generation code which +//! relies on references from the `translatable` library. +//! +//! The `translatable` library re-exports the utils +//! declared in this crate and exposes the necessary +//! ones. + +#![warn(missing_docs)] + +pub mod macros; +pub mod misc; +pub mod translations; diff --git a/translatable_shared/src/macros/collections.rs b/translatable_shared/src/macros/collections.rs new file mode 100644 index 0000000..c1b09b3 --- /dev/null +++ b/translatable_shared/src/macros/collections.rs @@ -0,0 +1,79 @@ +//! Shared collection utils module. +//! +//! This module declares functions used by `translatable_proc` +//! and `translatable_shared` together, mostly used to convert +//! compile-time structures into runtime representations of +//! the same structures. + +use std::collections::HashMap; + +use proc_macro2::TokenStream as TokenStream2; +use quote::{ToTokens, quote}; + +/// [`HashMap`] runtime conversion. +/// +/// This function converts a [`HashMap`] into a [`TokenStream2`] +/// that when generated on a macro contains the same values as the initial +/// map. +/// +/// The type of the keys and values of the map must implement [`ToTokens`]. +/// +/// **Parameters** +/// * `map` - The map to convert into tokens. +/// +/// **Returns** +/// The provided `map` parameter represented as [`TokenStream2`]. +#[inline] +pub fn map_to_tokens(map: &HashMap) -> TokenStream2 { + let map = map + .iter() + .map(|(key, value)| { + let key = key.into_token_stream(); + let value = value.into_token_stream(); + + quote! { (#key, #value) } + }); + + quote! { + vec![#(#map),*] + .into_iter() + .collect::>() + } +} + +/// [`HashMap`] runtime conversion and mapping. +/// +/// Similarly to [`map_to_tokens`] this function converts a [`HashMap`] +/// into a [`TokenStream2`] that when generated on a macro contains the same +/// values as the original map. The difference is that in this function the keys +/// and values types don't need to implement [`ToTokens`], as this takes a +/// predicate which lets you modify values before converting it to tokens. +/// +/// The predicate must return a [`TokenStream2`] containing tuples, the internal +/// conversion is as `vec![$($converted),*]` collected into a [`HashMap`] +/// in runtime. +/// +/// **Parameters** +/// * `map` - The map to convert into tokens. +/// * `predicate` - A predicate taking a key and a value that should return a +/// [`TokenStream2`] +/// containing a tuple of the key and the value transformed in any way. +/// +/// **Returns** +/// The provided `map` parameter mutated with the `predicate` and converted to a +/// [`TokenStream2`]. +#[inline] +pub fn map_transform_to_tokens(map: &HashMap, predicate: F) -> TokenStream2 +where + F: Fn(&K, &V) -> TokenStream2, +{ + let processed = map + .iter() + .map(|(key, value)| predicate(key, value)); + + quote! { + vec![#(#processed),*] + .into_iter() + .collect::>() + } +} diff --git a/translatable_shared/src/macros/errors.rs b/translatable_shared/src/macros/errors.rs new file mode 100644 index 0000000..73be469 --- /dev/null +++ b/translatable_shared/src/macros/errors.rs @@ -0,0 +1,111 @@ +//! Error utils module. +//! +//! This module declares blanket implementations +//! for error utils such as conversion to tokens +//! or other errors. + +use std::fmt::Display; + +use proc_macro2::TokenStream as TokenStream2; +use quote::{ToTokens, quote}; +use syn::Error as SynError; + +/// Error implementations for macro outputs. +/// +/// This trait is meant to be implemented +/// as a blanket where every type that +/// implements [`Display`] can be converted +/// or either to a compile error or a [`SynError`]. +pub trait IntoCompileError +where + Self: Display + Sized, +{ + /// Convert error reference to runtime. + /// + /// Transforms the value into a string + /// and wraps [`compile_error!`] into it + /// for it to be returned when an error + /// happens. + /// + /// The invocation happens inside a method + /// for compatibility in both outside and + /// inside functions. + /// + /// **Returns** + /// A [`compile_error!`] wrapped `&str`. + #[cold] + fn to_compile_error(&self) -> TokenStream2 { + let message = self.to_string(); + quote! { std::compile_error!(#message) } + } + + /// Convert error reference to runtime. + /// + /// Calls [`to_compile_error`] but wraps that + /// invocation in a function, so errors outside + /// functions only target that specific error. + /// + /// **Returns** + /// A `fn __() {}` wrapped [`to_compile_error`] invocation. + /// + /// [`to_compile_error`]: IntoCompileError::to_compile_error + fn to_out_compile_error(&self) -> TokenStream2 { + let invocation = self.to_compile_error(); + quote! { fn __() { #invocation } } + } + + /// Convert error reference to a spanned [`SynError`]. + /// + /// Transforms the value into a string + /// and creates a spanned [`SynError`] + /// with the user provided span. + /// + /// **Parameters** + /// * `span` - the error span for the `rust-analyzer` report. + /// + /// **Returns** + /// A [`SynError`] with the value as a message and the provided `span`. + #[cold] + fn to_syn_error(&self, span: T) -> SynError { + SynError::new_spanned(span, self.to_string()) + } +} + +/// [`IntoCompileError`] blanket implementation +/// for values that implement [`Display`]. +impl IntoCompileError for T {} + +/// [`to_compile_error`] conversion helper macro. +/// +/// This macro takes a [`Result`] where +/// `E` implements [`Display`] and generates +/// a match branch which directly returns the error +/// as a compile error. +/// +/// This macro is meant to be called from a macro +/// generation function. +/// +/// If you prepend `out` to the value this will +/// call [`to_out_compile_error`] instead. +/// +/// [`to_compile_error`]: IntoCompileError::to_compile_error +/// [`to_out_compile_error`]: IntoCompileError::to_out_compile_error +#[macro_export] +macro_rules! handle_macro_result { + ($method:ident; $val:expr) => {{ + use $crate::macros::errors::IntoCompileError; + + match $val { + std::result::Result::Ok(value) => value, + std::result::Result::Err(error) => return error.$method(), + } + }}; + + ($val:expr) => { + $crate::handle_macro_result!(to_compile_error; $val) + }; + + (out $val:expr) => { + $crate::handle_macro_result!(to_out_compile_error; $val) + }; +} diff --git a/translatable_shared/src/macros/mod.rs b/translatable_shared/src/macros/mod.rs new file mode 100644 index 0000000..885c4fc --- /dev/null +++ b/translatable_shared/src/macros/mod.rs @@ -0,0 +1,13 @@ +//! Macro helpers module. +//! +//! This module contains sub-modules +//! which or either help converting +//! compile-time structures into their +//! runtime representations with [`TokenStream2`] +//! or any other utils to generate +//! runtime code. +//! +//! [`TokenStream2`]: proc_macro2::TokenStream + +pub mod collections; +pub mod errors; diff --git a/translatable_shared/src/misc/language.rs b/translatable_shared/src/misc/language.rs new file mode 100644 index 0000000..d7fc925 --- /dev/null +++ b/translatable_shared/src/misc/language.rs @@ -0,0 +1,583 @@ +//! [`Language`] declaration module. +//! +//! This module declares all the implementations +//! required for parsing and validating ISO-639-1 +//! language strings from user input. + +use proc_macro2::{Span, TokenStream as TokenStream2}; +use quote::{ToTokens, TokenStreamExt, quote}; +use strum::{Display, EnumIter, EnumString}; +use syn::Ident; + +/// This implementation converts the tagged union +/// to an equivalent call from the runtime context. +/// +/// This is exclusively meant to be used from the +/// macro generation context. +impl ToTokens for Language { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let ident = Ident::new(&format!("{self:?}"), Span::call_site()); + + tokens.append_all(quote! { translatable::shared::misc::language::Language::#ident }) + } +} + +/// ISO 639-1 language code implementation with validation +/// +/// Provides two-way mapping between language codes and names with: +/// - Case-insensitive parsing +/// - Strict validation +/// - Complete ISO 639-1 coverage +#[derive(Debug, Clone, EnumIter, Display, EnumString, Eq, Hash, PartialEq)] +#[strum(ascii_case_insensitive)] +pub enum Language { + #[allow(missing_docs)] + #[strum(serialize = "Abkhazian", serialize = "ab")] + AB, + #[allow(missing_docs)] + #[strum(serialize = "Afar", serialize = "aa")] + AA, + #[allow(missing_docs)] + #[strum(serialize = "Afrikaans", serialize = "af")] + AF, + #[allow(missing_docs)] + #[strum(serialize = "Akan", serialize = "ak")] + AK, + #[allow(missing_docs)] + #[strum(serialize = "Albanian", serialize = "sq")] + SQ, + #[allow(missing_docs)] + #[strum(serialize = "Amharic", serialize = "am")] + AM, + #[allow(missing_docs)] + #[strum(serialize = "Arabic", serialize = "ar")] + AR, + #[allow(missing_docs)] + #[strum(serialize = "Aragonese", serialize = "an")] + AN, + #[allow(missing_docs)] + #[strum(serialize = "Armenian", serialize = "hy")] + HY, + #[allow(missing_docs)] + #[strum(serialize = "Assamese", serialize = "as")] + AS, + #[allow(missing_docs)] + #[strum(serialize = "Avaric", serialize = "av")] + AV, + #[allow(missing_docs)] + #[strum(serialize = "Avestan", serialize = "ae")] + AE, + #[allow(missing_docs)] + #[strum(serialize = "Aymara", serialize = "ay")] + AY, + #[allow(missing_docs)] + #[strum(serialize = "Azerbaijani", serialize = "az")] + AZ, + #[allow(missing_docs)] + #[strum(serialize = "Bambara", serialize = "bm")] + BM, + #[allow(missing_docs)] + #[strum(serialize = "Bashkir", serialize = "ba")] + BA, + #[allow(missing_docs)] + #[strum(serialize = "Basque", serialize = "eu")] + EU, + #[allow(missing_docs)] + #[strum(serialize = "Belarusian", serialize = "be")] + BE, + #[allow(missing_docs)] + #[strum(serialize = "Bengali", serialize = "bn")] + BN, + #[allow(missing_docs)] + #[strum(serialize = "Bislama", serialize = "bi")] + BI, + #[allow(missing_docs)] + #[strum(serialize = "Bosnian", serialize = "bs")] + BS, + #[allow(missing_docs)] + #[strum(serialize = "Breton", serialize = "br")] + BR, + #[allow(missing_docs)] + #[strum(serialize = "Bulgarian", serialize = "bg")] + BG, + #[allow(missing_docs)] + #[strum(serialize = "Burmese", serialize = "my")] + MY, + #[allow(missing_docs)] + #[strum(serialize = "Catalan", serialize = "ca")] + CA, + #[allow(missing_docs)] + #[strum(serialize = "Chamorro", serialize = "ch")] + CH, + #[allow(missing_docs)] + #[strum(serialize = "Chechen", serialize = "ce")] + CE, + #[allow(missing_docs)] + #[strum(serialize = "Chichewa", serialize = "ny")] + NY, + #[allow(missing_docs)] + #[strum(serialize = "Chinese", serialize = "zh")] + ZH, + #[allow(missing_docs)] + #[strum(serialize = "Church Slavonic", serialize = "cu")] + CU, + #[allow(missing_docs)] + #[strum(serialize = "Chuvash", serialize = "cv")] + CV, + #[allow(missing_docs)] + #[strum(serialize = "Cornish", serialize = "kw")] + KW, + #[allow(missing_docs)] + #[strum(serialize = "Corsican", serialize = "co")] + CO, + #[allow(missing_docs)] + #[strum(serialize = "Cree", serialize = "cr")] + CR, + #[allow(missing_docs)] + #[strum(serialize = "Croatian", serialize = "hr")] + HR, + #[allow(missing_docs)] + #[strum(serialize = "Czech", serialize = "cs")] + CS, + #[allow(missing_docs)] + #[strum(serialize = "Danish", serialize = "da")] + DA, + #[allow(missing_docs)] + #[strum(serialize = "Divehi", serialize = "dv")] + DV, + #[allow(missing_docs)] + #[strum(serialize = "Dutch", serialize = "nl")] + NL, + #[allow(missing_docs)] + #[strum(serialize = "Dzongkha", serialize = "dz")] + DZ, + #[allow(missing_docs)] + #[strum(serialize = "English", serialize = "en")] + EN, + #[allow(missing_docs)] + #[strum(serialize = "Esperanto", serialize = "eo")] + EO, + #[allow(missing_docs)] + #[strum(serialize = "Estonian", serialize = "et")] + ET, + #[allow(missing_docs)] + #[strum(serialize = "Ewe", serialize = "ee")] + EE, + #[allow(missing_docs)] + #[strum(serialize = "Faroese", serialize = "fo")] + FO, + #[allow(missing_docs)] + #[strum(serialize = "Fijian", serialize = "fj")] + FJ, + #[allow(missing_docs)] + #[strum(serialize = "Finnish", serialize = "fi")] + FI, + #[allow(missing_docs)] + #[strum(serialize = "French", serialize = "fr")] + FR, + #[allow(missing_docs)] + #[strum(serialize = "Western Frisian", serialize = "fy")] + FY, + #[allow(missing_docs)] + #[strum(serialize = "Fulah", serialize = "ff")] + FF, + #[allow(missing_docs)] + #[strum(serialize = "Gaelic", serialize = "gd")] + GD, + #[allow(missing_docs)] + #[strum(serialize = "Galician", serialize = "gl")] + GL, + #[allow(missing_docs)] + #[strum(serialize = "Ganda", serialize = "lg")] + LG, + #[allow(missing_docs)] + #[strum(serialize = "Georgian", serialize = "ka")] + KA, + #[allow(missing_docs)] + #[strum(serialize = "German", serialize = "de")] + DE, + #[allow(missing_docs)] + #[strum(serialize = "Greek", serialize = "el")] + EL, + #[allow(missing_docs)] + #[strum(serialize = "Kalaallisut", serialize = "kl")] + KL, + #[allow(missing_docs)] + #[strum(serialize = "Guarani", serialize = "gn")] + GN, + #[allow(missing_docs)] + #[strum(serialize = "Gujarati", serialize = "gu")] + GU, + #[allow(missing_docs)] + #[strum(serialize = "Haitian", serialize = "ht")] + HT, + #[allow(missing_docs)] + #[strum(serialize = "Hausa", serialize = "ha")] + HA, + #[allow(missing_docs)] + #[strum(serialize = "Hebrew", serialize = "he")] + HE, + #[allow(missing_docs)] + #[strum(serialize = "Herero", serialize = "hz")] + HZ, + #[allow(missing_docs)] + #[strum(serialize = "Hindi", serialize = "hi")] + HI, + #[allow(missing_docs)] + #[strum(serialize = "Hiri Motu", serialize = "ho")] + HO, + #[allow(missing_docs)] + #[strum(serialize = "Hungarian", serialize = "hu")] + HU, + #[allow(missing_docs)] + #[strum(serialize = "Icelandic", serialize = "is")] + IS, + #[allow(missing_docs)] + #[strum(serialize = "Ido", serialize = "io")] + IO, + #[allow(missing_docs)] + #[strum(serialize = "Igbo", serialize = "ig")] + IG, + #[allow(missing_docs)] + #[strum(serialize = "Indonesian", serialize = "id")] + ID, + #[allow(missing_docs)] + #[strum(serialize = "Interlingua", serialize = "ia")] + IA, + #[allow(missing_docs)] + #[strum(serialize = "Interlingue", serialize = "ie")] + IE, + #[allow(missing_docs)] + #[strum(serialize = "Inuktitut", serialize = "iu")] + IU, + #[allow(missing_docs)] + #[strum(serialize = "Inupiaq", serialize = "ik")] + IK, + #[allow(missing_docs)] + #[strum(serialize = "Irish", serialize = "ga")] + GA, + #[allow(missing_docs)] + #[strum(serialize = "Italian", serialize = "it")] + IT, + #[allow(missing_docs)] + #[strum(serialize = "Japanese", serialize = "ja")] + JA, + #[allow(missing_docs)] + #[strum(serialize = "Javanese", serialize = "jv")] + JV, + #[allow(missing_docs)] + #[strum(serialize = "Kannada", serialize = "kn")] + KN, + #[allow(missing_docs)] + #[strum(serialize = "Kanuri", serialize = "kr")] + KR, + #[allow(missing_docs)] + #[strum(serialize = "Kashmiri", serialize = "ks")] + KS, + #[allow(missing_docs)] + #[strum(serialize = "Kazakh", serialize = "kk")] + KK, + #[allow(missing_docs)] + #[strum(serialize = "Central Khmer", serialize = "km")] + KM, + #[allow(missing_docs)] + #[strum(serialize = "Kikuyu", serialize = "ki")] + KI, + #[allow(missing_docs)] + #[strum(serialize = "Kinyarwanda", serialize = "rw")] + RW, + #[allow(missing_docs)] + #[strum(serialize = "Kyrgyz", serialize = "ky")] + KY, + #[allow(missing_docs)] + #[strum(serialize = "Komi", serialize = "kv")] + KV, + #[allow(missing_docs)] + #[strum(serialize = "Kongo", serialize = "kg")] + KG, + #[allow(missing_docs)] + #[strum(serialize = "Korean", serialize = "ko")] + KO, + #[allow(missing_docs)] + #[strum(serialize = "Kuanyama", serialize = "kj")] + KJ, + #[allow(missing_docs)] + #[strum(serialize = "Kurdish", serialize = "ku")] + KU, + #[allow(missing_docs)] + #[strum(serialize = "Lao", serialize = "lo")] + LO, + #[allow(missing_docs)] + #[strum(serialize = "Latin", serialize = "la")] + LA, + #[allow(missing_docs)] + #[strum(serialize = "Latvian", serialize = "lv")] + LV, + #[allow(missing_docs)] + #[strum(serialize = "Limburgan", serialize = "li")] + LI, + #[allow(missing_docs)] + #[strum(serialize = "Lingala", serialize = "ln")] + LN, + #[allow(missing_docs)] + #[strum(serialize = "Lithuanian", serialize = "lt")] + LT, + #[allow(missing_docs)] + #[strum(serialize = "Luba-Katanga", serialize = "lu")] + LU, + #[allow(missing_docs)] + #[strum(serialize = "Luxembourgish", serialize = "lb")] + LB, + #[allow(missing_docs)] + #[strum(serialize = "Macedonian", serialize = "mk")] + MK, + #[allow(missing_docs)] + #[strum(serialize = "Malagasy", serialize = "mg")] + MG, + #[allow(missing_docs)] + #[strum(serialize = "Malay", serialize = "ms")] + MS, + #[allow(missing_docs)] + #[strum(serialize = "Malayalam", serialize = "ml")] + ML, + #[allow(missing_docs)] + #[strum(serialize = "Maltese", serialize = "mt")] + MT, + #[allow(missing_docs)] + #[strum(serialize = "Manx", serialize = "gv")] + GV, + #[allow(missing_docs)] + #[strum(serialize = "Maori", serialize = "mi")] + MI, + #[allow(missing_docs)] + #[strum(serialize = "Marathi", serialize = "mr")] + MR, + #[allow(missing_docs)] + #[strum(serialize = "Marshallese", serialize = "mh")] + MH, + #[allow(missing_docs)] + #[strum(serialize = "Mongolian", serialize = "mn")] + MN, + #[allow(missing_docs)] + #[strum(serialize = "Nauru", serialize = "na")] + NA, + #[allow(missing_docs)] + #[strum(serialize = "Navajo", serialize = "nv")] + NV, + #[allow(missing_docs)] + #[strum(serialize = "North Ndebele", serialize = "nd")] + ND, + #[allow(missing_docs)] + #[strum(serialize = "South Ndebele", serialize = "nr")] + NR, + #[allow(missing_docs)] + #[strum(serialize = "Nepali", serialize = "ng")] + NG, + #[allow(missing_docs)] + #[strum(serialize = "Nepali", serialize = "ne")] + NE, + #[allow(missing_docs)] + #[strum(serialize = "Norwegian", serialize = "no")] + NO, + #[allow(missing_docs)] + #[strum(serialize = "Norwegian BokmĂĄl", serialize = "nb")] + NB, + #[allow(missing_docs)] + #[strum(serialize = "Norwegian Nynorsk", serialize = "nn")] + NN, + #[allow(missing_docs)] + #[strum(serialize = "Occitan", serialize = "oc")] + OC, + #[allow(missing_docs)] + #[strum(serialize = "Ojibwa", serialize = "oj")] + OJ, + #[allow(missing_docs)] + #[strum(serialize = "Oriya", serialize = "or")] + OR, + #[allow(missing_docs)] + #[strum(serialize = "Oromo", serialize = "om")] + OM, + #[allow(missing_docs)] + #[strum(serialize = "Ossetian", serialize = "os")] + OS, + #[allow(missing_docs)] + #[strum(serialize = "Pali", serialize = "pi")] + PI, + #[allow(missing_docs)] + #[strum(serialize = "Pashto", serialize = "ps")] + PS, + #[allow(missing_docs)] + #[strum(serialize = "Persian", serialize = "fa")] + FA, + #[allow(missing_docs)] + #[strum(serialize = "Polish", serialize = "pl")] + PL, + #[allow(missing_docs)] + #[strum(serialize = "Portuguese", serialize = "pt")] + PT, + #[allow(missing_docs)] + #[strum(serialize = "Punjabi", serialize = "pa")] + PA, + #[allow(missing_docs)] + #[strum(serialize = "Quechua", serialize = "qu")] + QU, + #[allow(missing_docs)] + #[strum(serialize = "Romanian", serialize = "ro")] + RO, + #[allow(missing_docs)] + #[strum(serialize = "Romansh", serialize = "rm")] + RM, + #[allow(missing_docs)] + #[strum(serialize = "Rundi", serialize = "rn")] + RN, + #[allow(missing_docs)] + #[strum(serialize = "Russian", serialize = "ru")] + RU, + #[allow(missing_docs)] + #[strum(serialize = "North Sami", serialize = "se")] + SE, + #[allow(missing_docs)] + #[strum(serialize = "Samoan", serialize = "sm")] + SM, + #[allow(missing_docs)] + #[strum(serialize = "Sango", serialize = "sg")] + SG, + #[allow(missing_docs)] + #[strum(serialize = "Sanskrit", serialize = "sa")] + SA, + #[allow(missing_docs)] + #[strum(serialize = "Sardinian", serialize = "sc")] + SC, + #[allow(missing_docs)] + #[strum(serialize = "Serbian", serialize = "sr")] + SR, + #[allow(missing_docs)] + #[strum(serialize = "Shona", serialize = "sn")] + SN, + #[allow(missing_docs)] + #[strum(serialize = "Sindhi", serialize = "sd")] + SD, + #[allow(missing_docs)] + #[strum(serialize = "Sinhala", serialize = "si")] + SI, + #[allow(missing_docs)] + #[strum(serialize = "Slovak", serialize = "sk")] + SK, + #[allow(missing_docs)] + #[strum(serialize = "Slovenian", serialize = "sl")] + SL, + #[allow(missing_docs)] + #[strum(serialize = "Somali", serialize = "so")] + SO, + #[allow(missing_docs)] + #[strum(serialize = "Southern Sotho", serialize = "st")] + ST, + #[allow(missing_docs)] + #[strum(serialize = "Spanish", serialize = "es")] + ES, + #[allow(missing_docs)] + #[strum(serialize = "Sundanese", serialize = "su")] + SU, + #[allow(missing_docs)] + #[strum(serialize = "Swahili", serialize = "sw")] + SW, + #[allow(missing_docs)] + #[strum(serialize = "Swati", serialize = "ss")] + SS, + #[allow(missing_docs)] + #[strum(serialize = "Swedish", serialize = "sv")] + SV, + #[allow(missing_docs)] + #[strum(serialize = "Tagalog", serialize = "tl")] + TL, + #[allow(missing_docs)] + #[strum(serialize = "Tahitian", serialize = "ty")] + TY, + #[allow(missing_docs)] + #[strum(serialize = "Tajik", serialize = "tg")] + TG, + #[allow(missing_docs)] + #[strum(serialize = "Tamil", serialize = "ta")] + TA, + #[allow(missing_docs)] + #[strum(serialize = "Tatar", serialize = "tt")] + TT, + #[allow(missing_docs)] + #[strum(serialize = "Telugu", serialize = "te")] + TE, + #[allow(missing_docs)] + #[strum(serialize = "Thai", serialize = "th")] + TH, + #[allow(missing_docs)] + #[strum(serialize = "Tibetan", serialize = "bo")] + BO, + #[allow(missing_docs)] + #[strum(serialize = "Tigrinya", serialize = "ti")] + TI, + #[allow(missing_docs)] + #[strum(serialize = "Tonga", serialize = "to")] + TO, + #[allow(missing_docs)] + #[strum(serialize = "Tsonga", serialize = "ts")] + TS, + #[allow(missing_docs)] + #[strum(serialize = "Tswana", serialize = "tn")] + TN, + #[allow(missing_docs)] + #[strum(serialize = "Turkish", serialize = "tr")] + TR, + #[allow(missing_docs)] + #[strum(serialize = "Turkmen", serialize = "tk")] + TK, + #[allow(missing_docs)] + #[strum(serialize = "Twi", serialize = "tw")] + TW, + #[allow(missing_docs)] + #[strum(serialize = "Uighur", serialize = "ug")] + UG, + #[allow(missing_docs)] + #[strum(serialize = "Ukrainian", serialize = "uk")] + UK, + #[allow(missing_docs)] + #[strum(serialize = "Urdu", serialize = "ur")] + UR, + #[allow(missing_docs)] + #[strum(serialize = "Uzbek", serialize = "uz")] + UZ, + #[allow(missing_docs)] + #[strum(serialize = "Venda", serialize = "ve")] + VE, + #[allow(missing_docs)] + #[strum(serialize = "Vietnamese", serialize = "vi")] + VI, + #[allow(missing_docs)] + #[strum(serialize = "VolapĂĽk", serialize = "vo")] + VO, + #[allow(missing_docs)] + #[strum(serialize = "Walloon", serialize = "wa")] + WA, + #[allow(missing_docs)] + #[strum(serialize = "Welsh", serialize = "cy")] + CY, + #[allow(missing_docs)] + #[strum(serialize = "Wolof", serialize = "wo")] + WO, + #[allow(missing_docs)] + #[strum(serialize = "Xhosa", serialize = "xh")] + XH, + #[allow(missing_docs)] + #[strum(serialize = "Sichuan Yi", serialize = "ii")] + II, + #[allow(missing_docs)] + #[strum(serialize = "Yiddish", serialize = "yi")] + YI, + #[allow(missing_docs)] + #[strum(serialize = "Yoruba", serialize = "yo")] + YO, + #[allow(missing_docs)] + #[strum(serialize = "Zhuang", serialize = "za")] + ZA, + #[allow(missing_docs)] + #[strum(serialize = "Zulu", serialize = "zu")] + ZU, +} diff --git a/translatable_shared/src/misc/mod.rs b/translatable_shared/src/misc/mod.rs new file mode 100644 index 0000000..778f7c6 --- /dev/null +++ b/translatable_shared/src/misc/mod.rs @@ -0,0 +1,8 @@ +//! Uncategorized item module. +//! +//! This module contains sub-modules with miscellaneous structures, +//! or items that don't fit into an existing category — typically +//! because there aren’t enough related modules to justify their own group. + +pub mod language; +pub mod templating; diff --git a/translatable_shared/src/misc/templating.rs b/translatable_shared/src/misc/templating.rs new file mode 100644 index 0000000..08c6bee --- /dev/null +++ b/translatable_shared/src/misc/templating.rs @@ -0,0 +1,236 @@ +//! String template generation module. +//! +//! This module declares the [`FormatString`] +//! which is a structure to parse templates +//! and generate strings of them with replaced +//! parameters. + +use std::collections::HashMap; +use std::ops::Range; +use std::str::FromStr; + +use proc_macro2::TokenStream as TokenStream2; +use quote::{ToTokens, TokenStreamExt, quote}; +use syn::{Ident, parse_str}; +use thiserror::Error; + +/// Template parsing errors. +/// +/// This error is used within [`FormatString`] +/// to represent parsing errors such as unclosed +/// unescaped tags or invalid identifiers. +#[derive(Error, Debug)] +pub enum TemplateError { + /// Unclosed brace error. + /// + /// This error is returned when a brace + /// that was considered unescaped + /// was not closed after reaching the + /// last character of the string. + #[error("Found unclosed brace at index {0}")] + Unclosed(usize), + + /// Invalid ident error. + /// + /// This error is returned when a key + /// inside the braces couldn't be parsed + /// as an [`Ident`], invalid identifiers + /// are checked because of macro parsing. + #[error("Found template with key '{0}' which is an invalid identifier")] + InvalidIdent(String), +} + +/// Format string wrapper struct. +/// +/// This struct wraps a string and has +/// a counter of each template it has +/// with each respective position for +/// the sake of replacing these positions +/// with read data. +pub struct FormatString { + /// Original templated string. + /// + /// This field contains the original + /// string that aligns it's keyed templates + /// with `self.spans`. + /// + /// This should never be mutated for the sake + /// of keeping the alignment with `self.spans`. + original: String, + + /// Template spans. + /// + /// This vector contains the spans + /// of all the ranges containing a template + /// in the original string. + /// + /// This is stored in a vector because we + /// want to allow multiple templates with + /// the same key. + spans: Vec<(String, Range)>, +} + +impl FormatString { + /// Compile-time to runtime transformation function. + /// + /// This function takes data that may be generated + /// from a macro output and constructs an instance + /// of [`FormatString`] keeping its fields + /// private an immutable. + /// + /// If you use this to construct the instance manually + /// there is no promise that the string and spans + /// are aligned, thus the replacements are going + /// to work. + /// + /// **Parameters** + /// * `original` - What belongs to the `original` field. + /// * `spans` - What belongs to the `spans` field. + /// + /// **Returns** + /// An instance of self based on the provided parameters. + pub fn from_data(original: &str, spans: Vec<(String, Range)>) -> Self { + Self { original: original.to_string(), spans } + } + + /// Creates replaced original string copy. + /// + /// This method takes the original string, and replaces + /// it's templates with the values of the values provided + /// as a hashmap. + /// + /// **Parameters** + /// * `values` - The values to replace the templates with. + /// + /// **Returns** + /// A copy of the original string with it's templates replaced. + pub fn replace_with(&self, values: &HashMap) -> String { + let mut original = self + .original + .clone(); + + let mut spans = self + .spans + .clone(); + spans.sort_by_key(|(_key, range)| range.start); + + let mut offset = 0isize; + + for (key, range) in spans { + if let Some(value) = values.get(&key) { + let start = (range.start as isize + offset) as usize; + let end = (range.end as isize + offset) as usize; + + original.replace_range(start..end, value); + + offset += value.len() as isize - (range.end - range.start) as isize; + } + } + + original + } + + /// Original string getter. + /// + /// **Returns** + /// A shared slice to the original string. + pub fn original(&self) -> &str { + &self.original + } +} + +/// Parse method implementation. +/// +/// This implementation leads to the implementation +/// of the `parse` method for [`FormatString`] which +/// parses all the templates on the string and stores +/// them in a structure along the original string for +/// future replacement. +impl FromStr for FormatString { + type Err = TemplateError; + + fn from_str(s: &str) -> Result { + let original = s.to_string(); + let mut spans = Vec::new(); + + let char_to_byte = s + .char_indices() + .map(|(i, _)| i) + .collect::>(); + + let mut last_bracket_idx = None; + let mut current_tmpl_key = String::new(); + for (char_idx, c) in original + .chars() + .enumerate() + { + match (c, last_bracket_idx) { + // if last template index is the last character + // ignore current as is escaped. + ('{', Some(prev)) if prev == char_idx.saturating_sub(1) => last_bracket_idx = None, + // if last template index is anything but the last character + // set it as last index. + ('{', _) => last_bracket_idx = Some(char_idx), + + // if last template index is not 0 and we find + // a closing bracket complete a range. + ('}', Some(open_idx)) => { + let key = current_tmpl_key.clone(); + + spans.push(( + parse_str::(&key) + .map_err(|_| TemplateError::InvalidIdent(key))? + .to_string(), + char_to_byte[open_idx] + ..char_to_byte + .get(char_idx + 1) + .copied() + .unwrap_or_else(|| s.len()), + )); + + last_bracket_idx = None; + current_tmpl_key.clear(); + }, + + (c, Some(_)) => current_tmpl_key.push(c), + + _ => {}, + } + } + + if let Some(lbi) = last_bracket_idx { + Err(TemplateError::Unclosed(lbi)) + } else { + Ok(FormatString { original, spans }) + } + } +} + +/// Compile-time to runtime conversion implementation. +/// +/// This implementation generates a call to the [`from_data`] +/// function in [`FormatString`]. +/// +/// [`from_data`]: FormatString::from_data +impl ToTokens for FormatString { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let original = &self.original; + + let span_map = self + .spans + .iter() + .map(|(key, range)| { + let start = range.start; + let end = range.end; + + quote! { (#key.to_string(), #start..#end) } + }); + + tokens.append_all(quote! { + translatable::shared::misc::templating::FormatString::from_data( + #original, + vec![#(#span_map),*] + ) + }); + } +} diff --git a/translatable_shared/src/translations/collection.rs b/translatable_shared/src/translations/collection.rs new file mode 100644 index 0000000..852be95 --- /dev/null +++ b/translatable_shared/src/translations/collection.rs @@ -0,0 +1,114 @@ +//! Translation file collection module. +//! +//! This module declares [`TranslationNodeCollection`] +//! a representation of each file found in the translations +//! folder defined in the configuration file. + +use std::collections::HashMap; + +use proc_macro2::TokenStream as TokenStream2; +use quote::{ToTokens, TokenStreamExt, quote}; + +use super::node::{TranslationNode, TranslationObject}; +use crate::macros::collections::map_transform_to_tokens; + +/// Translation file collection. +/// +/// This tuple struct wraps a hashmap implementing +/// a lookup trough all the files in ascending order. +/// +/// The internal hashmap contains the original file +/// paths along all the unmerged [`TranslationNode`] +/// found in each file. +pub struct TranslationNodeCollection(HashMap); + +impl TranslationNodeCollection { + /// Create a new [`TranslationNodeCollection`]. + /// + /// By providing a populated hashmap, create a new + /// [`TranslationNodeCollection`] structure. + /// + /// The file paths in the hashmap key aren't validated. This + /// is usually called from a `to-runtime` implementation, if + /// you want to obtain all the translation files use + /// + /// **Arguments** + /// * `collection` - An already populated collection for lookup. + /// + /// **Returns** + /// The provided collection wrapped in a [`TranslationNodeCollection`]. + pub fn new(collection: HashMap) -> Self { + Self(collection) + } + + /// Get a node from a file path. + /// + /// This method may be used to load a translation + /// independently, if you are looking for an independent + /// translation you may want to call find_path instead. + /// + /// **Arguments** + /// * `path` - The OS path where the file was originally found. + /// + /// **Returns** + /// A top level translation node, containing all the translations + /// in that specific file. + #[allow(unused)] + pub fn get_node(&self, path: &str) -> Option<&TranslationNode> { + self.0 + .get(path) + } + + /// Search a path trough all the nodes. + /// + /// This method is used to load a specific translation + /// file agnostic from a "translation path" which consists + /// of the necessary TOML object path to reach a specific + /// translation object. + /// + /// **Arguments** + /// * `path` - The sections of the TOML path in order to access + /// the desired translation object. + /// + /// **Returns** + /// A translation object containing a specific translation + /// in all it's available languages. + pub fn find_path(&self, path: &Vec) -> Option<&TranslationObject> { + self.0 + .values() + .find_map(|node| node.find_path(path)) + } +} + +/// Hashmap wrapper implementation. +/// +/// Abstraction to easily collect a [`HashMap`] and +/// wrap it in a [`TranslationNodeCollection`]. +impl FromIterator<(String, TranslationNode)> for TranslationNodeCollection { + fn from_iter>(iter: T) -> Self { + Self( + iter.into_iter() + .collect(), + ) + } +} + +/// Compile-time to runtime implementation. +/// +/// This implementation generates the call to [`new`] on +/// [`TranslationNodeCollection`] with the data from the current +/// instance to perform a compile-time to runtime conversion. +/// +/// [`new`]: TranslationNodeCollection::new +impl ToTokens for TranslationNodeCollection { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let map = + map_transform_to_tokens(&self.0, |key, value| quote! { (#key.to_string(), #value) }); + + tokens.append_all(quote! { + translatable::shared::translations::collection::TranslationNodeCollection::new( + #map + ) + }); + } +} diff --git a/translatable_shared/src/translations/mod.rs b/translatable_shared/src/translations/mod.rs new file mode 100644 index 0000000..a7e81a7 --- /dev/null +++ b/translatable_shared/src/translations/mod.rs @@ -0,0 +1,10 @@ +//! Translation structures module. +//! +//! This module's sub-modules declare +//! structures to manage deserialized +//! structures from the translation files. +//! These permit searching paths in a more +//! rust-friendly way. + +pub mod collection; +pub mod node; diff --git a/translatable_shared/src/translations/node.rs b/translatable_shared/src/translations/node.rs new file mode 100644 index 0000000..32ae9b6 --- /dev/null +++ b/translatable_shared/src/translations/node.rs @@ -0,0 +1,210 @@ +//! Translation node declaration module. +//! +//! This module declares [`TranslationNode`] which +//! is a nested enum that behaves like a n-ary tree +//! for which each branch contains paths that might +//! lead to translation objects or other paths. + +use std::collections::HashMap; + +use proc_macro2::TokenStream as TokenStream2; +use quote::{ToTokens, TokenStreamExt, quote}; +use strum::ParseError; +use thiserror::Error; +use toml_edit::{Item, Table, Value}; + +use crate::macros::collections::{map_to_tokens, map_transform_to_tokens}; +use crate::misc::language::Language; +use crate::misc::templating::{FormatString, TemplateError}; + +/// [`TranslationNode`] errors. +/// +/// This error is agnostic to the runtime, it is used +/// for errors while parsing a [`TranslationNode`] or +/// while trying seeking for it's content. +#[derive(Error, Debug)] +pub enum TranslationNodeError { + /// Invalid object type error. + /// + /// This error signals that the nesting rules were + /// broken, thus the parsing cannot continue. + #[error("A nesting can only contain translation objects or other nestings")] + InvalidNesting, + + /// Template validation error. + /// + /// This means there was an error while validating + /// a translation templates, such as an invalid + /// ident for its keys or unclosed templates. + #[error("Template validation failed: {0:#}")] + TemplateValidation(#[from] TemplateError), + + /// Invalid value found inside a nesting. + /// + /// This error signals that an invalid value was found + /// inside a nesting, such as mixed values. + #[error( + "Mixed values are not allowed, a nesting can't contain strings and objects at the same \ + time" + )] + MixedValues, + + /// Invalid ISO-639-1 translation key. + /// + /// This error signals that an invalid key was found for a + /// translation inside a translation object. + /// + /// Translation keys must follow the ISO-639-1 standard. + #[error("Couldn't parse ISO 639-1 string for translation key")] + LanguageParsing(#[from] ParseError), + + /// Empty translation file. + /// + /// This error signals that a created translation file + /// is empty and cannot be parsed. + #[error("A translation file cannot be empty")] + EmptyTable, +} + +/// Nesting type alias. +/// +/// This is one of the valid objects that might be found +/// on a translation file, this object might contain a translation +/// or another nesting. +pub type TranslationNesting = HashMap; + +/// Object type alias. +/// +/// This is one of the valid objects that might be found +/// on a translation file, this object contains only translations +/// keyed with their respective languages. +pub type TranslationObject = HashMap; + +/// Translation node structure. +/// +/// This enum acts like an n-ary tree which +/// may contain [`TranslationNesting`] or +/// [`TranslationObject`] representing a tree +/// that follows the translation file rules. +pub enum TranslationNode { + /// Branch containing a [`TranslationNesting`]. + /// + /// Read the [`TranslationNesting`] documentation for + /// more information. + Nesting(TranslationNesting), + + /// Branch containing a [`TranslationObject`]. + /// + /// Read the [`TranslationObject`] documentation for + /// more information. + Translation(TranslationObject), +} + +impl TranslationNode { + /// Resolves a translation path through the nesting hierarchy. + /// + /// **Arguments** + /// * `path` - Slice of path segments to resolve. + /// + /// **Returns** + /// A reference to translations if path exists and points to leaf node. + pub fn find_path(&self, path: &Vec) -> Option<&TranslationObject> { + let path = path + .iter() + .map(|i| i.to_string()) + .collect::>(); + + match self { + Self::Nesting(nested) => { + let (first, rest) = path.split_first()?; + nested + .get(first)? + .find_path(&rest.to_vec()) + }, + Self::Translation(translation) => path + .is_empty() + .then_some(translation), + } + } +} + +/// Compile-time to runtime conversion implementation. +/// +/// This implementation converts a [`TranslationNode`] into +/// runtime trough tokens by nesting calls depending on the +/// type inferred in compile-time. +/// +/// This is usually used for dynamic paths. +impl ToTokens for TranslationNode { + fn to_tokens(&self, tokens: &mut TokenStream2) { + match self { + TranslationNode::Nesting(nesting) => { + let map = map_transform_to_tokens( + nesting, + |key, value| quote! { (#key.to_string(), #value) }, + ); + + tokens.append_all(quote! { + translatable::shared::translations::node::TranslationNode::Nesting( + #map + ) + }); + }, + + TranslationNode::Translation(translation) => { + let map = map_to_tokens(translation); + + tokens.append_all(quote! { + translatable::shared::translations::node::TranslationNode::Translation( + #map + ) + }); + }, + } + } +} + +/// TOML table parsing. +/// +/// This implementation parses a TOML table object +/// reference usually taken from a `toml_edit::DocuemntMut` +/// into a [`TranslationNode`] for validation and +/// seeking the translations according to the rules. +impl TryFrom<&Table> for TranslationNode { + type Error = TranslationNodeError; + + // The top level can only contain objects is never enforced. + fn try_from(value: &Table) -> Result { + let mut result = None; + + for (key, value) in value { + match value { + Item::Value(Value::String(translation_value)) => { + match result.get_or_insert_with(|| Self::Translation(HashMap::new())) { + Self::Translation(translation) => { + translation.insert( + key.parse()?, + translation_value + .value() + .parse()?, + ); + }, + Self::Nesting(_) => return Err(TranslationNodeError::MixedValues), + } + }, + + Item::Table(nesting_value) => { + match result.get_or_insert_with(|| Self::Nesting(HashMap::new())) { + Self::Nesting(nesting) => { + nesting.insert(key.to_string(), Self::try_from(nesting_value)?); + }, + Self::Translation(_) => return Err(TranslationNodeError::MixedValues), + } + }, + _ => return Err(TranslationNodeError::InvalidNesting), + } + } + + result.ok_or(TranslationNodeError::EmptyTable) + } +} diff --git a/translations/test.toml b/translations/test.toml deleted file mode 100644 index e5613df..0000000 --- a/translations/test.toml +++ /dev/null @@ -1,3 +0,0 @@ - -[salutation] -test = { es = "Hola", en = "Hello" }