From ea91f0e5970b1a31f8eaf7419833a756cf8fdf9c Mon Sep 17 00:00:00 2001 From: Josh Freda Date: Tue, 31 Jan 2023 03:31:28 -0600 Subject: [PATCH 001/192] Initial commit --- .gitignore | 19 + .yarnrc.yml | 1 + Makefile | 78 + README.md | 174 + cmd/hermes/main.go | 15 + configs/config.hcl | 169 + go.mod | 89 + go.sum | 476 + internal/api/analytics.go | 59 + internal/api/approvals.go | 371 + internal/api/document_types.go | 34 + internal/api/documents.go | 396 + internal/api/drafts.go | 838 + internal/api/helpers.go | 92 + internal/api/helpers_test.go | 98 + internal/api/me_subscriptions.go | 127 + internal/api/people.go | 109 + internal/api/products.go | 59 + internal/api/reviews.go | 725 + internal/auth/auth.go | 62 + internal/auth/google/google.go | 52 + internal/auth/oktaalb/doc.go | 3 + internal/auth/oktaalb/oktaalb.go | 107 + internal/cmd/base/base.go | 125 + internal/cmd/commands.go | 36 + internal/cmd/commands/indexer/indexer.go | 140 + internal/cmd/commands/server/server.go | 448 + internal/cmd/commands/version/version.go | 26 + internal/cmd/commands/version/version_test.go | 29 + internal/cmd/main.go | 48 + internal/config/config.go | 260 + internal/config/helpers.go | 21 + internal/db/db.go | 65 + internal/email/email.go | 114 + .../email/templates/review-requested.html | 14 + .../subscriber-document-published.html | 14 + internal/helpers/helpers.go | 12 + internal/indexer/indexer.go | 416 + internal/indexer/refresh_docs_headers.go | 236 + internal/indexer/refresh_drafts_headers.go | 208 + internal/indexer/refresh_headers.go | 190 + internal/pkg/doctypes/doc.go | 2 + internal/pkg/doctypes/doc_types.go | 57 + internal/pkg/featureflags/flags.go | 116 + internal/pub/assets/document.png | Bin 0 -> 29028 bytes internal/pub/assets/hermes-logo.png | Bin 0 -> 2657 bytes internal/pub/pub.go | 27 + internal/structs/product.go | 23 + internal/test/database.go | 60 + internal/version/version.go | 3 + pkg/algolia/client.go | 312 + pkg/algolia/doc.go | 2 + pkg/algolia/proxy.go | 58 + pkg/googleworkspace/admin_helpers.go | 56 + pkg/googleworkspace/doc.go | 2 + pkg/googleworkspace/docs_helpers.go | 113 + pkg/googleworkspace/drive_helpers.go | 391 + pkg/googleworkspace/gmail_helpers.go | 25 + pkg/googleworkspace/oauth2_helpers.go | 20 + pkg/googleworkspace/people_helpers.go | 51 + pkg/googleworkspace/service.go | 234 + pkg/hashicorpdocs/basedoc.go | 189 + pkg/hashicorpdocs/common.go | 123 + pkg/hashicorpdocs/doc.go | 3 + pkg/hashicorpdocs/frd.go | 346 + pkg/hashicorpdocs/frd_replace_header.go | 754 + pkg/hashicorpdocs/prd.go | 326 + pkg/hashicorpdocs/prd_replace_header.go | 741 + pkg/hashicorpdocs/rfc.go | 465 + pkg/hashicorpdocs/rfc_replace_header.go | 806 + pkg/links/data.go | 63 + pkg/links/redirect.go | 116 + pkg/models/document.go | 384 + pkg/models/document_custom_field.go | 63 + pkg/models/document_review.go | 160 + pkg/models/document_review_test.go | 263 + pkg/models/document_test.go | 759 + pkg/models/document_type.go | 136 + pkg/models/document_type_custom_field.go | 107 + pkg/models/document_type_custom_field_test.go | 161 + pkg/models/document_type_test.go | 143 + pkg/models/gorm.go | 16 + pkg/models/indexer_folder.go | 63 + pkg/models/indexer_folder_test.go | 98 + pkg/models/indexer_metadata.go | 45 + pkg/models/indexer_metadata_test.go | 64 + pkg/models/product.go | 78 + pkg/models/product_latest_document_number.go | 90 + .../product_latest_document_number_test.go | 206 + pkg/models/product_test.go | 325 + pkg/models/testing.go | 37 + pkg/models/user.go | 138 + pkg/models/user_test.go | 345 + web/.prettierrc.json | 1 + web/app/adapters/google/drive.js | 15 + web/app/adapters/google/drive/file.js | 8 + web/app/adapters/google/userinfo.js | 15 + web/app/adapters/google/userinfo/me.js | 8 + web/app/app.ts | 12 + web/app/authenticators/torii.js | 6 + web/app/components/.gitkeep | 0 web/app/components/action.hbs | 3 + .../components/application-loading/index.hbs | 3 + .../components/application-loading/index.js | 6 + web/app/components/custom-editable-field.hbs | 71 + web/app/components/custom-editable-field.js | 24 + .../custom-editable-fields/empty-state.hbs | 3 + .../components/dashboard/latest-updates.hbs | 55 + .../components/dashboard/latest-updates.ts | 98 + .../dashboard/new-features-banner.hbs | 27 + .../dashboard/new-features-banner.ts | 31 + web/app/components/doc/inline.hbs | 77 + web/app/components/doc/inline.js | 59 + web/app/components/doc/row.hbs | 87 + web/app/components/doc/row.js | 59 + web/app/components/doc/state-progress-bar.hbs | 5 + web/app/components/doc/state.hbs | 16 + web/app/components/doc/state.js | 59 + web/app/components/doc/tag.hbs | 12 + web/app/components/doc/tag.js | 107 + web/app/components/doc/tile.hbs | 101 + web/app/components/doc/tile.js | 59 + web/app/components/document/index.hbs | 25 + web/app/components/document/index.js | 48 + web/app/components/editable-field.hbs | 18 + web/app/components/editable-field.js | 62 + web/app/components/footer.hbs | 22 + web/app/components/footer.ts | 15 + web/app/components/header.hbs | 8 + web/app/components/header/facet-dropdown.hbs | 24 + web/app/components/header/facet-dropdown.ts | 47 + web/app/components/header/nav.hbs | 68 + web/app/components/header/nav.js | 36 + web/app/components/header/search.hbs | 90 + web/app/components/header/search.ts | 89 + web/app/components/header/toolbar.hbs | 60 + web/app/components/header/toolbar.js | 116 + web/app/components/hermes-logo.hbs | 10 + web/app/components/hermes-logo.ts | 8 + web/app/components/inputs/people-select.hbs | 18 + web/app/components/inputs/people-select.ts | 86 + web/app/components/inputs/tag-select.hbs | 15 + web/app/components/inputs/tag-select.js | 40 + web/app/components/modal-alert-error.hbs | 11 + web/app/components/modals/doc-created.hbs | 46 + web/app/components/modals/doc-created.ts | 36 + web/app/components/modals/index.hbs | 3 + web/app/components/modals/index.ts | 7 + web/app/components/multiselect/tag-chip.hbs | 2 + .../multiselect/user-email-image-chip.hbs | 4 + web/app/components/new/doc-form.hbs | 193 + web/app/components/new/doc-form.js | 209 + web/app/components/notification.hbs | 15 + web/app/components/notification.js | 12 + web/app/components/pagination/index.hbs | 46 + web/app/components/pagination/index.ts | 46 + web/app/components/pagination/link.hbs | 27 + web/app/components/pagination/link.ts | 16 + web/app/components/person.hbs | 27 + web/app/components/person.js | 7 + web/app/components/results/index.hbs | 57 + web/app/components/results/index.ts | 11 + web/app/components/row-results.hbs | 59 + web/app/components/row-results.ts | 12 + .../settings/subscription-list-item.hbs | 29 + .../settings/subscription-list-item.ts | 40 + .../components/settings/subscription-list.hbs | 16 + .../components/settings/subscription-list.ts | 31 + web/app/components/sidebar.hbs | 712 + web/app/components/sidebar.js | 419 + web/app/components/x/hds-tab.hbs | 17 + web/app/config/environment.d.ts | 26 + web/app/controllers/.gitkeep | 0 web/app/controllers/application.js | 8 + web/app/controllers/authenticate.ts | 39 + web/app/controllers/authenticated/all.ts | 11 + .../controllers/authenticated/dashboard.js | 15 + web/app/controllers/authenticated/document.js | 6 + web/app/controllers/authenticated/drafts.ts | 11 + web/app/controllers/authenticated/my.ts | 11 + web/app/controllers/authenticated/new/doc.js | 11 + web/app/controllers/authenticated/results.ts | 12 + web/app/helpers/.gitkeep | 0 web/app/helpers/add.js | 7 + web/app/helpers/get-first-letter.js | 10 + web/app/helpers/get-product-id.js | 5 + web/app/helpers/lowercase.js | 7 + web/app/helpers/parse-date.ts | 6 + web/app/helpers/uid.js | 17 + web/app/index.html | 24 + .../initializers/custom-inflector-rules.js | 13 + web/app/models/.gitkeep | 0 web/app/models/document.js | 3 + web/app/models/google/drive/file.js | 11 + web/app/models/google/userinfo/me.js | 8 + web/app/router.js | 23 + web/app/routes/.gitkeep | 0 web/app/routes/application.js | 41 + web/app/routes/authenticate.ts | 10 + web/app/routes/authenticated.ts | 34 + web/app/routes/authenticated/all.ts | 44 + web/app/routes/authenticated/dashboard.js | 138 + web/app/routes/authenticated/document.js | 150 + web/app/routes/authenticated/drafts.ts | 140 + web/app/routes/authenticated/index.js | 10 + web/app/routes/authenticated/my.ts | 43 + web/app/routes/authenticated/new/doc.js | 65 + web/app/routes/authenticated/new/index.js | 12 + web/app/routes/authenticated/results.ts | 41 + web/app/routes/authenticated/settings.ts | 30 + web/app/serializers/google/drive/file.js | 13 + web/app/serializers/google/userinfo/me.js | 21 + web/app/services/algolia.ts | 371 + web/app/services/authenticated-user.ts | 178 + web/app/services/config.ts | 29 + web/app/services/fetch.ts | 51 + web/app/services/flags.js | 8 + web/app/services/modal-alerts.ts | 29 + web/app/services/recently-viewed-docs.js | 147 + web/app/services/toolbar.js | 6 + web/app/styles/app.scss | 113 + web/app/styles/components/action.scss | 4 + web/app/styles/components/editable-field.scss | 61 + web/app/styles/components/footer.scss | 7 + web/app/styles/components/hds-badge.scss | 3 + web/app/styles/components/modal-dialog.scss | 19 + web/app/styles/components/multiselect.scss | 86 + web/app/styles/components/nav.scss | 57 + web/app/styles/components/notification.scss | 24 + web/app/styles/components/page.scss | 7 + web/app/styles/components/preview-card.scss | 4 + web/app/styles/components/row-results.scss | 51 + web/app/styles/components/sidebar.scss | 22 + web/app/styles/components/template-card.scss | 32 + web/app/styles/components/tile-list.scss | 3 + web/app/styles/components/toolbar.scss | 13 + web/app/styles/components/x-hds-tab.scss | 43 + web/app/styles/ember-power-select-theme.scss | 44 + web/app/styles/hashicorp/hermes-logo.scss | 11 + web/app/styles/hashicorp/product-badge.scss | 37 + web/app/styles/hds-overrides.scss | 20 + web/app/templates/application-loading.hbs | 3 + web/app/templates/application.hbs | 10 + web/app/templates/authenticate.hbs | 31 + web/app/templates/authenticated.hbs | 3 + web/app/templates/authenticated/all.hbs | 9 + web/app/templates/authenticated/dashboard.hbs | 81 + web/app/templates/authenticated/document.hbs | 3 + web/app/templates/authenticated/drafts.hbs | 10 + web/app/templates/authenticated/my.hbs | 9 + web/app/templates/authenticated/new.hbs | 6 + web/app/templates/authenticated/new/doc.hbs | 4 + web/app/templates/authenticated/new/index.hbs | 48 + web/app/templates/authenticated/results.hbs | 5 + web/app/templates/authenticated/settings.hbs | 13 + .../torii-providers/google-oauth2-bearer.js | 9 + web/app/types/document-routes.ts | 12 + web/app/types/document.d.ts | 20 + web/app/types/facets.d.ts | 23 + web/app/utils/facets.js | 29 + web/app/utils/get-product-id.ts | 18 + web/app/utils/parse-date.ts | 25 + web/app/utils/time-ago.js | 59 + web/config/deprecation-workflow.js | 12 + web/config/ember-cli-update.json | 17 + web/config/environment.js | 87 + web/config/optional-features.json | 6 + web/config/targets.js | 26 + web/ember-cli-build.js | 50 + web/mirage/config.ts | 35 + web/mirage/factories/person.ts | 6 + web/mirage/helpers.ts | 14 + web/mirage/models/person.ts | 5 + web/package.json | 135 + web/public/images/document.png | Bin 0 -> 29028 bytes web/public/robots.txt | 3 + web/tailwind.config.js | 177 + web/testem.js | 23 + web/tests/helpers/.gitkeep | 0 web/tests/helpers/flash-message.js | 3 + web/tests/index.html | 40 + web/tests/integration/.gitkeep | 0 .../integration/components/action-test.js | 48 + .../components/custom-editable-field-test.js | 89 + .../integration/components/doc/state-test.js | 87 + .../components/header/facet-test.js | 61 + .../components/header/toolbar-test.js | 59 + .../components/inputs/people-select-test.ts | 70 + .../components/modals/doc-created-test.ts | 61 + .../components/modals/index-test.ts | 23 + .../integration/components/person-test.js | 52 + .../settings/subscription-list-item-test.ts | 42 + .../settings/subscription-list-test.ts | 34 + .../integration/components/x/hds-tab-test.js | 79 + .../helpers/get-first-letter-test.js | 36 + .../helpers/get-product-id-test.js | 25 + .../integration/helpers/lowercase-test.js | 16 + .../integration/helpers/parse-date-test.ts | 28 + web/tests/test-helper.ts | 14 + web/tests/unit/.gitkeep | 0 .../unit/controllers/authenticate-test.js | 12 + web/tests/unit/routes/all-test.js | 11 + web/tests/unit/routes/authenticate-test.js | 11 + web/tests/unit/routes/authenticated-test.js | 11 + web/tests/unit/routes/document-test.js | 11 + web/tests/unit/routes/index-test.js | 11 + web/tests/unit/routes/results-test.js | 11 + web/tests/unit/services/modal-alerts-test.ts | 26 + web/tests/unit/utils/get-product-id-test.js | 17 + web/tests/unit/utils/parse-date-test.ts | 25 + web/tsconfig.json | 19 + web/types/document.d.ts | 4 + web/types/ember-cli-mirage/test-support.d.ts | 14 + .../ember-data/types/registries/model.d.ts | 6 + web/types/facets.d.ts | 19 + web/types/global.d.ts | 7 + web/types/hermes/index.d.ts | 11 + web/vendor/.gitkeep | 0 web/web.go | 123 + web/yarn.lock | 17785 ++++++++++++++++ 320 files changed, 42345 insertions(+) create mode 100644 .gitignore create mode 100644 .yarnrc.yml create mode 100644 Makefile create mode 100644 README.md create mode 100644 cmd/hermes/main.go create mode 100644 configs/config.hcl create mode 100644 go.mod create mode 100644 go.sum create mode 100644 internal/api/analytics.go create mode 100644 internal/api/approvals.go create mode 100644 internal/api/document_types.go create mode 100644 internal/api/documents.go create mode 100644 internal/api/drafts.go create mode 100644 internal/api/helpers.go create mode 100644 internal/api/helpers_test.go create mode 100644 internal/api/me_subscriptions.go create mode 100644 internal/api/people.go create mode 100644 internal/api/products.go create mode 100644 internal/api/reviews.go create mode 100644 internal/auth/auth.go create mode 100644 internal/auth/google/google.go create mode 100644 internal/auth/oktaalb/doc.go create mode 100644 internal/auth/oktaalb/oktaalb.go create mode 100644 internal/cmd/base/base.go create mode 100644 internal/cmd/commands.go create mode 100644 internal/cmd/commands/indexer/indexer.go create mode 100644 internal/cmd/commands/server/server.go create mode 100644 internal/cmd/commands/version/version.go create mode 100644 internal/cmd/commands/version/version_test.go create mode 100644 internal/cmd/main.go create mode 100644 internal/config/config.go create mode 100644 internal/config/helpers.go create mode 100644 internal/db/db.go create mode 100644 internal/email/email.go create mode 100644 internal/email/templates/review-requested.html create mode 100644 internal/email/templates/subscriber-document-published.html create mode 100644 internal/helpers/helpers.go create mode 100644 internal/indexer/indexer.go create mode 100644 internal/indexer/refresh_docs_headers.go create mode 100644 internal/indexer/refresh_drafts_headers.go create mode 100644 internal/indexer/refresh_headers.go create mode 100644 internal/pkg/doctypes/doc.go create mode 100644 internal/pkg/doctypes/doc_types.go create mode 100644 internal/pkg/featureflags/flags.go create mode 100644 internal/pub/assets/document.png create mode 100644 internal/pub/assets/hermes-logo.png create mode 100644 internal/pub/pub.go create mode 100644 internal/structs/product.go create mode 100644 internal/test/database.go create mode 100644 internal/version/version.go create mode 100644 pkg/algolia/client.go create mode 100644 pkg/algolia/doc.go create mode 100644 pkg/algolia/proxy.go create mode 100644 pkg/googleworkspace/admin_helpers.go create mode 100644 pkg/googleworkspace/doc.go create mode 100644 pkg/googleworkspace/docs_helpers.go create mode 100644 pkg/googleworkspace/drive_helpers.go create mode 100644 pkg/googleworkspace/gmail_helpers.go create mode 100644 pkg/googleworkspace/oauth2_helpers.go create mode 100644 pkg/googleworkspace/people_helpers.go create mode 100644 pkg/googleworkspace/service.go create mode 100644 pkg/hashicorpdocs/basedoc.go create mode 100644 pkg/hashicorpdocs/common.go create mode 100644 pkg/hashicorpdocs/doc.go create mode 100644 pkg/hashicorpdocs/frd.go create mode 100644 pkg/hashicorpdocs/frd_replace_header.go create mode 100644 pkg/hashicorpdocs/prd.go create mode 100644 pkg/hashicorpdocs/prd_replace_header.go create mode 100644 pkg/hashicorpdocs/rfc.go create mode 100644 pkg/hashicorpdocs/rfc_replace_header.go create mode 100644 pkg/links/data.go create mode 100644 pkg/links/redirect.go create mode 100644 pkg/models/document.go create mode 100644 pkg/models/document_custom_field.go create mode 100644 pkg/models/document_review.go create mode 100644 pkg/models/document_review_test.go create mode 100644 pkg/models/document_test.go create mode 100644 pkg/models/document_type.go create mode 100644 pkg/models/document_type_custom_field.go create mode 100644 pkg/models/document_type_custom_field_test.go create mode 100644 pkg/models/document_type_test.go create mode 100644 pkg/models/gorm.go create mode 100644 pkg/models/indexer_folder.go create mode 100644 pkg/models/indexer_folder_test.go create mode 100644 pkg/models/indexer_metadata.go create mode 100644 pkg/models/indexer_metadata_test.go create mode 100644 pkg/models/product.go create mode 100644 pkg/models/product_latest_document_number.go create mode 100644 pkg/models/product_latest_document_number_test.go create mode 100644 pkg/models/product_test.go create mode 100644 pkg/models/testing.go create mode 100644 pkg/models/user.go create mode 100644 pkg/models/user_test.go create mode 100644 web/.prettierrc.json create mode 100644 web/app/adapters/google/drive.js create mode 100644 web/app/adapters/google/drive/file.js create mode 100644 web/app/adapters/google/userinfo.js create mode 100644 web/app/adapters/google/userinfo/me.js create mode 100644 web/app/app.ts create mode 100644 web/app/authenticators/torii.js create mode 100644 web/app/components/.gitkeep create mode 100644 web/app/components/action.hbs create mode 100644 web/app/components/application-loading/index.hbs create mode 100644 web/app/components/application-loading/index.js create mode 100644 web/app/components/custom-editable-field.hbs create mode 100644 web/app/components/custom-editable-field.js create mode 100644 web/app/components/custom-editable-fields/empty-state.hbs create mode 100644 web/app/components/dashboard/latest-updates.hbs create mode 100644 web/app/components/dashboard/latest-updates.ts create mode 100644 web/app/components/dashboard/new-features-banner.hbs create mode 100644 web/app/components/dashboard/new-features-banner.ts create mode 100644 web/app/components/doc/inline.hbs create mode 100644 web/app/components/doc/inline.js create mode 100644 web/app/components/doc/row.hbs create mode 100644 web/app/components/doc/row.js create mode 100644 web/app/components/doc/state-progress-bar.hbs create mode 100644 web/app/components/doc/state.hbs create mode 100644 web/app/components/doc/state.js create mode 100644 web/app/components/doc/tag.hbs create mode 100644 web/app/components/doc/tag.js create mode 100644 web/app/components/doc/tile.hbs create mode 100644 web/app/components/doc/tile.js create mode 100644 web/app/components/document/index.hbs create mode 100644 web/app/components/document/index.js create mode 100644 web/app/components/editable-field.hbs create mode 100644 web/app/components/editable-field.js create mode 100644 web/app/components/footer.hbs create mode 100644 web/app/components/footer.ts create mode 100644 web/app/components/header.hbs create mode 100644 web/app/components/header/facet-dropdown.hbs create mode 100644 web/app/components/header/facet-dropdown.ts create mode 100644 web/app/components/header/nav.hbs create mode 100644 web/app/components/header/nav.js create mode 100644 web/app/components/header/search.hbs create mode 100644 web/app/components/header/search.ts create mode 100644 web/app/components/header/toolbar.hbs create mode 100644 web/app/components/header/toolbar.js create mode 100644 web/app/components/hermes-logo.hbs create mode 100644 web/app/components/hermes-logo.ts create mode 100644 web/app/components/inputs/people-select.hbs create mode 100644 web/app/components/inputs/people-select.ts create mode 100644 web/app/components/inputs/tag-select.hbs create mode 100644 web/app/components/inputs/tag-select.js create mode 100644 web/app/components/modal-alert-error.hbs create mode 100644 web/app/components/modals/doc-created.hbs create mode 100644 web/app/components/modals/doc-created.ts create mode 100644 web/app/components/modals/index.hbs create mode 100644 web/app/components/modals/index.ts create mode 100644 web/app/components/multiselect/tag-chip.hbs create mode 100644 web/app/components/multiselect/user-email-image-chip.hbs create mode 100644 web/app/components/new/doc-form.hbs create mode 100644 web/app/components/new/doc-form.js create mode 100644 web/app/components/notification.hbs create mode 100644 web/app/components/notification.js create mode 100644 web/app/components/pagination/index.hbs create mode 100644 web/app/components/pagination/index.ts create mode 100644 web/app/components/pagination/link.hbs create mode 100644 web/app/components/pagination/link.ts create mode 100644 web/app/components/person.hbs create mode 100644 web/app/components/person.js create mode 100644 web/app/components/results/index.hbs create mode 100644 web/app/components/results/index.ts create mode 100644 web/app/components/row-results.hbs create mode 100644 web/app/components/row-results.ts create mode 100644 web/app/components/settings/subscription-list-item.hbs create mode 100644 web/app/components/settings/subscription-list-item.ts create mode 100644 web/app/components/settings/subscription-list.hbs create mode 100644 web/app/components/settings/subscription-list.ts create mode 100644 web/app/components/sidebar.hbs create mode 100644 web/app/components/sidebar.js create mode 100644 web/app/components/x/hds-tab.hbs create mode 100644 web/app/config/environment.d.ts create mode 100644 web/app/controllers/.gitkeep create mode 100644 web/app/controllers/application.js create mode 100644 web/app/controllers/authenticate.ts create mode 100644 web/app/controllers/authenticated/all.ts create mode 100644 web/app/controllers/authenticated/dashboard.js create mode 100644 web/app/controllers/authenticated/document.js create mode 100644 web/app/controllers/authenticated/drafts.ts create mode 100644 web/app/controllers/authenticated/my.ts create mode 100644 web/app/controllers/authenticated/new/doc.js create mode 100644 web/app/controllers/authenticated/results.ts create mode 100644 web/app/helpers/.gitkeep create mode 100644 web/app/helpers/add.js create mode 100644 web/app/helpers/get-first-letter.js create mode 100644 web/app/helpers/get-product-id.js create mode 100644 web/app/helpers/lowercase.js create mode 100644 web/app/helpers/parse-date.ts create mode 100644 web/app/helpers/uid.js create mode 100644 web/app/index.html create mode 100644 web/app/initializers/custom-inflector-rules.js create mode 100644 web/app/models/.gitkeep create mode 100644 web/app/models/document.js create mode 100644 web/app/models/google/drive/file.js create mode 100644 web/app/models/google/userinfo/me.js create mode 100644 web/app/router.js create mode 100644 web/app/routes/.gitkeep create mode 100644 web/app/routes/application.js create mode 100644 web/app/routes/authenticate.ts create mode 100644 web/app/routes/authenticated.ts create mode 100644 web/app/routes/authenticated/all.ts create mode 100644 web/app/routes/authenticated/dashboard.js create mode 100644 web/app/routes/authenticated/document.js create mode 100644 web/app/routes/authenticated/drafts.ts create mode 100644 web/app/routes/authenticated/index.js create mode 100644 web/app/routes/authenticated/my.ts create mode 100644 web/app/routes/authenticated/new/doc.js create mode 100644 web/app/routes/authenticated/new/index.js create mode 100644 web/app/routes/authenticated/results.ts create mode 100644 web/app/routes/authenticated/settings.ts create mode 100644 web/app/serializers/google/drive/file.js create mode 100644 web/app/serializers/google/userinfo/me.js create mode 100644 web/app/services/algolia.ts create mode 100644 web/app/services/authenticated-user.ts create mode 100644 web/app/services/config.ts create mode 100644 web/app/services/fetch.ts create mode 100644 web/app/services/flags.js create mode 100644 web/app/services/modal-alerts.ts create mode 100644 web/app/services/recently-viewed-docs.js create mode 100644 web/app/services/toolbar.js create mode 100644 web/app/styles/app.scss create mode 100644 web/app/styles/components/action.scss create mode 100644 web/app/styles/components/editable-field.scss create mode 100644 web/app/styles/components/footer.scss create mode 100644 web/app/styles/components/hds-badge.scss create mode 100644 web/app/styles/components/modal-dialog.scss create mode 100644 web/app/styles/components/multiselect.scss create mode 100644 web/app/styles/components/nav.scss create mode 100644 web/app/styles/components/notification.scss create mode 100644 web/app/styles/components/page.scss create mode 100644 web/app/styles/components/preview-card.scss create mode 100644 web/app/styles/components/row-results.scss create mode 100644 web/app/styles/components/sidebar.scss create mode 100644 web/app/styles/components/template-card.scss create mode 100644 web/app/styles/components/tile-list.scss create mode 100644 web/app/styles/components/toolbar.scss create mode 100644 web/app/styles/components/x-hds-tab.scss create mode 100644 web/app/styles/ember-power-select-theme.scss create mode 100644 web/app/styles/hashicorp/hermes-logo.scss create mode 100644 web/app/styles/hashicorp/product-badge.scss create mode 100644 web/app/styles/hds-overrides.scss create mode 100644 web/app/templates/application-loading.hbs create mode 100644 web/app/templates/application.hbs create mode 100644 web/app/templates/authenticate.hbs create mode 100644 web/app/templates/authenticated.hbs create mode 100644 web/app/templates/authenticated/all.hbs create mode 100644 web/app/templates/authenticated/dashboard.hbs create mode 100644 web/app/templates/authenticated/document.hbs create mode 100644 web/app/templates/authenticated/drafts.hbs create mode 100644 web/app/templates/authenticated/my.hbs create mode 100644 web/app/templates/authenticated/new.hbs create mode 100644 web/app/templates/authenticated/new/doc.hbs create mode 100644 web/app/templates/authenticated/new/index.hbs create mode 100644 web/app/templates/authenticated/results.hbs create mode 100644 web/app/templates/authenticated/settings.hbs create mode 100644 web/app/torii-providers/google-oauth2-bearer.js create mode 100644 web/app/types/document-routes.ts create mode 100644 web/app/types/document.d.ts create mode 100644 web/app/types/facets.d.ts create mode 100644 web/app/utils/facets.js create mode 100644 web/app/utils/get-product-id.ts create mode 100644 web/app/utils/parse-date.ts create mode 100644 web/app/utils/time-ago.js create mode 100644 web/config/deprecation-workflow.js create mode 100644 web/config/ember-cli-update.json create mode 100644 web/config/environment.js create mode 100644 web/config/optional-features.json create mode 100644 web/config/targets.js create mode 100644 web/ember-cli-build.js create mode 100644 web/mirage/config.ts create mode 100644 web/mirage/factories/person.ts create mode 100644 web/mirage/helpers.ts create mode 100644 web/mirage/models/person.ts create mode 100644 web/package.json create mode 100644 web/public/images/document.png create mode 100644 web/public/robots.txt create mode 100644 web/tailwind.config.js create mode 100644 web/testem.js create mode 100644 web/tests/helpers/.gitkeep create mode 100644 web/tests/helpers/flash-message.js create mode 100644 web/tests/index.html create mode 100644 web/tests/integration/.gitkeep create mode 100644 web/tests/integration/components/action-test.js create mode 100644 web/tests/integration/components/custom-editable-field-test.js create mode 100644 web/tests/integration/components/doc/state-test.js create mode 100644 web/tests/integration/components/header/facet-test.js create mode 100644 web/tests/integration/components/header/toolbar-test.js create mode 100644 web/tests/integration/components/inputs/people-select-test.ts create mode 100644 web/tests/integration/components/modals/doc-created-test.ts create mode 100644 web/tests/integration/components/modals/index-test.ts create mode 100644 web/tests/integration/components/person-test.js create mode 100644 web/tests/integration/components/settings/subscription-list-item-test.ts create mode 100644 web/tests/integration/components/settings/subscription-list-test.ts create mode 100644 web/tests/integration/components/x/hds-tab-test.js create mode 100644 web/tests/integration/helpers/get-first-letter-test.js create mode 100644 web/tests/integration/helpers/get-product-id-test.js create mode 100644 web/tests/integration/helpers/lowercase-test.js create mode 100644 web/tests/integration/helpers/parse-date-test.ts create mode 100644 web/tests/test-helper.ts create mode 100644 web/tests/unit/.gitkeep create mode 100644 web/tests/unit/controllers/authenticate-test.js create mode 100644 web/tests/unit/routes/all-test.js create mode 100644 web/tests/unit/routes/authenticate-test.js create mode 100644 web/tests/unit/routes/authenticated-test.js create mode 100644 web/tests/unit/routes/document-test.js create mode 100644 web/tests/unit/routes/index-test.js create mode 100644 web/tests/unit/routes/results-test.js create mode 100644 web/tests/unit/services/modal-alerts-test.ts create mode 100644 web/tests/unit/utils/get-product-id-test.js create mode 100644 web/tests/unit/utils/parse-date-test.ts create mode 100644 web/tsconfig.json create mode 100644 web/types/document.d.ts create mode 100644 web/types/ember-cli-mirage/test-support.d.ts create mode 100644 web/types/ember-data/types/registries/model.d.ts create mode 100644 web/types/facets.d.ts create mode 100644 web/types/global.d.ts create mode 100644 web/types/hermes/index.d.ts create mode 100644 web/vendor/.gitkeep create mode 100644 web/web.go create mode 100644 web/yarn.lock diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..79fecca5d --- /dev/null +++ b/.gitignore @@ -0,0 +1,19 @@ +/config.hcl +/hermes + +# Google OAuth 2.0 +/credentials.json +/token.json + +# macOS & local +.DS_Store +.env + +# Web application +node_modules +/web/.pnp.* +/web/.yarn/* +/web/dist + +# Terraform related +.terraform diff --git a/.yarnrc.yml b/.yarnrc.yml new file mode 100644 index 000000000..7f3d03fd8 --- /dev/null +++ b/.yarnrc.yml @@ -0,0 +1 @@ +nodeLinker: "node-modules" diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..80a19165d --- /dev/null +++ b/Makefile @@ -0,0 +1,78 @@ +.PHONY: default +default: help + +.PHONY: build-ui +build-ui: + cd web; \ + yarn install; \ + rm -rf dist/; \ + yarn build; + +.PHONY: build +build: build-ui + rm -f ./hermes + CGO_ENABLED=0 go build -o ./hermes ./cmd/hermes + +.PHONY: bin +bin: + CGO_ENABLED=0 go build -o ./hermes ./cmd/hermes + +.PHONY: bin/linux +bin/linux: # bin creates hermes binary for linux + CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o ./hermes ./cmd/hermes + +.PHONY: dev +dev: ## One command to start a dev environment +dev: docker/postgres/start + $(MAKE) bin && ($(MAKE) run &) && $(MAKE) web/run && fg + +.PHONY: docker/postgres/clear +docker/postgres/clear: ## Stop and clear data for PostgreSQL in Docker + docker-compose down -v + +.PHONY: docker/postgres/start +docker/postgres/start: ## Start PostgreSQL in Docker + docker-compose up -d && sleep 1 + +.PHONY: docker/postgres/stop +docker/postgres/stop: ## Stop PostgreSQL in Docker + docker-compose down + +.PHONY: go/build +go/build: + CGO_ENABLED=0 go build -o ./hermes ./cmd/hermes + +.PHONY: go/test +go/test: + go test ./... + +.PHONY: go/test/with-docker-postgres +go/test/with-docker-postgres: docker/postgres/start + HERMES_TEST_POSTGRESQL_DSN="host=localhost user=postgres password=postgres port=5432" \ + go test -count=1 -v ./... + +.PHONY: help +help: ## Print this help + @echo "Usage: make " + @echo + @echo "Targets:" + @egrep '^(.+)\:\ ##\ (.+)' $(MAKEFILE_LIST) | column -t -c 2 -s ':#' + +.PHONY: run +run: + ./hermes server -config=config.hcl + +.PHONY: test +test: + go test ./... + +.PHONY: web/install-deps +web/install-deps: ## Install web application dependencies + cd web \ + && yarn install + +.PHONY: web/run +web/run: ## Run web application while proxying backend requests +web/run: web/install-deps + cd web \ + && yarn start:with-proxy diff --git a/README.md b/README.md new file mode 100644 index 000000000..858b72f86 --- /dev/null +++ b/README.md @@ -0,0 +1,174 @@ +# Hermes + +> Hermes is not an official HashiCorp project. +> The repository contains software which is under active development and is in the alpha stage. Please read the “[Project Status](#project-status)” section for more information. + +Hermes is an open source document management system created by HashiCorp to help scale the writing and document process. Read the release blog post [here](https://hashicorp.com/blog/introducing-hermes-an-open-source-document-management-system). + +Hermes was created and is currently maintained by HashiCorp Labs, a small team in the Office of the CTO. + +**Please note**: While this is not an official HashiCorp project, security is still very important to us! If you think that you've found a security issue, please contact us via email at hermes-feedback@hashicorp.com instead of filing a GitHub issue. + +# Usage + +## Setup + +### Google + +1. Sign up for a [Google Workspace](https://workspace.google.com/) account. + +1. [Create a Google Cloud project](https://developers.google.com/workspace/guides/create-project). + +1. Enable the following APIs for [Google Workspace APIs](https://developers.google.com/workspace/guides/enable-apis) + + - Google Docs API + - Google Drive API + - Gmail API + - People API + - Also, to enable searching for users, the Google Workspace domain admin needs to enable external directory sharing. See more details: https://support.google.com/a/answer/6343701 + - Select Directory Setting >Sharing setting section > Contact sharing > Enable contact sharing + +1. [Configure the OAuth consent screen](https://developers.google.com/workspace/guides/configure-oauth-consent) for the application in GCP project. + + - Enter a domain name in the “Authorized domains” section that Hermes may use. Example, mycompany.com + - Add scopes: + - `https://www.googleapis.com/auth/drive.readonly` + +1. [Create OAuth client ID credentials](https://developers.google.com/workspace/guides/create-credentials) for a “web application”. + + - Add the following domains in the “Authorized JavaScript origins” section. + + - `https://{HERMES_DOMAIN}` + - `http://localhost:8000` (Note: this should be localhost when running locally) + + - Add the following URLs in the “Authorized redirect URIs” section. + + - `https://{HERMES_DOMAIN}/torii/redirect.html` + - `http://localhost:8000/torii/redirect.html` (Note: this should be localhost when running locally) + + Please note the client ID as you may need it to be provided at application build time as the `HERMES_WEB_GOOGLE_OAUTH2_CLIENT_ID` environment variable. + +1. [Create OAuth client ID credentials](https://developers.google.com/workspace/guides/create-credentials) for a “desktop application” for Hermes backend. + + - Download the OAuth credentials JSON file and save it to the root of this project repository. + +### Google Drive + +We suggest using a [shared drive](https://support.google.com/a/users/answer/7212025?hl=en) for your organization. + +- "Shortcuts" folder: this folder contains an organized hierarchy of folders and shortcuts to published files. You may want to share this with users if you need to provide a non-Hermes experience of navigating through Google Drive to find documents. + + - Structure: `{doc_type}/{product}/{document}` + +- "Documents" folder: this folder contains all published documents in a flat structure. This folder should be shared with all of your users, but it is not ideal to view itself in Google Drive, given the flat structure. Instead, the "shortcuts folder" will provide a better viewing experience when navigating directly in Google Drive. + +- "Drafts" folder: this folder contains all draft documents. It should be kept private and only accessible to the Hermes service user. The Hermes user will automatically share any draft documents with document owners and collaborators. + +Example shared drive organization + +- Shared Drives + - Hermes + - Documents (this is the "shortcuts" folder) + - All Documents (this is the "documents" folder) + - Drafts (this is the "drafts" folder) + +### Algolia + +1. [Sign up](https://www.algolia.com/users/sign_up) for a free Algolia account. + +1. Take note of the Admin API Key and Search-Only API Key in the [API Keys](https://www.algolia.com/account/api-keys) section. The admin API and search-only API keys are required for the Hermes server and the indexer. + +## Development and Usage + +### Requirements + +- Go 1.18 +- Node.js 14 + +### Configuration File + +Copy the example configuration file to the root of this repo and edit the file (it contains sensible defaults and comments to hopefully provide enough information to update necessary values). + +```sh +cp configs/config.hcl ./ +# Edit config.hcl... +``` + +### Build the Project + +```sh +# OAuth client ID of the “web application” +export HERMES_WEB_GOOGLE_OAUTH2_CLIENT_ID=”{OAUTH_CLIENT_ID_HERE}” +``` + +```sh +make build +``` + +### PostgreSQL + +Hermes can be configured to point to any PostgreSQL database, but for running locally, there is tooling to start one in Docker using Docker Compose. + +```sh +# Start PostgreSQL in Docker. +make docker/postgres/start +``` + +### Run the Server + +```sh +./hermes server -config=config.hcl +``` + +NOTE: when not usin a Google service account, this will automatically open a browser to authenticate the server to read and create documents, send email, etc. + +## Running Hermes in Production + +1. [Create Service Account](https://developers.google.com/workspace/guides/create-credentials#service-account) + +- Create a new key (JSON type) for the service account and download it. +- Go to [Delegating domain-wide authority to the service account](https://developers.google.com/identity/protocols/oauth2/service-account#delegatingauthority) and follow the instructions to enter the OAuth scopes. +- Add the following OAuth scopes (comma-delimited list): + `https://www.googleapis.com/auth/directory.readonly,https://www.googleapis.com/auth/documents,https://www.googleapis.com/auth/drive,https://www.googleapis.com/auth/gmail.send` + +More to come here... + +## Architecture + +### Server + +The server process serves web content. Of note, there are API endpoints for an authenticated Algolia proxy (`/1/` to allow usage of Algolia's client library), and redirect links (`/l/`) which provide human-readable links (i.e., `/l/rfc/lab-123`) to documents. + +### Indexer + +The indexer is a process that is run alongside the server that continually polls for published document updates and reindexes their content in Algolia for search. Additionally, it will rewrite the document headers with Hermes metadata in case they are manually changed to incorrect values. While not strictly required, it is recommended to run the indexer so the search index and Google Docs stay up-to-date. + +### Frontend + +The Ember.js web frontend is built and embedded into the Hermes binary, and served via the server process. + +## Project Status + +This project is under active development and in the alpha stage. There may be breaking changes to the API, application configuration file, or other parts of the project. We recommend against installing builds from the `main` branch. We will make every attempt to document breaking changes and provide an upgrade path between releases of Hermes. + +## Feedback + +If you think that you've found a security issue, please contact us via email at hermes-feedback@hashicorp.com instead of filing a GitHub issue. + +Found a non-security-related bug or have a feature request? Please open a GitHub issue. + +Have other feedback? Please contact us via email at hermes-feedback@hashicorp.com. + +## Contributing + +In the short term, there are several large changes planned for the Hermes project. In order to make sure there aren’t any conflicts with the upcoming plans for the project, before submitting a PR please create a GitHub issue so we are able to validate the change you may want to propose. + +As the project becomes more stable over the next several releases, we think it will become much easier to contribute. + +## Upcoming Plans + +Here are some higher-level initiatives that we are currently working on: + +- Support custom document types. +- Increase test coverage. +- Finish migration of using Algolia as a NoSQL database to PostgreSQL as the source of truth for all non-search data. diff --git a/cmd/hermes/main.go b/cmd/hermes/main.go new file mode 100644 index 000000000..d0fa072a4 --- /dev/null +++ b/cmd/hermes/main.go @@ -0,0 +1,15 @@ +package main + +import ( + "os" + "path/filepath" + + "github.com/hashicorp-forge/hermes/internal/cmd" +) + +func main() { + // Name of the executable + os.Args[0] = filepath.Base(os.Args[0]) + + os.Exit(cmd.Main(os.Args)) +} diff --git a/configs/config.hcl b/configs/config.hcl new file mode 100644 index 000000000..5cae2fd3b --- /dev/null +++ b/configs/config.hcl @@ -0,0 +1,169 @@ +// base_url is the base URL used for building links. This should be the public +// URL of the application. +base_url = "http://localhost:8000" + +// algolia configures Hermes to work with Algolia. +algolia { + application_id = "" + docs_index_name = "docs" + drafts_index_name = "drafts" + internal_index_name = "internal" + links_index_name = "links" + missing_fields_index_name = "missing_fields" + search_api_key = "" + write_api_key = "" +} + +// document_types configures document types. Currently this block should not be +// modified, but Hermes will support custom document types in the near future. +// *** DO NOT MODIFY document_types *** +document_types { + document_type "RFC" { + long_name = "Request for Comments" + description = "Create a Request for Comments document to present a proposal to colleagues for their review and feedback." + template = "1Oz_7FhaWxdFUDEzKCC5Cy58t57C4znmC_Qr80BORy1U" + + more_info_link { + text = "More info on the RFC template" + url = "https://works.hashicorp.com/articles/rfc-template" + } + + custom_field { + name = "Current Version" + type = "string" + } + custom_field { + name = "PRD" + type = "string" + } + custom_field { + name = "Stakeholders" + type = "people" + } + custom_field { + name = "Target Version" + type = "string" + } + } + + document_type "PRD" { + long_name = "Product Requirements" + description = "Create a Product Requirements Document to summarize a problem statement and outline a phased approach to addressing the problem." + template = "1oS4q6IPDr3aMSTTk9UDdOnEcFwVWW9kT8ePCNqcg1P4" + + more_info_link { + text = "More info on the PRD template" + url = "https://works.hashicorp.com/articles/prd-template" + } + + custom_field { + name = "RFC" + type = "string" + } + custom_field { + name = "Stakeholders" + type = "people" + } + } +} + +// email configures Hermes to send email notifications. +email { + // enabled enables sending email notifications. + enabled = true + + // from_address is the email address to send email notifications from. + from_address = "hermes@yourorganization.com" +} + +// google_workspace configures Hermes to work with Google Workspace. +google_workspace { + // create_doc_shortcuts enables creating a shortcut in the shortcuts_folder + // when a document is published. + create_doc_shortcuts = true + + // docs_folder contains all published documents in a flat structure. + docs_folder = "my-docs-folder-id" + + // drafts_folder contains all draft documents. + drafts_folder = "my-drafts-folder-id" + + // If create_doc_shortcuts is set to true, shortcuts_folder will contain an + // organized hierarchy of folders and shortcuts to published files that can be + // easily browsed directly in Google Drive: + // {shortcut_folder}/{doc_type}/{product}/{document} + shortcuts_folder = "my-shortcuts-folder-id" + + // auth is the configuration for interacting with Google Workspace using a + // service account. + // auth { + // client_email = "" + // private_key = "" + // subject = "" + // token_url = "https://oauth2.googleapis.com/token" + // } + + // oauth2 is the configuration used to authenticate users via Google. + oauth2 { + client_id = "" + hd = "hashicorp.com" + redirect_uri = "http://localhost:8000/torii/redirect.html" + } +} + +// indexer contains the configuration for the indexer. +indexer { + // max_parallel_docs is the maximum number of documents that will be + // simultaneously indexed. + max_parallel_docs = 5 + + // update_doc_headers enables the indexer to automatically update document + // headers for changed documents based on Hermes metadata. + update_doc_headers = true + + // update_draft_headers enables the indexer to automatically update document + // headers for draft documents based on Hermes metadata. + update_draft_headers = true +} + +// okta configures Hermes to authenticate users using an AWS Application Load +// Balancer and Okta. +okta { + // auth_server_url is the URL of the Okta authorization server. + auth_server_url = "" + + // ClientID is the Okta client ID. + client_id = "" + + // disabled disables Okta authorization. + disabled = true +} + +// postgres configures PostgreSQL as the app database. +postgres { + dbname = "db" + host = "localhost" + password = "postgres" + port = 5432 + user = "postgres" +} + +// products should be modified to reflect the products/areas in your +// organization. +products { + product "Engineering" { + abbreviation = "ENG" + } + product "Labs" { + abbreviation = "LAB" + } + product "MyProduct" { + abbreviation = "MY" + } +} + +// server contains the configuration for the server. +server { + // addr is the address to bind to for listening. + addr = "127.0.0.1:8000" +} diff --git a/go.mod b/go.mod new file mode 100644 index 000000000..401dcf990 --- /dev/null +++ b/go.mod @@ -0,0 +1,89 @@ +module github.com/hashicorp-forge/hermes + +go 1.18 + +require ( + github.com/algolia/algoliasearch-client-go/v3 v3.23.0 + github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de + github.com/cenkalti/backoff/v4 v4.1.3 + github.com/forPelevin/gomoji v1.1.3 + github.com/go-ozzo/ozzo-validation/v4 v4.3.0 + github.com/hashicorp/go-hclog v1.2.0 + github.com/hashicorp/go-multierror v1.1.1 + github.com/hashicorp/hcl/v2 v2.11.1 + github.com/mitchellh/cli v1.1.2 + github.com/okta/okta-jwt-verifier-golang v1.3.1 + github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 + github.com/stretchr/testify v1.8.1 + golang.org/x/oauth2 v0.3.0 + google.golang.org/api v0.103.0 + gorm.io/datatypes v1.1.0 + gorm.io/driver/postgres v1.4.5 + gorm.io/gorm v1.24.3 +) + +require ( + cloud.google.com/go/compute v1.13.0 // indirect + cloud.google.com/go/compute/metadata v0.2.1 // indirect + github.com/Masterminds/goutils v1.1.1 // indirect + github.com/Masterminds/semver v1.5.0 // indirect + github.com/Masterminds/sprig v2.22.0+incompatible // indirect + github.com/agext/levenshtein v1.2.3 // indirect + github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect + github.com/armon/go-radix v1.0.0 // indirect + github.com/bgentry/speakeasy v0.1.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.0-20210816181553-5444fa50b93d // indirect + github.com/fatih/color v1.13.0 // indirect + github.com/go-sql-driver/mysql v1.7.0 // indirect + github.com/goccy/go-json v0.9.4 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/google/go-cmp v0.5.9 // indirect + github.com/google/uuid v1.3.0 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.2.0 // indirect + github.com/googleapis/gax-go/v2 v2.7.0 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/huandu/xstrings v1.3.2 // indirect + github.com/imdario/mergo v0.3.12 // indirect + github.com/jackc/chunkreader/v2 v2.0.1 // indirect + github.com/jackc/pgconn v1.13.0 // indirect + github.com/jackc/pgio v1.0.0 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgproto3/v2 v2.3.1 // indirect + github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect + github.com/jackc/pgtype v1.13.0 // indirect + github.com/jackc/pgx/v4 v4.17.2 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/jinzhu/now v1.1.5 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/lestrrat-go/backoff/v2 v2.0.8 // indirect + github.com/lestrrat-go/blackmagic v1.0.0 // indirect + github.com/lestrrat-go/httpcc v1.0.0 // indirect + github.com/lestrrat-go/iter v1.0.1 // indirect + github.com/lestrrat-go/jwx v1.2.18 // indirect + github.com/lestrrat-go/option v1.0.0 // indirect + github.com/mattn/go-colorable v0.1.12 // indirect + github.com/mattn/go-isatty v0.0.14 // indirect + github.com/mitchellh/copystructure v1.2.0 // indirect + github.com/mitchellh/go-wordwrap v1.0.1 // indirect + github.com/mitchellh/reflectwalk v1.0.2 // indirect + github.com/patrickmn/go-cache v0.0.0-20180815053127-5633e0862627 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/posener/complete v1.2.3 // indirect + github.com/rivo/uniseg v0.2.0 // indirect + github.com/zclconf/go-cty v1.10.0 // indirect + go.opencensus.io v0.24.0 // indirect + golang.org/x/crypto v0.4.0 // indirect + golang.org/x/net v0.3.0 // indirect + golang.org/x/sys v0.3.0 // indirect + golang.org/x/text v0.5.0 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/genproto v0.0.0-20221207170731-23e4bf6bdc37 // indirect + google.golang.org/grpc v1.51.0 // indirect + google.golang.org/protobuf v1.28.1 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + gorm.io/driver/mysql v1.4.5 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 000000000..1d5a8d3ae --- /dev/null +++ b/go.sum @@ -0,0 +1,476 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.105.0 h1:DNtEKRBAAzeS4KyIory52wWHuClNaXJ5x1F7xa4q+5Y= +cloud.google.com/go/compute v1.13.0 h1:AYrLkB8NPdDRslNp4Jxmzrhdr03fUAIDbiGFjLWowoU= +cloud.google.com/go/compute v1.13.0/go.mod h1:5aPTS0cUNMIc1CE546K+Th6weJUNQErARyZtRXDJ8GE= +cloud.google.com/go/compute/metadata v0.2.1 h1:efOwf5ymceDhK6PKMnnrTHP4pppY5L22mle96M1yP48= +cloud.google.com/go/compute/metadata v0.2.1/go.mod h1:jgHgmJd2RKBGzXqF5LR2EZMGxBkeanZ9wwa75XHJgOM= +cloud.google.com/go/longrunning v0.3.0 h1:NjljC+FYPV3uh5/OwWT6pVU+doBqMg2x/rZlE+CamDs= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= +github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= +github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= +github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= +github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= +github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/Masterminds/sprig v2.22.0+incompatible h1:z4yfnGrZ7netVz+0EDJ0Wi+5VZCSYp4Z0m2dk6cEM60= +github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= +github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= +github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/algolia/algoliasearch-client-go/v3 v3.23.0 h1:h1bFdTZfBorRuH4EM1FcYxZzFH2e4JGgnlCe0enV5hU= +github.com/algolia/algoliasearch-client-go/v3 v3.23.0/go.mod h1:i7tLoP7TYDmHX3Q7vkIOL4syVse/k5VJ+k0i8WqFiJk= +github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= +github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= +github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= +github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= +github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de h1:FxWPpzIjnTlhPwqqXc4/vE0f7GvRjuAsbW+HOIe8KnA= +github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de/go.mod h1:DCaWoUhZrYW9p1lxo/cm8EmUOOzAPSEZNGF2DK1dJgw= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/asaskevich/govalidator v0.0.0-20200108200545-475eaeb16496 h1:zV3ejI06GQ59hwDQAvmK1qxOQGB3WuVTRoY0okPTAv0= +github.com/asaskevich/govalidator v0.0.0-20200108200545-475eaeb16496/go.mod h1:oGkLhpf+kjZl6xBf758TQhh5XrAeiJv/7FRz/2spLIg= +github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/cenkalti/backoff/v4 v4.1.3 h1:cFAlzYUlVYDysBEH2T5hyJZMh3+5+WCBvSnK6Q8UtC4= +github.com/cenkalti/backoff/v4 v4.1.3/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= +github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.0-20210816181553-5444fa50b93d h1:1iy2qD6JEhHKKhUOA9IWs7mjco7lnw2qx8FsRI2wirE= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.0-20210816181553-5444fa50b93d/go.mod h1:tmAIfUFEirG/Y8jhZ9M+h36obRZAk/1fcSpXwAVlfqE= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/forPelevin/gomoji v1.1.3 h1:7c3dYzVmYhpOL3bS4riXqSWJBX3BhSvH68yoNNf3FH0= +github.com/forPelevin/gomoji v1.1.3/go.mod h1:ypB7Kz3Fsp+LVR7KoT7mEFOioYBuTuAtaAT4RGl+ASY= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-ozzo/ozzo-validation/v4 v4.3.0 h1:byhDUpfEwjsVQb1vBunvIjh2BHQ9ead57VkAEY4V+Es= +github.com/go-ozzo/ozzo-validation/v4 v4.3.0/go.mod h1:2NKgrcHl3z6cJs+3Oo940FPRiTzuqKbvfrL2RxCj6Ew= +github.com/go-sql-driver/mysql v1.7.0 h1:ueSltNNllEqE3qcWBTD0iQd3IpL/6U+mJxLkazJ7YPc= +github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= +github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/goccy/go-json v0.9.4 h1:L8MLKG2mvVXiQu07qB6hmfqeSYQdOnqPot2GhsIwIaI= +github.com/goccy/go-json v0.9.4/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw= +github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= +github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.2.0 h1:y8Yozv7SZtlU//QXbezB6QkpuE6jMD2/gfzk4AftXjs= +github.com/googleapis/enterprise-certificate-proxy v0.2.0/go.mod h1:8C0jb7/mgJe/9KK8Lm7X9ctZC2t60YyIpYEI16jx0Qg= +github.com/googleapis/gax-go/v2 v2.7.0 h1:IcsPKeInNvYi7eqSaDjiZqDDKu5rsmunY0Y1YupQSSQ= +github.com/googleapis/gax-go/v2 v2.7.0/go.mod h1:TEop28CZZQ2y+c0VxMUmu1lV+fQx57QpBWsYpwqHJx8= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-hclog v1.2.0 h1:La19f8d7WIlm4ogzNHB0JGqs5AUDAZ2UfCY4sJXcJdM= +github.com/hashicorp/go-hclog v1.2.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/hcl/v2 v2.11.1 h1:yTyWcXcm9XB0TEkyU/JCRU6rYy4K+mgLtzn2wlrJbcc= +github.com/hashicorp/hcl/v2 v2.11.1/go.mod h1:FwWsfWEjyV/CMj8s/gqAuiviY72rJ1/oayI9WftqcKg= +github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw= +github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU= +github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= +github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= +github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= +github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= +github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= +github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= +github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= +github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= +github.com/jackc/pgconn v1.13.0 h1:3L1XMNV2Zvca/8BYhzcRFS70Lr0WlDg16Di6SFGAbys= +github.com/jackc/pgconn v1.13.0/go.mod h1:AnowpAqO4CMIIJNZl2VJp+KrkAZciAkhEl0W0JIobpI= +github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= +github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= +github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= +github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5Wi/+Zz7xoE5ALHsRQlOctkOiHc= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= +github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.3.1 h1:nwj7qwf0S+Q7ISFfBndqeLwSwxs+4DPsbRFjECT1Y4Y= +github.com/jackc/pgproto3/v2 v2.3.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= +github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= +github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= +github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= +github.com/jackc/pgtype v1.12.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= +github.com/jackc/pgtype v1.13.0 h1:XkIc7A+1BmZD19bB2NxrtjJweHxQ9agqvM+9URc68Cg= +github.com/jackc/pgtype v1.13.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= +github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= +github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= +github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= +github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= +github.com/jackc/pgx/v4 v4.17.2 h1:0Ut0rpeKwvIVbMQ1KbMBU4h6wxehBI535LK6Flheh8E= +github.com/jackc/pgx/v4 v4.17.2/go.mod h1:lcxIZN44yMIrWI78a5CpucdD14hX0SBDbNRvjDBItsw= +github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= +github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= +github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= +github.com/lestrrat-go/backoff/v2 v2.0.8 h1:oNb5E5isby2kiro9AgdHLv5N5tint1AnDVVf2E2un5A= +github.com/lestrrat-go/backoff/v2 v2.0.8/go.mod h1:rHP/q/r9aT27n24JQLa7JhSQZCKBBOiM/uP402WwN8Y= +github.com/lestrrat-go/blackmagic v1.0.0 h1:XzdxDbuQTz0RZZEmdU7cnQxUtFUzgCSPq8RCz4BxIi4= +github.com/lestrrat-go/blackmagic v1.0.0/go.mod h1:TNgH//0vYSs8VXDCfkZLgIrVTTXQELZffUV0tz3MtdQ= +github.com/lestrrat-go/codegen v1.0.0/go.mod h1:JhJw6OQAuPEfVKUCLItpaVLumDGWQznd1VaXrBk9TdM= +github.com/lestrrat-go/httpcc v1.0.0 h1:FszVC6cKfDvBKcJv646+lkh4GydQg2Z29scgUfkOpYc= +github.com/lestrrat-go/httpcc v1.0.0/go.mod h1:tGS/u00Vh5N6FHNkExqGGNId8e0Big+++0Gf8MBnAvE= +github.com/lestrrat-go/iter v1.0.1 h1:q8faalr2dY6o8bV45uwrxq12bRa1ezKrB6oM9FUgN4A= +github.com/lestrrat-go/iter v1.0.1/go.mod h1:zIdgO1mRKhn8l9vrZJZz9TUMMFbQbLeTsbqPDrJ/OJc= +github.com/lestrrat-go/jwx v1.2.18 h1:RV4hcTRUlPVYUnGqATKXEojoOsLexoU8Na4KheVzxQ8= +github.com/lestrrat-go/jwx v1.2.18/go.mod h1:bWTBO7IHHVMtNunM8so9MT8wD+euEY1PzGEyCnuI2qM= +github.com/lestrrat-go/option v0.0.0-20210103042652-6f1ecfceda35/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I= +github.com/lestrrat-go/option v1.0.0 h1:WqAWL8kh8VcSoD6xjSH34/1m8yxluXQbDeKNfvFeEO4= +github.com/lestrrat-go/option v1.0.0/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I= +github.com/lestrrat-go/pdebug/v3 v3.0.1/go.mod h1:za+m+Ve24yCxTEhR59N7UlnJomWwCiIqbJRmKeiADU4= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.10.2 h1:AqzbZs4ZoCBp+GtejcpCpcxM3zlSMx29dXbUSeVtJb8= +github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= +github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI= +github.com/microsoft/go-mssqldb v0.17.0 h1:Fto83dMZPnYv1Zwx5vHHxpNraeEaUlQ/hhHLgZiaenE= +github.com/mitchellh/cli v1.1.2 h1:PvH+lL2B7IQ101xQL63Of8yFS2y+aDlsFcsqNc+u/Kw= +github.com/mitchellh/cli v1.1.2/go.mod h1:6iaV0fGdElS6dPBx0EApTxHrcWvmJphyh2n8YBLPPZ4= +github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= +github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= +github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= +github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= +github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= +github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/okta/okta-jwt-verifier-golang v1.3.1 h1:V+9W5KD3nG7xN0UYtnzXtkurGcs71bLwzPFuUGNMwdE= +github.com/okta/okta-jwt-verifier-golang v1.3.1/go.mod h1:cHffA777f7Yi4K+yDzUp89sGD5v8sk04Pc3CiT1OMR8= +github.com/patrickmn/go-cache v0.0.0-20180815053127-5633e0862627 h1:pSCLCl6joCFRnjpeojzOpEYs4q7Vditq8fySFG5ap3Y= +github.com/patrickmn/go-cache v0.0.0-20180815053127-5633e0862627/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ= +github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= +github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/posener/complete v1.2.3 h1:NP0eAhjcjImqslEwo/1hq7gpajME0fTLTezBKDqfXqo= +github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= +github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg= +github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= +github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= +github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= +github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= +github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= +github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= +github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= +github.com/zclconf/go-cty v1.10.0 h1:mp9ZXQeIcN8kAwuqorjH+Q+njbJKjLrvB2yIh4q7U+0= +github.com/zclconf/go-cty v1.10.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= +github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= +github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20201217014255-9d1352758620/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.4.0 h1:UVQgzMY87xqpKNgb+kDsll2Igd33HszWHFLmpaRMq/8= +golang.org/x/crypto v0.4.0/go.mod h1:3quD/ATkf6oY+rnes5c3ExXTbLc8mueNue5/DoinL80= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.3.0 h1:VWL6FNY2bEEmsGVKabSlHu5Irp34xmMRoqb/9lF9lxk= +golang.org/x/net v0.3.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.3.0 h1:6l90koy8/LaBLmLu8jpHeHexzMwEita0zFfYlggy2F8= +golang.org/x/oauth2 v0.3.0/go.mod h1:rQrIauxkUhJ6CuwEXwymO2/eh4xz2ZWF1nBkcxS+tGk= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502175342-a43fa875dd82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ= +golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM= +golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200918232735-d647fc253266/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= +golang.org/x/tools v0.0.0-20210114065538-d78b04bdf963/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= +google.golang.org/api v0.103.0 h1:9yuVqlu2JCvcLg9p8S3fcFLZij8EPSyvODIY1rkMizQ= +google.golang.org/api v0.103.0/go.mod h1:hGtW6nK1AC+d9si/UBhw8Xli+QMOf6xyNAyJw4qU9w0= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20221207170731-23e4bf6bdc37 h1:jmIfw8+gSvXcZSgaFAGyInDXeWzUhvYH57G/5GKMn70= +google.golang.org/genproto v0.0.0-20221207170731-23e4bf6bdc37/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.51.0 h1:E1eGv1FTqoLIdnBCZufiSHgKjlqG6fKFf6pPWtMTh8U= +google.golang.org/grpc v1.51.0/go.mod h1:wgNDFcnuBGmxLKI/qn4T+m5BtEBYXJPvibbUPsAIPww= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/datatypes v1.1.0 h1:EVp1Z28N4ACpYFK1nHboEIJGIFfjY7vLeieDk8jSHJA= +gorm.io/datatypes v1.1.0/go.mod h1:SH2K9R+2RMjuX1CkCONrPwoe9JzVv2hkQvEu4bXGojE= +gorm.io/driver/mysql v1.4.5 h1:u1lytId4+o9dDaNcPCFzNv7h6wvmc92UjNk3z8enSBU= +gorm.io/driver/mysql v1.4.5/go.mod h1:SxzItlnT1cb6e1e4ZRpgJN2VYtcqJgqnHxWr4wsP8oc= +gorm.io/driver/postgres v1.4.5 h1:mTeXTTtHAgnS9PgmhN2YeUbazYpLhUI1doLnw42XUZc= +gorm.io/driver/postgres v1.4.5/go.mod h1:GKNQYSJ14qvWkvPwXljMGehpKrhlDNsqYRr5HnYGncg= +gorm.io/driver/sqlite v1.4.3 h1:HBBcZSDnWi5BW3B3rwvVTc510KGkBkexlOg0QrmLUuU= +gorm.io/driver/sqlserver v1.4.1 h1:t4r4r6Jam5E6ejqP7N82qAJIJAht27EGT41HyPfXRw0= +gorm.io/gorm v1.23.8/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk= +gorm.io/gorm v1.24.1-0.20221019064659-5dd2bb482755/go.mod h1:DVrVomtaYTbqs7gB/x2uVvqnXzv0nqjB396B8cG4dBA= +gorm.io/gorm v1.24.3 h1:WL2ifUmzR/SLp85CSURAfybcHnGZ+yLSGSxgYXlFBHg= +gorm.io/gorm v1.24.3/go.mod h1:DVrVomtaYTbqs7gB/x2uVvqnXzv0nqjB396B8cG4dBA= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= diff --git a/internal/api/analytics.go b/internal/api/analytics.go new file mode 100644 index 000000000..2cdede84d --- /dev/null +++ b/internal/api/analytics.go @@ -0,0 +1,59 @@ +package api + +import ( + "encoding/json" + "net/http" + + "github.com/hashicorp/go-hclog" +) + +type AnalyticsRequest struct { + DocumentID string `json:"document_id"` + ProductName string `json:"product_name"` +} + +type AnalyticsResponse struct { + Recorded bool `json:"recorded"` +} + +// Analytics handles user events for analytics +func AnalyticsHandler(log hclog.Logger) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Only allow POST requests. + if r.Method != http.MethodPost { + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + + decoder := json.NewDecoder(r.Body) + var req AnalyticsRequest + if err := decoder.Decode(&req); err != nil { + log.Error("error decoding analytics request", "error", err) + http.Error(w, "Error decoding analytics request", + http.StatusBadRequest) + return + } + + response := &AnalyticsResponse{ + Recorded: false, + } + + // Check if document id is set, product name is optional + if req.DocumentID != "" { + log.Info("document view event", "document_id", req.DocumentID, "product_name", req.ProductName) + response.Recorded = true + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + + enc := json.NewEncoder(w) + err := enc.Encode(response) + if err != nil { + log.Error("error encoding analytics response", "error", err) + http.Error(w, "Error encoding analytics response", + http.StatusInternalServerError) + return + } + }) +} diff --git a/internal/api/approvals.go b/internal/api/approvals.go new file mode 100644 index 000000000..bfe629926 --- /dev/null +++ b/internal/api/approvals.go @@ -0,0 +1,371 @@ +package api + +import ( + "fmt" + "net/http" + + "github.com/hashicorp-forge/hermes/internal/config" + "github.com/hashicorp-forge/hermes/pkg/algolia" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + hcd "github.com/hashicorp-forge/hermes/pkg/hashicorpdocs" + "github.com/hashicorp/go-hclog" +) + +func ApprovalHandler( + cfg *config.Config, + l hclog.Logger, + ar *algolia.Client, + aw *algolia.Client, + s *gw.Service) http.Handler { + + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.Method { + case "DELETE": + // Validate request. + docID, err := parseResourceIDFromURL(r.URL.Path, "approvals") + if err != nil { + l.Error("error parsing document ID", + "error", err, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Document ID not found", http.StatusNotFound) + return + } + + // Get base document object from Algolia so we can determine the doc type. + baseDocObj := &hcd.BaseDoc{} + err = ar.Docs.GetObject(docID, &baseDocObj) + if err != nil { + l.Error("error requesting base document object from Algolia", + "error", err, + "path", r.URL.Path, + "method", r.Method, + "doc_id", docID, + ) + http.Error(w, "Error requesting changes of document", + http.StatusInternalServerError) + return + } + + // Create new document object of the proper doc type. + docObj, err := hcd.NewEmptyDoc(baseDocObj.DocType) + if err != nil { + l.Error("error creating new empty doc", + "error", err, + "path", r.URL.Path, + "method", r.Method, + "doc_id", docID, + ) + http.Error(w, "Error requesting changes of document", + http.StatusInternalServerError) + return + } + + // Get document object from Algolia. + err = ar.Docs.GetObject(docID, &docObj) + if err != nil { + l.Error("error getting document from Algolia", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error accessing document", + http.StatusInternalServerError) + return + } + + // Authorize request. + userEmail := r.Context().Value("userEmail").(string) + if docObj.GetStatus() != "In-Review" { + http.Error(w, + "Can only request changes of documents in the \"In-Review\" status", + http.StatusBadRequest) + return + } + if !contains(docObj.GetApprovers(), userEmail) { + http.Error(w, "Not authorized as a document approver", + http.StatusUnauthorized) + return + } + if contains(docObj.GetChangesRequestedBy(), userEmail) { + http.Error(w, "Document already has changes requested by user", + http.StatusBadRequest) + return + } + + // Add email to slice of users who have requested changes of the document. + docObj.SetChangesRequestedBy( + append(docObj.GetChangesRequestedBy(), userEmail)) + + // If user had previously approved, delete email from slice of users who + // have approved the document. + var newApprovedBy []string + for _, a := range docObj.GetApprovedBy() { + if a != userEmail { + newApprovedBy = append(newApprovedBy, a) + } + } + docObj.SetApprovedBy(newApprovedBy) + + // Get latest Google Drive file revision. + latestRev, err := s.GetLatestRevision(docID) + if err != nil { + l.Error("error getting latest revision", + "error", err, + "method", r.Method, + "path", r.URL.Path, + "doc_id", docID) + http.Error(w, "Error requesting changes of document", + http.StatusInternalServerError) + return + } + + // Mark latest revision to be kept forever. + _, err = s.KeepRevisionForever(docID, latestRev.Id) + if err != nil { + l.Error("error marking revision to keep forever", + "error", err, + "method", r.Method, + "path", r.URL.Path, + "doc_id", docID, + "rev_id", latestRev.Id) + http.Error(w, "Error requesting changes", + http.StatusInternalServerError) + return + } + + // Record file revision in the Algolia document object. + revisionName := fmt.Sprintf("Changes requested by %s", userEmail) + docObj.SetFileRevision(latestRev.Id, revisionName) + + // Save modified doc object in Algolia. + res, err := aw.Docs.SaveObject(docObj) + if err != nil { + l.Error("error saving requested changes doc object in Algolia", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error requesting changes of document", + http.StatusInternalServerError) + return + } + err = res.Wait() + if err != nil { + l.Error("error saving requested changes doc object in Algolia", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error requesting changes of document", + http.StatusInternalServerError) + return + } + + // Replace the doc header. + err = docObj.ReplaceHeader( + docID, cfg.BaseURL, true, s) + if err != nil { + l.Error("error replacing doc header", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error requesting changes of document", + http.StatusInternalServerError) + return + } + + // Write response. + w.WriteHeader(http.StatusOK) + + // Log success. + l.Info("changes requested successfully", + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + + case "POST": + // Validate request. + docID, err := parseResourceIDFromURL(r.URL.Path, "approvals") + if err != nil { + l.Error("error parsing document ID from approvals path", + "error", err, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Document ID not found", http.StatusNotFound) + return + } + + // Get base document object from Algolia so we can determine the doc type. + baseDocObj := &hcd.BaseDoc{} + err = ar.Docs.GetObject(docID, &baseDocObj) + if err != nil { + l.Error("error requesting base document object from Algolia", + "error", err, + "path", r.URL.Path, + "method", r.Method, + "doc_id", docID, + ) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + return + } + + // Create new document object of the proper doc type. + docObj, err := hcd.NewEmptyDoc(baseDocObj.DocType) + if err != nil { + l.Error("error creating new empty doc", + "error", err, + "path", r.URL.Path, + "method", r.Method, + "doc_id", docID, + ) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + return + } + + // Get document object from Algolia. + err = ar.Docs.GetObject(docID, &docObj) + if err != nil { + l.Error("error getting document from Algolia", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error accessing document", + http.StatusInternalServerError) + return + } + + // Authorize request. + userEmail := r.Context().Value("userEmail").(string) + if docObj.GetStatus() != "In-Review" && docObj.GetStatus() != "In Review" { + http.Error(w, + `{"error": "Only documents in the "In-Review" status can be approved"}`, + http.StatusBadRequest) + return + } + if !contains(docObj.GetApprovers(), userEmail) { + http.Error(w, + `{"error": "Not authorized as a document approver"}`, + http.StatusUnauthorized) + return + } + if contains(docObj.GetApprovedBy(), userEmail) { + http.Error(w, + `{"error": "Document already approved by user"}`, + http.StatusBadRequest) + return + } + + // Add email to slice of users who have approved the document. + docObj.SetApprovedBy(append(docObj.GetApprovedBy(), userEmail)) + + // If the user had previously requested changes, delete email from slice + // of users who have requested changes of the document. + var newChangesRequestedBy []string + for _, a := range docObj.GetChangesRequestedBy() { + if a != userEmail { + newChangesRequestedBy = append(newChangesRequestedBy, a) + } + } + docObj.SetChangesRequestedBy(newChangesRequestedBy) + + // Get latest Google Drive file revision. + latestRev, err := s.GetLatestRevision(docID) + if err != nil { + l.Error("error getting latest revision", + "error", err, + "method", r.Method, + "path", r.URL.Path, + "doc_id", docID) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + return + } + + // Mark latest revision to be kept forever. + _, err = s.KeepRevisionForever(docID, latestRev.Id) + if err != nil { + l.Error("error marking revision to keep forever", + "error", err, + "method", r.Method, + "path", r.URL.Path, + "doc_id", docID, + "rev_id", latestRev.Id) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + return + } + + // Record file revision in the Algolia document object. + revisionName := fmt.Sprintf("Approved by %s", userEmail) + docObj.SetFileRevision(latestRev.Id, revisionName) + + // Save modified doc object in Algolia. + res, err := aw.Docs.SaveObject(docObj) + if err != nil { + l.Error("error saving approved doc object in Algolia", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error approving document", + http.StatusInternalServerError) + return + } + err = res.Wait() + if err != nil { + l.Error("error saving approved doc object in Algolia", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error approving document", + http.StatusInternalServerError) + return + } + + // Replace the doc header. + err = docObj.ReplaceHeader( + docID, cfg.BaseURL, true, s) + if err != nil { + l.Error("error replacing doc header", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error approving document", + http.StatusInternalServerError) + return + } + + // Write response. + w.WriteHeader(http.StatusOK) + + // Log success. + l.Info("approval created", + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + + default: + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + }) +} diff --git a/internal/api/document_types.go b/internal/api/document_types.go new file mode 100644 index 000000000..fa33c9732 --- /dev/null +++ b/internal/api/document_types.go @@ -0,0 +1,34 @@ +package api + +import ( + "encoding/json" + "net/http" + + "github.com/hashicorp-forge/hermes/internal/config" + "github.com/hashicorp/go-hclog" +) + +func DocumentTypesHandler(cfg config.Config, log hclog.Logger) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.Method { + case "GET": + w.Header().Set("Content-Type", "application/json") + + enc := json.NewEncoder(w) + err := enc.Encode(cfg.DocumentTypes.DocumentType) + if err != nil { + log.Error("error encoding document types", + "error", err, + "method", r.Method, + "path", r.URL.Path) + http.Error(w, "{\"error\": \"Error getting document types\"}", + http.StatusInternalServerError) + return + } + + default: + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + }) +} diff --git a/internal/api/documents.go b/internal/api/documents.go new file mode 100644 index 000000000..af5479ede --- /dev/null +++ b/internal/api/documents.go @@ -0,0 +1,396 @@ +package api + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "net/http" + "time" + + "github.com/hashicorp-forge/hermes/internal/config" + "github.com/hashicorp-forge/hermes/pkg/algolia" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + hcd "github.com/hashicorp-forge/hermes/pkg/hashicorpdocs" + "github.com/hashicorp-forge/hermes/pkg/models" + "github.com/hashicorp/go-hclog" + "gorm.io/gorm" +) + +// DocumentPatchRequest contains a subset of documents fields that are allowed +// to be updated with a PATCH request. +type DocumentPatchRequest struct { + Approvers []string `json:"approvers,omitempty"` + Contributors []string `json:"contributors,omitempty"` + Status string `json:"status,omitempty"` + Summary string `json:"summary,omitempty"` + // Tags []string `json:"tags,omitempty"` + Title string `json:"title,omitempty"` + + // TODO: These are all current custom editable fields for all supported doc + // types. We should instead make this dynamic. + CurrentVersion string `json:"currentVersion,omitempty"` + PRD string `json:"prd,omitempty"` + PRFAQ string `json:"prfaq,omitempty"` + RFC string `json:"rfc,omitempty"` + Stakeholders []string `json:"stakeholders,omitempty"` + TargetVersion string `json:"targetVersion,omitempty"` +} + +func DocumentHandler( + cfg *config.Config, + l hclog.Logger, + ar *algolia.Client, + aw *algolia.Client, + s *gw.Service, + db *gorm.DB) http.Handler { + + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Parse document ID from the URL path. + docID, err := parseURLPath(r.URL.Path, "/api/v1/documents") + if err != nil { + l.Error("error parsing document ID from the URL path", + "error", err, + "path", r.URL.Path, + "method", r.Method, + ) + http.Error(w, "Error accessing document", http.StatusInternalServerError) + return + } + + // Get base document object from Algolia so we can determine the doc type. + baseDocObj := &hcd.BaseDoc{} + err = ar.Docs.GetObject(docID, &baseDocObj) + if err != nil { + l.Error("error requesting base document object from Algolia", + "error", err, + "path", r.URL.Path, + "method", r.Method, + "doc_id", docID, + ) + http.Error(w, "Error accessing document", + http.StatusInternalServerError) + return + } + + // Create new document object of the proper doc type. + docObj, err := hcd.NewEmptyDoc(baseDocObj.DocType) + if err != nil { + l.Error("error creating new empty doc", + "error", err, + "path", r.URL.Path, + "method", r.Method, + "doc_id", docID, + ) + http.Error(w, "Error accessing document", + http.StatusInternalServerError) + return + } + + // Get document object from Algolia. + err = ar.Docs.GetObject(docID, &docObj) + if err != nil { + l.Error("error retrieving document object from Algolia", + "error", err, + "path", r.URL.Path, + "method", r.Method, + "doc_id", docID, + ) + http.Error(w, "Error accessing document", http.StatusInternalServerError) + return + } + + switch r.Method { + case "GET": + now := time.Now() + + // Get file from Google Drive so we can return the latest modified time. + file, err := s.GetFile(docID) + if err != nil { + l.Error("error getting document file from Google", + "error", err, + "path", r.URL.Path, + "method", r.Method, + "doc_id", docID, + ) + http.Error(w, "Error requesting document", http.StatusInternalServerError) + return + } + + // Parse and set modified time. + modifiedTime, err := time.Parse(time.RFC3339Nano, file.ModifiedTime) + if err != nil { + l.Error("error parsing modified time", + "error", err, + "path", r.URL.Path, + "method", r.Method, + "doc_id", docID, + ) + http.Error(w, "Error requesting document", http.StatusInternalServerError) + return + } + docObj.SetModifiedTime(modifiedTime.Unix()) + + // Set custom editable fields. + docObj.SetCustomEditableFields() + + // Write response. + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + + enc := json.NewEncoder(w) + err = enc.Encode(docObj) + if err != nil { + l.Error("error encoding document", + "error", err, + "doc_id", docID, + ) + http.Error(w, "Error requesting document", + http.StatusInternalServerError) + return + } + + // Update recently viewed documents if this is a document view event. The + // Add-To-Recently-Viewed header is set in the request from the frontend + // to differentiate between document views and requests to only retrieve + // document metadata. + if r.Header.Get("Add-To-Recently-Viewed") != "" { + // Get authenticated user's email address. + email := r.Context().Value("userEmail").(string) + + // Get user (if exists). + u := models.User{ + EmailAddress: email, + } + if err := u.Get(db); err != nil && !errors.Is( + err, gorm.ErrRecordNotFound) { + l.Error("error getting user in database", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + // TODO: return an error response when this is required. + // http.Error(w, "Error requesting document", + // http.StatusInternalServerError) + return + } + + // Prepend document to recently viewed documents. + rvd := append( + []models.Document{{GoogleFileID: docID}}, + u.RecentlyViewedDocs...) + + // Trim recently viewed documents to a length of 5. + if len(rvd) > 5 { + rvd = rvd[:5] + } + + // Update user. + u.RecentlyViewedDocs = rvd + if err := u.Upsert(db); err != nil { + l.Error("error upserting user", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + // TODO: return an error response when this is required. + // http.Error(w, "Error requesting document", + // http.StatusInternalServerError) + return + } + + // Update ViewedAt time for this document. + viewedDoc := models.RecentlyViewedDoc{ + UserID: int(u.ID), + DocumentID: int(u.RecentlyViewedDocs[0].ID), + ViewedAt: now, + } + if err := db.Updates(&viewedDoc).Error; err != nil { + l.Error("error updating recently viewed document in database", + "error", err, + "method", r.Method, + "path", r.URL.Path, + ) + // TODO: return an error response when this is required. + // http.Error(w, "Error requesting document", + // http.StatusInternalServerError) + return + } + } + + l.Info("retrieved document", + "doc_id", docID, + ) + + case "PATCH": + // Authorize request (only the owner can PATCH the doc). + userEmail := r.Context().Value("userEmail").(string) + if docObj.GetOwners()[0] != userEmail { + http.Error(w, + `{"error": "Not a document owner"}`, + http.StatusUnauthorized) + return + } + + // Copy request body so we can use both for validation using the request + // struct, and then afterwards for patching the document JSON. + buf, err := ioutil.ReadAll(r.Body) + if err != nil { + l.Error("error reading request body", + "error", err, + "path", r.URL.Path, + "method", r.Method, + "doc_id", docID) + http.Error(w, "Error patching document", + http.StatusInternalServerError) + return + } + body := ioutil.NopCloser(bytes.NewBuffer(buf)) + newBody := ioutil.NopCloser(bytes.NewBuffer(buf)) + r.Body = newBody + + // Decode request. The request struct validates that the request only + // contains fields that are allowed to be patched. + var req DocumentPatchRequest + if err := decodeRequest(r, &req); err != nil { + l.Error("error decoding document patch request", "error", err) + http.Error(w, fmt.Sprintf("Bad request: %q", err), + http.StatusBadRequest) + return + } + + // Compare approvers in req and stored object in Algolia + // before we save the patched objected + var approversToEmail []string + if len(docObj.GetApprovers()) == 0 && len(req.Approvers) != 0 { + // If there are no approvers of the document + // email the approvers in the request + approversToEmail = req.Approvers + } else if len(req.Approvers) != 0 { + // Only compare when there are stored approvers + // and approvers in the request + approversToEmail = compareSlices(docObj.GetApprovers(), req.Approvers) + } + + // Patch document by decoding the (now validated) request body JSON to the + // document object. + err = json.NewDecoder(body).Decode(docObj) + if err != nil { + l.Error("error decoding request body to document object", + "error", err, + "method", r.Method, + "path", r.URL.Path, + "doc_id", docID) + http.Error(w, "Error patching document", + http.StatusInternalServerError) + return + } + + // Save new modified doc object in Algolia. + res, err := aw.Docs.SaveObject(docObj) + if err != nil { + l.Error("error saving patched document in Algolia", + "error", err, + "method", r.Method, + "path", r.URL.Path, + "doc_id", docID) + http.Error(w, "Error patching document", + http.StatusInternalServerError) + return + } + err = res.Wait() + if err != nil { + l.Error("error saving patched document in Algolia", + "error", err, + "method", r.Method, + "path", r.URL.Path, + "doc_id", docID) + http.Error(w, "Error patching document", + http.StatusInternalServerError) + return + } + + // Send emails to new approvers. + if cfg.Email != nil && cfg.Email.Enabled { + if len(approversToEmail) > 0 { + // TODO: use a template for email content. + rawBody := ` + + +

Hi!

+

+Your review has been requested for a new document, [%s] %s. +

+

+Cheers,
+Hermes +

+ +` + + docURL, err := getDocumentURL(cfg.BaseURL, docID) + if err != nil { + l.Error("error getting document URL", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error patching review", + http.StatusInternalServerError) + return + } + body := fmt.Sprintf(rawBody, docURL, docObj.GetDocNumber(), docObj.GetTitle()) + + // TODO: use an asynchronous method for sending emails because we + // can't currently recover gracefully on a failure here. + for _, approverEmail := range approversToEmail { + _, err = s.SendEmail( + []string{approverEmail}, + cfg.Email.FromAddress, + fmt.Sprintf("Document review requested for %s", docObj.GetDocNumber()), + body, + ) + if err != nil { + l.Error("error sending email", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error patching review", + http.StatusInternalServerError) + return + } + } + l.Info("approver emails sent") + } + } + + // Replace the doc header. + err = docObj.ReplaceHeader(docID, cfg.BaseURL, true, s) + if err != nil { + l.Error("error replacing document header", + "error", err, "doc_id", docID) + http.Error(w, "Error patching document", + http.StatusInternalServerError) + return + } + + // Rename file with new title. + s.RenameFile(docID, + fmt.Sprintf("[%s] %s", docObj.GetDocNumber(), docObj.GetTitle())) + + w.WriteHeader(http.StatusOK) + l.Info("patched document", "doc_id", docID) + return + + default: + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + }) +} diff --git a/internal/api/drafts.go b/internal/api/drafts.go new file mode 100644 index 000000000..2b35522bf --- /dev/null +++ b/internal/api/drafts.go @@ -0,0 +1,838 @@ +package api + +import ( + "bytes" + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + "strconv" + "strings" + "time" + + "github.com/algolia/algoliasearch-client-go/v3/algolia/opt" + "github.com/algolia/algoliasearch-client-go/v3/algolia/search" + "github.com/hashicorp/go-hclog" + "gorm.io/gorm" + + "github.com/hashicorp-forge/hermes/internal/config" + "github.com/hashicorp-forge/hermes/pkg/algolia" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + hcd "github.com/hashicorp-forge/hermes/pkg/hashicorpdocs" + "github.com/hashicorp-forge/hermes/pkg/models" +) + +type DraftsRequest struct { + Approvers []string `json:"approvers,omitempty"` + Contributors []string `json:"contributors,omitempty"` + DocType string `json:"docType,omitempty"` + Owner string `json:"owner,omitempty"` + Product string `json:"product,omitempty"` + ProductAbbreviation string `json:"productAbbreviation,omitempty"` + Summary string `json:"summary,omitempty"` + Tags []string `json:"tags,omitempty"` + Title string `json:"title"` +} + +// DraftsPatchRequest contains a subset of drafts fields that are allowed to +// be updated with a PATCH request. +type DraftsPatchRequest struct { + Approvers []string `json:"approvers,omitempty"` + Contributors []string `json:"contributors,omitempty"` + Summary string `json:"summary,omitempty"` + // Tags []string `json:"tags,omitempty"` + Title string `json:"title,omitempty"` + + // TODO: These are all current custom editable fields for all supported doc + // types. We should instead make this dynamic. + CurrentVersion string `json:"currentVersion,omitempty"` + PRD string `json:"prd,omitempty"` + PRFAQ string `json:"prfaq,omitempty"` + RFC string `json:"rfc,omitempty"` + Stakeholders []string `json:"stakeholders,omitempty"` + TargetVersion string `json:"targetVersion,omitempty"` +} + +type DraftsResponse struct { + ID string `json:"id"` +} + +func DraftsHandler( + cfg *config.Config, + l hclog.Logger, + ar *algolia.Client, + aw *algolia.Client, + s *gw.Service, + db *gorm.DB) http.Handler { + + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.Method { + case "POST": + // Decode request. + var req DraftsRequest + if err := decodeRequest(r, &req); err != nil { + l.Error("error decoding drafts request", "error", err) + http.Error(w, fmt.Sprintf("Bad request: %q", err), + http.StatusBadRequest) + return + } + + // Validate request. + if req.Owner == "" { + http.Error(w, "Bad request: owner is required", http.StatusBadRequest) + return + } + + switch req.DocType { + case "FRD": + case "RFC": + case "PRD": + case "": + l.Error("Bad request: docType is required") + http.Error(w, "Bad request: docType is required", http.StatusBadRequest) + return + default: + l.Error("Bad request: docType is required", "doc_type", req.DocType) + http.Error(w, "Bad request: invalid docType", http.StatusBadRequest) + return + } + + if req.Owner == "" { + http.Error(w, "Bad request: owner is required", http.StatusBadRequest) + return + } + if req.Title == "" { + http.Error(w, "Bad request: title is required", http.StatusBadRequest) + return + } + + // Get doc type template. + template := getDocTypeTemplate(cfg.DocumentTypes.DocumentType, req.DocType) + if template == "" { + l.Error("Bad request: no template configured for doc type", "doc_type", req.DocType) + http.Error(w, + "Bad request: no template configured for doc type", + http.StatusBadRequest) + return + } + + // Build title. + if req.ProductAbbreviation == "" { + req.ProductAbbreviation = "TODO" + } + title := fmt.Sprintf("[%s-???] %s", req.ProductAbbreviation, req.Title) + + // Copy template to new draft file. + f, err := s.CopyFile(template, title, cfg.GoogleWorkspace.DraftsFolder) + if err != nil { + l.Error("error creating draft", "error", err, "template", template, + "drafts_folder", cfg.GoogleWorkspace.DraftsFolder) + http.Error(w, "Error creating document draft", + http.StatusInternalServerError) + return + } + + // Build created date. + ct, err := time.Parse(time.RFC3339Nano, f.CreatedTime) + if err != nil { + l.Error("error parsing draft created time", "error", err, "doc_id", f.Id) + http.Error(w, "Error creating document draft", + http.StatusInternalServerError) + return + } + cd := ct.Format("Jan 2, 2006") + + // Get owner photo by searching Google Workspace directory. + op := []string{} + people, err := s.SearchPeople(req.Owner) + if err != nil { + l.Error( + "error searching directory for person", + "err", err, + "person", req.Owner, + ) + } + if len(people) > 0 { + if len(people[0].Photos) > 0 { + op = append(op, people[0].Photos[0].Url) + } + } + + // Create tag + // Note: The o_id tag may be empty for environments such as development. + // For environments like pre-prod and prod, it will be set as + // Okta authentication is enforced before this handler is called for + // those environments. Maybe, if id isn't set we use + // owner emails in the future? + id := r.Header.Get("x-amzn-oidc-identity") + metaTags := []string{ + "o_id:" + id, + } + + baseDocObj := &hcd.BaseDoc{ + ObjectID: f.Id, + Title: req.Title, + AppCreated: true, + Contributors: req.Contributors, + Created: cd, + CreatedTime: ct.Unix(), + DocNumber: fmt.Sprintf("%s-???", req.ProductAbbreviation), + DocType: req.DocType, + MetaTags: metaTags, + Owners: []string{req.Owner}, + OwnerPhotos: op, + Product: req.Product, + Status: "WIP", + Summary: req.Summary, + Tags: req.Tags, + } + + res, err := aw.Drafts.SaveObject(baseDocObj) + if err != nil { + l.Error("error saving draft doc in Algolia", "error", err, "doc_id", f.Id) + http.Error(w, "Error creating document draft", + http.StatusInternalServerError) + return + } + err = res.Wait() + if err != nil { + l.Error("error saving draft doc in Algolia", "error", err, "doc_id", f.Id) + http.Error(w, "Error creating document draft", + http.StatusInternalServerError) + return + } + + // Create new document object of the proper doc type. + docObj, err := hcd.NewEmptyDoc(baseDocObj.DocType) + if err != nil { + l.Error("error creating new empty doc", + "error", err, + "doc_id", f.Id, + ) + http.Error(w, "Error accessing draft document", + http.StatusInternalServerError) + return + } + + // Get document object from Algolia. + err = ar.Drafts.GetObject(f.Id, &docObj) + if err != nil { + l.Error("error requesting document draft from Algolia", + "error", err, + "doc_id", f.Id, + ) + http.Error(w, "Error accessing draft document", + http.StatusInternalServerError) + return + } + + // Replace the doc header. + err = docObj.ReplaceHeader( + f.Id, cfg.BaseURL, true, s) + if err != nil { + l.Error("error replacing draft doc header", + "error", err, "doc_id", f.Id) + http.Error(w, "Error creating document draft", + http.StatusInternalServerError) + return + } + + // Create document in the database. + var approvers []*models.User + for _, c := range req.Approvers { + approvers = append(approvers, &models.User{ + EmailAddress: c, + }) + } + var contributors []*models.User + for _, c := range req.Contributors { + contributors = append(contributors, &models.User{ + EmailAddress: c, + }) + } + createdTime, err := time.Parse(time.RFC3339Nano, f.CreatedTime) + if err != nil { + l.Error("error parsing document created time", + "error", err, "doc_id", f.Id) + http.Error(w, "Error creating document draft", + http.StatusInternalServerError) + return + } + // TODO: add custom fields. + d := models.Document{ + GoogleFileID: f.Id, + Approvers: approvers, + Contributors: contributors, + DocumentCreatedAt: createdTime, + DocumentModifiedAt: createdTime, + DocumentType: models.DocumentType{ + Name: req.DocType, + }, + Owner: &models.User{ + EmailAddress: req.Owner, + }, + Product: models.Product{ + Name: req.Product, + }, + Status: models.WIPDocumentStatus, + Summary: req.Summary, + Title: req.Title, + } + if err := d.Create(db); err != nil { + l.Error("error creating document in database", + "error", err, + "doc_id", f.Id, + ) + http.Error(w, "Error creating document draft", + http.StatusInternalServerError) + return + } + + // Share file with the owner + if err := s.ShareFile(f.Id, req.Owner, "writer"); err != nil { + l.Error("error sharing file with the owner", + "error", err, "doc_id", f.Id) + http.Error(w, "Error creating document draft", + http.StatusInternalServerError) + return + } + + // Share file with contributors. + // Google Drive API limitation + // is that you can only share files + // with one user at a time + for _, c := range req.Contributors { + if err := s.ShareFile(f.Id, c, "writer"); err != nil { + l.Error("error sharing file with the contributor", + "error", err, "doc_id", f.Id, "contributor", c) + http.Error(w, "Error creating document draft", + http.StatusInternalServerError) + return + } + } + + // TODO: Delete draft file in the case of an error. + + // Write response. + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + + resp := &DraftsResponse{ + ID: f.Id, + } + + enc := json.NewEncoder(w) + err = enc.Encode(resp) + if err != nil { + l.Error("error encoding drafts response", "error", err, "doc_id", f.Id) + http.Error(w, "Error creating document draft", + http.StatusInternalServerError) + return + } + + l.Info("created draft", "doc_id", f.Id) + + case "GET": + // Get OIDC ID + id := r.Header.Get("x-amzn-oidc-identity") + + // Parse query + q := r.URL.Query() + facetFiltersStr := q.Get("facetFilters") + facetsStr := q.Get("facets") + hitsPerPageStr := q.Get("hitsPerPage") + maxValuesPerFacetStr := q.Get("maxValuesPerFacet") + pageStr := q.Get("page") + ownerEmail := q.Get("ownerEmail") + + if ownerEmail == "" { + l.Error("Bad request: owner is required") + http.Error(w, "Error retrieving document drafts", + http.StatusBadRequest) + return + } + + facetFilters := strings.Split(facetFiltersStr, ",") + facets := strings.Split(facetsStr, ",") + hitsPerPage, err := strconv.Atoi(hitsPerPageStr) + if err != nil { + l.Error("error converting to int", "error", err, "hits_per_page", hitsPerPageStr) + http.Error(w, "Error retrieving document drafts", + http.StatusInternalServerError) + return + } + maxValuesPerFacet, err := strconv.Atoi(maxValuesPerFacetStr) + if err != nil { + l.Error("error converting to int", "error", err, "max_values_per_facet", maxValuesPerFacetStr) + http.Error(w, "Error retrieving document drafts", + http.StatusInternalServerError) + return + } + page, err := strconv.Atoi(pageStr) + if err != nil { + l.Error("error converting to int", "error", err, "page", pageStr) + http.Error(w, "Error retrieving document drafts", + http.StatusInternalServerError) + return + } + + // Build params + params := []interface{}{ + opt.Facets(facets...), + // FacetFilters are supplied as follows: + // ['attribute1:value', 'attribute2:value'], 'owners:owner_email_value' + opt.FacetFilterAnd(facetFilters, "owners:"+ownerEmail), + // TagFilter allows to filter for a particular OIDC ID. + // This enables a user to only view their own documents + opt.TagFilter("o_id:" + id), + opt.HitsPerPage(hitsPerPage), + opt.MaxValuesPerFacet(maxValuesPerFacet), + opt.Page(page), + } + + // Retrieve all documents + var resp search.QueryRes + sortBy := q.Get("sortBy") + if sortBy == "dateAsc" { + resp, err = ar.DraftsCreatedTimeAsc.Search("", params...) + } else { + resp, err = ar.DraftsCreatedTimeDesc.Search("", params...) + } + if err != nil { + l.Error("error retrieving document drafts from Algolia", "error", err) + http.Error(w, "Error retrieving document drafts", + http.StatusInternalServerError) + return + } + + // Write response. + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + + enc := json.NewEncoder(w) + err = enc.Encode(resp) + if err != nil { + l.Error("error encoding document drafts", "error", err) + http.Error(w, "Error requesting document draft", + http.StatusInternalServerError) + return + } + + l.Info("retrieved document drafts", "o_id", id) + + default: + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + }) +} + +func DraftsDocumentHandler( + cfg *config.Config, + l hclog.Logger, + ar *algolia.Client, + aw *algolia.Client, + s *gw.Service) http.Handler { + + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Get document ID from URL path + docId, err := parseURLPath(r.URL.Path, "/api/v1/drafts") + if err != nil { + l.Error("error requesting document draft from algolia", + "error", err, + "path", r.URL.Path, + ) + http.Error(w, "Error requesting document draft", http.StatusInternalServerError) + return + } + + // Get base document object from Algolia so we can determine the doc type. + baseDocObj := &hcd.BaseDoc{} + err = ar.Drafts.GetObject(docId, &baseDocObj) + if err != nil { + l.Error("error requesting base document object from Algolia", + "error", err, + "doc_id", docId, + ) + http.Error(w, "Error accessing draft document", + http.StatusInternalServerError) + return + } + + // Create new document object of the proper doc type. + docObj, err := hcd.NewEmptyDoc(baseDocObj.DocType) + if err != nil { + l.Error("error creating new empty doc", + "error", err, + "doc_id", docId, + ) + http.Error(w, "Error accessing draft document", + http.StatusInternalServerError) + return + } + + // Get document object from Algolia. + err = ar.Drafts.GetObject(docId, &docObj) + if err != nil { + l.Error("error requesting document draft from Algolia", + "error", err, + "doc_id", docId, + ) + http.Error(w, "Error accessing draft document", + http.StatusInternalServerError) + return + } + + // Authorize request (only the owner can access a document draft). + userEmail := r.Context().Value("userEmail").(string) + if docObj.GetOwners()[0] != userEmail { + http.Error(w, + `{"error": "Not a document owner"}`, + http.StatusUnauthorized) + return + } + + switch r.Method { + case "GET": + // Get file from Google Drive so we can return the latest modified time. + file, err := s.GetFile(docId) + if err != nil { + l.Error("error getting document file from Google", + "error", err, + "path", r.URL.Path, + "method", r.Method, + "doc_id", docId, + ) + http.Error(w, "Error requesting document draft", http.StatusInternalServerError) + return + } + + // Parse and set modified time. + modifiedTime, err := time.Parse(time.RFC3339Nano, file.ModifiedTime) + if err != nil { + l.Error("error parsing modified time", + "error", err, + "path", r.URL.Path, + "method", r.Method, + "doc_id", docId, + ) + http.Error(w, "Error requesting document draft", http.StatusInternalServerError) + return + } + docObj.SetModifiedTime(modifiedTime.Unix()) + + // Set custom editable fields. + docObj.SetCustomEditableFields() + + // Write response. + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + + enc := json.NewEncoder(w) + err = enc.Encode(docObj) + if err != nil { + l.Error("error encoding document draft", "error", err, "doc_id", docId) + http.Error(w, "Error requesting document draft", + http.StatusInternalServerError) + return + } + + l.Info("retrieved document draft", "doc_id", docId) + + case "DELETE": + // Delete document + err = s.DeleteFile(docId) + if err != nil { + l.Error("error deleting document", "error", err, "doc_id", docId) + http.Error(w, "Error deleting document draft", + http.StatusInternalServerError) + return + } + + // Delete object in Algolia + res, err := aw.Drafts.DeleteObject(docId) + if err != nil { + l.Error("error deleting document draft from algolia", + "error", err, + "doc_id", docId, + ) + http.Error(w, "Error deleting document draft", + http.StatusInternalServerError) + return + } + err = res.Wait() + if err != nil { + l.Error("error deleting document draft from algolia", + "error", err, + "doc_id", docId, + ) + http.Error(w, "Error deleting document draft", + http.StatusInternalServerError) + return + } + + resp := &DraftsResponse{ + ID: docId, + } + + // Write response. + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + + enc := json.NewEncoder(w) + err = enc.Encode(resp) + if err != nil { + l.Error("error encoding document id", "error", err, "doc_id", docId) + http.Error(w, "Error deleting document draft", + http.StatusInternalServerError) + return + } + + case "PATCH": + // Copy request body so we can use both for validation using the request + // struct, and then afterwards for patching the document JSON. + buf, err := ioutil.ReadAll(r.Body) + if err != nil { + l.Error("error reading request body", + "error", err, + "method", r.Method, + "path", r.URL.Path, + "doc_id", docId) + http.Error(w, "Error patching document draft", + http.StatusInternalServerError) + return + } + body := ioutil.NopCloser(bytes.NewBuffer(buf)) + newBody := ioutil.NopCloser(bytes.NewBuffer(buf)) + r.Body = newBody + + // Decode request. The request struct validates that the request only + // contains fields that are allowed to be patched. + var req DraftsPatchRequest + if err := decodeRequest(r, &req); err != nil { + l.Error("error decoding draft patch request", "error", err) + http.Error(w, fmt.Sprintf("Bad request: %q", err), + http.StatusBadRequest) + return + } + + // Compare contributors in request and stored object in Algolia + // before we save the patched objected + // Find out contributors to share the document with + var contributorsToAddSharing []string + if len(docObj.GetContributors()) == 0 && len(req.Contributors) != 0 { + // If there are no contributors of the document + // add the contributors in the request + contributorsToAddSharing = req.Contributors + } else if len(req.Contributors) != 0 { + // Only compare when there are stored contributors + // and contributors in the request + contributorsToAddSharing = compareSlices(docObj.GetContributors(), req.Contributors) + } + // Find out contributors to remove from sharing the document + var contributorsToRemoveSharing []string + // TODO: figure out how we want to handle user removing all contributors + // from the sidebar select + if len(docObj.GetContributors()) != 0 && len(req.Contributors) != 0 { + // Compare contributors when there are stored contributors + // and there are contributors in the request + contributorsToRemoveSharing = compareSlices(req.Contributors, docObj.GetContributors()) + } + + // Patch document by decoding the (now validated) request body JSON to the + // document object. + err = json.NewDecoder(body).Decode(docObj) + if err != nil { + l.Error("error decoding request body to document object", + "error", err, + "method", r.Method, + "path", r.URL.Path, + "doc_id", docId) + http.Error(w, "Error patching document draft", + http.StatusInternalServerError) + return + } + + // Share file with contributors. + // Google Drive API limitation + // is that you can only share files + // with one user at a time + for _, c := range contributorsToAddSharing { + if err := s.ShareFile(docId, c, "writer"); err != nil { + l.Error("error sharing file with the contributor", + "error", err, + "method", r.Method, + "path", r.URL.Path, + "doc_id", docId, + "contributor", c) + http.Error(w, "Error patching document draft", + http.StatusInternalServerError) + return + } + } + if len(contributorsToAddSharing) > 0 { + l.Info("shared document with contributors", + "contributors_count", len(contributorsToAddSharing)) + } + + // Remove contributors from file. + // This unfortunately needs to be done one user at a time + for _, c := range contributorsToRemoveSharing { + // Only remove contributor if the email + // associated with the permission doesn't + // match owner email(s). + if !contains(docObj.GetOwners(), c) { + if err := removeSharing(s, docId, c); err != nil { + l.Error("error removing contributor from file", + "error", err, + "method", r.Method, + "path", r.URL.Path, + "doc_id", docId, + "contributor", c) + http.Error(w, "Error patching document draft", + http.StatusInternalServerError) + return + } + } + } + if len(contributorsToRemoveSharing) > 0 { + l.Info("removed contributors from document", + "contributors_count", len(contributorsToRemoveSharing)) + } + + // Save new modified draft doc object in Algolia. + res, err := aw.Drafts.SaveObject(docObj) + if err != nil { + l.Error("error saving patched draft doc in Algolia", "error", err, + "doc_id", docId) + http.Error(w, "Error creating document draft", + http.StatusInternalServerError) + return + } + err = res.Wait() + if err != nil { + l.Error("error saving patched draft doc in Algolia", "error", err, + "doc_id", docId) + http.Error(w, "Error creating document draft", + http.StatusInternalServerError) + return + } + + // Replace the doc header. + err = docObj.ReplaceHeader( + docId, cfg.BaseURL, true, s) + if err != nil { + l.Error("error replacing draft doc header", + "error", err, "doc_id", docId) + http.Error(w, "Error patching document draft", + http.StatusInternalServerError) + return + } + + // Rename file with new title. + s.RenameFile(docId, + fmt.Sprintf("[%s] %s", docObj.GetDocNumber(), docObj.GetTitle())) + + w.WriteHeader(http.StatusOK) + l.Info("patched draft document", "doc_id", docId) + + default: + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + }) +} + +// validateID validates the whether the ID matches +// the ID tag in the Algolia document object +func validateID(id string, tags []string) error { + // draft document should have tags set + // in order to verify the document + // was created by a particular id + if len(tags) == 0 { + return fmt.Errorf("tags cannot be empty") + } + for _, j := range tags { + if strings.Contains(j, "o_id:") { + // Prevent user requesting a document draft that + // wasn't created by them. + if j != "o_id:"+id { + return fmt.Errorf("oidc id didn't match the id tag on the document") + } + } else { + return fmt.Errorf("o_id tag wasn't set in the object") + } + } + + return nil +} + +// parseURLPath parses the URL path with format "{prefix}/{resource_id}" +// (e.g., "/api/v1/drafts/{document_id}") and returns a +// resource ID +// TODO: make more extensible using regexp package and move to helpers.go. +func parseURLPath(path, prefix string) (string, error) { + // Remove prefix (like "/api/v1/drafts") from URL path + path = strings.TrimPrefix(path, prefix) + + // Remove empty entries and validate path + urlPath := strings.Split(path, "/") + var resultPath []string + for _, v := range urlPath { + // Only append non-empty values, this remove + // any empty strings in the slice + if v != "" { + resultPath = append(resultPath, v) + } + } + resultPathLen := len(resultPath) + // Only allow 1 value to be set in the + // resultPath slice. For example, + // if the urlPath is set to /{document_id} + // then the resultPath slice would be ["{document_id}"] + if resultPathLen > 1 { + return "", fmt.Errorf("invalid url path") + } + // If there are no entries in the resultPath + // slice, then there was no document ID set in + // URL path. Return an empty string + if resultPathLen == 0 { + return "", fmt.Errorf("no document id set in url path") + } + + // return document ID + return resultPath[0], nil +} + +// getDocTypeTemplate returns the file ID of the template for a specified +// document type or an empty string if not found. +func getDocTypeTemplate( + docTypes []*config.DocumentType, + docType string, +) string { + template := "" + + for _, t := range docTypes { + if strings.ToUpper(t.Name) == docType { + template = t.Template + break + } + } + + return template +} + +// removeSharing lists permissions for a document and then +// deletes the permission for the supplied user email +func removeSharing(s *gw.Service, docId, email string) error { + permissions, err := s.ListPermissions(docId) + if err != nil { + return err + } + for _, p := range permissions { + if p.EmailAddress == email { + return s.DeletePermission(docId, p.Id) + } + } + return nil +} diff --git a/internal/api/helpers.go b/internal/api/helpers.go new file mode 100644 index 000000000..1bfcd9b71 --- /dev/null +++ b/internal/api/helpers.go @@ -0,0 +1,92 @@ +package api + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "strings" +) + +// contains returns true if a string is present in a slice of strings. +func contains(values []string, s string) bool { + for _, v := range values { + if s == v { + return true + } + } + return false +} + +// compareSlices compares the first slice with the second +// and returns the elements that exist in the second slice +// that don't exist in the first +func compareSlices(a, b []string) []string { + // Create a map with the length of slice "a" + tempA := make(map[string]bool, len(a)) + for _, j := range a { + tempA[j] = true + } + + diffElems := []string{} + for _, k := range b { + // If elements in slice "b" are + // not present in slice "a" then + // append to diffElems slice + if !tempA[k] { + diffElems = append(diffElems, k) + } + } + + return diffElems +} + +// decodeRequest decodes the JSON contents of a HTTP request body to a request +// struct. An error is returned if the request contains fields that do not exist +// in the request struct. +func decodeRequest(r *http.Request, reqStruct interface{}) error { + dec := json.NewDecoder(r.Body) + dec.DisallowUnknownFields() + for { + if err := dec.Decode(&reqStruct); err == io.EOF { + break + } else if err != nil { + return err + } + } + + return nil +} + +// parseResourceIDFromURL parses a URL path with the format +// "/api/v1/{apiPath}/{resourceID}" and returns the resource ID. +func parseResourceIDFromURL(url, apiPath string) (string, error) { + // Remove API path from URL. + url = strings.TrimPrefix(url, fmt.Sprintf("/api/v1/%s", apiPath)) + + // Remove empty entries and validate path. + urlPath := strings.Split(url, "/") + var resultPath []string + for _, v := range urlPath { + // Only append non-empty values, this removes any empty strings in the + // slice. + if v != "" { + resultPath = append(resultPath, v) + } + } + resultPathLen := len(resultPath) + // Only allow 1 value to be set in the resultPath slice. For example, if the + // urlPath is set to "/{document_id}" then the resultPath slice would be + // ["{document_id}"]. + if resultPathLen > 1 { + return "", fmt.Errorf("invalid URL path") + } + // If there are no entries in the resultPath slice, then there was no resource + // ID set in the URL path. Return an empty string. + if resultPathLen == 0 { + return "", fmt.Errorf("no document ID set in url path") + } + + // Return resource ID. + return resultPath[0], nil +} diff --git a/internal/api/helpers_test.go b/internal/api/helpers_test.go new file mode 100644 index 000000000..a011b53b8 --- /dev/null +++ b/internal/api/helpers_test.go @@ -0,0 +1,98 @@ +package api + +import ( + "reflect" + "testing" +) + +func TestParseResourceIDFromURL(t *testing.T) { + cases := map[string]struct { + url string + apiPath string + + want string + shouldErr bool + }{ + "good": { + url: "/api/v1/drafts/myID", + apiPath: "drafts", + + want: "myID", + }, + "extra path after resource ID": { + url: "/api/v1/drafts/myID/something", + apiPath: "drafts", + + shouldErr: true, + }, + "no resource ID": { + url: "/api/v1/drafts", + apiPath: "drafts", + + shouldErr: true, + }, + } + + for name, c := range cases { + t.Run(name, func(t *testing.T) { + if got, err := parseResourceIDFromURL(c.url, c.apiPath); err != nil { + if !c.shouldErr { + t.Error(err) + } + } else { + if got != c.want { + t.Errorf("got %q, want %q", got, c.want) + } + } + }) + } +} + +func TestCompareSlices(t *testing.T) { + cases := map[string]struct { + firstSlice []string + secondSlice []string + + want []string + }{ + "second slice has an element that first slice doesn't": { + firstSlice: []string{"a", "b", "c"}, + secondSlice: []string{"a", "d"}, + + want: []string{"d"}, + }, + "empty slices": { + firstSlice: []string{}, + secondSlice: []string{}, + + want: []string{}, + }, + "identical slices": { + firstSlice: []string{"a", "b", "c"}, + secondSlice: []string{"a", "b", "c"}, + + want: []string{}, + }, + "first slice has elements and second slice is empty": { + firstSlice: []string{"a", "b", "c"}, + secondSlice: []string{}, + + want: []string{}, + }, + "first slice is empty and second slice has elements": { + firstSlice: []string{}, + secondSlice: []string{"a", "b", "c"}, + + want: []string{"a", "b", "c"}, + }, + } + + for name, c := range cases { + t.Run(name, func(t *testing.T) { + got := compareSlices(c.firstSlice, c.secondSlice) + if !reflect.DeepEqual(got, c.want) { + t.Errorf("got %q, want %q", got, c.want) + } + }) + } +} diff --git a/internal/api/me_subscriptions.go b/internal/api/me_subscriptions.go new file mode 100644 index 000000000..1efb9d99d --- /dev/null +++ b/internal/api/me_subscriptions.go @@ -0,0 +1,127 @@ +package api + +import ( + "encoding/json" + "fmt" + "net/http" + + "github.com/hashicorp-forge/hermes/internal/config" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + "github.com/hashicorp-forge/hermes/pkg/models" + "github.com/hashicorp/go-hclog" + "gorm.io/gorm" +) + +type MeSubscriptionsPostRequest struct { + Subscriptions []string `json:"subscriptions"` +} + +func MeSubscriptionsHandler( + cfg *config.Config, + l hclog.Logger, + s *gw.Service, + db *gorm.DB, +) http.Handler { + + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + errResp := func(httpCode int, userErrMsg, logErrMsg string, err error) { + l.Error(logErrMsg, + "method", r.Method, + "path", r.URL.Path, + "error", err, + ) + errJSON := fmt.Sprintf(`{"error": "%s"}`, userErrMsg) + http.Error(w, errJSON, httpCode) + } + + // Authorize request. + userEmail := r.Context().Value("userEmail").(string) + if userEmail == "" { + errResp( + http.StatusUnauthorized, + "No authorization information for request", + "no user email found in request context", + nil, + ) + return + } + + switch r.Method { + case "GET": + // Find or create user. + u := models.User{ + EmailAddress: userEmail, + } + if err := u.FirstOrCreate(db); err != nil { + errResp( + http.StatusInternalServerError, + "Error authorizing the request", + "error finding or creating user", + err, + ) + return + } + + // Build response of product subscriptions. + var products []string + for _, p := range u.ProductSubscriptions { + products = append(products, p.Name) + } + + // Write response. + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + enc := json.NewEncoder(w) + if err := enc.Encode(products); err != nil { + errResp( + http.StatusInternalServerError, + "Error finding product subscriptions", + "error encoding products to JSON", + err, + ) + return + } + + case "POST": + // Decode request. + var req MeSubscriptionsPostRequest + if err := decodeRequest(r, &req); err != nil { + errResp( + http.StatusBadRequest, + "Bad request", + "error decoding request", + err, + ) + return + } + + // Build user product subscriptions. + var subs []models.Product + for _, p := range req.Subscriptions { + subs = append(subs, models.Product{Name: p}) + } + + // Upsert user. + u := models.User{ + EmailAddress: userEmail, + ProductSubscriptions: subs, + } + if err := u.Upsert(db); err != nil { + errResp( + http.StatusInternalServerError, + "Error updating user subscriptions", + "error upserting user", + err, + ) + return + } + + // Write response. + w.WriteHeader(http.StatusOK) + + default: + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + }) +} diff --git a/internal/api/people.go b/internal/api/people.go new file mode 100644 index 000000000..b0c966957 --- /dev/null +++ b/internal/api/people.go @@ -0,0 +1,109 @@ +package api + +import ( + "encoding/json" + "fmt" + "net/http" + "strings" + + "github.com/hashicorp-forge/hermes/internal/config" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + "github.com/hashicorp/go-hclog" + "google.golang.org/api/people/v1" +) + +// PeopleDataRequest contains the fields that are allowed to +// make the POST request. +type PeopleDataRequest struct { + Query string `json:"query,omitempty"` +} + +// PeopleDataHandler returns people related data from the Google API +// to the Hermes frontend. +func PeopleDataHandler( + cfg *config.Config, + log hclog.Logger, + s *gw.Service) http.Handler { + + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + req := &PeopleDataRequest{} + switch r.Method { + // Using POST method to avoid logging the query in browser history + // and server logs + case "POST": + if err := decodeRequest(r, &req); err != nil { + log.Error("error decoding people request", "error", err) + http.Error(w, fmt.Sprintf("Bad request: %q", err), + http.StatusBadRequest) + return + } + + users, err := s.People.SearchDirectoryPeople(). + Query(req.Query). + // Only query for photos and email addresses + // This may be expanded based on use case + // in the future + ReadMask("photos,emailAddresses"). + Sources("DIRECTORY_SOURCE_TYPE_DOMAIN_PROFILE"). + Do() + if err != nil { + log.Error("error searching people directory", "error", err) + http.Error(w, fmt.Sprintf("Error searching people directory: %q", err), + http.StatusInternalServerError) + return + } + + // Write response. + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + + enc := json.NewEncoder(w) + err = enc.Encode(users.People) + if err != nil { + log.Error("error encoding people response", "error", err) + http.Error(w, "Error searching people directory", + http.StatusInternalServerError) + return + } + case "GET": + query := r.URL.Query() + if len(query["emails"]) != 1 { + log.Error("attempted to get users without providing any email addresses") + http.Error(w, "Attempted to get users without providing a single value for the emails query parameter.", http.StatusBadRequest) + } else { + emails := strings.Split(query["emails"][0], ",") + var people []*people.Person + + for _, email := range emails { + result, err := s.People.SearchDirectoryPeople(). + Query(email). + ReadMask("photos,emailAddresses"). + Sources("DIRECTORY_SOURCE_TYPE_DOMAIN_PROFILE"). + Do() + + if err == nil && len(result.People) > 0 { + people = append(people, result.People[0]) + } else { + log.Warn("Email lookup miss", "error", err) + } + } + + // Write response. + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + + enc := json.NewEncoder(w) + err := enc.Encode(people) + if err != nil { + log.Error("error encoding people response", "error", err) + http.Error(w, "Error getting people responses", + http.StatusInternalServerError) + return + } + } + default: + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + }) +} diff --git a/internal/api/products.go b/internal/api/products.go new file mode 100644 index 000000000..3c6103ff9 --- /dev/null +++ b/internal/api/products.go @@ -0,0 +1,59 @@ +package api + +import ( + "encoding/json" + "net/http" + + "github.com/hashicorp-forge/hermes/internal/config" + "github.com/hashicorp-forge/hermes/internal/structs" + "github.com/hashicorp-forge/hermes/pkg/algolia" + "github.com/hashicorp/go-hclog" +) + +// ProductsHandler returns the product mappings to the Hermes frontend. +func ProductsHandler(cfg *config.Config, a *algolia.Client, log hclog.Logger) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Only allow GET requests. + if r.Method != http.MethodGet { + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + + // Get products and associated data from Algolia + products, err := getProductsData(a) + if err != nil { + log.Error("error getting products from algolia", "error", err) + http.Error(w, "Error getting product mappings", + http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + + enc := json.NewEncoder(w) + err = enc.Encode(products) + if err != nil { + log.Error("error encoding products response", "error", err) + http.Error(w, "Error getting products", + http.StatusInternalServerError) + return + } + }) +} + +// getProducts gets the product or area name and their associated +// data from Algolia +func getProductsData(a *algolia.Client) (map[string]structs.ProductData, error) { + p := structs.Products{ + ObjectID: "products", + Data: make(map[string]structs.ProductData, 0), + } + + err := a.Internal.GetObject("products", &p) + if err != nil { + return nil, err + } + + return p.Data, nil +} diff --git a/internal/api/reviews.go b/internal/api/reviews.go new file mode 100644 index 000000000..aa475d66b --- /dev/null +++ b/internal/api/reviews.go @@ -0,0 +1,725 @@ +package api + +import ( + "errors" + "fmt" + "net/http" + "net/url" + "path" + "strings" + + "github.com/hashicorp-forge/hermes/internal/config" + "github.com/hashicorp-forge/hermes/internal/email" + "github.com/hashicorp-forge/hermes/pkg/algolia" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + hcd "github.com/hashicorp-forge/hermes/pkg/hashicorpdocs" + "github.com/hashicorp-forge/hermes/pkg/links" + "github.com/hashicorp-forge/hermes/pkg/models" + "github.com/hashicorp/go-hclog" + "github.com/hashicorp/go-multierror" + "google.golang.org/api/drive/v3" + "gorm.io/gorm" +) + +func ReviewHandler( + cfg *config.Config, + l hclog.Logger, + ar *algolia.Client, + aw *algolia.Client, + s *gw.Service, + db *gorm.DB, +) http.Handler { + + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.Method { + case "POST": + // Validate request. + docID, err := parseResourceIDFromURL(r.URL.Path, "reviews") + if err != nil { + l.Error("error parsing document ID from reviews path", + "error", err, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Document ID not found", http.StatusNotFound) + return + } + + // Get base document object from Algolia so we can determine the doc type. + baseDocObj := &hcd.BaseDoc{} + err = ar.Drafts.GetObject(docID, &baseDocObj) + if err != nil { + l.Error("error requesting base document object from Algolia", + "error", err, + "path", r.URL.Path, + "method", r.Method, + "doc_id", docID, + ) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + return + } + + // Create new document object of the proper doc type. + docObj, err := hcd.NewEmptyDoc(baseDocObj.DocType) + if err != nil { + l.Error("error creating new empty doc", + "error", err, + "path", r.URL.Path, + "method", r.Method, + "doc_id", docID, + ) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + return + } + + // Get document object from Algolia. + err = ar.Drafts.GetObject(docID, &docObj) + if err != nil { + l.Error("error getting document from Algolia", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + return + } + l.Info("retrieved document draft", + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + + // Get latest product number. + latestNum, err := models.GetLatestProductNumber( + db, docObj.GetDocType(), docObj.GetProduct()) + if err != nil { + l.Error("error getting product document number", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + return + } + + // Get product from database so we can get the product abbreviation. + product := models.Product{ + Name: docObj.GetProduct(), + } + if err := product.Get(db); err != nil { + l.Error("error getting product", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + return + } + + // Set the document number. + nextDocNum := latestNum + 1 + docObj.SetDocNumber(fmt.Sprintf("%s-%03d", + product.Abbreviation, + nextDocNum)) + + // Change document status to "In-Review". + docObj.SetStatus("In-Review") + + // Replace the doc header. + err = docObj.ReplaceHeader( + docID, cfg.BaseURL, true, s) + if err != nil { + l.Error("error replacing doc header", + "error", err, "doc_id", docID) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + + if err := revertReviewCreation( + docObj, product.Abbreviation, "", nil, cfg, aw, s, + ); err != nil { + l.Error("error reverting review creation", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + } + return + } + // Log replacing doc header + l.Info("doc header replaced", + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + + // Get latest Google Drive file revision. + latestRev, err := s.GetLatestRevision(docID) + if err != nil { + l.Error("error getting latest revision", + "error", err, + "method", r.Method, + "path", r.URL.Path, + "doc_id", docID) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + + if err := revertReviewCreation( + docObj, product.Abbreviation, "", nil, cfg, aw, s, + ); err != nil { + l.Error("error reverting review creation", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + } + return + } + + // Mark latest revision to be kept forever. + _, err = s.KeepRevisionForever(docID, latestRev.Id) + if err != nil { + l.Error("error marking revision to keep forever", + "error", err, + "method", r.Method, + "path", r.URL.Path, + "doc_id", docID, + "rev_id", latestRev.Id) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + + if err := revertReviewCreation( + docObj, product.Abbreviation, "", nil, cfg, aw, s, + ); err != nil { + l.Error("error reverting review creation", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + } + return + } + // Log replacing doc header + l.Info("doc revision set to be kept forever", + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + + // Record file revision in the Algolia document object. + revisionName := "Requested review" + docObj.SetFileRevision(latestRev.Id, revisionName) + + // Move document object to docs index in Algolia. + saveRes, err := aw.Docs.SaveObject(docObj) + if err != nil { + l.Error("error saving doc in Algolia", "error", err, "doc_id", docID) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + return + } + err = saveRes.Wait() + if err != nil { + l.Error("error saving doc in Algolia", "error", err, "doc_id", docID) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + return + } + l.Info("doc saved in Algolia", + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + delRes, err := aw.Drafts.DeleteObject(docID) + if err != nil { + l.Error("error deleting draft in Algolia", + "error", err, "doc_id", docID) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + + if err := revertReviewCreation( + docObj, product.Abbreviation, latestRev.Id, nil, cfg, aw, s, + ); err != nil { + l.Error("error reverting review creation", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + } + return + } + err = delRes.Wait() + if err != nil { + l.Error("error deleting draft in Algolia", + "error", err, "doc_id", docID) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + + if err := revertReviewCreation( + docObj, product.Abbreviation, latestRev.Id, nil, cfg, aw, s, + ); err != nil { + l.Error("error reverting review creation", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + } + return + } + + // Move document to published docs location in Google Drive. + if _, err := s.MoveFile( + docID, cfg.GoogleWorkspace.DocsFolder); err != nil { + l.Error("error moving file", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + + if err := revertReviewCreation( + docObj, product.Abbreviation, latestRev.Id, nil, cfg, aw, s, + ); err != nil { + l.Error("error reverting review creation", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + } + return + } + l.Info("doc moved to published document folder", + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + + // Create shortcut in hierarchical folder structure. + shortcut, err := createShortcut(cfg, docObj, s) + if err != nil { + l.Error("error creating shortcut", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + + if err := revertReviewCreation( + docObj, product.Abbreviation, latestRev.Id, shortcut, cfg, aw, s, + ); err != nil { + l.Error("error reverting review creation", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + } + return + } + l.Info("doc shortcut created", + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + + // Create go-link. + if err := links.SaveDocumentRedirectDetails( + aw, docID, docObj.GetDocType(), docObj.GetDocNumber()); err != nil { + l.Error("error saving redirect details", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + + if err := revertReviewCreation( + docObj, product.Abbreviation, latestRev.Id, shortcut, cfg, aw, s, + ); err != nil { + l.Error("error reverting review creation", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + } + return + } + l.Info("doc redirect details saved", + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + + // Update document in the database. + d := models.Document{ + GoogleFileID: docID, + } + if err := d.Get(db); err != nil { + l.Error("error getting document in database", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + + if err := revertReviewCreation( + docObj, product.Abbreviation, latestRev.Id, shortcut, cfg, aw, s, + ); err != nil { + l.Error("error reverting review creation", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + } + return + } + d.Status = models.InReviewDocumentStatus + d.DocumentNumber = nextDocNum + if err := d.Upsert(db); err != nil { + l.Error("error upserting document in database", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + + if err := revertReviewCreation( + docObj, product.Abbreviation, latestRev.Id, shortcut, cfg, aw, s, + ); err != nil { + l.Error("error reverting review creation", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path) + } + return + } + + // Send emails, if enabled. + if cfg.Email != nil && cfg.Email.Enabled { + docURL, err := getDocumentURL(cfg.BaseURL, docID) + if err != nil { + l.Error("error getting document URL", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + return + } + + // Send emails to approvers. + if len(docObj.GetApprovers()) > 0 { + // TODO: use an asynchronous method for sending emails because we + // can't currently recover gracefully from a failure here. + for _, approverEmail := range docObj.GetApprovers() { + err := email.SendReviewRequestedEmail( + email.ReviewRequestedEmailData{ + BaseURL: cfg.BaseURL, + DocumentOwner: docObj.GetOwners()[0], + DocumentShortName: docObj.GetDocNumber(), + DocumentTitle: docObj.GetTitle(), + DocumentURL: docURL, + }, + []string{approverEmail}, + cfg.Email.FromAddress, + s, + ) + if err != nil { + l.Error("error sending approver email", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, "Error creating review", + http.StatusInternalServerError) + return + } + l.Info("doc approver email sent", + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + } + } + + // Send emails to product subscribers. + p := models.Product{ + Name: docObj.GetProduct(), + } + if err := p.Get(db); err != nil { + l.Error("error getting product from database", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, `{"error": "Error sending subscriber email"}`, + http.StatusInternalServerError) + return + } + + if len(p.UserSubscribers) > 0 { + // TODO: use an asynchronous method for sending emails because we + // can't currently recover gracefully from a failure here. + for _, subscriber := range p.UserSubscribers { + err := email.SendSubscriberDocumentPublishedEmail( + email.SubscriberDocumentPublishedEmailData{ + BaseURL: cfg.BaseURL, + DocumentOwner: docObj.GetOwners()[0], + DocumentShortName: docObj.GetDocNumber(), + DocumentTitle: docObj.GetTitle(), + DocumentURL: docURL, + Product: docObj.GetProduct(), + }, + []string{subscriber.EmailAddress}, + cfg.Email.FromAddress, + s, + ) + if err != nil { + l.Error("error sending subscriber email", + "error", err, + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, `{"error": "Error sending subscriber email"}`, + http.StatusInternalServerError) + return + } + l.Info("doc subscriber email sent", + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + } + } + } + + // Write response. + w.WriteHeader(http.StatusOK) + + // Log success. + l.Info("review created", + "doc_id", docID, + "method", r.Method, + "path", r.URL.Path, + ) + + default: + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + }) +} + +// createShortcut creates a shortcut in the hierarchical folder structure +// ("Shortcuts Folder/RFC/MyProduct/") under docsFolder. +func createShortcut( + cfg *config.Config, + docObj hcd.Doc, + s *gw.Service) (shortcut *drive.File, retErr error) { + + // Get folder for doc type. + docTypeFolder, err := s.GetSubfolder( + cfg.GoogleWorkspace.ShortcutsFolder, docObj.GetDocType()) + if err != nil { + return nil, fmt.Errorf("error getting doc type subfolder: %w", err) + } + + // Doc type folder wasn't found, so create it. + if docTypeFolder == nil { + docTypeFolder, err = s.CreateFolder( + docObj.GetDocType(), cfg.GoogleWorkspace.ShortcutsFolder) + if err != nil { + return nil, fmt.Errorf("error creating doc type subfolder: %w", err) + } + } + + // Get folder for doc type + product. + productFolder, err := s.GetSubfolder(docTypeFolder.Id, docObj.GetProduct()) + if err != nil { + return nil, fmt.Errorf("error getting product subfolder: %w", err) + } + + // Product folder wasn't found, so create it. + if productFolder == nil { + productFolder, err = s.CreateFolder( + docObj.GetProduct(), docTypeFolder.Id) + if err != nil { + return nil, fmt.Errorf("error creating product subfolder: %w", err) + } + } + + // Create shortcut. + if shortcut, err = s.CreateShortcut( + docObj.GetObjectID(), + productFolder.Id); err != nil { + + return nil, fmt.Errorf("error creating shortcut: %w", err) + } + + return +} + +// getDocumentURL returns a Hermes document URL. +func getDocumentURL(baseURL, docID string) (string, error) { + docURL, err := url.Parse(baseURL) + if err != nil { + return "", fmt.Errorf("error parsing base URL: %w", err) + } + + docURL.Path = path.Join(docURL.Path, "document", docID) + docURLString := docURL.String() + docURLString = strings.TrimRight(docURLString, "/") + + return docURLString, nil +} + +// revertReviewCreation attempts to revert the actions that occur when a review +// is created. This is to be used in the case of an error during the review- +// creation process. +// TODO: use some sort of undo stack of functions instead of checking if the +// arguments for this function are set. +func revertReviewCreation( + docObj hcd.Doc, + productAbbreviation string, + fileRevision string, + shortcut *drive.File, + cfg *config.Config, + a *algolia.Client, + s *gw.Service) error { + + // Use go-multierror so we can return all cleanup errors. + var result error + + // Delete go-link if it exists. + if err := links.DeleteDocumentRedirectDetails( + a, docObj.GetObjectID(), docObj.GetDocType(), docObj.GetDocNumber(), + ); err != nil { + result = multierror.Append( + result, fmt.Errorf("error deleting go-link: %w", err)) + } + + // Delete shortcut if it exists. + if shortcut != nil { + if err := s.DeleteFile(shortcut.Id); err != nil { + result = multierror.Append( + result, fmt.Errorf("error deleting shortcut: %w", err)) + } + } + + // Move document back to drafts folder in Google Drive. + if _, err := s.MoveFile( + docObj.GetObjectID(), cfg.GoogleWorkspace.DraftsFolder); err != nil { + + result = multierror.Append( + result, fmt.Errorf("error moving doc back to drafts folder: %w", err)) + } + + // Change back document number to "ABC-???" and status to "WIP". + docObj.SetDocNumber(fmt.Sprintf("%s-???", productAbbreviation)) + docObj.SetStatus("WIP") + + // Replace the doc header. + if err := docObj.ReplaceHeader( + docObj.GetObjectID(), cfg.BaseURL, true, s); err != nil { + + result = multierror.Append( + result, fmt.Errorf("error replacing the doc header: %w", err)) + } + + // Delete file revision from Algolia document object. + if fileRevision != "" { + docObj.DeleteFileRevision(fileRevision) + } + + // Save doc back in the drafts index and delete it from the docs index. + saveRes, err := a.Drafts.SaveObject(docObj) + if err != nil { + result = multierror.Append( + result, fmt.Errorf("error saving draft in Algolia: %w", err)) + } + err = saveRes.Wait() + if err != nil { + result = multierror.Append( + result, fmt.Errorf("error saving draft in Algolia: %w", err)) + } + delRes, err := a.Docs.DeleteObject(docObj.GetObjectID()) + if err != nil { + result = multierror.Append( + result, fmt.Errorf("error deleting doc in Algolia: %w", err)) + } + err = delRes.Wait() + if err != nil { + result = multierror.Append( + result, fmt.Errorf("error deleting doc in Algolia: %w", err)) + } + + return result +} + +// setLatestProductDocumentNumberinDB sets the latest product document number in +// the database. +func setLatestProductDocumentNumberinDB( + doc hcd.Doc, + db *gorm.DB, + log hclog.Logger, +) error { + return db.Transaction(func(tx *gorm.DB) error { + // Get the latest product document number. + p := models.ProductLatestDocumentNumber{ + DocumentType: models.DocumentType{ + Name: doc.GetDocType(), + }, + Product: models.Product{ + Name: doc.GetProduct(), + }, + } + if err := p.Get(tx); err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + // Latest product document number record doesn't exist so we need to + // create it (with latest document number of 1). + p = models.ProductLatestDocumentNumber{ + DocumentType: models.DocumentType{ + Name: doc.GetDocType(), + }, + Product: models.Product{ + Name: doc.GetProduct(), + }, + LatestDocumentNumber: 1, + } + if err := p.Upsert(tx); err != nil { + return fmt.Errorf( + "error upserting latest product document number: %w", err) + } + + return nil + } else { + return fmt.Errorf( + "error getting latest product document number: %w", err) + } + } + + p.LatestDocumentNumber = p.LatestDocumentNumber + 1 + if err := p.Upsert(tx); err != nil { + return fmt.Errorf( + "error upserting latest product document number: %w", err) + } + + return nil + }) +} diff --git a/internal/auth/auth.go b/internal/auth/auth.go new file mode 100644 index 000000000..5405dcafb --- /dev/null +++ b/internal/auth/auth.go @@ -0,0 +1,62 @@ +package auth + +import ( + "net/http" + + "github.com/hashicorp-forge/hermes/internal/auth/google" + "github.com/hashicorp-forge/hermes/internal/auth/oktaalb" + "github.com/hashicorp-forge/hermes/internal/config" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + "github.com/hashicorp/go-hclog" +) + +// AuthenticateRequest is middleware that authenticates an HTTP request. +func AuthenticateRequest( + cfg config.Config, gwSvc *gw.Service, log hclog.Logger, next http.Handler, +) http.Handler { + // If Okta isn't disabled, authenticate using Okta. + if cfg.Okta != nil && !cfg.Okta.Disabled { + // Create Okta authorizer. + oa, err := oktaalb.New(*cfg.Okta, log) + if err != nil { + log.Error("error creating Okta authenticator") + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.Error(w, + `{"error": "Internal server error"}`, http.StatusInternalServerError) + return + }) + } + + // Return handler wrapped with Okta auth. + return oa.EnforceOktaAuth( + http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + validateUserEmail(w, r, log) + next.ServeHTTP(w, r) + })) + } + + // Authenticate using Google. + return google.AuthenticateRequest(gwSvc, log, + // Return handler wrapped with Google auth. + http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + validateUserEmail(w, r, log) + next.ServeHTTP(w, r) + })) +} + +// validateUserEmail validates that userEmail was set in the request's context. +// It responds with an internal server error if not found because this should +// be set by all authentication methods. userEmail is used for authorization in +// API endpoint implmentations. +func validateUserEmail( + w http.ResponseWriter, r *http.Request, log hclog.Logger, +) { + if r.Context().Value("userEmail") == nil { + log.Error("userEmail is not set in the request context", + "method", r.Method, + "path", r.URL.Path) + http.Error(w, + `{"error": "Internal server error"}`, http.StatusInternalServerError) + return + } +} diff --git a/internal/auth/google/google.go b/internal/auth/google/google.go new file mode 100644 index 000000000..90653a20c --- /dev/null +++ b/internal/auth/google/google.go @@ -0,0 +1,52 @@ +package google + +import ( + "context" + "net/http" + + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + "github.com/hashicorp/go-hclog" +) + +const ( + tokenHeader = "Hermes-Google-Access-Token" +) + +// AuthenticateRequest is middleware that authenticates an HTTP request using +// Google. +func AuthenticateRequest( + s *gw.Service, log hclog.Logger, next http.Handler, +) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Get user email from Google access token. + tok := r.Header.Get(tokenHeader) + + // Validate access token. + ti, err := s.ValidateAccessToken(tok) + if err != nil || !ti.VerifiedEmail { + log.Error("error validating Google access token", + "error", err, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, + `{"error": "Unauthorized"}`, http.StatusUnauthorized) + return + } + if ti.Email == "" { + log.Error("no user email found in Google access token", + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, + `{"error": "Unauthorized"}`, http.StatusUnauthorized) + return + } + + // Set userEmail in request context. + ctx := context.WithValue(r.Context(), "userEmail", ti.Email) + r = r.WithContext(ctx) + + next.ServeHTTP(w, r) + }) +} diff --git a/internal/auth/oktaalb/doc.go b/internal/auth/oktaalb/doc.go new file mode 100644 index 000000000..5778bc840 --- /dev/null +++ b/internal/auth/oktaalb/doc.go @@ -0,0 +1,3 @@ +// Package oktaalb implements authorization using Okta and an Amazon Application +// Load Balancer. +package oktaalb diff --git a/internal/auth/oktaalb/oktaalb.go b/internal/auth/oktaalb/oktaalb.go new file mode 100644 index 000000000..162fa7177 --- /dev/null +++ b/internal/auth/oktaalb/oktaalb.go @@ -0,0 +1,107 @@ +package oktaalb + +import ( + "context" + "fmt" + "net/http" + + "github.com/hashicorp/go-hclog" + verifier "github.com/okta/okta-jwt-verifier-golang" +) + +const ( + audience = "api://default" +) + +// OktaAuthorizer implements authorization using Okta. +type OktaAuthorizer struct { + // cfg is the configuration for the authorizer. + cfg Config + + // log is the logger to use. + log hclog.Logger +} + +// Config is the configuration for Okta authorizatioon. +type Config struct { + // AuthServerURL is the URL of the Okta authorization server. + AuthServerURL string `hcl:"auth_server_url,optional"` + + // ClientID is the Okta client ID. + ClientID string `hcl:"client_id,optional"` + + // Disabled disables Okta authorization. + Disabled bool `hcl:"disabled,optional"` +} + +// New returns a new Okta authorizer. +func New(cfg Config, l hclog.Logger) (*OktaAuthorizer, error) { + return &OktaAuthorizer{ + cfg: cfg, + log: l, + }, nil +} + +// EnforceOktaAuth is HTTP middleware that enforces Okta authorization. +func (oa *OktaAuthorizer) EnforceOktaAuth(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + id := r.Header.Get("x-amzn-oidc-identity") + if id == "" { + oa.log.Error("no identity header found", + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, `{"error": "Unauthorized"}`, http.StatusUnauthorized) + return + } + + jwt, err := oa.verifyOIDCToken(r) + if err != nil { + oa.log.Error("error validating OIDC token", + "error", err, + "method", r.Method, + "path", r.URL.Path, + ) + http.Error(w, `{"error": "Unauthorized"}`, http.StatusUnauthorized) + return + } else { + // Set user email from the OIDC claims. + ctx := context.WithValue(r.Context(), "userEmail", jwt.Claims["sub"]) + r = r.WithContext(ctx) + + next.ServeHTTP(w, r) + } + }) +} + +// verifyOIDCToken checks if the request is authorized. +func (oa *OktaAuthorizer) verifyOIDCToken(r *http.Request) (*verifier.Jwt, error) { + tok := r.Header.Get("x-amzn-oidc-accesstoken") + if tok == "" { + oa.log.Error("no access token header found") + return nil, fmt.Errorf("no access token header found") + } + + return oa.verifyAccessToken(tok) +} + +// verifyAccessToken verifies an Okta access token. +func (oa *OktaAuthorizer) verifyAccessToken(t string) (*verifier.Jwt, error) { + claims := map[string]string{} + claims["aud"] = audience + claims["cid"] = oa.cfg.ClientID + jv := verifier.JwtVerifier{ + Issuer: oa.cfg.AuthServerURL, + ClaimsToValidate: claims, + } + + resp, err := jv.New().VerifyAccessToken(t) + if err != nil { + return nil, err + } + if resp == nil { + return nil, fmt.Errorf("jwt verifier was nil") + } + + return resp, nil +} diff --git a/internal/cmd/base/base.go b/internal/cmd/base/base.go new file mode 100644 index 000000000..1b927a218 --- /dev/null +++ b/internal/cmd/base/base.go @@ -0,0 +1,125 @@ +package base + +import ( + "bytes" + "context" + "flag" + "fmt" + "os" + "os/signal" + "strings" + "syscall" + + "github.com/hashicorp/go-hclog" + "github.com/mitchellh/cli" +) + +// Command is a base collection of common logic and data embedded in all +// commands. +type Command struct { + // Context is the base context for the command. It is up to commands to + // utilize this context so that cancellation works in a timely manner. + Context context.Context + + // Flags is the flag set for the command. + Flags *flag.FlagSet + + // Log is the logger to use. + Log hclog.Logger + + // ShutdownCh is a channel that can be used for shutdown notifications + // for commands. + ShutdownCh chan struct{} + + // UI is used to write to the CLI. + UI cli.Ui +} + +// NewCommand returns a new instance of a base.Command type. +func NewCommand(log hclog.Logger, ui cli.Ui) *Command { + ctx, cancel := context.WithCancel(context.Background()) + ret := &Command{ + Context: ctx, + Log: log, + ShutdownCh: MakeShutdownCh(), + UI: ui, + } + + go func() { + <-ret.ShutdownCh + cancel() + }() + + return ret +} + +// MakeShutdownCh returns a channel that can be used for shutdown +// notifications for commands. This channel will send a message for every +// SIGINT or SIGTERM received. +func MakeShutdownCh() chan struct{} { + resultCh := make(chan struct{}) + + shutdownCh := make(chan os.Signal, 1) + signal.Notify(shutdownCh, os.Interrupt, syscall.SIGTERM) + go func() { + <-shutdownCh + close(resultCh) + }() + return resultCh +} + +// WaitForInterrupt waits for an interrupt signal and runs a provided shutdown +// function. While the graceful shutdown is in progress, another interrupt can +// trigger a forced-shutdown and will immediately exit. +func (c *Command) WaitForInterrupt(shutdownFunc func()) int { + shutdownTriggered := false + + for !shutdownTriggered { + <-c.ShutdownCh + c.Log.Info("shutdown triggered, interrupt again to force") + + // Add a force-shutdown goroutine to consume another interrupt. + abortForceShutdownCh := make(chan struct{}) + defer close(abortForceShutdownCh) + go func() { + shutdownCh := make(chan os.Signal, 1) + signal.Notify(shutdownCh, os.Interrupt, syscall.SIGTERM) + select { + case <-shutdownCh: + c.Log.Error("second interrupt received, forcing shutdown") + os.Exit(1) + case <-abortForceShutdownCh: + // No-op, we just use this to shut down the goroutine. + } + }() + + // Run provided shutdown function. + shutdownFunc() + c.Log.Info("shutdown complete") + + shutdownTriggered = true + } + + return 0 +} + +// FlagSet is a wrapper around a flag set. +type FlagSet struct { + *flag.FlagSet +} + +// NewFlagSet creates a new flag set. +func NewFlagSet(f *flag.FlagSet) *FlagSet { + return &FlagSet{f} +} + +// Help generates usage text for a flag set. +func (f *FlagSet) Help() string { + var out bytes.Buffer + + fmt.Fprintf(&out, "\n\nCommand Options:\n") + f.SetOutput(&out) + f.PrintDefaults() + + return strings.TrimSuffix(out.String(), "\n") +} diff --git a/internal/cmd/commands.go b/internal/cmd/commands.go new file mode 100644 index 000000000..38ffa3b3c --- /dev/null +++ b/internal/cmd/commands.go @@ -0,0 +1,36 @@ +package cmd + +import ( + "github.com/hashicorp/go-hclog" + "github.com/mitchellh/cli" + + "github.com/hashicorp-forge/hermes/internal/cmd/base" + "github.com/hashicorp-forge/hermes/internal/cmd/commands/indexer" + "github.com/hashicorp-forge/hermes/internal/cmd/commands/server" + "github.com/hashicorp-forge/hermes/internal/cmd/commands/version" +) + +// Commands is the mapping of all the available commands. +var Commands map[string]cli.CommandFactory + +func initCommands(log hclog.Logger, ui cli.Ui) { + b := base.NewCommand(log, ui) + + Commands = map[string]cli.CommandFactory{ + "indexer": func() (cli.Command, error) { + return &indexer.Command{ + Command: b, + }, nil + }, + "server": func() (cli.Command, error) { + return &server.Command{ + Command: b, + }, nil + }, + "version": func() (cli.Command, error) { + return &version.Command{ + Command: b, + }, nil + }, + } +} diff --git a/internal/cmd/commands/indexer/indexer.go b/internal/cmd/commands/indexer/indexer.go new file mode 100644 index 000000000..4ca9c28ca --- /dev/null +++ b/internal/cmd/commands/indexer/indexer.go @@ -0,0 +1,140 @@ +package indexer + +import ( + "flag" + "fmt" + "strings" + + validation "github.com/go-ozzo/ozzo-validation/v4" + "github.com/hashicorp-forge/hermes/internal/cmd/base" + "github.com/hashicorp-forge/hermes/internal/config" + "github.com/hashicorp-forge/hermes/internal/db" + "github.com/hashicorp-forge/hermes/internal/indexer" + "github.com/hashicorp-forge/hermes/pkg/algolia" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" +) + +type Command struct { + *base.Command + + flagConfig string + flagDryRun bool +} + +func (c *Command) Synopsis() string { + return "Run the indexer" +} + +func (c *Command) Help() string { + return `Usage: hermes indexer + +This command runs the indexer. + +The indexer is a background process that indexes documents in Algolia, refreshes +document headers, sends notifications, etc.` + c.Flags().Help() +} + +func (c *Command) Flags() *base.FlagSet { + f := base.NewFlagSet(flag.NewFlagSet("indexer", flag.ExitOnError)) + + f.StringVar( + &c.flagConfig, "config", "", "Path to config file", + ) + f.BoolVar( + &c.flagDryRun, "dry-run", false, + "Print document data instead of indexing", + ) + return f +} + +func (c *Command) Run(args []string) int { + log, ui := c.Log, c.UI + + // Parse flags. + f := c.Flags() + if err := f.Parse(args); err != nil { + ui.Error(fmt.Sprintf("error parsing flags: %v", err)) + return 1 + } + + // Validate flags. + if err := validation.ValidateStruct(c, + validation.Field( + &c.flagConfig, + validation.Required.Error("config argument is required")), + ); err != nil { + // Remove the field name from the error string. + errStr := strings.SplitAfter(err.Error(), ": ")[1] + ui.Error("error parsing flags: " + errStr) + return 1 + } + + // Parse configuration file. + cfg, err := config.NewConfig(c.flagConfig) + if err != nil { + ui.Error(fmt.Sprintf("error parsing configuration file: %v", err)) + return 1 + } + + // Initialize database connection. + db, err := db.NewDB(*cfg.Postgres) + if err != nil { + ui.Error(fmt.Sprintf("error initializing database: %v", err)) + return 1 + } + + // Initialize Algolia client. + var algo *algolia.Client + algo, err = algolia.New(cfg.Algolia) + if err != nil { + c.UI.Error(fmt.Sprintf("error initializing Algolia: %v", err)) + return 1 + } + + // Initialize Google Workspace service. + var goog *gw.Service + if cfg.GoogleWorkspace.Auth != nil { + // Use Google Workspace auth if it is defined in the config. + goog = gw.NewFromConfig(cfg.GoogleWorkspace.Auth) + } else { + // Use OAuth if Google Workspace auth is not defined in the config. + goog = gw.New() + } + + idxOpts := []indexer.IndexerOption{ + indexer.WithAlgoliaClient(algo), + indexer.WithBaseURL(cfg.BaseURL), + indexer.WithDatabase(db), + indexer.WithDocumentsFolderID(cfg.GoogleWorkspace.DocsFolder), + indexer.WithDraftsFolderID(cfg.GoogleWorkspace.DraftsFolder), + indexer.WithGoogleWorkspaceService(goog), + indexer.WithLogger(log), + } + if cfg.Indexer.MaxParallelDocs != 0 { + idxOpts = append(idxOpts, + indexer.WithMaxParallelDocuments(cfg.Indexer.MaxParallelDocs)) + } + if cfg.Indexer.UpdateDocHeaders { + idxOpts = append(idxOpts, + indexer.WithUpdateDocumentHeaders(true)) + } + if cfg.Indexer.UpdateDraftHeaders { + idxOpts = append(idxOpts, + indexer.WithUpdateDraftHeaders(true)) + } + idx, err := indexer.NewIndexer(idxOpts...) + if err != nil { + ui.Error(fmt.Sprintf("error creating indexer: %v", err)) + return 1 + } + + go func() int { + if err := idx.Run(); err != nil { + ui.Error(err.Error()) + // TODO: get this return value from indexer.Run(). + return 1 + } + return 0 + }() + return c.WaitForInterrupt(func() {}) +} diff --git a/internal/cmd/commands/server/server.go b/internal/cmd/commands/server/server.go new file mode 100644 index 000000000..e85613f63 --- /dev/null +++ b/internal/cmd/commands/server/server.go @@ -0,0 +1,448 @@ +package server + +import ( + "context" + "encoding/json" + "flag" + "fmt" + "net/http" + "os" + "strings" + "time" + + "github.com/hashicorp-forge/hermes/internal/api" + "github.com/hashicorp-forge/hermes/internal/auth" + "github.com/hashicorp-forge/hermes/internal/cmd/base" + "github.com/hashicorp-forge/hermes/internal/config" + "github.com/hashicorp-forge/hermes/internal/db" + "github.com/hashicorp-forge/hermes/internal/pkg/doctypes" + "github.com/hashicorp-forge/hermes/internal/pub" + "github.com/hashicorp-forge/hermes/internal/structs" + "github.com/hashicorp-forge/hermes/pkg/algolia" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + hcd "github.com/hashicorp-forge/hermes/pkg/hashicorpdocs" + "github.com/hashicorp-forge/hermes/pkg/links" + "github.com/hashicorp-forge/hermes/pkg/models" + "github.com/hashicorp-forge/hermes/web" + "gorm.io/gorm" +) + +type Command struct { + *base.Command + + flagAddr string + flagBaseURL string + flagConfig string + flagOktaAuthServerURL string + flagOktaClientID string + flagOktaDisabled bool +} + +type endpoint struct { + pattern string + handler http.Handler +} + +func (c *Command) Synopsis() string { + return "Run the server" +} + +func (c *Command) Help() string { + return `Usage: hermes server + + This command runs the Hermes web server.` + c.Flags().Help() +} + +func (c *Command) Flags() *base.FlagSet { + f := base.NewFlagSet(flag.NewFlagSet("server", flag.ExitOnError)) + + f.StringVar( + &c.flagAddr, "addr", "127.0.0.1:8000", + "[HERMES_SERVER_ADDR] Address to bind to for listening.", + ) + f.StringVar( + &c.flagBaseURL, "base-url", "http://localhost:8000", + "[HERMES_BASE_URL] Base URL used for building links.", + ) + f.StringVar( + &c.flagConfig, "config", "", "Path to Hermes config file", + ) + f.StringVar( + &c.flagOktaAuthServerURL, "okta-auth-server-url", "", + "[HERMES_SERVER_OKTA_AUTH_SERVER_URL] URL to the Okta authorization server.", + ) + f.StringVar( + &c.flagOktaClientID, "okta-client-id", "", + "[HERMES_SERVER_OKTA_CLIENT_ID] Okta client ID.", + ) + f.BoolVar( + &c.flagOktaDisabled, "okta-disabled", false, + "[HERMES_SERVER_OKTA_DISABLED] Disable Okta authorization.", + ) + + return f +} + +func (c *Command) Run(args []string) int { + f := c.Flags() + if err := f.Parse(args); err != nil { + c.UI.Error(fmt.Sprintf("error parsing flags: %v", err)) + return 1 + } + + var ( + cfg *config.Config + err error + ) + if c.flagConfig != "" { + cfg, err = config.NewConfig(c.flagConfig) + if err != nil { + c.UI.Error(fmt.Sprintf("error parsing config file: %v: config=%q", + err, c.flagConfig)) + return 1 + } + } + + // Get configuration from environment variables if not set on the command + // line. + // TODO: make this section more DRY and add tests. + if val, ok := os.LookupEnv("HERMES_SERVER_ADDR"); ok { + cfg.Server.Addr = val + } + if c.flagAddr != f.Lookup("addr").DefValue { + cfg.Server.Addr = c.flagAddr + } + if val, ok := os.LookupEnv("HERMES_BASE_URL"); ok { + cfg.BaseURL = val + } + if c.flagBaseURL != f.Lookup("base-url").DefValue { + cfg.BaseURL = c.flagBaseURL + } + if val, ok := os.LookupEnv("HERMES_SERVER_OKTA_AUTH_SERVER_URL"); ok { + cfg.Okta.AuthServerURL = val + } + if c.flagOktaAuthServerURL != f.Lookup("okta-auth-server-url").DefValue { + cfg.Okta.AuthServerURL = c.flagOktaAuthServerURL + } + if val, ok := os.LookupEnv("HERMES_SERVER_OKTA_CLIENT_ID"); ok { + cfg.Okta.ClientID = val + } + if c.flagOktaClientID != f.Lookup("okta-client-id").DefValue { + cfg.Okta.ClientID = c.flagOktaClientID + } + if val, ok := os.LookupEnv("HERMES_SERVER_OKTA_DISABLED"); ok { + if val == "" || val == "false" { + // Keep Okta enabled if the env var value is an empty string or "false". + } else { + cfg.Okta.Disabled = true + } + } + if c.flagOktaDisabled { + cfg.Okta.Disabled = true + } + + // Validate feature flags defined in configuration + if cfg.FeatureFlags != nil { + err := config.ValidateFeatureFlags(cfg.FeatureFlags.FeatureFlag) + if err != nil { + c.UI.Error(fmt.Sprintf("error initializing server: %v", err)) + return 1 + } + } + + // Validate other configuration. + if cfg.Email != nil && cfg.Email.Enabled { + if cfg.Email.FromAddress == "" { + c.UI.Error("email from_address must be set if email is enabled") + return 1 + } + } + + // Build configuration for Okta authentication. + if !cfg.Okta.Disabled { + // Check for required Okta configuration. + if cfg.Okta.AuthServerURL == "" { + c.UI.Error("error initializing server: Okta authorization server URL is required") + return 1 + } + if cfg.Okta.ClientID == "" { + c.UI.Error("error initializing server: Okta client ID is required") + return 1 + } + } + + // Initialize Google Workspace service. + var goog *gw.Service + if cfg.GoogleWorkspace.Auth != nil { + // Use Google Workspace auth if it is defined in the config. + goog = gw.NewFromConfig(cfg.GoogleWorkspace.Auth) + } else { + // Use OAuth if Google Workspace auth is not defined in the config. + goog = gw.New() + } + + reqOpts := map[interface{}]string{ + cfg.Algolia.ApplicationID: "Algolia Application ID is required", + cfg.Algolia.SearchAPIKey: "Algolia Search API Key is required", + cfg.BaseURL: "Base URL is required", + cfg.GoogleWorkspace.DocsFolder: "Google Workspace Docs Folder is required", + cfg.GoogleWorkspace.DraftsFolder: "Google Workspace Drafts Folder is required", + cfg.GoogleWorkspace.ShortcutsFolder: "Google Workspace Shortcuts Folder is required", + } + for r, msg := range reqOpts { + if r == "" { + c.UI.Error(fmt.Sprintf("error initializing server: %s", msg)) + return 1 + } + } + + // Initialize Algolia search client. + algoSearch, err := algolia.NewSearchClient(cfg.Algolia) + if err != nil { + c.UI.Error(fmt.Sprintf("error initializing Algolia search client: %v", err)) + return 1 + } + + // Initialize Algolia write client. + algoWrite, err := algolia.New(cfg.Algolia) + if err != nil { + c.UI.Error(fmt.Sprintf("error initializing Algolia write client: %v", err)) + return 1 + } + + // Initialize database. + db, err := db.NewDB(*cfg.Postgres) + if err != nil { + c.UI.Error(fmt.Sprintf("error initializing database: %v", err)) + return 1 + } + + // Register document types. + // for _, d := range cfg.DocumentTypes.DocumentType { + // if err := models.RegisterDocumentType(*d, db); err != nil { + // c.UI.Error(fmt.Sprintf("error registering document type: %v", err)) + // return 1 + // } + // } + if err := registerDocumentTypes(*cfg, db); err != nil { + c.UI.Error(fmt.Sprintf("error registering document types: %v", err)) + return 1 + } + + // Register products. + if err := registerProducts(cfg, algoWrite, db); err != nil { + c.UI.Error(fmt.Sprintf("error registering products: %v", err)) + return 1 + } + + // Register document types. + // TODO: remove this and use the database for all document type lookups. + docTypes := map[string]hcd.Doc{ + "frd": &hcd.FRD{}, + "prd": &hcd.PRD{}, + "rfc": &hcd.RFC{}, + } + for name, dt := range docTypes { + if err = doctypes.Register(name, dt); err != nil { + c.UI.Error(fmt.Sprintf("error registering %q doc type: %v", name, err)) + return 1 + } + } + + mux := http.NewServeMux() + + // Define handlers for authenticated endpoints. + // TODO: stop passing around all these arguments to handlers and use a struct + // with (functional) options. + authenticatedEndpoints := []endpoint{ + {"/1/indexes/", + algolia.AlgoliaProxyHandler(algoSearch, cfg.Algolia, c.Log)}, + {"/api/v1/approvals/", + api.ApprovalHandler(cfg, c.Log, algoSearch, algoWrite, goog)}, + {"/api/v1/document-types", api.DocumentTypesHandler(*cfg, c.Log)}, + {"/api/v1/documents/", + api.DocumentHandler(cfg, c.Log, algoSearch, algoWrite, goog, db)}, + {"/api/v1/drafts", + api.DraftsHandler(cfg, c.Log, algoSearch, algoWrite, goog, db)}, + {"/api/v1/drafts/", + api.DraftsDocumentHandler(cfg, c.Log, algoSearch, algoWrite, goog)}, + {"/api/v1/me/subscriptions", + api.MeSubscriptionsHandler(cfg, c.Log, goog, db)}, + {"/api/v1/people", api.PeopleDataHandler(cfg, c.Log, goog)}, + {"/api/v1/products", api.ProductsHandler(cfg, algoSearch, c.Log)}, + {"/api/v1/reviews/", + api.ReviewHandler(cfg, c.Log, algoSearch, algoWrite, goog, db)}, + {"/api/v1/web/analytics", api.AnalyticsHandler(c.Log)}, + } + + // Define handlers for unauthenticated endpoints. + unauthenticatedEndpoints := []endpoint{ + {"/health", healthHandler()}, + {"/pub/", http.StripPrefix("/pub/", pub.Handler())}, + } + + // Web endpoints are conditionally authenticated based on if Okta is enabled. + webEndpoints := []endpoint{ + {"/", web.Handler()}, + {"/api/v1/web/config", web.ConfigHandler(cfg, algoSearch, c.Log)}, + {"/l/", links.RedirectHandler(algoSearch, cfg.Algolia, c.Log)}, + } + + // If Okta is enabled, add the web endpoints for the single page app as + // authenticated endpoints. + if cfg.Okta != nil && !cfg.Okta.Disabled { + authenticatedEndpoints = append(authenticatedEndpoints, webEndpoints...) + } else { + // If Okta is disabled, we need to add the web endpoints for the SPA as + // unauthenticated endpoints so the application will load. + unauthenticatedEndpoints = append(unauthenticatedEndpoints, webEndpoints...) + } + + // Register handlers. + for _, e := range authenticatedEndpoints { + mux.Handle( + e.pattern, + auth.AuthenticateRequest(*cfg, goog, c.Log, e.handler), + ) + } + for _, e := range unauthenticatedEndpoints { + mux.Handle(e.pattern, e.handler) + } + + server := &http.Server{ + Addr: cfg.Server.Addr, + Handler: mux, + } + go func() { + c.Log.Info(fmt.Sprintf("listening on %s...", cfg.Server.Addr)) + + if err := server.ListenAndServe(); err != http.ErrServerClosed { + c.Log.Error(fmt.Sprintf("error starting listener: %v", err)) + os.Exit(1) + } + }() + + return c.WaitForInterrupt(c.ShutdownServer(server)) +} + +// healthHandler responds with the health of the service. +func healthHandler() http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(200) + w.Write([]byte("OK")) + }) +} + +// ShutdownServer gracefully shuts down the HTTP server. +func (c *Command) ShutdownServer(s *http.Server) func() { + return func() { + c.Log.Debug("shutting down HTTP server...") + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + if err := s.Shutdown(ctx); err != nil { + c.Log.Error(fmt.Sprintf("error shutting down HTTP server: %v", err)) + } + } +} + +// registerDocumentTypes registers all products configured in the application +// config in the database. +func registerDocumentTypes(cfg config.Config, db *gorm.DB) error { + for _, d := range cfg.DocumentTypes.DocumentType { + // Marshal Checks to JSON. + checksJSON, err := json.Marshal(d.Checks) + if err != nil { + return fmt.Errorf("error marshaling checks to JSON: %w", err) + } + + // Convert custom fields to model's version. + var cfs []models.DocumentTypeCustomField + for _, c := range d.CustomFields { + cf := models.DocumentTypeCustomField{ + Name: c.Name, + ReadOnly: c.ReadOnly, + } + + // Convert custom field type. + t := strings.ToLower(c.Type) + switch t { + case "string": + cf.Type = models.DocumentTypeCustomFieldType( + models.StringDocumentTypeCustomFieldType) + case "person": + cf.Type = models.DocumentTypeCustomFieldType( + models.PersonDocumentTypeCustomFieldType) + case "people": + cf.Type = models.DocumentTypeCustomFieldType( + models.PeopleDocumentTypeCustomFieldType) + case "": + return fmt.Errorf("missing document type custom field") + default: + return fmt.Errorf("invalid document type custom field: %s", t) + } + + cfs = append(cfs, cf) + } + + dt := models.DocumentType{ + Name: d.Name, + LongName: d.LongName, + Description: d.Description, + Checks: checksJSON, + CustomFields: cfs, + } + + if d.MoreInfoLink != nil { + dt.MoreInfoLinkText = d.MoreInfoLink.Text + dt.MoreInfoLinkURL = d.MoreInfoLink.URL + } + + // Upsert document type. + if err := dt.Upsert(db); err != nil { + return fmt.Errorf("error upserting document type: %w", err) + } + } + + return nil +} + +// registerProducts registers all products configured in the application config +// in the database and Algolia. +// TODO: products are currently needed in Algolia for legacy reasons - remove +// this when possible. +func registerProducts( + cfg *config.Config, algo *algolia.Client, db *gorm.DB) error { + + productsObj := structs.Products{ + ObjectID: "products", + Data: make(map[string]structs.ProductData, 0), + } + + for _, p := range cfg.Products.Product { + // Upsert product in database. + pm := models.Product{ + Name: p.Name, + Abbreviation: p.Abbreviation, + } + if err := pm.Upsert(db); err != nil { + return fmt.Errorf("error upserting product: %w", err) + } + + // Add product to Algolia products object. + productsObj.Data[p.Name] = structs.ProductData{ + Abbreviation: p.Abbreviation, + } + } + + // Save Algolia products object. + res, err := algo.Internal.SaveObject(&productsObj) + if err != nil { + return fmt.Errorf("error saving Algolia products object: %w", err) + } + err = res.Wait() + if err != nil { + return fmt.Errorf("error saving Algolia products object: %w", err) + } + + return nil +} diff --git a/internal/cmd/commands/version/version.go b/internal/cmd/commands/version/version.go new file mode 100644 index 000000000..a8f5b1e78 --- /dev/null +++ b/internal/cmd/commands/version/version.go @@ -0,0 +1,26 @@ +package version + +import ( + "github.com/hashicorp-forge/hermes/internal/cmd/base" + "github.com/hashicorp-forge/hermes/internal/version" +) + +type Command struct { + *base.Command +} + +func (c *Command) Synopsis() string { + return "Print the version of the binary" +} + +func (c *Command) Help() string { + return `Usage: hermes version + + This command prints the version of the binary.` +} + +func (c *Command) Run(args []string) int { + c.UI.Output(version.Version) + + return 0 +} diff --git a/internal/cmd/commands/version/version_test.go b/internal/cmd/commands/version/version_test.go new file mode 100644 index 000000000..3a44da59f --- /dev/null +++ b/internal/cmd/commands/version/version_test.go @@ -0,0 +1,29 @@ +package version + +import ( + "regexp" + "testing" + + "github.com/hashicorp/go-hclog" + "github.com/mitchellh/cli" + + "github.com/hashicorp-forge/hermes/internal/cmd/base" +) + +func TestVersion(t *testing.T) { + log := hclog.NewNullLogger() + ui := cli.NewMockUi() + c := &Command{ + Command: base.NewCommand(log, ui), + } + + args := []string{} + if code := c.Run(args); code != 0 { + t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) + } + + output := ui.OutputWriter.String() + if matched, _ := regexp.MatchString(`^\d\.\d\.\d\n$`, output); !matched { + t.Fatalf("output is not a valid version: %s", output) + } +} diff --git a/internal/cmd/main.go b/internal/cmd/main.go new file mode 100644 index 000000000..fbe0adc1e --- /dev/null +++ b/internal/cmd/main.go @@ -0,0 +1,48 @@ +package cmd + +import ( + "bufio" + "os" + + "github.com/hashicorp-forge/hermes/internal/version" + "github.com/hashicorp/go-hclog" + "github.com/mitchellh/cli" +) + +// Main runs the CLI with the given arguments and returns the exit code. +func Main(args []string) int { + cliName := args[0] + + log := hclog.New(&hclog.LoggerOptions{ + Name: cliName, + }) + + if len(args) == 2 && + (args[1] == "-version" || + args[1] == "-v") { + args = []string{cliName, "version"} + } + + ui := &cli.BasicUi{ + Reader: bufio.NewReader(os.Stdin), + Writer: os.Stdout, + ErrorWriter: os.Stderr, + } + + initCommands(log, ui) + + c := &cli.CLI{ + Name: cliName, + Args: args[1:], + Version: version.Version, + Commands: Commands, + } + + // Run the CLI + exitCode, err := c.Run() + if err != nil { + panic(err) + } + + return exitCode +} diff --git a/internal/config/config.go b/internal/config/config.go new file mode 100644 index 000000000..0fa3d2dfe --- /dev/null +++ b/internal/config/config.go @@ -0,0 +1,260 @@ +package config + +import ( + "fmt" + + "github.com/hashicorp-forge/hermes/internal/auth/oktaalb" + "github.com/hashicorp-forge/hermes/pkg/algolia" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + "github.com/hashicorp/hcl/v2/hclsimple" +) + +// Config contains the Hermes configuration. +type Config struct { + // Algolia configures Hermes to work with Algolia. + Algolia *algolia.Config `hcl:"algolia,block"` + + // BaseURL is the base URL used for building links. + BaseURL string `hcl:"base_url,optional"` + + // DocumentTypes contain available document types. + DocumentTypes *DocumentTypes `hcl:"document_types,block"` + + // Email configures Hermes to send email notifications. + Email *Email `hcl:"email,block"` + + // FeatureFlags contain available feature flags. + FeatureFlags *FeatureFlags `hcl:"feature_flags,block"` + + // GoogleWorkspace configures Hermes to work with Google Workspace. + GoogleWorkspace *GoogleWorkspace `hcl:"google_workspace,block"` + + // Indexer contains the configuration for the Hermes indexer. + Indexer *Indexer `hcl:"indexer,block"` + + // Okta configures Hermes to work with Okta. + Okta *oktaalb.Config `hcl:"okta,block"` + + // Products contain available products. + Products *Products `hcl:"products,block"` + + // Postgres configures PostgreSQL as the app database. + Postgres *Postgres `hcl:"postgres,block"` + + // Server contains the configuration for the Hermes server. + Server *Server `hcl:"server,block"` + + // Shortener base URL for building short links. + ShortenerBaseURL string `hcl:"shortener_base_url,optional"` +} + +// DocumentTypes contain available document types. +type DocumentTypes struct { + // DocumentType defines a document type. + DocumentType []*DocumentType `hcl:"document_type,block"` +} + +// DocumentType is a document type (e.g., "RFC", "PRD"). +type DocumentType struct { + // Name is the name of the document type, which is generally an abbreviation. + // Example: "RFC" + Name string `hcl:"name,label" json:"name"` + + // LongName is the longer name for the document type. + // Example: "Request for Comments" + LongName string `hcl:"long_name,optional" json:"longName"` + + // Description is the description of the document type. + // Example: "Create a Request for Comments document to present a proposal to + // colleagues for their review and feedback." + Description string `hcl:"description,optional" json:"description"` + + // Template is the Google file ID for the document template used for this + // document type. + Template string `hcl:"template"` + + // MoreInfoLink defines a link to more info for the document type. + // Example: "When should I create an RFC?" + MoreInfoLink *DocumentTypeLink `hcl:"more_info_link,block" json:"moreInfoLink"` + + // Checks are document type checks, which require acknowledging a check box + // in order to publish a document. + Checks []*DocumentTypeCheck `hcl:"check,block" json:"checks"` + + // CustomFields are custom fields specific to the document type. + CustomFields []*DocumentTypeCustomField `hcl:"custom_field,block" json:"customFields"` +} + +// DocumentTypeCheck is a document type check, which require acknowledging a +// check box in order to publish a document. +type DocumentTypeCheck struct { + // Label is the document type check label. + Label string `hcl:"label" json:"label"` + + // HelperText contains more details for the document type check. + HelperText string `hcl:"helper_text,optional" json:"helperText"` + + // Links contain document type check links. + Links []*DocumentTypeLink `hcl:"link,block" json:"links"` +} + +type DocumentTypeCustomField struct { + // Name is the name of the custom field. This is used as the custom field + // identifier. + Name string `hcl:"name" json:"name"` + + // ReadOnly is true if the custom field can only be read. + ReadOnly bool `hcl:"read_only,optional" json:"readOnly"` + + // Type is the type of custom field. Valid values are "people", "person", and + // "string". + Type string `hcl:"type" json:"type"` +} + +// DocumentTypeLink is a document type link. +type DocumentTypeLink struct { + // Text is the displayed text for a document type link. + Text string `hcl:"text" json:"text"` + + // URL is the URL that the document type link links to. + URL string `hcl:"url" json:"url"` +} + +// Email configures Hermes to send email notifications. +type Email struct { + // Enabled enables sending email notifications. + Enabled bool `hcl:"enabled,optional"` + + // FromAddress is the email address to send emails from. + FromAddress string `hcl:"from_address,optional"` +} + +// FeatureFlags contain available feature flags. +type FeatureFlags struct { + // FeatureFlag defines a feature flag in Hermes. + FeatureFlag []*FeatureFlag `hcl:"flag,block"` +} + +type FeatureFlag struct { + // Name is the name of the feature flag + Name string `hcl:"name,label"` + // Enabled enables the feature flag. + // Since the default value of uninitialized bool is false, + // *bool is used to check whether Enabled is set or not. + Enabled *bool `hcl:"enabled,optional"` + // Percentage defines the percentage of users that will have + // the feature flag enabled. + Percentage int `hcl:"percentage,optional"` +} + +// Indexer contains the configuration for the Hermes indexer. +type Indexer struct { + // MaxParallelDocs is the maximum number of documents that will be + // simultaneously indexed. + MaxParallelDocs int `hcl:"max_parallel_docs,optional"` + + // UpdateDocHeaders enables the indexer to automatically update document + // headers for Hermes-managed documents with Hermes document metadata. + UpdateDocHeaders bool `hcl:"update_doc_headers,optional"` + + // UpdateDraftHeaders enables the indexer to automatically update document + // headers for draft documents with Hermes document metadata. + UpdateDraftHeaders bool `hcl:"update_draft_headers,optional"` +} + +// GoogleWorkspace is the configuration to work with Google Workspace. +type GoogleWorkspace struct { + // Auth contains the authentication configuration for Google Workspace. + Auth *gw.Config `hcl:"auth,block"` + + // CreateDocShortcuts enables creating a shortcut in the appropriate (per doc + // type and product) Shared Drive folder when a document is published. + CreateDocShortcuts bool `hcl:"create_doc_shortcuts,optional"` + + // DocsFolder is the folder that contains all published documents. + DocsFolder string `hcl:"docs_folder"` + + // DraftsFolder is the folder that contains all document drafts. + DraftsFolder string `hcl:"drafts_folder"` + + // OAuth2 is the configuration to use OAuth 2.0 to access Google Workspace + // APIs. + OAuth2 *GoogleWorkspaceOAuth2 `hcl:"oauth2,block"` + + // ShortcutsFolder is the folder that contains document shortcuts organized + // into doc type and product subfolders. + ShortcutsFolder string `hcl:"shortcuts_folder"` +} + +// GoogleWorkspaceOAuth2 is the configuration to use OAuth 2.0 to access Google +// Workspace APIs. +type GoogleWorkspaceOAuth2 struct { + // ClientID is the client ID obtained from the Google API Console Credentials + // page. + ClientID string `hcl:"client_id,optional"` + + // HD is the allowed domain associated with the authenticating user. + HD string `hcl:"hd,optional"` + + // RedirectURI is an authorized redirect URI for the given client_id as + // specified in the Google API Console Credentials page. + RedirectURI string `hcl:"redirect_uri,optional"` +} + +// Postgres configures PostgreSQL as the app database. +type Postgres struct { + // Host is the database name. + DBName string `hcl:"dbname"` + + // Host is the name of host to connect to. + Host string `hcl:"host"` + + // Password is the password to be used. + Password string `hcl:"password"` + + // Port is the port number to connect to at the server host. + Port int `hcl:"port"` + + // Host is the PostgreSQL user name to connect as. + User string `hcl:"user"` +} + +// Products contain available products. +type Products struct { + // Product defines a product. + Product []*Product `hcl:"product,block"` +} + +// Product is a product/area. +type Product struct { + // Name is the name of the product. + Name string `hcl:"name,label" json:"name"` + + // Abbreviation is the abbreviation (usually a few uppercase letters). + Abbreviation string `hcl:"abbreviation" json:"abbreviation"` +} + +// Server contains the configuration for the Hermes server. +type Server struct { + // Addr is the address to bind to for listening. + Addr string `hcl:"addr,optional"` +} + +// NewConfig parses an HCL configuration file and returns the Hermes config. +func NewConfig(filename string) (*Config, error) { + c := &Config{ + Algolia: &algolia.Config{}, + Email: &Email{}, + FeatureFlags: &FeatureFlags{}, + GoogleWorkspace: &GoogleWorkspace{}, + Indexer: &Indexer{}, + Okta: &oktaalb.Config{}, + Server: &Server{}, + } + err := hclsimple.DecodeFile(filename, nil, c) + if err != nil { + return nil, fmt.Errorf("failed to load configuration: %w", err) + } + + return c, nil +} diff --git a/internal/config/helpers.go b/internal/config/helpers.go new file mode 100644 index 000000000..ffbcd09d1 --- /dev/null +++ b/internal/config/helpers.go @@ -0,0 +1,21 @@ +package config + +import ( + "fmt" +) + +// ValidateFeatureFlags validates the feature flags defined in the config. +func ValidateFeatureFlags(flags []*FeatureFlag) error { + for _, f := range flags { + if f.Name == "" { + return fmt.Errorf("feature flag 'name' cannot be empty") + } + if f.Enabled != nil && f.Percentage > 0 { + return fmt.Errorf("invalid definition of feature flag %q: only one of 'enabled' or 'percentage' parameter can be set", f.Name) + } + if f.Enabled == nil && f.Percentage == 0 { + return fmt.Errorf("invalid definition of feature flag %q: at least one of 'enabled' or a non-zero value for 'percentage' parameter should be set", f.Name) + } + } + return nil +} diff --git a/internal/db/db.go b/internal/db/db.go new file mode 100644 index 000000000..2c90d3bf2 --- /dev/null +++ b/internal/db/db.go @@ -0,0 +1,65 @@ +package db + +import ( + "fmt" + + "github.com/hashicorp-forge/hermes/internal/config" + "github.com/hashicorp-forge/hermes/pkg/models" + "gorm.io/driver/postgres" + "gorm.io/gorm" +) + +// NewDB returns a new migrated database. +func NewDB(cfg config.Postgres) (*gorm.DB, error) { + // TODO: validate config. + dsn := fmt.Sprintf("host=%s user=%s password=%s dbname=%s port=%d", + cfg.Host, + cfg.User, + cfg.Password, + cfg.DBName, + cfg.Port, + ) + + db, err := gorm.Open(postgres.Open(dsn)) + if err != nil { + return nil, fmt.Errorf("error connecting to database: %w", err) + } + + // Enable citext extension. + sqlDB, err := db.DB() + if err != nil { + return nil, fmt.Errorf("error getting sql.DB: %w", err) + } + _, err = sqlDB.Exec("CREATE EXTENSION IF NOT EXISTS citext;") + if err != nil { + return nil, fmt.Errorf("error enabling citext extension: %w", err) + } + + if err := db.SetupJoinTable( + models.Document{}, + "Approvers", + &models.DocumentReview{}, + ); err != nil { + return nil, fmt.Errorf( + "error setting up DocumentReviews join table: %w", err) + } + + if err := db.SetupJoinTable( + models.User{}, + "RecentlyViewedDocs", + &models.RecentlyViewedDoc{}, + ); err != nil { + return nil, fmt.Errorf( + "error setting up RecentlyViewedDocs join table: %w", err) + } + + // Automatically migrate models. + // TODO: move to manually migrating models with a separate command. + if err := db.AutoMigrate( + models.ModelsToAutoMigrate()..., + ); err != nil { + return nil, fmt.Errorf("error migrating database: %w", err) + } + + return db, nil +} diff --git a/internal/email/email.go b/internal/email/email.go new file mode 100644 index 000000000..c35904527 --- /dev/null +++ b/internal/email/email.go @@ -0,0 +1,114 @@ +package email + +import ( + "bytes" + "embed" + "fmt" + "text/template" + "time" + + validation "github.com/go-ozzo/ozzo-validation/v4" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" +) + +//go:embed templates/* +var tmplFS embed.FS + +type ReviewRequestedEmailData struct { + BaseURL string + CurrentYear int + DocumentOwner string + DocumentShortName string + DocumentTitle string + DocumentURL string +} + +type SubscriberDocumentPublishedEmailData struct { + BaseURL string + CurrentYear int + DocumentOwner string + DocumentShortName string + DocumentTitle string + DocumentURL string + Product string +} + +func SendReviewRequestedEmail( + d ReviewRequestedEmailData, + to []string, + from string, + s *gw.Service, +) error { + // Validate data. + if err := validation.ValidateStruct(&d, + validation.Field(&d.BaseURL, validation.Required), + validation.Field(&d.DocumentOwner, validation.Required), + validation.Field(&d.DocumentTitle, validation.Required), + validation.Field(&d.DocumentURL, validation.Required), + ); err != nil { + return fmt.Errorf("error validating email data: %w", err) + } + + var body bytes.Buffer + tmpl, err := template.ParseFS(tmplFS, "templates/review-requested.html") + if err != nil { + return fmt.Errorf("error parsing template: %w", err) + } + + // Set current year. + d.CurrentYear = time.Now().Year() + + if err := tmpl.Execute(&body, d); err != nil { + return fmt.Errorf("error executing template: %w", err) + } + + _, err = s.SendEmail( + to, + from, + fmt.Sprintf("Document review requested for %s", d.DocumentShortName), + body.String(), + ) + return err +} + +func SendSubscriberDocumentPublishedEmail( + d SubscriberDocumentPublishedEmailData, + to []string, + from string, + s *gw.Service, +) error { + // Validate data. + if err := validation.ValidateStruct(&d, + validation.Field(&d.BaseURL, validation.Required), + validation.Field(&d.DocumentOwner, validation.Required), + validation.Field(&d.DocumentTitle, validation.Required), + validation.Field(&d.DocumentURL, validation.Required), + validation.Field(&d.Product, validation.Required), + ); err != nil { + return fmt.Errorf("error validating email data: %w", err) + } + + var body bytes.Buffer + tmpl, err := template.ParseFS( + tmplFS, "templates/subscriber-document-published.html") + if err != nil { + return fmt.Errorf("error parsing template: %w", err) + } + + // Set current year. + d.CurrentYear = time.Now().Year() + + if err := tmpl.Execute(&body, d); err != nil { + return fmt.Errorf("error executing template: %w", err) + } + + _, err = s.SendEmail( + to, + from, + fmt.Sprintf("New document published for %s: %s", + d.Product, + d.DocumentShortName), + body.String(), + ) + return err +} diff --git a/internal/email/templates/review-requested.html b/internal/email/templates/review-requested.html new file mode 100644 index 000000000..356b0c7ba --- /dev/null +++ b/internal/email/templates/review-requested.html @@ -0,0 +1,14 @@ + + +

Hi!

+

+ Your review has been requested for a new document, + [{{.DocumentShortName}}] {{.DocumentTitle}}, by {{.DocumentOwner}}. +

+

+ Cheers,
+ Hermes +

+ + diff --git a/internal/email/templates/subscriber-document-published.html b/internal/email/templates/subscriber-document-published.html new file mode 100644 index 000000000..69b58ff6c --- /dev/null +++ b/internal/email/templates/subscriber-document-published.html @@ -0,0 +1,14 @@ + + +

Hi!

+

+ A new document has been published for {{.Product}}, + [{{.DocumentShortName}}] {{.DocumentTitle}}, by {{.DocumentOwner}}. +

+

+ Cheers,
+ Hermes +

+ + diff --git a/internal/helpers/helpers.go b/internal/helpers/helpers.go new file mode 100644 index 000000000..5314ac50f --- /dev/null +++ b/internal/helpers/helpers.go @@ -0,0 +1,12 @@ +package helpers + +// StringSliceContains returns true if a string is present in a slice of +// strings. +func StringSliceContains(values []string, s string) bool { + for _, v := range values { + if s == v { + return true + } + } + return false +} diff --git a/internal/indexer/indexer.go b/internal/indexer/indexer.go new file mode 100644 index 000000000..8155395b0 --- /dev/null +++ b/internal/indexer/indexer.go @@ -0,0 +1,416 @@ +package indexer + +import ( + "errors" + "fmt" + "io" + "os" + "time" + + validation "github.com/go-ozzo/ozzo-validation/v4" + "github.com/hashicorp-forge/hermes/pkg/algolia" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + hcd "github.com/hashicorp-forge/hermes/pkg/hashicorpdocs" + "github.com/hashicorp-forge/hermes/pkg/links" + "github.com/hashicorp-forge/hermes/pkg/models" + "github.com/hashicorp/go-hclog" + "gorm.io/gorm" +) + +const ( + // loggerName is the name of the logger. + loggerName = "indexer" + + // maxContentSize is the maximum size of a document's content in bytes. If the + // content is larger than this, it will be trimmed to this length. + // Note: Algolia currently has a hard limit of 100000 bytes total per record. + maxContentSize = 85000 +) + +// Indexer contains the indexer configuration. +type Indexer struct { + // AlgoliaClient is the Algolia client. + AlgoliaClient *algolia.Client + + // BaseURL is the base URL for the application. + BaseURL string + + // Database is the database connection. + Database *gorm.DB + + // DocumentsFolderID is the Google Drive ID of the folder containing published + // documents to index. + DocumentsFolderID string + + // DraftsFolderID is the Google Drive ID of the folder containing draft + // documents to index. + DraftsFolderID string + + // GoogleWorkspaceService is the Google Workspace service. + GoogleWorkspaceService *gw.Service + + // Logger is the logger to use. + Logger hclog.Logger + + // MaxParallelDocuments is the maximum number of documents that will be + // simultaneously indexed. + MaxParallelDocuments int + + // UpdateDocumentHeaders updates published document headers, if true. + UpdateDocumentHeaders bool + + // UpdateDraftHeaders updates draft document headers, if true. + UpdateDraftHeaders bool +} + +type IndexerOption func(*Indexer) + +// NewIndexer creates a new indexer. +func NewIndexer(opts ...IndexerOption) (*Indexer, error) { + // Initialize a new indexer with defaults. + idx := &Indexer{ + Logger: hclog.New(&hclog.LoggerOptions{ + Name: loggerName, + }), + } + + // Apply functional options. + for _, opt := range opts { + opt(idx) + } + + // Validate indexer configuration. + if err := idx.validate(); err != nil { + return nil, err + } + + return idx, nil +} + +// validate validates the indexer configuration. +func (idx *Indexer) validate() error { + return validation.ValidateStruct(idx, + validation.Field(&idx.AlgoliaClient, validation.Required), + validation.Field(&idx.BaseURL, validation.Required), + validation.Field(&idx.Database, validation.Required), + validation.Field(&idx.DocumentsFolderID, validation.Required), + validation.Field(&idx.DraftsFolderID, validation.Required), + validation.Field(&idx.GoogleWorkspaceService, validation.Required), + ) +} + +// WithAlgoliaClient sets the Algolia client. +func WithAlgoliaClient(a *algolia.Client) IndexerOption { + return func(i *Indexer) { + i.AlgoliaClient = a + } +} + +// WithBaseURL sets the base URL. +func WithBaseURL(b string) IndexerOption { + return func(i *Indexer) { + i.BaseURL = b + } +} + +// WithDatabase sets the database. +func WithDatabase(db *gorm.DB) IndexerOption { + return func(i *Indexer) { + i.Database = db + } +} + +// WithDocumentsFolderID sets the documents folder ID. +func WithDocumentsFolderID(d string) IndexerOption { + return func(i *Indexer) { + i.DocumentsFolderID = d + } +} + +// WithDraftsFolderID sets the drafts folder ID. +func WithDraftsFolderID(d string) IndexerOption { + return func(i *Indexer) { + i.DraftsFolderID = d + } +} + +// WithGoogleWorkspaceService sets the Google Workspace service. +func WithGoogleWorkspaceService(g *gw.Service) IndexerOption { + return func(i *Indexer) { + i.GoogleWorkspaceService = g + } +} + +// WithLogger sets the logger. +func WithLogger(l hclog.Logger) IndexerOption { + return func(i *Indexer) { + i.Logger = l.Named(loggerName) + } +} + +// WithMaxParallelDocuments sets the number of documents (per folder) to index +// in parallel. +func WithMaxParallelDocuments(m int) IndexerOption { + return func(i *Indexer) { + i.MaxParallelDocuments = m + } +} + +// WithUpdateDocumentHeaders sets the boolean to update draft document headers. +func WithUpdateDocumentHeaders(u bool) IndexerOption { + return func(i *Indexer) { + i.UpdateDocumentHeaders = u + } +} + +// WithUpdateDraftHeaders sets the boolean to update draft document headers. +func WithUpdateDraftHeaders(u bool) IndexerOption { + return func(i *Indexer) { + i.UpdateDraftHeaders = u + } +} + +// Run runs the indexer. +// TODO: improve error handling. +func (idx *Indexer) Run() error { + algo := idx.AlgoliaClient + db := idx.Database + gwSvc := idx.GoogleWorkspaceService + log := idx.Logger + + for { + runStartedAt := time.Now().UTC() + runStartedAtStr := runStartedAt.UTC().Format(time.RFC3339Nano) + + // Get indexer metadata. + md := models.IndexerMetadata{} + if err := md.Get(db); err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + // This is the first time that the indexer is being run, so set last + // full index timestamp to the Unix epoch. + md.LastFullIndexAt = time.Unix(0, 0).UTC() + } else { + log.Error("error getting indexer metadata", + "error", err, + ) + os.Exit(1) + } + } + + // Get documents folder data from the database. + docsFolderData := models.IndexerFolder{ + GoogleDriveID: idx.DocumentsFolderID, + } + if err := docsFolderData.Get(db); err != nil && !errors.Is( + err, gorm.ErrRecordNotFound) { + log.Error("error getting documents folder indexer data", + "error", err, + ) + os.Exit(1) + } + + // If the last indexed timestamp doesn't exist, set it to the Unix epoch. + if docsFolderData.LastIndexedAt.IsZero() { + docsFolderData.LastIndexedAt = time.Unix(0, 0).UTC() + } + lastIndexedAtStr := docsFolderData.LastIndexedAt.UTC().Format( + time.RFC3339Nano) + + log.Info("indexing documents folder", + "folder_id", idx.DocumentsFolderID, + "last_indexed_at", lastIndexedAtStr, + ) + + // Get documents that have been updated in the folder since it was last + // indexed. + docFiles, err := gwSvc.GetUpdatedDocsBetween( + idx.DocumentsFolderID, lastIndexedAtStr, runStartedAtStr) + if err != nil { + log.Error("error getting updated document files", + "error", err, + "folder_id", idx.DocumentsFolderID, + "last_indexed_at", lastIndexedAtStr, + "run_started_at", runStartedAtStr, + ) + os.Exit(1) + } + if len(docFiles) == 0 { + log.Info("no new document updates since the last indexed time", + "folder_id", idx.DocumentsFolderID, + "last_indexed_at", lastIndexedAtStr, + ) + } + + for _, file := range docFiles { + logError := func(errMsg string, err error) { + log.Error(errMsg, + "error", err, + "google_file_id", file.Id, + "folder_id", idx.DocumentsFolderID, + "last_indexed_at", lastIndexedAtStr, + ) + } + + log.Info("indexing document", + "google_file_id", file.Id, + "folder_id", idx.DocumentsFolderID, + "last_indexed_at", lastIndexedAtStr, + ) + + // Get document from database. + dbDoc := models.Document{ + GoogleFileID: file.Id, + } + if err := dbDoc.Get(db); err != nil { + logError("error getting document from the database", err) + os.Exit(1) + } + + // Parse document modified time. + modifiedTime, err := time.Parse(time.RFC3339Nano, file.ModifiedTime) + if err != nil { + logError("error parsing document modified time", err) + os.Exit(1) + } + + // Set new modified for document record and update in database. + dbDoc.DocumentModifiedAt = modifiedTime + + // Update document in database. + if err := dbDoc.Upsert(db); err != nil { + logError("error upserting document", err) + os.Exit(1) + } + + // Create new document object of the proper document type. + docObj, err := hcd.NewEmptyDoc(dbDoc.DocumentType.Name) + if err != nil { + logError("error creating new empty document", err) + os.Exit(1) + } + + // Get document object from Algolia. + if err := algo.Docs.GetObject(file.Id, &docObj); err != nil { + logError("error retrieving document object from Algolia", err) + os.Exit(1) + } + + // Get document content. + exp, err := gwSvc.Drive.Files.Export(file.Id, "text/plain").Download() + if err != nil { + logError("error exporting document", err) + os.Exit(1) + } + content, err := io.ReadAll(exp.Body) + if err != nil { + logError("error reading exported document", err) + os.Exit(1) + } + // Trim doc content if it is larger than the maximum size. + if len(content) > maxContentSize { + content = content[:maxContentSize] + } + + // Update document object with content and latest modified time. + docObj.SetContent(string(content)) + docObj.SetModifiedTime(modifiedTime.Unix()) + + // Save the document in Algolia. + if err := saveDocInAlgolia(docObj, idx.AlgoliaClient); err != nil { + return fmt.Errorf("error saving document in Algolia: %w", err) + } + + // Update last indexed time for folder if document modified time is later. + if modifiedTime.After(docsFolderData.LastIndexedAt) { + docsFolderData.LastIndexedAt = modifiedTime + } + + log.Info("indexed document", + "google_file_id", file.Id, + "folder_id", idx.DocumentsFolderID, + ) + } + + // Save last indexed time for the documents folder. + if err := docsFolderData.Upsert(db); err != nil { + log.Error("error upserting last indexed time for the folder", + "folder_id", idx.DocumentsFolderID, + "last_indexed_at", docsFolderData.LastIndexedAt, + ) + } + + // Update draft document headers, if configured. + if idx.UpdateDraftHeaders { + log.Info("refreshing draft document headers") + if err := refreshDocumentHeaders( + *idx, + idx.DraftsFolderID, + draftsFolderType, + md.LastFullIndexAt, + runStartedAt, + ); err != nil { + log.Error("error refreshing draft document headers", + "error", err, + ) + os.Exit(1) + } + log.Info("done refreshing draft document headers") + } + + // Update published document headers, if configured. + if idx.UpdateDocumentHeaders { + log.Info("refreshing published document headers") + if err := refreshDocumentHeaders( + *idx, + idx.DocumentsFolderID, + documentsFolderType, + md.LastFullIndexAt, + runStartedAt, + ); err != nil { + log.Error("error refreshing published document headers", + "error", err, + ) + os.Exit(1) + } + log.Info("done published document headers") + } + + // Update the last full index time. + md.LastFullIndexAt = runStartedAt.UTC() + if err := md.Upsert(db); err != nil { + log.Error("error upserting metadata with last full index time: %w", err) + os.Exit(1) + } + + log.Info("sleeping for a minute before the next indexing run...") + // TODO: make sleep time configurable. + time.Sleep(1 * time.Minute) + } +} + +// saveDoc saves a document struct and its redirect details in Algolia. +func saveDocInAlgolia( + doc hcd.Doc, + algo *algolia.Client, +) error { + // Save document object. + res, err := algo.Docs.SaveObject(doc) + if err != nil { + return fmt.Errorf("error saving document: %w", err) + } + err = res.Wait() + if err != nil { + return fmt.Errorf("error saving document: %w", err) + } + + // Save document redirect details. + if doc.GetDocNumber() != "" { + err = links.SaveDocumentRedirectDetails( + algo, doc.GetObjectID(), doc.GetDocType(), doc.GetDocNumber()) + if err != nil { + return err + } + } + + return nil +} diff --git a/internal/indexer/refresh_docs_headers.go b/internal/indexer/refresh_docs_headers.go new file mode 100644 index 000000000..aed34ff33 --- /dev/null +++ b/internal/indexer/refresh_docs_headers.go @@ -0,0 +1,236 @@ +package indexer + +/* +import ( + "fmt" + "os" + "strings" + "sync" + "time" + + hcd "github.com/hashicorp-forge/hermes/pkg/hashicorpdocs" + "google.golang.org/api/drive/v3" +) + +// Parse any recently updated published docs for product data and refresh +// published docs with updated doc headers, if configured. +func refreshPublishedDocsHeaders(products *products, cfg IndexerConfig) { + if !cfg.DryRun { + // Get last index record for published docs if it exists. + lastIndexed := &lastIndexed{ + ObjectID: "docs", + LastIndexed: make(map[string]string), + } + if err := cfg.Algo.Internal.GetObject("docs", &lastIndexed); err != nil { + cfg.Log.Error("error getting docs last indexed data", "error", err) + } + lastIndexedTimeString := lastIndexed.LastIndexed[cfg.AppConfig.GoogleWorkspace.DocsFolder] + if lastIndexedTimeString == "" { + // If timestamp doesn't exist, set it to the Unix epoch. + lastIndexedTimeString = time.Unix(0, 0).UTC().Format(time.RFC3339) + } + + cfg.Log.Info("refreshing published doc headers", + "folder", cfg.AppConfig.GoogleWorkspace.DocsFolder, + "last_indexed_time", lastIndexedTimeString, + ) + + // untilTimeString is 30 minutes ago in RFC 3339 format. We use this + // because we don't want to update the doc headers for files that are + // actively being modified by users. + untilTimeString := time.Now().Add(time.Duration(-30) * time.Minute).UTC(). + Format(time.RFC3339) + + docs, err := cfg.Goog.GetUpdatedDocsBetween( + cfg.AppConfig.GoogleWorkspace.DocsFolder, + lastIndexedTimeString, untilTimeString) + if err != nil { + cfg.Log.Error( + fmt.Errorf( + "error getting updated published docs in folder: %w", err).Error()) + os.Exit(1) + } + + if len(docs) == 0 { + cfg.Log.Info("no new published doc updates since the last indexed time", + "folder", cfg.AppConfig.GoogleWorkspace.DocsFolder) + } else { + // Create channel and wait group for goroutines to refresh doc headers. + var wg sync.WaitGroup + var ch = make(chan *drive.File, len(docs)) + + // The number of worker goroutines is the lesser of the number of docs or + // maxParallelDocs. + var parallel int + if len(docs) < cfg.MaxParallelDocs { + parallel = len(docs) + } else { + parallel = cfg.MaxParallelDocs + } + wg.Add(parallel) + + // Refresh doc headers in parallel. + for i := 0; i < parallel; i++ { + go func() { + for { + file, ok := <-ch + if !ok { + wg.Done() + return + } + refreshDocHeader( + file, + lastIndexed, + products, + cfg, + ) + } + }() + } + for _, doc := range docs { + ch <- doc + } + close(ch) + + wg.Wait() + } + + // Save LastIndexed with the latest doc timestamp for the folder and + // products data. + lastIndexed.Lock() + res, err := cfg.Algo.Internal.SaveObject(lastIndexed) + if err != nil { + cfg.Log.Error( + fmt.Errorf("error saving last index state: %w", err).Error()) + os.Exit(1) + } + err = res.Wait() + if err != nil { + cfg.Log.Error( + fmt.Errorf("error saving last index state: %w", err).Error()) + os.Exit(1) + } + lastIndexed.Unlock() + + products.Lock() + err = saveProductsData(cfg.Algo, products) + if err != nil { + cfg.Log.Error(fmt.Errorf("error saving products data: %w", err).Error()) + os.Exit(1) + } + products.Unlock() + + cfg.Log.Info("finished refreshing published doc headers", + "folder", cfg.AppConfig.GoogleWorkspace.DocsFolder, + "last_indexed_time", lastIndexed.LastIndexed[cfg.AppConfig.GoogleWorkspace.DocsFolder]) + } +} + +// refreshDocHeader refreshes the header for a published document. +func refreshDocHeader( + file *drive.File, + lastIndexed *lastIndexed, + products *products, + cfg IndexerConfig) { + + // Get base document object from Algolia so we can determine the doc type. + baseDocObj := &hcd.BaseDoc{} + err := cfg.Algo.Docs.GetObject(file.Id, &baseDocObj) + if err != nil { + cfg.Log.Error( + fmt.Errorf( + "error getting base document object from Algolia: %w", err).Error(), + "id", file.Id) + os.Exit(1) + } + + // Create new document object of the proper doc type. + docObj, err := hcd.NewEmptyDoc(baseDocObj.DocType) + if err != nil { + cfg.Log.Error( + fmt.Errorf( + "error creating new empty doc object: %w", err).Error(), + "id", file.Id) + os.Exit(1) + } + + // Get document object from Algolia. + err = cfg.Algo.Docs.GetObject(file.Id, &docObj) + if err != nil { + cfg.Log.Error( + fmt.Errorf( + "error getting document from Algolia: %w", err).Error(), + "id", file.Id) + os.Exit(1) + } + + if cfg.AppConfig.Indexer.UpdateDocHeaders { + // Replace doc header. + docObj.ReplaceHeader(file.Id, cfg.AppConfig.BaseURL, true, cfg.Goog) + + // Re-get file because we just changed it and need the new modified + // time. + file, err = cfg.Goog.GetFile(file.Id) + if err != nil { + cfg.Log.Error( + fmt.Errorf( + "error getting document file from Google: %w", err).Error(), + "id", file.Id) + os.Exit(1) + } + } + + // Parse modified time. + modifiedTime, err := time.Parse(time.RFC3339, file.ModifiedTime) + if err != nil { + cfg.Log.Error( + fmt.Errorf("error parsing modified time: %w", err).Error(), + "id", file.Id) + os.Exit(1) + } + + // If this doc was updated later than the last indexed time, update + // the last indexed time of this folder with this doc's modified + // time. + lastIndexed.Lock() + lastIndexedTimeString := lastIndexed.LastIndexed[cfg.AppConfig.GoogleWorkspace.DocsFolder] + if lastIndexedTimeString == "" { + // If timestamp doesn't exist, set it to the Unix epoch. + lastIndexedTimeString = time.Unix(0, 0).UTC().Format(time.RFC3339) + } + lastIndexedTime, err := time.Parse(time.RFC3339, lastIndexedTimeString) + if err != nil { + cfg.Log.Error( + fmt.Errorf("error parsing last indexed time: %w", err).Error()) + os.Exit(1) + } + if modifiedTime.After(lastIndexedTime) { + lastIndexed.LastIndexed[cfg.AppConfig.GoogleWorkspace.DocsFolder] = file.ModifiedTime + } + lastIndexed.Unlock() + + // Update product data. + products.Lock() + if err := updateProductData( + docObj, + strings.ToLower(docObj.GetDocType()), + "", + products, + cfg.Log); err != nil { + cfg.Log.Error( + fmt.Errorf( + "error updating product data: %w", err).Error(), + "id", file.Id, + ) + os.Exit(1) + } + products.Unlock() + + if cfg.AppConfig.Indexer.UpdateDocHeaders { + cfg.Log.Info("updated published doc header", + "id", file.Id, + "modified_time", file.ModifiedTime, + ) + } +} +*/ diff --git a/internal/indexer/refresh_drafts_headers.go b/internal/indexer/refresh_drafts_headers.go new file mode 100644 index 000000000..34cdc5a8f --- /dev/null +++ b/internal/indexer/refresh_drafts_headers.go @@ -0,0 +1,208 @@ +package indexer + +/* +import ( + "fmt" + "os" + "sync" + "time" + + "google.golang.org/api/drive/v3" + + hcd "github.com/hashicorp-forge/hermes/pkg/hashicorpdocs" +) + +// Parse any recently updated drafts docs for product data and refresh +// drafts docs with updated doc headers, if configured. +func refreshDraftsDocsHeaders(products *products, cfg IndexerConfig) { + if cfg.AppConfig.Indexer.UpdateDraftHeaders && !cfg.DryRun { + // Get last indexed record for drafts if it exists. + lastIndexed := &lastIndexed{ + ObjectID: "drafts", + LastIndexed: make(map[string]string), + } + _ = cfg.Algo.Internal.GetObject("drafts", &lastIndexed) + lastIndexedTimeString := lastIndexed.LastIndexed[cfg.AppConfig.GoogleWorkspace.DraftsFolder] + if lastIndexedTimeString == "" { + // If timestamp doesn't exist, set it to the Unix epoch. + lastIndexedTimeString = time.Unix(0, 0).UTC().Format(time.RFC3339) + } + + cfg.Log.Info("refreshing drafts doc headers", + "folder", cfg.AppConfig.GoogleWorkspace.DraftsFolder, + "last_indexed_time", lastIndexedTimeString, + ) + + // untilTimeString is 30 minutes ago in RFC 3339 format. We use this + // because we don't want to update the doc headers for files that are + // actively being modified by users. + untilTimeString := time.Now().Add(time.Duration(-30) * time.Minute).UTC(). + Format(time.RFC3339) + + docs, err := cfg.Goog.GetUpdatedDocsBetween( + cfg.AppConfig.GoogleWorkspace.DraftsFolder, + lastIndexedTimeString, untilTimeString) + if err != nil { + cfg.Log.Error( + fmt.Errorf( + "error getting updated draft docs in folder: %w", err).Error()) + os.Exit(1) + } + + if len(docs) == 0 { + cfg.Log.Info("no new draft doc updates since the last indexed time", + "folder", cfg.AppConfig.GoogleWorkspace.DraftsFolder) + } + + // Create channel and wait group for goroutines to refresh draft headers. + var wg sync.WaitGroup + var ch = make(chan *drive.File, len(docs)) + + // The number of worker goroutines is the lesser of the number of docs or + // maxParallelDocs. + var parallel int + if len(docs) < cfg.MaxParallelDocs { + parallel = len(docs) + } else { + parallel = cfg.MaxParallelDocs + } + wg.Add(parallel) + + // Refresh draft headers in parallel. + for i := 0; i < parallel; i++ { + go func() { + for { + file, ok := <-ch + if !ok { + wg.Done() + return + } + refreshDraftHeader( + file, + lastIndexed, + cfg, + ) + } + }() + } + for _, doc := range docs { + ch <- doc + } + close(ch) + + wg.Wait() + + // Save LastIndex with the latest doc timestamp for the folder. + lastIndexed.Lock() + res, err := cfg.Algo.Internal.SaveObject(lastIndexed) + if err != nil { + cfg.Log.Error( + fmt.Errorf("error saving last index state: %w", err).Error()) + os.Exit(1) + } + err = res.Wait() + if err != nil { + cfg.Log.Error( + fmt.Errorf("error saving last index state: %w", err).Error()) + os.Exit(1) + } + lastIndexed.Unlock() + + cfg.Log.Info("finished refreshing drafts doc headers", + "folder", cfg.AppConfig.GoogleWorkspace.DraftsFolder, + "last_indexed_time", lastIndexed.LastIndexed[cfg.AppConfig.GoogleWorkspace.DraftsFolder], + ) + } +} + +// refreshDraftHeader refreshes the header for a draft document. +func refreshDraftHeader( + file *drive.File, + lastIndexed *lastIndexed, + cfg IndexerConfig) { + + // Get base document object from Algolia so we can determine the doc type. + baseDocObj := &hcd.BaseDoc{} + err := cfg.Algo.Drafts.GetObject(file.Id, &baseDocObj) + if err != nil { + cfg.Log.Error( + fmt.Errorf( + "error requesting base document object from Algolia: %w", err).Error(), + "id", file.Id, + ) + os.Exit(1) + } + + // Create new document object of the proper doc type. + docObj, err := hcd.NewEmptyDoc(baseDocObj.DocType) + if err != nil { + cfg.Log.Error( + fmt.Errorf( + "error creating new empty doc: %w", err).Error(), + "id", file.Id, + ) + os.Exit(1) + } + + // Get document object from Algolia. + err = cfg.Algo.Drafts.GetObject(file.Id, &docObj) + if err != nil { + cfg.Log.Error( + fmt.Errorf( + "error getting draft from Algolia: %w", err).Error(), + "id", file.Id, + ) + os.Exit(1) + } + + // Replace doc header. + docObj.ReplaceHeader(file.Id, cfg.AppConfig.BaseURL, true, cfg.Goog) + + // Re-get file because we just changed it and need the new modified + // time. + file, err = cfg.Goog.GetFile(file.Id) + if err != nil { + cfg.Log.Error( + fmt.Errorf( + "error getting draft file from Google: %w", err).Error(), + "id", file.Id, + ) + os.Exit(1) + } + + // Parse modified time. + modifiedTime, err := time.Parse(time.RFC3339, file.ModifiedTime) + if err != nil { + cfg.Log.Error( + fmt.Errorf("error parsing modified time: %w", err).Error(), + "id", file.Id, + ) + os.Exit(1) + } + + // If this doc was updated later than the last indexed time, update + // the last indexed time of this folder with this doc's modified + // time. + lastIndexed.Lock() + lastIndexedTimeString := lastIndexed.LastIndexed[cfg.AppConfig.GoogleWorkspace.DraftsFolder] + if lastIndexedTimeString == "" { + // If timestamp doesn't exist, set it to the Unix epoch. + lastIndexedTimeString = time.Unix(0, 0).UTC().Format(time.RFC3339) + } + lastIndexedTime, err := time.Parse(time.RFC3339, lastIndexedTimeString) + if err != nil { + cfg.Log.Error( + fmt.Errorf("error parsing last indexed time: %w", err).Error()) + os.Exit(1) + } + if modifiedTime.After(lastIndexedTime) { + lastIndexed.LastIndexed[cfg.AppConfig.GoogleWorkspace.DraftsFolder] = file.ModifiedTime + } + lastIndexed.Unlock() + + cfg.Log.Info("refreshed draft header", + "id", file.Id, + "modified_time", file.ModifiedTime, + ) +} +*/ diff --git a/internal/indexer/refresh_headers.go b/internal/indexer/refresh_headers.go new file mode 100644 index 000000000..36264a490 --- /dev/null +++ b/internal/indexer/refresh_headers.go @@ -0,0 +1,190 @@ +package indexer + +import ( + "fmt" + "os" + "sync" + "time" + + "github.com/hashicorp-forge/hermes/pkg/algolia" + hcd "github.com/hashicorp-forge/hermes/pkg/hashicorpdocs" + "google.golang.org/api/drive/v3" +) + +// folderType is a temporary hack until we only fetch document data from the +// database. It is needed so we know which Algolia index to fetch document data +// from. +type folderType int + +const ( + unspecifiedFolderType folderType = iota + draftsFolderType + documentsFolderType +) + +// refreshDocumentHeaders updates the header of any documents in a specified +// folder that been modified since the last indexer run but inactive in the last +// 30 minutes (to not disrupt users' editing). +func refreshDocumentHeaders( + idx Indexer, + folderID string, + ft folderType, + LastFullIndexAt time.Time, + runStartedAt time.Time, +) error { + log := idx.Logger + + if ft == unspecifiedFolderType { + return fmt.Errorf("folder type cannot be unspecified") + } + + // Last run we checked for any updated files up until 30 minutes before that + // so fromTime is 30 minutes before the last full index time. + fromTime := LastFullIndexAt.Add(time.Duration(-30) * time.Minute).UTC() + + // If LastFullIndexAt was the Unix epoch, set fromTime to the epoch. + if LastFullIndexAt.Equal(time.Unix(0, 0)) { + fromTime = time.Unix(0, 0).UTC() + } + + // Create from time string to use with Google Workspace APIs. + fromTimeStr := fromTime.Format(time.RFC3339Nano) + + // untilTimeStr is 30 minutes ago in RFC 3339(Nano) format. We use this + // because we don't want to update the doc headers for files that are + // actively being modified by users. + untilTimeStr := runStartedAt.Add(time.Duration(-30) * time.Minute).UTC(). + Format(time.RFC3339Nano) + + docs, err := idx.GoogleWorkspaceService.GetUpdatedDocsBetween( + folderID, + fromTimeStr, + untilTimeStr, + ) + if err != nil { + return fmt.Errorf("error getting updated documents in folder: %w", err) + } + + // Return if there are no updated documents. + if len(docs) == 0 { + log.Info("no new updated documents to refresh headers", + "folder_id", folderID, + "from_time", fromTimeStr, + "until_time", untilTimeStr, + ) + return nil + } + + // Create channel and wait group for goroutines to refresh document headers. + var wg sync.WaitGroup + var ch = make(chan *drive.File, len(docs)) + + // The number of worker goroutines is the lesser of the number of documents + // or MaxParallelDocuments. + var parallel int + if len(docs) < idx.MaxParallelDocuments { + parallel = len(docs) + } else { + parallel = idx.MaxParallelDocuments + } + wg.Add(parallel) + + // Refresh document headers in parallel. + for i := 0; i < parallel; i++ { + go func() { + for { + file, ok := <-ch + if !ok { + wg.Done() + return + } + refreshDocumentHeader( + idx, + file, + ft, + ) + } + }() + } + for _, doc := range docs { + ch <- doc + } + close(ch) + + wg.Wait() + + return nil +} + +// refreshDocumentHeader refreshes the header for a published document. +// TODO: improve error handling. +func refreshDocumentHeader( + idx Indexer, + file *drive.File, + ft folderType, +) { + algo := idx.AlgoliaClient + log := idx.Logger + + // Get base document object from Algolia so we can determine the document + // type. + var baseDocObj hcd.BaseDoc + if err := getAlgoliaDocObject(algo, file.Id, ft, &baseDocObj); err != nil { + log.Error("error getting document object from Algolia", + "error", err, + "google_file_id", file.Id, + ) + os.Exit(1) + } + + // Create new document object of the proper document type. + docObj, err := hcd.NewEmptyDoc(baseDocObj.DocType) + if err != nil { + log.Error("error creating new empty document object", + "error", err, + "google_file_id", file.Id, + ) + os.Exit(1) + } + + // Get document object from Algolia. + if err := getAlgoliaDocObject(algo, file.Id, ft, &docObj); err != nil { + log.Error("error getting document object from Algolia", + "error", err, + "google_file_id", file.Id, + ) + os.Exit(1) + } + + // Replace document header. + if err := docObj.ReplaceHeader( + file.Id, idx.BaseURL, true, idx.GoogleWorkspaceService); err != nil { + log.Error("error replacing document header", + "error", err, + "google_file_id", file.Id, + ) + os.Exit(1) + + } + + log.Info("refreshed document header", + "google_file_id", file.Id, + ) +} + +func getAlgoliaDocObject( + algo *algolia.Client, + objectID string, + ft folderType, + target interface{}, +) error { + switch ft { + case draftsFolderType: + return algo.Drafts.GetObject(objectID, &target) + case documentsFolderType: + return algo.Docs.GetObject(objectID, &target) + default: + return fmt.Errorf("bad folder type: %v", ft) + } + +} diff --git a/internal/pkg/doctypes/doc.go b/internal/pkg/doctypes/doc.go new file mode 100644 index 000000000..6e22f4d61 --- /dev/null +++ b/internal/pkg/doctypes/doc.go @@ -0,0 +1,2 @@ +// Package doctypes manages document types. +package doctypes diff --git a/internal/pkg/doctypes/doc_types.go b/internal/pkg/doctypes/doc_types.go new file mode 100644 index 000000000..a63ddd67f --- /dev/null +++ b/internal/pkg/doctypes/doc_types.go @@ -0,0 +1,57 @@ +package doctypes + +import ( + "fmt" + "strings" + "sync" + + "github.com/hashicorp-forge/hermes/internal/helpers" + hcd "github.com/hashicorp-forge/hermes/pkg/hashicorpdocs" +) + +var ( + docTypes = make(map[string]hcd.Doc) + docTypesMu sync.RWMutex +) + +// Register makes a document type available by the provided (case-insensitive) +// name. +func Register(name string, docType hcd.Doc) error { + docTypesMu.Lock() + defer docTypesMu.Unlock() + + // Validate custom editable fields. + for _, f := range docType.GetCustomEditableFields() { + if !helpers.StringSliceContains(hcd.ValidCustomDocTypeFieldTypes, f.Type) { + return fmt.Errorf("invalid custom editable field type: %s", f.Type) + } + } + + name = strings.ToLower(name) + + if docType == nil { + return fmt.Errorf("doc type is nil") + } + + if _, dup := docTypes[name]; dup { + return fmt.Errorf("doc type %q is already registered", name) + } + + docTypes[name] = docType + + return nil +} + +// Get returns a document type for a provided (case-insensitive) name or an +// error if it is not registered. +func Get(name string) (hcd.Doc, error) { + docTypesMu.RLock() + defer docTypesMu.RUnlock() + + name = strings.ToLower(name) + + if d, ok := docTypes[name]; ok { + return d, nil + } + return nil, fmt.Errorf("doc type %q is not registered", name) +} diff --git a/internal/pkg/featureflags/flags.go b/internal/pkg/featureflags/flags.go new file mode 100644 index 000000000..d3817bb02 --- /dev/null +++ b/internal/pkg/featureflags/flags.go @@ -0,0 +1,116 @@ +package featureflags + +import ( + "hash/fnv" + + "github.com/hashicorp-forge/hermes/internal/config" + "github.com/hashicorp-forge/hermes/pkg/algolia" + "github.com/hashicorp/go-hclog" +) + +// FeatureFlagsObj is a record in Algolia +// with "featureFlags" as object ID and +// a map of each feature flag with a +// set of user emails that should have +// access to the flag +type FeatureFlagsObj struct { + // ObjectID is "featureFlags" + ObjectID string `json:"objectID,omitempty"` + // FeatureFlagUserEmails is the map of each feature flag + // with a set of user emails that should have + // access to the flag. The map looks as follows: + // { + // "createWorkflow": [ + // "username1@example.com", + // "username2@example.com" + // ] + // } + FeatureFlagUserEmails map[string][]string `json:"featureFlagUserEmails"` +} + +// SetAndToggle sets and toggle feature flags. +func SetAndToggle( + flags *config.FeatureFlags, + a *algolia.Client, + h string, + email string, + log hclog.Logger) map[string]bool { + featureFlags := make(map[string]bool) + + if len(flags.FeatureFlag) > 0 { + for _, j := range flags.FeatureFlag { + // Check if "Enabled" is set to enable + // the feature flag + if j.Enabled != nil { + // If "Enabled" is set to true, + // feature flag is set to true. + // Otherwise, it's set to false. + if *j.Enabled { + featureFlags[j.Name] = true + } else { + featureFlags[j.Name] = false + } + } else if j.Percentage == 0 { + // When percentage is set to 0, + // the feature flag will remain disabled. + featureFlags[j.Name] = false + } else if j.Percentage != 0 { + // If the percentage is provided in the config + // the feature flag may be toggled. + featureFlags[j.Name] = toggleFlagPercentage( + h, + j.Percentage, + ) + } + + // Email based feature flag toggle + // only when feature flag is set to + // false. This allows for toggling + // feature flags for specific + // users using email address + if !featureFlags[j.Name] { + featureFlags[j.Name] = toggleFlagEmail( + a, + j.Name, + email, + log, + ) + } + } + } + + return featureFlags +} + +// toggleFlagPercentage toggles a feature flag +// using an id string and percentage value +// using the built-in hash functions +// This function is based on: https://hashi.co/3O2JwTK +func toggleFlagPercentage(s string, p int) bool { + h := fnv.New32() + h.Write([]byte(s)) + percent := h.Sum32() % 100 + return int(percent) <= p +} + +// toggleFlagEmail toggles a feature flag +// using user email +func toggleFlagEmail(a *algolia.Client, flag string, email string, log hclog.Logger) bool { + f := FeatureFlagsObj{} + err := a.Internal.GetObject("featureFlags", &f) + if err != nil { + log.Error("error getting featureFlags object from algolia", "error", err) + return false + } + + // Enable feature flag if the user email + // is found in the list of user emails + // for the feature flag in Algolia + for _, k := range f.FeatureFlagUserEmails[flag] { + if email == k { + return true + } + } + + return false +} diff --git a/internal/pub/assets/document.png b/internal/pub/assets/document.png new file mode 100644 index 0000000000000000000000000000000000000000..5618a50231aa193fa83f6af82faa8a06507eb03e GIT binary patch literal 29028 zcmc$GX*`r|7_Vh8$udcj#H2}*BuSEFY$1dsBq2%?l8}7~A^VGvY#~WPk`S6CNs=Vl zlWf_Oo%7c@=gaw>^ZEGjHgBexdG6=Fuj{{E6Re|soSl`Am5z>%U0qFC4}X56qodzU zWWXbZ&~5hkgYmkex;~LeZ0|kULq|uZQ&*q`{2l8N76swY0U{QZUVk3Dfa&K7c< zN*B?fs7$_cj(6{#J?j49_lNO_fsy5$!Z)HHMNN+^&ifmF{IK}($brW4Y5CRxs!^PZ zv30LM^Is}wY5))E!Oq)zrPdkyeN#M@y7rLi|J%a}PB%JlZZo6j2Dzh!@#zoGB{oY{ ztUsWcXV7l7ufC9tR$f`*P$izcm5rHO#pSZsqV-d<*KAq$PW34R|2_wqD7BvkSJ(Ne zWjFUb&S$xon%ET^Yh~&k8ofvBB?i3K5pim+5D9tIGORDE()`DLwooirFGq)aZ+!ju z)sKabbwvKt$ZWB>5g#=!#4abmNhJlE+kHPwB@w6{8hjFL8-tb0-i0U&m9}(VG%_HQ z97;RRva{7*D=;h%y;s*ib#_#dbc@z2H~%w6ri*uj;?@(do%ZZsSZ#C}*=#-QvR2@wYN_>;xhKV z_)3CiR&MSUFRzuKbusJLujdH5U};I==ct@|zsIhmtg_M@zY06l#VaT{;2=ZvE1!71 z{`&Rnc&2H-i-W_2!LEg&lisrX_tRHqXJxs{-Nyy2_-G1W|M4a`EiLUty!c`M@F!26 zY}}>YBO`zKu+jM6%y`q`p`p?iu3fxrUu_QhDmc*o*3{JKi(+-`zX?Rto_FmHxYGLM zrriBkuU?%qU|^#yO~qGs6kTe}<1jQf{yZ{M@_jLisa=LUw5Gy+_HEzfIPO#B@~Hip zGiL-nCa4j$(@WEhtFzv#v){jeUs!NU(@`K+M8s?C9HffI zer#;afBe|?>eVKxZDFeZ)n?L3Rz}ZF#O<$M-Neddc*3%%%~0%Z;qPOs)5q|41!5yk zveWZ&f0T6pGsv|v`a7Mny0jxgjzLQ;+T`rn3r5nh8augyYX#gN8;Tj^o{p%E(a21_ z6kpGjEI0Gz)aHGrmPTJkX58fNFC1iM@U@mo)W|Gsd)z!k*v9m6h zH>WIRlfzXrdL8as8Aa4?jHn%H=$o9Io0C}zJn&jPh^8yT9a1CYM^4ju=-b$@ zys7=GOO5vrb$d*=oH;+UxLj(Yo!#8<@#A|-Ndk3c`q=6z(b%!>$`mW39DTQ?xygNA zlcDk{C&{FV`K6SlpSzlyn@=8{(U(nDB@t>T8$HLaJC}JnI5@1XEIXGi=dUj0=e7)2 zF1J@&6}9#D_TomqTAHdS`pGR#(29zRIOQf3BzTu1R_7wtZx;9H-*Y~=#-iAGuRuhK z+|8*&mGcche?F+~^W7jWCiczt`Owgiz1MV-efi{DPVYITZyX`|;_>yCk`@JXqb1!d zA6;Hy!M*+#r6kK-`*Eau{^R@i@0XUAx>rWKR~A|;$3{j%Yx}xaS7tX5hK7d~Nk2!< zuP(IYo)L5#OjDx>#nd?$?Df!ZGMi@S<m(SFn?llZk!OZPkqP3!md(m^{%J&1O#{@V;1${^C~C#kYrEUHkcNLuGfqjtGfxbE2BXd%|B(?(eq? zyn)Mu$B!+<9Gl-5XJsYX_UqTLMqJx*ftx?)Ci|MKzt|MZugvvj>IIwMZ8dctUAva3 z9mju6&VBp#tzx$8gzlHuul)>iV>L2Y$Gv)G9t*Af*te2iR<_u7DJ73nKBX~;*1ON6 zcl~Ua`}lO@vDM;y&#Sm~*4DSJ+7!qWYnvVmTh_h&q|Mf=FYJ&GcR>G}ckk2+4CQVN zf4Pped9(2(nJ7deKJh-hs`ja8%Ld^#4&*8(tWa8bi!ZtDW=-H0zjoI=uIRODo zaaJ1$3E0T<{qgQ@ZW@_fRMO(#(Tzj`jYc!b-Sm==mv^r2q}S)}wV%~}t5f?J85yyM z;_HpX;?6Jq*o>mJui{U%My9ceNxAdr_mTX{@V>c~Z&RQd;5J_&1dpWQSAE! zS6vGXZw+mvk`lA76!R^M5C0MkG+$U;JhnV^Qt+r7w<<^9(%LQi^&uhtyx07vG$s7x zzgifc?5n+;$U`LT(!Tcs@A3V6qK?S#p`nD7l-0i_mC20*^B%7{?d*SFzw9OT*+VJ% zXJ5pzmG^t{v8z9R)K1X+_4Zy}SdcXgK7Rao$@Axb z+$s#kUU|$*5@qiNWDSLG-Y0OxnT;BW2VI`Co!lx{?d-HQHPIF}hpKR^99LECuye`L zM-BRU>DZ!CZj0=J13P#^QC?7i{{H>@%5#y2hewA0W3@kvxe#7mP9Wbze0_jouySEV z1$)sM>7*NFoijy8_&G{Vo-|qW29|ov7itP`-Li#Bvc!7{$MidvGO}>k+S=;pwixF< zx9@&sQ6O{Z&?U(PH#awaj^jD{eA~8N{c?R?CpF5?P&_`xYhh?tipNycae>kXVf$C} zze4xDnmfNbbKXR}M3**~Jb^;&VQy}2X4bQI|Bpq|KclnW^6acN>od0R03Lk1Z@1XY ze91XA>AJLhu%ykF==a8=z*=g-J)DAbIDKj5eFHZ z@%ib28MmYA(Wg$G;tTurt>D73#kNK#*=>I0wfcaXvWR+bTIQ^k-P|N~R8CG#LV~Vg z17UZ3JN4%OW67*;_)Wp5vmzA78^cD&3l-NaI+HpqN1X3 zMhMg|HV1hbf&v2_OYhZL8lOGO7;qUi81FthIw~iCf{iCcQ#$QR8VB5e1n=V6y!onh z``nzXe9FG~`XQ&6d3g=}j_hn~-$tF2wQCD)i;X>dd|4QL>E6&^mP+jE>e_7TT5T)c zCnFxEW^m?ASy>rYbYbD8rf~hh^z!oZ*|TS7J6+DF={#*8#a1sVaq;v#muDb&#M#!% z%ap=pCAlR>-&j1}#o1ZFd+Az!>(WH7P!=0>j_K!^XzO!^6Xlr6ws0U$R4L zeoReCKJ>>?&ae2JSZ(%hV45rVmN{>M;l28P)_~XV-repV&({~-wryK|eLXIc-J>H+ zWyd9DUxn1xk;Z$6al}4+(8Oi@E~p(V8-Inn{Yi<_DDNxUP)!YXdh^hq3lT?|n~eCv zn5m@r`b-@WTj{O!g_(Nlc?K7FpBELK65+-{wkb|D6#F&l>R4K?FS;e{ZFo3Gu(D8m zJ?_Ydi`P+pm4&brJ=Ok`t|k<&u<&qHgIv@mIf01!{zv+v4#v1q;_+w2<1?B>=2R5! zlL)=IO7TMpntI$jNyMs;*{vg|a=yHIbIU}09K9jF9yeDw25n|+Y)oI2X>(Ug3)8DD z{?%25zzSkKp+ZsbY>Kn=asaN3^MtvAt5glecu*Ve7S{JWQn(6?^Q4pZ#?<|8FX2>a zvXyRcZZ4{*&?4PZqgWU@dwP1>+G^&vUXenDQjUI5XPJKayO{);nBHQupRL(iia^Dy zWj9}zPUyXQv+OY*E(x$|118ti&(y2CSmaPw zW3fM0BZ`7w+7$EsX)LrA;Z7G36cl>zY0%t`(MNzjzWE_EK%rc7Nh}^Z$Iv&hti%;^-f`acK+D&!-*#oF zkn2!!qqV4eys~SXhf<&nf1Db{S&&gZ+F@8ngy1~2@I;ncHTIZGkehGl%0=Fb7cTS- zHf5kr3advSJ$h6+iJc~abIudG?6r!L-`doqo7)l>AHO5&HfjlxV32!|Clou|QgSy} z@C%bMbdUJ@O`$c|*(BnB_5Hx&%;D1<)nEmkc4<0;j-|r_<^BEr#|1t0^9;^cxXt`- zD>@z_khcalR5F?#G!LO*9Jut+T8i(*8GepSmoBjs66ZWtP!zY%_u{hwx8~)I;#PV9 z@azsjQ6*3vWcVl2Xy-m#OR)y{k%<{ickBDHON3*3CtZh}jB{JEnulzqbDM{RqSQoV z-4=$jj}319Tlqya_dGuXb7j!N=XAO$Ob{rdH7 zZEc>x6&ZdMUetN??in`;0uE51xlOULaLkr1TdtLwn22w-4QjDDo!_eTef2mUs*pzJ z>%PfHEod$QwK|)3#97&k>M49tj;4sTS`^$h=fz*VsOdRR|MJjai22>DAHGc5pn* zrBWJcG{9c>*|DpEM!x9}>+0&37uSwwurkAtm_-3^^o78{K;X7cyGuq`AGDlkN_%^| zZ=c)En?@8az&Lam?d%$hYZAP7R5N%3hn!?%>i*Nt#+_6k8=gHoGBkv);~}47)n=R* z*KiS7H@eOeRDqk7!IzxW;Oyd}6w((&45JJylawi3oi5#;K$u?ns1+gUk)o{6_7SL6 zd>iD%L9s0*>GvASrySyCdzwt^)zsGhAro$HNZ|rC2Z^R|UA=m>TGdqo+p>Xt8~2i* z<3y%jW%qf}*g20^`l3KvxCGi$T6)X4li zK5K7l3#M7+axeX%uP=R^RlaG(BZFK|`R<7c2QeN-A(4q{!1q1etcJ&bP2Y@Cdti_Y z#+v-%oZ(H3DgrjjROMT^CZ4!oOW z2;=MjrtQ}htjiJja^rtgzo>(Mn#%s$wQtutw%&CUwpn^&t*<7`VhXR95DLtFA`>(+ z86Fp!c|>VsPP*P}9Ee}*L~GT~f=U!kY3=xd4Zyklc0ddiCQ%s8*^fyoBWyiaPPr}}?9)VUR7nfLZ7 z>+TIF&ur35H_i(txW+yTJf>kr|M9mA3n>O z-+V?=boch3*WKJIPf2DbzxoRxt69De2f2g1iRCbl$j^ec0c{fagH38lm=NNv71kE0MP3z?;3iB6n2A(`;OOhQat-1&x4w9&6G zwl-~X5pV0cHmrok=!(DnUO{xZb`tDCs`mLPzG_kaUn9E#vBy-1EyeqOmVeb29|d=)JxrjrC8u>R1kE z#dcLM$7%|9U&-|+p32dGd1Bp>3e%K^i}t6h1s;0izaT~ElJD+$@6+M#pPrY}8h1R$ z&oPjt>mV_8u49lla9zObvnrpcK8@n(bk7!cxkQnNjvIF9gl$*_y z_lreT`r{?-fSEVty6~hl9UOwng_e8*gKzhEL~S1%?f38B3SFuCq5v06SBfB6EG}bB z-^cD@qy1bXU7JH|Krf1oJKNgQL^S@Dk&<@YaWHIOHVR5-^W|6#0R7Z^cbkW9>G?uz z0WPSqXzF*Q?mdBSQs0jrjP3sQYn-8&sgV)A@f%CYAqN?dpVzMyA-!D5uXtN1Mw5_E zYA}1cYS8f4Cm|dKn_mC6lZfmRScigjQ4@kQ~E?D2YvqS9oje&q>xI%Fouq}n zI$DR%EG#U1Px}%|9);_g3_pRYpNqToP%j7k(IXjKN+XkR%NC>a=kpD6eH9e)TXzt- zID)R1n#k}oCx#r+ivQkIWyoI(00DKbz8|1KN(;SNKINFSG*3irm}&+({K!mhZZ5!o zzaxPPZqLXmM`B}UVPOH>WKdB+Yled`u=OVq+6jI%DAC4w%xfp|!^I-E3ez+lFl->` z7MmK2f;L<9#>iJsnup@z;#7|xud$efVk?_$Ai}Me^Q@xc8~}gj=jYF*xmB=CRaHKE zIS(!;LSerUR^JaE0@Q6PpQ*PlxTUkR6G+y~%h5BHlR<)jOzpZl!bhSvOttG^>D4FCSC3AN!r;nZk}h)wnNo{Xb6MIG(V`rAxHoH znj?1WMBk*Vl-9`10swnKK>;vfaBwgX?ZJaL<|ca4Zu?Jw=wM$O=S@#ft7bHrNpOd% zBx_$JsPauCOk>QnD7>EJ%|O zk6&N7_v{N#=wPnicHgvh^~t0cUcyjJN)lW-_b8_6e5<-P7A_1jzjC*!=j4H>zCgtd z7cUg>Qc3@I)ud~=rR0`~+D)th-Qz5q2x}toIq}VdQeKrQrrN zO3n+E2G+GEZ0=8L098(ntS!Ah06`Vppx8KosST_?@{5w+dOtGi-Bdr=Lmc^baq&Sj zO>kE6_)uD}ve1UqVgG{aEldpbCgk|~y|Eh5&YAt`zi9(B8;bp1t3RB)lT-6@f#yxt z-0uQ^J0F>{llpYlzJlq`D@a;=Vc4Dh{Z`np>mNDD{C1G(Im;iW3Z=XsR8m?;yDLZ@VYSQsrL_8F1lX7v zeHDaburwS&j53S9!@Ehu-`$mh?@zkQ2?Wuo(GQxe%N_e!!}}n+pF4LBz836AOUdLT zE>DVzu3x@fXHnp@O_ZDUhO_hcQD<(I?Ck74u^~{(SOXMdohE;%=-4R>MOB*_8W|nF zIrTt|!l1%xlcKaYL5&ivM!`04cVCR%bB2xCzi|K<+PN$_YY6)RLi^F9XhvJunAw?W zzkUfjh-WolhB{YZ_ylB=E7-umU}}Ep?_cLq6K5HI(O4ThJB~kghDEPtzMj2UbeNaT zKy;IMd|7cZoTjM80c>Z{Sh0OEz5hE4@V%o0ofP6hp21eOKx|Slg@I{oBm0u}j3#S% zIgUTyC1-hDe(=>+ijPX8dy^sZ&r5p@iZMY zLQBcuKAF4u@B0h&MZp16L!l=5S3fN-wvymYjZ>#^L0U?UMd7wT>_?v1{8*wYBY;ed zuiqvh00M=Vv?yR>XK(oM;oebq9x92yR!jJl6rT!78H>Wt5mSe9#s2+{VwyyPCMbs= z+4l#JHhpyzBw>(2ngsa}&E)n#Bxu5Rx*-W}n5viK!_Gz}MZ(~~LU(m{H}*|_xH$dmS5}=R zL#S_)H9-sPjoJ{ZK&G2^0qzjtMxnNr^0Rw`LL(>elON8AU;3}vURZ-zDu=BNogstiC2DFw}429TYk{qD7B9NDpw4iRL=egWOax7 z$9)8p*2MeBi!{QIJO{=U7)&$AooVJXLLrCfG;%tBJ+u=H`(OlZ2~RG6Hsr*oq6?`? zF9!-edgxaBa{6B;96>vx)KDVoEZ^gJI?^Zp`uX!G%msL97tlRVp5*l4CK1*J(SUsT z!mQyZY*W|p1gwNu#7bLrIc3%b5!8s54iA#_4A9={2@lD`pwy zJ3oJhY$=)40Nq003+3iL{QDCWt{ppe0MI@vgwU!i1OXOEM<@z=yT@tyPxi5+$BuFC z91(yK%^bp>8g;+|vYhMjDprS{DtZD{{M<`GN$W%be*Rl;9vbJVMq3I+rCwfuvB6B0 zLJ`Ql;p+NAFNZ|H3v&f4L)S9Mg(Az8`DT0;sg_s`kOz^C4yHwIY)_s~PEOhsL*?ln zZ|rw$vvoEzqx%V0HNGAtMD$BHcPu4~N>Z zlwV&wnld}uJ}^Cxf8fi=QkBGV>Q%Mb;MmyPYy6_y$!jPG@`{IiH#W)e@S_?F`dvD* z$pU@@E&oC)w}mzLq{G`kGc#~fRE0J?eR_*# zPN0f}__=9tt3bYiX+67$52_>bKtwcId%*FqLx_zOAL$gdS#DOKCSOfH(XEKf__J_A zh{MI>+%;UpFV+VzxZWiZ=nEx*K`I%)q=uc}0lfsOp`1WSdcWgMS*LK!y4q_K_fV~)Db+h53j+$M=*lK@ ze29rsBe<3y&SXf9?3WCAv?tUaBed5DU$HTl9P~2-m zH**TeJ=76_X=Iweis}sO)pj)*dr|nhOK8)SKND%=nAeA~9YOXc8MQ5U=uR{z zd)-FCLMPai@g%5qgvtLE$W>-Z!^JYMy7T9`yHj(jM6n;+O#)lpbK`$JVf>{?pqd$| zb`>*?g>SVx%Dcf_Sc&vAYxieC^{VV4?Uc-@13!@4XdaS6GzQ)ufdLY0Mn>P~%oFhA z?Mk39Z@ev+FjY*H;D#E zdrnPssL&a&FDfG(1>(IzMy_QESo(Z zuKMvdPK8jErluy{6{*KJz?OihTH4!l^+iXn03V{p5$1QssE5`>9dC%_kz{Qg-RE35)7(4RXn^Vh5obhWQ@_fFr1NvI z+*LsR>K+e@-ka)zs~Ws{<;wayyHl0Vh;F(~{^=lN*5nFTTH(S$zA%R;PO_b_ET2Dr zEAasgrG3;OM_<7eJrseagoMM+zaix&(Rw4*D8P-JNB@v`bwF$1Wg6t7L7>s!=&uOq zW)ZY%1eo?Oym6@0ea}@!QAUMN;ynwZ`%))<$e4=9kKfwV7r`TFMYZi!x@91H83IZi z`S~Yq>vb0Yw}%hNl5EHpb#2|r`cPkVyiuQ!v$wMQld-ikR24?wDr8E=oMa;ekA<5Y zmn2NmkC#t#yy%N`$%+2vGC$R;7AZY4F_ra#`=ulmaTx>#dj49F zaCmbt!|?-DT!U=M7TVL5l@+9-prAj=&sX|wbLPzE4^b^Pn+bX*Xbgp|dOfnoDH(zQQG>a6MDz$MFwgH0pc_Iq}~;!0Xw z)4MHTv;xPxJT6~eHyDm!j*2dPfVrj!F9^`sr750&+mK0W&B(~WCGA5S{`QTxz7UEy zVqvSYa&oqk3HRN{4uhGQe-H`jLZax{%49@Q+wZmD7jt+~fbbiZo-6X#e`$*G6dC6Q zaco;)OnDiQrLt!*4N_Nq`2UeCje6!(XX$8T16`+W)EN=YW+@fjD(BN8+~~EUv9BeN zsU-8m2&}4S_HG_>0@C~T4G5jAV z^`uj23JYV;GbnrY3V0bBqHJ^G54Ah0enV4qPZU zt}xw~(ugnsl*IIh_??!f<~`XPtZ#_cyimh>X+iAFjDBQjoT!;au;FKF_IYh!>(kCY z5MK|M7%E^)-IwJ$juc=nI1$}ZR(G>A;ZDO10l=xdg?uJ?UM2LwLJ;$ z@s5{7*seZ>j3HK=g;U^OLIRj%kEHYNk zPELbPFWcIXel#cBQHYA7u^?l2H-JN%%p@zpI_!yOWQC2so@s;6X(iFv#D-r8b;Q^2 z4gr>i)h)(zsmLfmaXX@qLZMko zM+50&Tj}~<2jmNtg>p`j_5=LhG0o4uy!H>mc_kjemRwMwUL^!5ZeU~Hp1FxvA1VX1 z!B4tyD@C}Kg_;mzfqpJR^h*^Po?V2^t(ZpVJQ)@iw&00?Gl= z!q}K__8{P>QQ!mevT+g2OWL;tHVq*%x!(Hl>t2cQKA72_p2_CCP`}nf$9e zL*rLh<_44OQ(wIU2({alvyu^p7)iO#A={IgdZXPgv0jiB zJ~P`!nWX^7O+-Js>%_TG^MU~K?M}P=HSwQa-ka#x(WAp$C`6!sZEel(UZ33BE!cDE zc-LM|9*&@qveb1niKk3{K4v>S+=icRpI~$gtP-@=8xKi;xbQB2JiALtIw?6Zku`{> zojo1s?znt|-ZP|i1RzCWh|oT=>!o}TyR_ZatJJ4lf2Kbk87C8eILVIKn%#hb(&f<0 zMytv$?`n5v93DOkjAdtSy1L%-`=Q@9VthG|p*Wt()=p^nWmEiqVA?#dv!&(W;lqbz zWbSGT0|0}iN&H3%fgm}6tZaCAcuPx5gy-)IXh$CbJdq@6A4N*F>cmPxgeRZ?YN2DP z2a=q~wUuMm&4B9~rK(if)DJDqI!OC^ftu$_BbsOr^fxzt3 zoDb=bFwrE8^LW0B;09f|phPBF78&)J$v6#0w%H;liZ@Scn40%6Y=O`+xaQ3V2PjrU z!0jAsXlMX(zz+(&(+0#$EG;dqt%+2UK*Vvo`eBXSq6v^Lc9oy%zg(C9zpBuaO|%TPafbMGAoHWS5V zgC%hK<*wORwS|vShI1$oXnlb(UGHLKKAv&Z6}{_(A?*9Cb(Rpx9*R}x(9 zZ(98Px--qCdMqGmwUN=TCMzgDKk)_!f zOmMJ3jK`|TD5*g>N-b5CM?D&X(p<3lJ;+s0ADuNc^y%MC8it@?E4_YLO+5a3S?5#b zq?D9PMqhg!6k2vYEJ1Hoi~eFQm93r3P|3k&fAi*++G|M-f#i2c-Bh!(|D_K!zoodn zz7T^2yn!I~(G+OtTLMv=;>FJu8j7LQx7f7UmB2>}qT#wxv3~u!2)7H+8N(mYq$O;x zUq=KIxhf#99Q|Pj8MIScFCag}B($Il7k0$dDfpU{w6hCdsqC&SDA>*wj3S-hWDN+P zL9yH{<){kX4h(M8nfCbzE;gplMEnfQ(*Op!%h}C!g{Z;6*YM0`!jB6@#l}K8eJ!C0 zwI{p}#+H?lK*TN*5vnhY8X11+1Wl2UkQxia;2lH@DLyo`wC@SX{8g^!KEN3M9eW*8 zG2F_I&4K1%cvW|aD=*RJ!EQyi6+eCIF#KgVZnk!pq1e~9ww&f6kUi9OHBr}dp7Mgk zJ=Tghj+82;rq028$+xCghyI1dmB0nQKYHpOn9o<~y}u`E!iaP-(YIajL(6Sf}S!9}Q+ zV0*`!(wKdjPh)$Ah>ZlV1RG>@Kq;XpI`3n)w~XtU{lP`48e zLYOSBkVxG0`kPC=h|(t?sBfaND7K1ePxJE|%_P1dRoDVk3JQm4ENQT+Z}O+(O$pxL zV`DtQ$}o~4PB}Q}a1Naw?ygboNNC2I&02@F-NmsO5$1rRP^ z8SrG&I-PIn85xW()De0|j_&PQk)?~$NwAO<$Y#87wGfefSo65aS}mHu1W-J;?De8R zHDg0LLcX`_`=QRD@?JnD5>jzm+IEq9K!!*jc2vMien>!wXw-S}{!w>q5pQpAsAKW< z5pq*R^=PlkN_5Vd8BCACmXVd^V`GM_tVS6g9GsZ<07+K(?Ug;LdkW@)++FwKRmG)u z>_N-gzXK`iri~ebVX6q8k01Mz#PB|(JMClldTe;%qGR8-*asZhb31jod*z0rx$e)u zhN~kF1wX8e%hH)`rYq{uI!UNLcSs^uX^?N~$*NiWuabpXH;YEqa~GCFyiS_lc>1nm zNH1fY5n|9Tu62eh`FVM}G00S-0_jnWf|-K1Zv%BiOiWB}`NZP@ema2$2fRDoa0puB zM`XI2pTRwe)fi|@X&srtc2#g@zjiA@^P;=^R+_{^eK@G7G&G5S(;Qs<5|I22azW7^ zPVq(50;lVT27J~}okiM!gPmPVTidOo%S-|?bbMUg1HIkheaBYk8&Ty#Ytm`GJ{qMu z)vOvqKD4VBFEYQEg_r>=0>r1l5Yt|!rcVrVk$HzmX7E;rHxa&z%spixs_qvg5THyQ zQsd`P>WoJ*e(?gvvh<-t z)T4-vdsPam=uSL-?E|Z(*todOHuZT~=U86e-U?Ii<)mgd>j#Alq2+CDMhTkIhY#PJ z=&1%B^i`Pn+it(cl|CdKfD+cTa7*z6(XT2wz#8O4CHm*!;2>}k*7)2xoQreD#)(Nu zM{W!wF9}tq*MV1~2?gs=mn-AFour$rkLq9{!p;zm!Pdfw9he^NEOWVeGr^qKk1QIC z=_)t@^heL+=qvKJU@~hfcm`X0Utb>*u)J)FbE1ZJ*!CC+gdZMWYq4KM(uaIojUs#P zhw`OMyQm}-)r}iBipS^X=bsdcDu<<|lXKGpCS!z>72|Ck{%zZsGPgbxktl<+ig0f$ z0wDW^0?nu9%j^3kdD-kMm!+-u0uNBsA5K7IN2zMOe-uG`Y6UdqUWXTR;@Cj2>kV?N ztQDI703?3;q=WRN;^%(gEsA=aMy9W?uNtM$WxNZ!P~?rAtn5n^RcUEqZq{r3hHyn*y# z$Jf+Yx1K7zfnVPX7`1z%X}!Gs{OASR*&77}XcE{x&)7xIB5a$=9D}mHN(iF)elrBI z3RWg_wVO%6QNxlh z=z<~XsrCW1UX(V2+%)kDq%IO&({wOt<^KCyfrE^#bQ1NxVcgX#R}jtflRUB3v$2QS zXsIGlBvPXV3Z^!8qlH=o}Bxbpu@%J;Z6{2QF?@QT4+ z@l5zNdtBFJ(R@Vb`px+24xN$tip&(4Ggku#J4NY0yjM~f1LxRon{`_~D|;i;#X4S* zp7dj3>uK@$-YrN1;6ns`5DWV=P4K6dv_ndaQKRT^BP$H;OlF;EEYi~5n2a+KKdvi< zZf+8%n6@^u6?0O)w7eXt;xh&Y$OwP2xq&dJXl&$lc~I{=LTFw{b@n>QYicl2%bf=+cG~w~_?r-1FhtZhcB}=GE z&oyI>tHW1+gziJlglCInOyfY`q>LZ(*)=xBikS|?OJGZquQhZGet245d?aM?4oFIqD&y_2i%^XF(y;j`z?ITuBfLKG_zOmHv|ill;h6;(3>Bt=p*7@Z zu#+DSo@jPL&>XU#6to5Cmx!QMn|;R$XoT4 zR9)ley)b67dr_6|nQsQs;@8LfXOjP}``=lBql;i}7`sC?1P@c z7h3bj?Wl?_(reeR7v<-3`dt$VVK5eqrt~&F6$|Z~Lm~-B4^jk1WREDU&3adC{NZ|B zZcpU%R(%ewyX*MWE;CIVWw@0!zc_tu_?&~1W6}NhIN3uILvmw^ym{ck^ ze$~}8D6-OVnVzElx&(E?hdY{rtcNe}ZeBJaB13`sOmC-XKI^;f!>d4#v*8Jz@$5gG zRlS-=DeA9E+Rs|)x0JNT$`L2L#-k3Y_0C+vv>rNw{2&>YN-sb(PhX(_pP-@YSj$*P_4^=Wy^LJ9Zo zfs=r3PpM!~u-JIU4I#e$T){u5=P`$X0i0dCkO%s7wPNnaT4VN=I!|;& zFwg@&Dw~X?iH^vU_T)|=*n1mVzkVHiIU)|rPhOs5*i?!S{1l+g;sP?64~0qlh;^>; zkw0s3z==diap4}KA{bmm3@g0PtI``t1u!+>$7blu47r#?fTD&t*yfidC5K|_c-aCm zV1+aaz!98$Rn_#SL42FRxz?on_s4$!zGaz-&WsV<4@EgRni+< zX(fhMpgcpdEcgx|QLQc~zH*y+q$~tY5Q9FjUHG?bfyaxqT}oq-*ob2(=KLB5Frj;M zakK<+%^o<-SlV4N>UX~bE+M*vdFE;}AErGzJ@h5J*cgAjL=X>*S26*xXl;})7J|fW zDs(AW3WkUF^3>z52Abm+(45wyd!RX2ijB{oKi~OD_}?5F#;?(5NrZ&EcL5eAm>Evh z%m_a}a!d8e249`5va}!4_w@^DxV5FFf5r_H*1^iyq(V`MA7L=Dr6pE7 z+k7;$c?fCG@IGKoy_|RbQw}m6c>9Q2Bzc8nqUtO$PMN9aYkr5;3%OZG1Y1}sEfR^@ z9Q{bfM|wHdiGf~j?(Wn>-`m{PVpuQhnDkx!HT$)WWIxvfWHTu5ys+XA`+61_t*eQy-pD5#qswc|rro4dM z8Ot)-G{-qnrEQA44qvEUWY*cO1Yzh0XGXCHH#3xWbVQ8r^vO_$Hxf2P)UHK~0oy4g*X>T?~>bjxgS+MhgkwAY1Gm410 z(NFLa8$?3j(IqxqygvYA3;Ih*yTby}5ArN>vk%-2w;tDhhMHx(myQJ8C?vgy7by=F zU9^}q`S^NtYimn-Df9=#0s-tuguBXEG~nNuy3a#WCa2X1Rb`CYkT9sCRNO}chA_$#9szM}yHv!Ir- z2vT7nA+^Fbm-Z8FVgi5QQK-IT1|YAt2PemF|)PDBI?1ATZO z0`{;5<35>K+3gMFVh!!{HJK&>MbM&v z&X+)qM0WmZ``c?v_#FRSj3;?{K;(lDk;;u6ejsAgvu!iu-z%_S6_zls3+YKTHvG;V zq#!bmM<8&EY$c?GciHsY=)bR-aqIl(q=Gbs{o9`C21}#uS5->}DPwXp;g*-K5Z3R*{9dqq>6-Qs3{_t{@bJ zPKvubGQ-2lbT`Z|o3h*>2hI+;e`5D((O5Jf*TPTFn08gI9dnzl z^j#|@|G#M&EFV&@@dvdOtbGYfleLqFM@O$i)iYJ$82X+}N5^z2Q%QCXL7XK&ua&(YY>P*J{I`Q<2^$)|d@K*q)^ z5V73c7WN!|urGT%tFo?lqnk=V!GxilKIX4?4xTJuDTic}-Ku-+oHk2D(Z2M3FEYtQ zDJ?;UNaYGX_Cf(5z#lT&B*KGl=wfD%J;%F_BC;>K$*l8%-VpNsP%pd}np5O&PAQAV z-j}^GjC&2Z>fwQAsmFce&!Fx*9Yoq=>ZCxF``(7d>^g}I+8|}tt)kBIvrRGdxVsl| zU;g-W%0o4!s4MQ>p}7Gh?Pn?`_z~K{B=i9@+KsYvJx&15{IwW2`TY6w-@nq~eQma1 z2d1f;G9I4EpmuFQ+#_u3A&db1`Qrqa%^=t3ozFdC9Tq^QCTp~3U+*Hg=7wUJKT8*> z>|RCEkJZntq#emij11lsczO-qr9+`drP3X zNC@I>)E&P3MAZnDP2s`=-WU-o=XHS6#r(_L315mK*V z5{Gg z!YlUr^BKv6j*gC7iUbiK47oy?A;Wg^t2Vn+V&Z)Sk6peOC{* zPH)PuyYDM`*PbASk~2Gc^Y1)Ad7+2y_H=^xjK#%!5A!vCJU05Gqh`Bn(?M=7MqNB) zQ`S}5zMrJR9`Hti`+xnVb>!|>7mMa$eG>f~#ySo^^jtWR+?d1EnbX~w-*k}|K_Lt7&3oP-#PLpPJoHRhF#VNdunK2- zpyH383jb?i)2PfsYhS6LaQbg(38t}i`d1ScqEIIIa_C(4OvW85_EUDlN4VIz3hGzUBi^JB%8c|Jq(MuKN7`y~fi}vLHUVl8L1d zM&%J!+!PTpHgm_r7B}7!UWm?Kunfb6`*Q0cb8K%)tXtyURu(sBY1uED3a@mc>}v^S zsoq(*$YbBWP3lJ}>*SW>mdLF~+vzEH_TSS~G%7|lXxo^xmuv?9-5`G;t)WA_bz4Ay z&BgT{?d>d6(t60^!aXg#$0kBmW?aP~!EUS?Pohd5+ye+n$wwUab07?N&JQURd*u@> zeE%YkJQ&79=~WB_q>BH_$RK!w@=g8dmD=6uRS_#v3$z3evQ5(I&N#IE^9_^^!K%z! zo9LpPZmNbx5Y&J+hpeq&G%{XGRzhs%byGF5x41;mjU2Tl%X>5Nm-8UdiBS)%GxB$9 zO?Qm;S~l!sTc=(P%tP}#I^;twm@KpF*X1W^%Pv%lX7HoJMx1&@bC^>%DrO4-C2Hrf zy049Zun>%7YAs4eiT2DjmR+W;)ZXPo*X86yMw!&c9>!VFulOLkzWT+eP7w5_2lUNq zoMEzdEgleeT|bJ&3mY;>8hV#M^flj4-$qZRRp$~7R9!obR!4V0=gEfCYLdV8FLZt( z*#1a_i3yCTjZbeseDI)Hp!U(#endrylxeFG2o*JLBL=Tuy}Ck_(nGy9F+#9$E5!xA zytnR5)*-uu9irF|ohb1{dw2KGymCQ#Bko_dm`$=s{S6PW2bN5Hd_1c}OnaJ9v_qDn zWGLc%nB6qL&y_qP5+#NPx?@XK3x1Uznt$NH0hiFa_5~KZGiUD`9{xCj$A5|n7p}~2z`H!)Zm>*55zz z%#M@gqu)J(LNt8%{Yi4+9*&g%GNqws1@^Y?@gx~kY5z_o!F;M9SJ%HEKi?qS#TdEe zR4`(OFy{rA0_Vf!!xX^K<|V4G!Wray+&ZH~^PrW+G)2W7+DdecSNAI3OIy1u{Fw8~ z7~lEEr%-yNR$3SC8?d3hq$APHz4f=W!7*{&vvFzvsSA`|DO|2=FS;jn<8X4W%8lc6 zhJq1ySACdlI|I7m)4Qh03QeKKc38B^U(<1~is|x6Xx;naRM4S$@eeL;$^op>O?dB_ z3-9wn=?f*5*gvE8iFY5t)JvIs{0%yug6dRrWGLgF851LH6^@x;hMaRSo>D0%f8aOo zTzUbfv1T}HMw+*D=MVV>3lsJINNtk5Wyp|+f~Li7MER8PIFk0fND0g^KX;;hrg2rn zz}bfBc~`Z2!M=|#5u#U{twV%dr%oYFR${_KSfK=6kMmV;5pp0B#V3EV-^D%) zJAcN79a`^Tqvyy;2iRnwqHEfKs*%(kPdHdEK!V$`d1fqVj`=x3)%Ms@LCz(Itg)(V zAYzVxTDo>ZVrx)EsVl)dmzO(p}Lhv6a< z)^`%YcqhYEqMDTsp?Vd}dc`6v$UB8^dPz2(h3p?2SC5<~?}8V0yywgAufSg5@nu)k zB8z-tmn1vV^gdvG?UW<_Np}4<6zal4qulziG*f!BHOo*(Ow|HkNw07P(N-=gwEsrH zBY_^Wn7yDtVsFssy+7kh^UojZ-9YGuC?6<0z|6*!&hy943;NMRp|?vT&n7>jSP~dv z$?j44M`r&YMiO2YE@8C=oi^Rw-A!gudU5CbZqND?z%iuII^$_TS(nTbH@Kfb3I?eBa&L)td|YmI3sSu24ay4ywLcd$X&+jC}WUsZKc=K z6rB@F%q}}$z4Mqo^DKshrrYxy_FGwH)P$@IRSz;sNH`m(?s{eVPBm5eyOBzg5kYGF za&*esc%a?|MQ#LjfQY9rvLD^={sZn-D45b6aP|&-d89wa`bJ9VAY8c-{d=M z#Np#$l*_U2JJ_`(nbOywBZFL;zjxjAZcVWc8vQ)$%B@?NS!d0f)!_fN-B;g0U*8{^ z1^atWE7$c#GOk$sZ$?=9YvTW`H-)4oryphFva?kn3_@zBd}@@a?oMNf&rLx-%bTemj)d9e(PWZ5-k{Ht-YmKY4qef$HV8mM`q z&`PJAuc?RigC7Z6&(YCm)Fj`%W~!#69OaYeblKwXjX(b3IrMJ30plOwN16Wwyov%l zm*zHB1Ppc}a(4O>V|f172>Q4icJRm_0KdwQGu z4WGQhyHlL|O9J?7ur+|aI!3iLr@?<8_(X6;M>log&{xVAg{>aCQ#Eb3TFUpHlJ_#p zu&{}hYhW~wmC&`PndaaY@1_3{1DcN@!^YbYljgKPTITnypYceu#|<}bw@mrJyTAY0 z5-Pa}NA|txo#y35FqG45`4Gu45NGmudB zUvz3B5SnI+G$5&%Al3qDY7)T^x%ikx50lDKVC0zn49h5lNz9)sQqQfwK+w{e9TCe2 zCZjtTW%R*Y4>P^{4ZN-E)GHDe=iItQ@brpQ1|AIXB}3^zaM?mIv4_w(0olLPg37&6 zebYLlxalb~6{j|SKQe`PYlB7{!lvp(OACwBJ0SRE1wko}oPa)`&*D6^jZGr$^0pViVbUTI7#hILdAtB7C=3B(H+(m!p=w-cFy4KpAB6BoKdp7mj>WiK|9y+4ytuxQU~Djznv zQ9~ur8|}}sd97I&*&FvJqSaRKY|J~b;i9qjbJ zw|DPpvSsW}@4yL+g8ADJ)xeb(48)NkY@BB`p zXv<}$A(XP@VwJV<0}7|XUIMBC=Il1c0l6(n7RHta{>E&m@%GYW*;O%& z)bTw!74up*Hj=1v=!=e5}o?=fXtBpC!0Yi4rp%A01(N2a(a&pEo9 z-Z1*E2!~9C)t$V6{55+^AW`32K6#ghg>lW?#^gl{7mm3qhEzx}nljx0L48h2&!b(G z_XqrzRf~DS-qJ``LF56rXA^Ya^df$fprP$qwNxtvS}Zs@YKd~NP;`uHVmZ&xCr*RO z4oPN=b=2hGYSf?8yb2o!`~#*RL8+rw6)|)~a-*#8xvm^$7c4BW6Ca4PQ@GYyxF2`B z!lAo}wJC(jb-3%2R7Tl)h?@6Ez(Ul zfuan7Rl(EyB{iC9pxknlce3T%J;#q;suh3R^_0EkHTx7bpRRwaJLqdCMi8?O4NDZ7 zQE175?_Qr+K49>#XYPk1`uz&|O2db&_C3jVryR2Iz~He~=>!91Yd%4A7d$jSHyqn} z!>wiYf|KptUlqb&FOIGzF_LNNid5F=&==<+dS-+!(%H&d7>b*p!<(`5&5LVdVzND= zI|4M~U^}U!nB&2@dgcaxyvdJ;ZM(2AsB{|!i^lSCUnbEW9b5_;_gKrnr)fXfrbI9X z+K^;lef0gW1Nye+Y%F=!c%OnRXRXYn*bRCBy8Pw8D|- zRfu0?G8`<#+K;9rB_9+-jxCvUO&PX58OoX7&J*r{4{Mtm3EjN>7jQtNx7IOg&lp7 zXoA6no6k_!e#7cgSC_|yY6eJ*b&1~2qq~N@$=*0sGL)E#5>W$qboI9qBZsz16+ain z>cF*?Zi`XH{)toxJ!*Lj56tlV5{Ji&TYlKE0d63~9JRd@$CzYRv@&ZI7-1*1%JcDH ztkBxKPJRBoc~~l6i|9jaHR86KVRr4FQ%rZr@=Bedit_m>GKJB}#)~xH`evYwWCdGi zq^hY)1k<%d_mEZs2&=6Guoq-l(Ar7qnydx@BqJe0W`?0W>x{MORlJk^TWyu49&gXy zi0RlA?lMOx(c-39#D^ywFj&|ayhj<%8&ZkAq(>!x_joPYU)8Jm_jm7RpSoFSOKYZ| zgd`p`BOHFnGZ)>-)AN|>s^f)*yP3#H74v|E67mN^?Pz-0W5$IR4SUvE7IcGL($Kk= znpwh{vt27*Aq?RSS?%a*s&w3O>{Aw>FqW~zFI0ch<7H>NM^?R=TWAZOzz63)M(TL* zpy2+f=)egA3@1eAn4i;0Fex}ud=n{vL07R)D=F*Pxg#dc>Ko9n=tPsr0o-4JOU8%8?ba&y@fJBs`Sh)+;$)PPK%pRIi!%t8#Ysk;17BoZr8;^ zm@Y9onk&T@=z^h)N`*p!KxT)mLO(v)i0<4Ki|Ii&DWNB3}Is6glQ z^XJNCS1_g*YvRRC>(Yr@8Z_`r^#zp~!-Ah=n4L{>qcjiCR$OZOWvX7be^dh7e_8;FFY9W#Br^^%^X&#A9j6LNC~&gUz$sdreS zFl#dYA3~t%h*1e$A_`KcG#6WLmRgC7Mt61mI0@2dGfZaW57;|9ci=Ebki6m1#vegI z22`7@z(t&GjiN&3Fd~e_yU;=+M@}6--nci>z82G+zV`=CEaWOwxHZPc^n5E9@z1=$ zzBZWAtCfzQz4bJcs^7ZnL}6~CCPQv!>X9kJ)M&jOvLLasLtU_#&GbiZIWk73A%m|% zja4z^%SnZCWddf$MxBSQ&-wlf{*>_TrC_GoEO)>q&>t7j`%R2$hE2Zy+_{w<-BYE1 z=7pPL21EVj$HtI!_Tq?qf_LW6ztc&ibm;EZnqPTHMW@8yMbnSc5)DsePo6ljL{@N0 z)>Zi%K@-j;Jn&;J6nPljqgzfQZ|yc)e^ML=2(Z@&2g%yJqWWu!}T7NOvE?QnmRsV{j$;V7<_$w z(Ta(!^z@`1^>1C=K>EA0^I8%~qtmK(WUA}Bz2fcWYos%m6%4Q7*RAB&d;&QZjtSA* zeyzXj4^7ib?eVXzIfmSuAC>+^#hS_` z>|-D5M*kN9pLaX;{tQWKkn&{kE1kdh#eC%($D0t3s2Tl{X)|yOjB1!RIC=Kq<{R!~ zrN>(TZUAFQF(xZ`b1|$`>#uhJnq#i+KO|Ez%5z+c+y`W+pgrem+CJ2A3~TJtt0?~X zL2pifwXXCR@u+t;8;0MWZ!c@E|6gWQZ=AJc3&#;h)APFX#PPxW4Br8F@9%BzNcNt} z8hmi`;Ww3KxnbfO;`q$z@4sE;2D&6Ga9j*1z%xaMSzZah$-F8n3kW@>@~kK${-96- z)OXKr>Md?>i7P|fBB(q!_ku`tJ*~FW%d4ivZF9c==Jcxe_wN~LH|a=w5d4C2!)P97 z(*Q;>f1xaqwx~qKnd~_lad`F!xG&ZuR%F*Mf^T3r>Hkd9Q)}AbG}nC%y6mMUcV**6 z{!rOAV6mhMP$BBDoE%sH7v;mtrU%+NX)B3WYcBt`o4p$L{bm`-ySZF- zLv|D6XH*B{k=#vUb6R>UKkYUxjGcHmS2XHX;ON3UivS2+Gd}?mm5kQh%>~Dngpnb` zYPHprAR*QK`jsmq@940EcwbY^GA9RoepAUC{mzq{dZ;*27!fb?_+;uwd9XpA2Phdz zx1DYiTmLw=^c5YawfTq=VMjQ<3S1|Z9rk61EU2?wuMUPRqPN}H7=>-+beL3B8_1(} zPs*plbBFB|XdGk1n|0#Xc%mawNTUprnit=EjUMPtTFmM+qU<2E`}?@CMO9N)uxe&3 z)dh7zqKcHbm<*SwSE$I9#@Q$G7J8Qgzc z#_$L-#LAe1%cdjj;b6Q~?jXKIXBD*ph)9g;*kF!MMsdiZjN=Eo8w_ZD2+^I2C%oh^ zU!C^RqJO-=i5#-pJr~E87F^=~1|s9q%;lE*QQ)BwXrC-!==2;x9F7E&572%N{~GxekfXhp~W+olIKv0npMHakFICKa0_97r$m&oLkwsx#LD1pn#P9oo|9= zNh|)*8!Ve8SOG-qrJI29d}V{t02v>@*DOwuzIl%AN=NxoD6!=QNB`Nqpr*|ubV*S2 zwbE^5)L_BXQZA?^^M~CPuGLW+HkBW*`)ga5?m7XdIC+bs{-9L1(n!+v*on4-255rC ze~XBomf5!|`B!&0H}mO73Z{z@qN#dkvI8mqOP|e4FhSWO z-p^gPeVFiHHa*B;Woo6w+vyjo`?Msl?*Ac+^hqWn)N=4RzkK<1TIF_(Yt)Z^p(T{% z)^q(xWys~@9G)`hH2>buduTGcd$5$~X_!saI;(w|gdM_cj&pw-xtmzB~TViWyw4 zRiC>*aNdt8{P@W=Q#5jxM z;&0s|w3$M+#f^>s;(ES%0*CqktNG&~AuVC!gVQ%s>e{c&9yiqvzv79xVA6jKPE%Gk z{ADS$=2exDLaxjSY}TphBs|nQgU0^NcVlEGsO8&Q%AeRwL;vt$R81QfR0S}%G?Rms z(~M>P)6(f0+k}l)3@4xA%v-nX;7FjmCJDOSBq7du2Gx|`zy$?(0F@32jPQMvGAK$v z4x2mQ%92vzPCB8HxIVUR6KL4v)KLg?D!Y8okecWhOThGf;pCT6624S!+eP!?TRoR#yj7 zg_oESiG_tXSrM2gI7Q?nAGv?g(q8$6hsG*3ef1yP4_YUZ-OHCL))~kpY|o-OjJc!c zJO!!7--3kUCh2XY)92>0OEMB;kP2QlgR_prZ^l1;eu0k7-Cr+={Gx%~TNKXJ?xwPF ztc2`kuLh{-tVCZbcWBmOyX74cb43_eQ&hNG{AVi{u(7V>o5T$07ZA$JTF{0>ggqk& z%wH1(O?zKBY8UlPovFco)H7al5rN$B>ADk>Gt#UymYU7f{^N)$NpB2sIlCOSS*k{| zZ$g03kKaai8w<)e4c2#;4#R2+xj1f*|~ML=gp{0*E3CLIn^- z6od*OiYN#bKon6BDnO}WA7ycLBMgo26~DB$IM?)}AY zFQ_2CPp7ro-hI7(Shx*A*aaNQ3Cz&h%)5n|NhQF_*s?78CketECbJ?!0okOygjvZ- zC!Q(u@Bnyd_dsL?|Hpfam;x|JI~TsPCRPzo8Nc{Kt-M@7+wN57)GV zG#ZU#&s)h>DStGXOa_#L^>mOooKB}h5?&RFideu8bXjqF)9H^=6$btz_u-0H$8*o~ zx&3jUb_mZ?S5X;rYNyXn1Ihu?LGdGYkN*)xd}XYerjdMLi?Y#k9P8b4u3B0lm-)x9 zBnUf5!it#H5A+A~`RwfR@o_{ihB_ue*g*>95!H7#s*Wa;@sX*7I^mydA#h>)bF>yZX%P=+DRv;|Jz-AUdgIEcw3GMu7+V!%^hncs#yK>jO{x zr(rCE&mSq}4f95wks`D08p}aJ4u5Jnkh@s?yvyr$={_xvBW?ZQ!!CZuY&IKiYA<(X zP4r7SwZRHlyu?j-d7=|%c#H?k5*|S0ID33fZKMA`n-LE->sPdv2ikz2jv9^TF^>s8 zZbecu%crFx1`2v_G^h6Wn=Qv#+=i2R*dO0loZ>kg`M!4+Dp)w~kXDOM{Q9+pU}L7( z#b!AW(6FCdr5sOT=czIco42A z;VT05!PyEkbWwhu8ek|PD-5FA8FS7W3}V~%Qu~C6Ir@whTr1&H=`S|*9repNH|>n< za-gYB%4(&51`UGp>KaI*#9tfuFA zS5yG3Bw12ryaHco3(VhImki|%3Q^YJ$>p39jox@mRM`0bT4Emy1? z_8ZUe;Mcv4SL&5y83>%uf4*Z=@O@H0g4HWjh8y%9=i`~b`%6O8=jH*(fzQ2;|2}*V zvH%Z%@Z&w6@qS$nbZN@Lwy9%GB5o+t@%Vww>G4Or#&0uza|`}$^jKjZaRb37;dQtJ zZ4y#p`5g1i+o6)dsuT5DF!*d+bzFB6oBgL#x6gxh08@*CSw1powiaw)z)h#)T!kIz z3;RuGDs0H+8?R3Gw}eZ2tSsHgdt5hQUy=;-u!%rSwA~~*Bz*TWfW&e*ipl}c!7AHb zUBBS(Q_=SB#^d#15rS20ku;Bpl&+iA1YkmrS#wOBki+b@<_c_Ow%1_u~>ABL+-W^e-H(Y z9@%Qiu$kDWLU{k7)PAVHqwjg3f2Y*%cSf{Y8Y58}tV(`~POmZHgjR2t?Z8bvd=JaL|~q9FxzvUzMTixrUL9Mx*I&t}C? z>58Z$N8`j(P0ID4n8GTlwrnI(Rln;c^GgN!)n z5}VLAMc5gkd>~t11oy##v)QbL!uaT8Q(Z1Ap;;sAM#ue&E8?oQZ+w(A%j%+B zOYTimBD2}j;)qUmO}z`CaT+RQcz1=MJn%I3l|L~#C}rP6;X;Ls$N^)p z8@(QtIN@uoukt}8{QS)YzslnSl{RcK?0PQ_KT$QDm|On*IBCTg?zv?;t8&Y z-4x-mFrTRRAMRllq?CF2W>@Z>hZTrT#Lv7sMe~O+FT1ReUF7@H<=_uYIk5lz*)TW5 zg#K-)q}*Q1Y$>R5jTOVc=!%FH&@@94T?+3UCrbEy1a+XjoR{6eb)X%B11No^mb*eS zyF=LB;W+lkgtoA%^-``$%$cVZnkOk@YVBW{giE&L;6Jj-e&;w63WIR{d~Ur9whg5X zL^tZc(+&bfD`E)^Y zer;EBmNG5)fqY(5?+W;!SlaqM}a%Qrc`rvw{dS+tJOwtEqGR@9N=qO zmemT|vMkiJZoXd)Nc0i0FO8r2!LH=|92ZL}q6j2Z= zfGDCMQ~*&#L8t(th=Nc7L=gp{0*E3CLIn^-6od*OiYN#bKon6BD!~5%MqwBNg{IRn P00000NkvXXu0mjf0azBv literal 0 HcmV?d00001 diff --git a/internal/pub/pub.go b/internal/pub/pub.go new file mode 100644 index 000000000..8c21c7f47 --- /dev/null +++ b/internal/pub/pub.go @@ -0,0 +1,27 @@ +package pub + +import ( + "embed" + "io/fs" + "net/http" +) + +//go:embed assets/* +var assetsFS embed.FS + +func Handler() http.Handler { + return http.FileServer(httpFileSystem()) +} + +func httpFileSystem() http.FileSystem { + return http.FS(fileSystem()) +} + +func fileSystem() fs.FS { + f, err := fs.Sub(assetsFS, "assets") + if err != nil { + panic(err) + } + + return f +} diff --git a/internal/structs/product.go b/internal/structs/product.go new file mode 100644 index 000000000..703dc8c72 --- /dev/null +++ b/internal/structs/product.go @@ -0,0 +1,23 @@ +package structs + +// ProductDocTypeData contains data for each document type. +type ProductDocTypeData struct { + FolderID string `json:"folderID"` + LatestDocNumber int `json:"latestDocNumber"` +} + +// ProductData is the data associated with a product or area. +// This may include product abbreviation, etc. +type ProductData struct { + Abbreviation string `json:"abbreviation"` + // PerDocTypeData is a map of each document type (RFC, PRD, etc) + // to the associated data + PerDocTypeData map[string]ProductDocTypeData `json:"perDocTypeData"` +} + +// Products is the slice of product data. +type Products struct { + // ObjectID should be "products" + ObjectID string `json:"objectID,omitempty"` + Data map[string]ProductData `json:"data"` +} diff --git a/internal/test/database.go b/internal/test/database.go new file mode 100644 index 000000000..3c9c3a144 --- /dev/null +++ b/internal/test/database.go @@ -0,0 +1,60 @@ +package test + +import ( + "fmt" + "testing" + "time" + + "gorm.io/driver/postgres" + "gorm.io/gorm" + "gorm.io/gorm/logger" +) + +func CreateTestDatabase(t *testing.T, dsn string) ( + db *gorm.DB, dbName string, err error, +) { + dbName = fmt.Sprintf("hermes-test-%d", time.Now().UnixNano()) + t.Logf("%s: database name: %s", t.Name(), dbName) + + db, err = gorm.Open(postgres.Open(dsn)) + if err != nil { + err = fmt.Errorf("error connecting to database: %w", err) + return + } + + // Create test database. + if err = db.Exec( + fmt.Sprintf("CREATE DATABASE %q;", dbName), + ).Error; err != nil { + err = fmt.Errorf("error creating test database: %w", err) + return + } + + dsn = fmt.Sprintf("%s dbname=%s", dsn, dbName) + db, err = gorm.Open(postgres.Open(dsn), &gorm.Config{ + // TODO: make log mode configurable. + Logger: logger.Default.LogMode(logger.Silent), + }) + if err != nil { + err = fmt.Errorf("error connecting to test database: %w", err) + return + } + + return +} + +func DropTestDatabase(dsn, dbName string) error { + db, err := gorm.Open(postgres.Open(dsn)) + if err != nil { + return fmt.Errorf("error connecting to database: %w", err) + } + + // Drop test database. + if err := db.Exec( + fmt.Sprintf("DROP DATABASE %q WITH (FORCE);", dbName), + ).Error; err != nil { + return fmt.Errorf("error dropping test database: %w", err) + } + + return nil +} diff --git a/internal/version/version.go b/internal/version/version.go new file mode 100644 index 000000000..3ee5d22f9 --- /dev/null +++ b/internal/version/version.go @@ -0,0 +1,3 @@ +package version + +const Version = "0.0.1" diff --git a/pkg/algolia/client.go b/pkg/algolia/client.go new file mode 100644 index 000000000..e39597e10 --- /dev/null +++ b/pkg/algolia/client.go @@ -0,0 +1,312 @@ +package algolia + +import ( + "fmt" + "time" + + "github.com/algolia/algoliasearch-client-go/v3/algolia/opt" + "github.com/algolia/algoliasearch-client-go/v3/algolia/search" + validation "github.com/go-ozzo/ozzo-validation/v4" +) + +// Client provides access to Hermes indexes in Algolia. +type Client struct { + *search.Client + + // Docs is an Algolia index for searching documents. + Docs *search.Index + + // DocsCreatedTimeAsc is an Algolia replica of the docs index that is sorted + // by ascending created time. + DocsCreatedTimeAsc *search.Index + + // DocsCreatedTimeDesc is an Algolia replica of the docs index that is sorted + // by descending created time. + DocsCreatedTimeDesc *search.Index + + // DocsModifiedTimeDesc is an Algolia replica of the docs index that is sorted + // by descending modified time. + DocsModifiedTimeDesc *search.Index + + // Drafts is an Algolia index for storing metadata for draft documents. + Drafts *search.Index + + // DraftsCreatedTimeAsc is an Algolia replica of the drafts index that is sorted + // by ascending created time. + DraftsCreatedTimeAsc *search.Index + + // DraftsCreatedTimeDesc is an Algolia replica of the drafts index that is sorted + // by descending created time. + DraftsCreatedTimeDesc *search.Index + + // DraftsModifiedTimeDesc is an Algolia replica of the drafts index that is sorted + // by descending modified time. + DraftsModifiedTimeDesc *search.Index + + // Internal is an Algolia index for storing internal Hermes metadata. + Internal *search.Index + + // Links is an Algolia index for storing links of documents + Links *search.Index + + // MissingFields is an Algolia index for storing missing fields from indexed + // documents. + MissingFields *search.Index +} + +// Config is the configuration for interacting with the Algolia API. +type Config struct { + // ApplicationID is the Algolia Application ID. + ApplicationID string `hcl:"application_id,optional"` + + // DocsIndexName is the name of the Algolia index for storing document + // metadata. + DocsIndexName string `hcl:"docs_index_name,optional"` + + // DraftsIndexName is the name of the Algolia index for storing draft + // documents' metadata. + DraftsIndexName string `hcl:"drafts_index_name,optional"` + + // InternalIndexName is the name of the Algolia index for storing internal + // Hermes metadata. + InternalIndexName string `hcl:"internal_index_name,optional"` + + // LinksIndexName is the name of the Algolia index for storing links + LinksIndexName string `hcl:"links_index_name,optional"` + + // MissingFieldsIndexName is the name of the Algolia index for storing missing + // fields from indexed documents. + MissingFieldsIndexName string `hcl:"missing_fields_index_name,optional"` + + // SearchAPIKey is the Algolia API Key for searching Hermes indices. + SearchAPIKey string `hcl:"search_api_key,optional"` + + // WriteAPIKey is the Algolia API Key for writing to Hermes indices. + WriteAPIKey string `hcl:"write_api_key,optional"` +} + +// New initializes Hermes indices and returns a new Algolia client for +// indexing data. +func New(cfg *Config) (*Client, error) { + if err := validate(cfg); err != nil { + return nil, fmt.Errorf("error initializing Algolia client: %q", err) + } + + c := &Client{} + + // TODO: make timeouts configurable. + a := search.NewClientWithConfig(search.Configuration{ + AppID: cfg.ApplicationID, + APIKey: cfg.WriteAPIKey, + ReadTimeout: 30 * time.Second, + WriteTimeout: 30 * time.Second, + }) + + c.Docs = a.InitIndex(cfg.DocsIndexName) + c.Drafts = a.InitIndex(cfg.DraftsIndexName) + c.Internal = a.InitIndex(cfg.InternalIndexName) + c.Links = a.InitIndex(cfg.LinksIndexName) + c.MissingFields = a.InitIndex(cfg.MissingFieldsIndexName) + + // Configure the docs index. + err := configureMainIndex(cfg.DocsIndexName, c.Docs, search.Settings{ + // Attributes + AttributesForFaceting: opt.AttributesForFaceting( + "appCreated", + "approvers", + "approvedBy", + "docType", + "owners", + "product", + "status", + "searchable(tags)", + ), + + // Highlighting/snippeting + AttributesToSnippet: opt.AttributesToSnippet( + "content:7", + ), + HighlightPostTag: opt.HighlightPostTag(""), + HighlightPreTag: opt.HighlightPreTag(``), + SnippetEllipsisText: opt.SnippetEllipsisText("..."), + + // Ranking + Replicas: opt.Replicas( + cfg.DocsIndexName+"_createdTime_asc", + cfg.DocsIndexName+"_createdTime_desc", + cfg.DocsIndexName+"_modifiedTime_desc", + ), + }) + if err != nil { + return nil, err + } + + // Configure the docs createdTime_asc, createdTime_desc, modifiedTime_desc replica. + c.DocsCreatedTimeAsc = a.InitIndex(cfg.DocsIndexName + "_createdTime_asc") + c.DocsCreatedTimeDesc = a.InitIndex(cfg.DocsIndexName + "_createdTime_desc") + c.DocsModifiedTimeDesc = a.InitIndex(cfg.DocsIndexName + "_modifiedTime_desc") + err = configureReplicaIndexes( + cfg.DocsIndexName, + c.DocsCreatedTimeAsc, + c.DocsCreatedTimeDesc, + c.DocsModifiedTimeDesc, + ) + if err != nil { + return nil, err + } + + // Configure the drafts index. + err = configureMainIndex(cfg.DraftsIndexName, c.Drafts, search.Settings{ + // Attributes + AttributesForFaceting: opt.AttributesForFaceting( + "docType", + "owners", + "product", + "status", + "tags", + ), + + // Ranking + Replicas: opt.Replicas( + cfg.DraftsIndexName+"_createdTime_asc", + cfg.DraftsIndexName+"_createdTime_desc", + cfg.DraftsIndexName+"_modifiedTime_desc", + ), + }) + if err != nil { + return nil, err + } + + // Configure the drafts createdTime_asc, createdTime_desc, modifiedTime_desc replica. + c.DraftsCreatedTimeAsc = a.InitIndex(cfg.DraftsIndexName + "_createdTime_asc") + c.DraftsCreatedTimeDesc = a.InitIndex(cfg.DraftsIndexName + "_createdTime_desc") + c.DraftsModifiedTimeDesc = a.InitIndex(cfg.DraftsIndexName + "_modifiedTime_desc") + err = configureReplicaIndexes( + cfg.DraftsIndexName, + c.DraftsCreatedTimeAsc, + c.DraftsCreatedTimeDesc, + c.DraftsModifiedTimeDesc, + ) + if err != nil { + return nil, err + } + + return c, nil +} + +// configureMainIndex configures the main index with settings +func configureMainIndex(indexName string, mainIndex *search.Index, settings search.Settings) error { + res, err := mainIndex.SetSettings(settings) + if err != nil { + return fmt.Errorf("error setting settings for %s index: %w", + indexName, err) + } + err = res.Wait() + if err != nil { + return fmt.Errorf("error setting settings for %s index: %w", + indexName, err) + } + + return nil +} + +func configureReplicaIndexes( + indexName string, + createdTimeAscIndex *search.Index, + createdTimeDescIndex *search.Index, + modifiedTimeDescIndex *search.Index, +) error { + // Configure the createdTime_asc replica for index. + _, err := createdTimeAscIndex.SetSettings(search.Settings{ + AttributesForFaceting: opt.AttributesForFaceting( + "docType", + "owners", + "product", + "status", + ), + + Ranking: opt.Ranking( + "asc(createdTime)", + ), + }) + if err != nil { + return fmt.Errorf( + "error setting settings for the %s createdTime_asc standard replica: %w", + indexName, err) + } + + // Configure the createdTime_desc replica for index. + _, err = createdTimeDescIndex.SetSettings(search.Settings{ + AttributesForFaceting: opt.AttributesForFaceting( + "docType", + "owners", + "product", + "status", + ), + + Ranking: opt.Ranking( + "desc(createdTime)", + ), + }) + if err != nil { + return fmt.Errorf( + "error setting settings for the %s createdTime_desc standard replica: %w", + indexName, err) + } + + // Configure the modifiedTime_desc replica for index. + _, err = modifiedTimeDescIndex.SetSettings(search.Settings{ + AttributesForFaceting: opt.AttributesForFaceting( + "status", + ), + + Ranking: opt.Ranking( + "desc(modifiedTime)", + ), + }) + if err != nil { + return fmt.Errorf( + "error setting settings for the %s modifiedTime_desc standard replica: %w", + indexName, err) + } + + return nil +} + +// NewSearchClient returns a new Algolia client for searching indices. +func NewSearchClient(cfg *Config) (*Client, error) { + if err := validate(cfg); err != nil { + return nil, fmt.Errorf("error initializing Algolia client: %q", err) + } + + c := &Client{} + + // TODO: make ReadTimeout configurable. + a := search.NewClient(cfg.ApplicationID, cfg.SearchAPIKey) + + c.Docs = a.InitIndex(cfg.DocsIndexName) + c.DocsCreatedTimeAsc = a.InitIndex(cfg.DocsIndexName + "_createdTime_asc") + c.DocsCreatedTimeDesc = a.InitIndex(cfg.DocsIndexName + "_createdTime_desc") + c.DocsModifiedTimeDesc = a.InitIndex(cfg.DocsIndexName + "_modifiedTime_desc") + c.Drafts = a.InitIndex(cfg.DraftsIndexName) + c.DraftsCreatedTimeAsc = a.InitIndex(cfg.DraftsIndexName + "_createdTime_asc") + c.DraftsCreatedTimeDesc = a.InitIndex(cfg.DraftsIndexName + "_createdTime_desc") + c.DraftsModifiedTimeDesc = a.InitIndex(cfg.DraftsIndexName + "_modifiedTime_desc") + c.Internal = a.InitIndex(cfg.InternalIndexName) + c.Links = a.InitIndex(cfg.LinksIndexName) + + return c, nil +} + +// validate validates the Algolia configuration. +func validate(c *Config) error { + return validation.ValidateStruct(c, + validation.Field(&c.ApplicationID, validation.Required), + validation.Field(&c.DocsIndexName, validation.Required), + validation.Field(&c.DraftsIndexName, validation.Required), + validation.Field(&c.InternalIndexName, validation.Required), + validation.Field(&c.LinksIndexName, validation.Required), + validation.Field(&c.MissingFieldsIndexName, validation.Required), + validation.Field(&c.SearchAPIKey, validation.Required), + ) +} diff --git a/pkg/algolia/doc.go b/pkg/algolia/doc.go new file mode 100644 index 000000000..7d5a3fd19 --- /dev/null +++ b/pkg/algolia/doc.go @@ -0,0 +1,2 @@ +// Package algolia contains logic for working with Algolia. +package algolia diff --git a/pkg/algolia/proxy.go b/pkg/algolia/proxy.go new file mode 100644 index 000000000..c754ea4e8 --- /dev/null +++ b/pkg/algolia/proxy.go @@ -0,0 +1,58 @@ +package algolia + +import ( + "fmt" + "io/ioutil" + "net/http" + "time" + + "github.com/hashicorp/go-hclog" +) + +// AlgoliaProxyHandler proxies Algolia API requests from the Hermes frontend. +func AlgoliaProxyHandler( + c *Client, cfg *Config, log hclog.Logger) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Create HTTP request. + url := fmt.Sprintf("https://%s-dsn.algolia.net%s?%s", + c.Docs.GetAppID(), r.URL.Path, r.URL.RawQuery) + client := &http.Client{ + Timeout: time.Second * 10, + } + req, err := http.NewRequest(r.Method, url, r.Body) + if err != nil { + log.Error("error executing search request", "error", err) + http.Error(w, "Error executing search request", + http.StatusInternalServerError) + return + } + + // Add Algolia auth headers. + req.Header.Add("X-Algolia-API-Key", cfg.SearchAPIKey) + req.Header.Add("X-Algolia-Application-Id", c.Docs.GetAppID()) + + // Execute HTTP request. + resp, err := client.Do(req) + if err != nil { + log.Error("error executing search request", "error", err) + http.Error(w, "Error executing search request", + http.StatusInternalServerError) + return + } + defer resp.Body.Close() + + // Build and write HTTP response. + w.WriteHeader(resp.StatusCode) + for k, v := range resp.Header { + w.Header().Add(k, v[0]) + } + respBody, err := ioutil.ReadAll(resp.Body) + if err != nil { + log.Error("error executing search request", "error", err) + http.Error(w, "Error executing search request", + http.StatusInternalServerError) + return + } + w.Write(respBody) + }) +} diff --git a/pkg/googleworkspace/admin_helpers.go b/pkg/googleworkspace/admin_helpers.go new file mode 100644 index 000000000..fa2b289ec --- /dev/null +++ b/pkg/googleworkspace/admin_helpers.go @@ -0,0 +1,56 @@ +package googleworkspace + +/* +import ( + "fmt" + "strings" + + admin "google.golang.org/api/admin/directory/v1" +) + +// CreateGroup creates a Google Group. +func (s *Service) CreateGroup( + id, name, description, email string) (*admin.Group, error) { + + g := &admin.Group{ + Description: description, + Email: email, + Id: id, + Name: name, + } + + resp, err := s.Admin.Groups.Insert(g).Do() + if err != nil { + return nil, fmt.Errorf("error creating group: %w", err) + } + return resp, nil +} + +// CreateTagGroup creates a Google Group for a Hermes tag. +func (s *Service) CreateTagGroup(tag string) (*admin.Group, error) { + tag = strings.ToLower(tag) + tagID := strings.ToLower(fmt.Sprintf("hermes-josh-tag-%s", tag)) + + g, err := s.CreateGroup( + tagID, + tagID, + fmt.Sprintf("Hermes tag: \"%s\"", tag), + // TODO: make domain configurable + fmt.Sprintf("%s@hashicorp.com", tagID), + ) + if err != nil { + return nil, fmt.Errorf("error creating tag group: %w", err) + } + + return g, nil +} + +// GetGroup returns a Google Group. +func (s *Service) GetGroup(groupKey string) (*admin.Group, error) { + resp, err := s.Admin.Groups.Get(groupKey).Do() + if err != nil { + return nil, fmt.Errorf("error getting group: %w", err) + } + return resp, nil +} +*/ diff --git a/pkg/googleworkspace/doc.go b/pkg/googleworkspace/doc.go new file mode 100644 index 000000000..e7e789045 --- /dev/null +++ b/pkg/googleworkspace/doc.go @@ -0,0 +1,2 @@ +// Package googleworkspace contains logic for working with Google Workspace. +package googleworkspace diff --git a/pkg/googleworkspace/docs_helpers.go b/pkg/googleworkspace/docs_helpers.go new file mode 100644 index 000000000..76de5e621 --- /dev/null +++ b/pkg/googleworkspace/docs_helpers.go @@ -0,0 +1,113 @@ +package googleworkspace + +import ( + "fmt" + + "github.com/cenkalti/backoff/v4" + "google.golang.org/api/docs/v1" +) + +// GetDoc gets a Google Doc. +func (s *Service) GetDoc(id string) (*docs.Document, error) { + var ( + d *docs.Document + err error + ) + + op := func() error { + d, err = s.Docs.Documents.Get(id).Do() + if err != nil { + return err + } + + return nil + } + + boErr := backoff.Retry(op, backoff.NewExponentialBackOff()) + if boErr != nil { + return nil, boErr + } + + return d, nil +} + +// GetLinkURLs returns all link URLs in a Google Doc Body. +func GetLinkURLs(b *docs.Body) []string { + var urls []string + structelems := b.Content + + for _, elem := range structelems { + if para := elem.Paragraph; para != nil { + if elems := para.Elements; elems != nil { + for _, e := range elems { + if tr := e.TextRun; tr != nil { + if ts := tr.TextStyle; ts != nil { + if link := ts.Link; link != nil { + urls = append(urls, link.Url) + } + } + } + } + } + } + } + + return urls +} + +// GetTables returns all tables in a Google Doc Body. +func GetTables(b *docs.Body) []*docs.Table { + var tables []*docs.Table + elems := b.Content + + for _, e := range elems { + if e.Table != nil { + tables = append(tables, e.Table) + } + } + + return tables +} + +// ReplaceText replaces text in a Google Doc. Provide a map of replacement tags +// (denoted as "{{tag}}" in the doc) to replacement text as the second +// parameter. +func (s *Service) ReplaceText(id string, r map[string]string) error { + // Initialize request. + req := &docs.BatchUpdateDocumentRequest{ + Requests: []*docs.Request{}, + } + + // Build text replacement requests. + for k, v := range r { + req.Requests = append(req.Requests, &docs.Request{ + ReplaceAllText: &docs.ReplaceAllTextRequest{ + ContainsText: &docs.SubstringMatchCriteria{ + MatchCase: true, + Text: fmt.Sprintf("{{%s}}", k), + }, + ReplaceText: v, + }}) + } + + _, err := s.Docs.Documents.BatchUpdate(id, req). + Do() + if err != nil { + return fmt.Errorf("error executing document batch update: %w", err) + } + return nil +} + +// VisitAllTableParagraphs visits all paragraphs in a Google Doc table, calling +// fn for each. +func VisitAllTableParagraphs(t *docs.Table, fn func(p *docs.Paragraph)) { + for _, row := range t.TableRows { + for _, cell := range row.TableCells { + for _, content := range cell.Content { + if para := content.Paragraph; para != nil { + fn(para) + } + } + } + } +} diff --git a/pkg/googleworkspace/drive_helpers.go b/pkg/googleworkspace/drive_helpers.go new file mode 100644 index 000000000..2007a0b33 --- /dev/null +++ b/pkg/googleworkspace/drive_helpers.go @@ -0,0 +1,391 @@ +package googleworkspace + +import ( + "fmt" + "strings" + + "google.golang.org/api/drive/v3" + "google.golang.org/api/googleapi" +) + +const ( + fileFields = "id, lastModifyingUser, modifiedTime, name, parents, thumbnailLink" +) + +// CopyFile copies a Google Drive file. +func (s *Service) CopyFile( + fileID, name, destFolder string) (*drive.File, error) { + + f := &drive.File{ + Name: name, + Parents: []string{destFolder}, + } + + resp, err := s.Drive.Files.Copy(fileID, f). + Fields("*"). + SupportsAllDrives(true). + Do() + if err != nil { + return nil, fmt.Errorf("error copying file: %w", err) + } + return resp, nil +} + +// CreateFolder creates a Google Drive folder. +func (s *Service) CreateFolder( + folderName, destFolder string) (*drive.File, error) { + + // Validate inputs. + if folderName == "" { + return nil, fmt.Errorf("folder name is required") + } + if destFolder == "" { + return nil, fmt.Errorf("destination folder is required") + } + + f := &drive.File{ + Name: folderName, + MimeType: "application/vnd.google-apps.folder", + Parents: []string{destFolder}, + } + + resp, err := s.Drive.Files.Create(f). + Fields("id,mimeType,name,parents"). + SupportsAllDrives(true). + Do() + if err != nil { + return nil, err + } + + return resp, nil +} + +// CreateShortcut creates a Google Drive shortcut. +func (s *Service) CreateShortcut( + targetFileID, destFolder string) (*drive.File, error) { + + // Validate inputs. + if targetFileID == "" { + return nil, fmt.Errorf("target file ID is required") + } + if destFolder == "" { + return nil, fmt.Errorf("destination folder is required") + } + + target, err := s.GetFile(targetFileID) + if err != nil { + return nil, fmt.Errorf("error getting target file: %w", err) + } + + f := &drive.File{ + Name: target.Name, + MimeType: "application/vnd.google-apps.shortcut", + Parents: []string{destFolder}, + ShortcutDetails: &drive.FileShortcutDetails{ + TargetId: targetFileID, + }, + } + + resp, err := s.Drive.Files.Create(f). + Fields("id,mimeType,name,parents,shortcutDetails"). + SupportsAllDrives(true). + Do() + if err != nil { + return nil, err + } + + return resp, nil +} + +// GetDocs returns all docs in a Google Drive folder. +func (s *Service) GetDocs(folderID string) ([]*drive.File, error) { + return s.GetFiles(folderID, "application/vnd.google-apps.document") +} + +// GetFile returns a Google Drive file. +func (s *Service) GetFile(fileID string) (*drive.File, error) { + resp, err := s.Drive.Files.Get(fileID). + Fields(fileFields). + SupportsAllDrives(true). + Do() + if err != nil { + return nil, fmt.Errorf("error getting file: %w", err) + } + return resp, nil +} + +// GetFiles returns all files in a Google Drive folder. +func (s *Service) GetFiles(folderID, mimeType string) ([]*drive.File, error) { + query := fmt.Sprintf("'%s' in parents"+ + " and mimeType = '%s'"+ + " and trashed = false", + folderID, mimeType) + return s.ListFiles(folderID, query) +} + +// GetDocs returns all folders in a Google Drive folder. +func (s *Service) GetFolders(folderID string) ([]*drive.File, error) { + return s.GetFiles(folderID, "application/vnd.google-apps.folder") +} + +// GetDocs returns all folders and recursively all subfolders in a Google Drive +// folder. +func (s *Service) GetFoldersRecursive(folderID string) ([]*drive.File, error) { + folders, err := s.GetFolders(folderID) + if err != nil { + return nil, fmt.Errorf("error getting folders: %w", err) + } + + for _, f := range folders { + subFolders, err := s.GetFoldersRecursive(f.Id) + if err != nil { + return nil, fmt.Errorf("error getting subfolders: %w", err) + } + folders = append(folders, subFolders...) + } + + return folders, nil +} + +// GetLatestRevision returns the latest revision for a Google Drive file. +func (s *Service) GetLatestRevision(fileID string) (*drive.Revision, error) { + revs, err := s.ListRevisions(fileID) + if err != nil { + return nil, fmt.Errorf("error listing revisions: %w", err) + } + + if len(revs) == 0 { + return nil, fmt.Errorf("no revisions found") + } + + return revs[len(revs)-1], nil +} + +// GetSubfolder returns the subfolder file if the specified folder contains a +// subfolder with the specified name, and nil if not found. +func (s *Service) GetSubfolder( + folderID, subfolderName string) (*drive.File, error) { + + subfolders, err := s.GetFolders(folderID) + if err != nil { + return nil, fmt.Errorf("error getting subfolders: %w", err) + } + + for _, f := range subfolders { + if f.Name == subfolderName { + return f, nil + } + } + return nil, nil +} + +// GetUpdatedDocs returns all docs in a Google Drive folder that have been +// modified after a provided timestamp. +func (s *Service) GetUpdatedDocs( + folderID, timestamp string) ([]*drive.File, error) { + return s.GetUpdatedFiles(folderID, "application/vnd.google-apps.document", + timestamp) +} + +// GetUpdatedDocsBetween returns all docs in a Google Drive folder that have +// been modified after a provided timestamp string (in RFC 3339 date-time) +// "afterTime" and before (or equal to) the timestamp string "beforeTime". +func (s *Service) GetUpdatedDocsBetween( + folderID, afterTime, beforeTime string) ([]*drive.File, error) { + query := fmt.Sprintf("'%s' in parents"+ + " and mimeType = 'application/vnd.google-apps.document'"+ + " and trashed = false"+ + " and modifiedTime > '%s'"+ + " and modifiedTime <= '%s'", + folderID, afterTime, beforeTime) + return s.ListFiles(folderID, query) +} + +// GetUpdatedFiles returns all files in a Google Drive folder that have been +// modified after a provided timestamp. +func (s *Service) GetUpdatedFiles( + folderID, mimeType, timestamp string) ([]*drive.File, error) { + query := fmt.Sprintf("'%s' in parents"+ + " and mimeType = '%s'"+ + " and trashed = false"+ + " and modifiedTime > '%s'", + folderID, mimeType, timestamp) + return s.ListFiles(folderID, query) +} + +// KeepRevisionForever keeps a Google Drive file revision forever. +func (s *Service) KeepRevisionForever( + fileID, revisionID string) (*drive.Revision, error) { + + resp, err := s.Drive.Revisions.Update(fileID, revisionID, &drive.Revision{ + KeepForever: true, + }). + Fields("keepForever"). + Do() + if err != nil { + return nil, err + } + return resp, nil +} + +// ListFiles lists files in a Google Drive folder using the provided query. +func (s *Service) ListFiles(folderID, query string) ([]*drive.File, error) { + var files []*drive.File + var nextPageToken string + + for { + call := s.Drive.Files.List(). + Fields(googleapi.Field(fmt.Sprintf("files(%s), nextPageToken", fileFields))). + IncludeItemsFromAllDrives(true). + PageSize(20). + Q(query). + SupportsAllDrives(true) + if nextPageToken != "" { + call = call.PageToken(nextPageToken) + } + resp, err := call.Do() + if err != nil { + return nil, fmt.Errorf("error listing files: %w", err) + } + files = append(files, resp.Files...) + + nextPageToken = resp.NextPageToken + if nextPageToken == "" { + break + } + } + + return files, nil +} + +// ListRevisions lists revisions for a Google Drive file. +func (s *Service) ListRevisions(fileID string) ([]*drive.Revision, error) { + var revisions []*drive.Revision + var nextPageToken string + + for { + call := s.Drive.Revisions.List(fileID). + Fields("*"). + PageSize(20) + if nextPageToken != "" { + call = call.PageToken(nextPageToken) + } + resp, err := call.Do() + if err != nil { + return nil, err + } + revisions = append(revisions, resp.Revisions...) + + nextPageToken = resp.NextPageToken + if nextPageToken == "" { + break + } + } + + return revisions, nil +} + +// MoveFile moves a Google Drive file to a different folder. +func (s *Service) MoveFile(fileID, destFolder string) (*drive.File, error) { + if destFolder == "" { + return nil, fmt.Errorf("destination folder cannot be empty") + } + + f, err := s.GetFile(fileID) + if err != nil { + return nil, fmt.Errorf("error getting file: %w", err) + } + + resp, err := s.Drive.Files.Update(fileID, &drive.File{}). + AddParents(destFolder). + RemoveParents(strings.Join(f.Parents[:], ",")). + Fields("parents"). + SupportsAllDrives(true). + Do() + if err != nil { + return nil, fmt.Errorf("error updating file: %w", err) + } + return resp, nil +} + +// RenameFile renames a Google Drive file. +func (s *Service) RenameFile(fileID, newName string) error { + _, err := s.Drive.Files.Update(fileID, &drive.File{ + Name: newName, + }). + SupportsAllDrives(true). + Do() + if err != nil { + return fmt.Errorf("error updating file: %w", err) + } + return nil +} + +// ShareFile shares a Google Drive file with a user. +func (s *Service) ShareFile( + fileID, email, role string) error { + + _, err := s.Drive.Permissions.Create(fileID, + &drive.Permission{ + EmailAddress: email, + Role: role, + Type: "user", + }). + SupportsAllDrives(true). + Do() + if err != nil { + return fmt.Errorf("error updating file permissions: %w", err) + } + return nil +} + +// ListPermissions lists permissions for a Google Drive file. +func (s *Service) ListPermissions(fileID string) ([]*drive.Permission, error) { + var permissions []*drive.Permission + var nextPageToken string + + for { + call := s.Drive.Permissions.List(fileID). + Fields("*"). + PageSize(20). + SupportsAllDrives(true) + if nextPageToken != "" { + call = call.PageToken(nextPageToken) + } + resp, err := call.Do() + if err != nil { + return nil, err + } + permissions = append(permissions, resp.Permissions...) + + nextPageToken = resp.NextPageToken + if nextPageToken == "" { + break + } + } + + return permissions, nil +} + +// DeleteFile deletes a Google Drive file +func (s *Service) DeleteFile(fileID string) error { + err := s.Drive.Files.Delete(fileID). + SupportsAllDrives(true). + Do() + if err != nil { + return fmt.Errorf("error deleting file: %w", err) + } + return nil +} + +// DeletePermission deletes a permission for a Google Drive file. +func (s *Service) DeletePermission( + fileID, permissionID string) error { + err := s.Drive.Permissions.Delete(fileID, permissionID). + SupportsAllDrives(true). + Do() + if err != nil { + return fmt.Errorf("error deleting permission: %w", err) + } + return nil +} diff --git a/pkg/googleworkspace/gmail_helpers.go b/pkg/googleworkspace/gmail_helpers.go new file mode 100644 index 000000000..d5c3ef0bf --- /dev/null +++ b/pkg/googleworkspace/gmail_helpers.go @@ -0,0 +1,25 @@ +package googleworkspace + +import ( + "encoding/base64" + "fmt" + "strings" + + "google.golang.org/api/gmail/v1" +) + +// SendEmail sends an email. +func (s *Service) SendEmail(to []string, from, subject, body string) (*gmail.Message, error) { + email := fmt.Sprintf("To: %s\r\nFrom: %s\r\nContent-Type: text/html; charset=UTF-8\r\nSubject: %s\r\n\r\n%s\r\n", + strings.Join(to, ","), from, subject, body) + + msg := &gmail.Message{ + Raw: base64.URLEncoding.EncodeToString([]byte(email)), + } + + resp, err := s.Gmail.Users.Messages.Send("me", msg).Do() + if err != nil { + return nil, fmt.Errorf("error sending email: %w", err) + } + return resp, nil +} diff --git a/pkg/googleworkspace/oauth2_helpers.go b/pkg/googleworkspace/oauth2_helpers.go new file mode 100644 index 000000000..4a095b756 --- /dev/null +++ b/pkg/googleworkspace/oauth2_helpers.go @@ -0,0 +1,20 @@ +package googleworkspace + +import ( + "google.golang.org/api/oauth2/v2" +) + +// ValidateAccessToken validates a Google access token and returns the token +// info. +func (s *Service) ValidateAccessToken( + accessToken string) (*oauth2.Tokeninfo, error) { + + resp, err := s.OAuth2.Tokeninfo(). + AccessToken(accessToken). + Fields("*"). + Do() + if err != nil { + return nil, err + } + return resp, nil +} diff --git a/pkg/googleworkspace/people_helpers.go b/pkg/googleworkspace/people_helpers.go new file mode 100644 index 000000000..396f2bc5d --- /dev/null +++ b/pkg/googleworkspace/people_helpers.go @@ -0,0 +1,51 @@ +package googleworkspace + +import ( + "github.com/cenkalti/backoff/v4" + "google.golang.org/api/people/v1" +) + +// SearchPeople searches the Google People API. +func (s *Service) SearchPeople(query string) ([]*people.Person, error) { + + var ( + call *people.PeopleSearchDirectoryPeopleCall + err error + nextPageToken string + ret []*people.Person + resp *people.SearchDirectoryPeopleResponse + ) + + op := func() error { + resp, err = call.Do() + if err != nil { + return err + } + + return nil + } + + for { + call = s.People.SearchDirectoryPeople().Query(query). + ReadMask("photos"). + Sources("DIRECTORY_SOURCE_TYPE_DOMAIN_PROFILE") + + if nextPageToken != "" { + call = call.PageToken(nextPageToken) + } + + boErr := backoff.Retry(op, backoff.NewExponentialBackOff()) + if boErr != nil { + return nil, boErr + } + + ret = append(ret, resp.People...) + + nextPageToken = resp.NextPageToken + if nextPageToken == "" { + break + } + } + + return ret, nil +} diff --git a/pkg/googleworkspace/service.go b/pkg/googleworkspace/service.go new file mode 100644 index 000000000..a2bd952db --- /dev/null +++ b/pkg/googleworkspace/service.go @@ -0,0 +1,234 @@ +package googleworkspace + +import ( + "context" + "encoding/json" + "fmt" + "io/ioutil" + "log" + "net/http" + "os" + + "golang.org/x/oauth2" + "golang.org/x/oauth2/google" + "golang.org/x/oauth2/jwt" + + "github.com/pkg/browser" + // admin "google.golang.org/api/admin/directory/v1" + "google.golang.org/api/docs/v1" + "google.golang.org/api/drive/v3" + "google.golang.org/api/gmail/v1" + oauth2api "google.golang.org/api/oauth2/v2" + "google.golang.org/api/option" + "google.golang.org/api/people/v1" +) + +// Service provides access to the Google Workspace API. +type Service struct { + // Admin *admin.Service + Docs *docs.Service + Drive *drive.Service + Gmail *gmail.Service + OAuth2 *oauth2api.Service + People *people.PeopleService +} + +// Config is the configuration for interacting with Google Workspace using a +// service account. +type Config struct { + ClientEmail string `hcl:"client_email,optional"` + PrivateKey string `hcl:"private_key,optional"` + Subject string `hcl:"subject,optional"` + TokenURL string `hcl:"token_url,optional"` +} + +// New returns a service with the required Google Workspace access for +// Hermes. +func NewFromConfig(cfg *Config) *Service { + conf := &jwt.Config{ + Email: cfg.ClientEmail, + PrivateKey: []byte(cfg.PrivateKey), + Scopes: []string{ + // "https://www.googleapis.com/auth/admin.directory.group", + "https://www.googleapis.com/auth/directory.readonly", + "https://www.googleapis.com/auth/documents", + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/gmail.send", + }, + Subject: cfg.Subject, + TokenURL: cfg.TokenURL, + } + client := conf.Client(context.TODO()) + + // adminSrv, err := admin.NewService(context.TODO(), option.WithHTTPClient(client)) + // if err != nil { + // log.Fatalf("Unable to retrieve Admin client: %v", err) + // } + docSrv, err := docs.NewService(context.TODO(), option.WithHTTPClient(client)) + if err != nil { + log.Fatalf("Unable to retrieve Docs client: %v", err) + } + driveSrv, err := drive.NewService(context.TODO(), option.WithHTTPClient(client)) + if err != nil { + log.Fatalf("Unable to retrieve Drive client: %v", err) + } + gmailSrv, err := gmail.NewService(context.TODO(), option.WithHTTPClient(client)) + if err != nil { + log.Fatalf("Unable to retrieve Drive client: %v", err) + } + oAuth2Srv, err := oauth2api.NewService(context.TODO(), option.WithHTTPClient(client)) + if err != nil { + log.Fatalf("Unable to retrieve OAuth2 client: %v", err) + } + peopleSrv, err := people.NewService(context.TODO(), option.WithHTTPClient(client)) + if err != nil { + log.Fatalf("Unable to retrieve Google People client: %v", err) + } + peoplePeopleSrv := people.NewPeopleService(peopleSrv) + + return &Service{ + // Admin: adminSrv, + Docs: docSrv, + Drive: driveSrv, + Gmail: gmailSrv, + OAuth2: oAuth2Srv, + People: peoplePeopleSrv, + } +} + +// NOTE: the code below this line was largely copied from the Google Docs Go +// Quickstart (https://developers.google.com/docs/api/quickstart/go) and will +// be replaced to use a service account. + +// New reads Google API credentials and returns a service with the required +// access for Hermes. +func New() *Service { + b, err := ioutil.ReadFile("credentials.json") + if err != nil { + log.Fatalf("Unable to read client secret file: %v", err) + } + + // If modifying these scopes, delete your previously saved token.json. + gc, err := google.ConfigFromJSON(b, + // "https://www.googleapis.com/auth/admin.directory.group", + "https://www.googleapis.com/auth/directory.readonly", + "https://www.googleapis.com/auth/documents", + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/gmail.send") + if err != nil { + log.Fatalf("Unable to parse client secret file to config: %v", err) + } + client := getClient(gc) + + // adminSrv, err := admin.NewService(context.TODO(), option.WithHTTPClient(client)) + // if err != nil { + // log.Fatalf("Unable to retrieve Admin client: %v", err) + // } + docSrv, err := docs.NewService(context.TODO(), option.WithHTTPClient(client)) + if err != nil { + log.Fatalf("Unable to retrieve Google Docs client: %v", err) + } + driveSrv, err := drive.NewService(context.TODO(), option.WithHTTPClient(client)) + if err != nil { + log.Fatalf("Unable to retrieve Google Drive client: %v", err) + } + gmailSrv, err := gmail.NewService(context.TODO(), option.WithHTTPClient(client)) + if err != nil { + log.Fatalf("Unable to retrieve Drive client: %v", err) + } + oAuth2Srv, err := oauth2api.NewService(context.TODO(), option.WithHTTPClient(client)) + if err != nil { + log.Fatalf("Unable to retrieve OAuth2 client: %v", err) + } + peopleSrv, err := people.NewService(context.TODO(), option.WithHTTPClient(client)) + if err != nil { + log.Fatalf("Unable to retrieve Google People client: %v", err) + } + peoplePeopleSrv := people.NewPeopleService(peopleSrv) + + return &Service{ + // Admin: adminSrv, + Docs: docSrv, + Drive: driveSrv, + Gmail: gmailSrv, + OAuth2: oAuth2Srv, + People: peoplePeopleSrv, + } +} + +// Retrieves a token, saves the token, then returns the generated client. +func getClient(config *oauth2.Config) *http.Client { + tokFile := "token.json" + tok, err := tokenFromFile(tokFile) + if err != nil || !tok.Valid() { + log.Printf("token doesn't exist or is expired, so getting a new one...") + tok = getTokenFromWeb(config) + saveToken(tokFile, tok) + } + return config.Client(context.Background(), tok) +} + +// Requests a token from the web, then returns the retrieved token. +func getTokenFromWeb(config *oauth2.Config) *oauth2.Token { + var authCode string + + m := http.NewServeMux() + // TODO: remove hardcoded port. + s := http.Server{Addr: ":9999", Handler: m} + config.RedirectURL = "http://localhost:9999/callback" + + m.HandleFunc("/callback", func(w http.ResponseWriter, r *http.Request) { + // Get authorization code from request. + authCode = r.FormValue("code") + + // Write response. + w.WriteHeader(http.StatusOK) + w.Write([]byte("The token has been recorded and this window can be closed.")) + + // Shutdown server in a goroutine so it doesn't shutdown before writing a + // response. + go func() { + if err := s.Shutdown(context.Background()); err != nil { + log.Fatal("error shutting down server: %w", err) + } + }() + }) + + authURL := config.AuthCodeURL("state-token") + fmt.Printf("Go to the following link in your browser and authorize the app:"+ + "\n%v\n", authURL) + browser.OpenURL(authURL) + + if err := s.ListenAndServe(); err != http.ErrServerClosed { + log.Fatal("error starting listener: %w", err) + } + + tok, err := config.Exchange(context.Background(), authCode) + if err != nil { + log.Fatalf("Unable to retrieve token from web: %v", err) + } + return tok +} + +// Saves a token to a file path. +func saveToken(path string, token *oauth2.Token) { + fmt.Printf("Saving credential file to: %s\n", path) + f, err := os.OpenFile(path, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600) + if err != nil { + log.Fatalf("Unable to cache OAuth token: %v", err) + } + defer f.Close() + json.NewEncoder(f).Encode(token) +} + +// Retrieves a token from a local file. +func tokenFromFile(file string) (*oauth2.Token, error) { + f, err := os.Open(file) + if err != nil { + return nil, err + } + defer f.Close() + tok := &oauth2.Token{} + err = json.NewDecoder(f).Decode(tok) + return tok, err +} diff --git a/pkg/hashicorpdocs/basedoc.go b/pkg/hashicorpdocs/basedoc.go new file mode 100644 index 000000000..4f0b3ba23 --- /dev/null +++ b/pkg/hashicorpdocs/basedoc.go @@ -0,0 +1,189 @@ +package hashicorpdocs + +// BaseDoc contains common document metadata fields used by Hermes. +type BaseDoc struct { + // ObjectID is the Google Drive file ID for the document. + ObjectID string `json:"objectID,omitempty"` + + // Title is the title of the document. It does not contain the document number + // (e.g., "TF-123"). + Title string `json:"title,omitempty"` + + // DocType is the type of document (e.g., "RFC", "PRD"). + DocType string `json:"docType,omitempty"` + + // DocNumber is a unique document identifier containing a product/area + // abbreviation and a unique number (e.g., "TF-123"). + DocNumber string `json:"docNumber,omitempty"` + + // AppCreated should be set to true if the document was created through this + // application, and false if created directly in Google Docs and indexed + // afterwards. + AppCreated bool `json:"appCreated,omitempty"` + + // ApprovedBy is a slice of email address strings for users that have approved + // the document. + ApprovedBy []string `json:"approvedBy,omitempty"` + + // Approvers is a slice of email address strings for users whose approvals + // are requested for the document. + Approvers []string `json:"approvers,omitempty"` + + // ChangesRequestedBy is a slice of email address strings for users that have + // requested changes for the document. + ChangesRequestedBy []string `json:"changesRequestedBy,omitempty"` + + // Contributors is a slice of email address strings for users who have + // contributed to the document. + Contributors []string `json:"contributors,omitempty"` + + // Content is the plaintext content of the document. + Content string `json:"content,omitempty"` + + // Created is the UTC time of document creation, in a RFC 3339 string format. + Created string `json:"created,omitempty"` + + // CreatedTime is the time of document creation, in Unix time. + CreatedTime int64 `json:"createdTime,omitempty"` + + // CustomEditableFields are all document-type-specific fields that are + // editable. + CustomEditableFields map[string]CustomDocTypeField `json:"customEditableFields,omitempty"` + + // FileRevisions is a map of file revision IDs to custom names. + FileRevisions map[string]string `json:"fileRevisions,omitempty"` + + // TODO: LinkedDocs is not used yet. + LinkedDocs []string `json:"linkedDocs,omitempty"` + + // MetaTags contains metadata tags that can be used for filtering in Algolia. + MetaTags []string `json:"_tags,omitempty"` + + // Created is the time that the document was last modified, in Unix time. + ModifiedTime int64 `json:"modifiedTime,omitempty"` + + // Owners is a slice of email address strings for document owners. Hermes + // generally only uses the first element as the document owner, but this is a + // slice for historical reasons as some HashiCorp documents have had multiple + // owners in the past. + Owners []string `json:"owners,omitempty"` + + // OwnerPhotos is a slice of URL strings for the profile photos of the + // document owners (in the same order as the Owners field). + OwnerPhotos []string `json:"ownerPhotos,omitempty"` + + // Product is the product or area that the document relates to. + Product string `json:"product,omitempty"` + + // Summary is a summary of the document. + Summary string `json:"summary,omitempty"` + + // Status is the status of the document (e.g., "WIP", "In-Review", "Approved", + // "Obsolete"). + Status string `json:"status,omitempty"` + + // Tags is a slice of tags to help users discover the document based on their + // interests. + Tags []string `json:"tags,omitempty"` + + // ThumbnailLink is a URL string for the document thumbnail image. + ThumbnailLink string `json:"thumbnailLink,omitempty"` +} + +func (d *BaseDoc) DeleteFileRevision(revisionID string) { + delete(d.FileRevisions, revisionID) +} + +func (d BaseDoc) GetApprovedBy() []string { + return d.ApprovedBy +} + +func (d BaseDoc) GetApprovers() []string { + return d.Approvers +} + +func (d BaseDoc) GetChangesRequestedBy() []string { + return d.ChangesRequestedBy +} + +func (d BaseDoc) GetContributors() []string { + return d.Contributors +} + +func (d BaseDoc) GetCreatedTime() int64 { + return d.CreatedTime +} + +func (d BaseDoc) GetDocNumber() string { + return d.DocNumber +} + +func (d BaseDoc) GetDocType() string { + return d.DocType +} + +func (d BaseDoc) GetMetaTags() []string { + return d.MetaTags +} + +func (d BaseDoc) GetObjectID() string { + return d.ObjectID +} + +func (d BaseDoc) GetOwners() []string { + return d.Owners +} + +func (d BaseDoc) GetModifiedTime() int64 { + return d.ModifiedTime +} + +func (d BaseDoc) GetProduct() string { + return d.Product +} + +func (d BaseDoc) GetStatus() string { + return d.Status +} + +func (d BaseDoc) GetSummary() string { + return d.Summary +} + +func (d BaseDoc) GetTitle() string { + return d.Title +} + +func (d *BaseDoc) SetApprovedBy(s []string) { + d.ApprovedBy = s +} + +func (d *BaseDoc) SetChangesRequestedBy(s []string) { + d.ChangesRequestedBy = s +} + +func (d *BaseDoc) SetContent(s string) { + d.Content = s +} + +func (d *BaseDoc) SetDocNumber(s string) { + d.DocNumber = s +} + +func (d *BaseDoc) SetFileRevision(revisionID, revisionName string) { + if d.FileRevisions == nil { + d.FileRevisions = map[string]string{ + revisionID: revisionName, + } + } else { + d.FileRevisions[revisionID] = revisionName + } +} + +func (d *BaseDoc) SetModifiedTime(i int64) { + d.ModifiedTime = i +} + +func (d *BaseDoc) SetStatus(s string) { + d.Status = s +} diff --git a/pkg/hashicorpdocs/common.go b/pkg/hashicorpdocs/common.go new file mode 100644 index 000000000..e90f15d8b --- /dev/null +++ b/pkg/hashicorpdocs/common.go @@ -0,0 +1,123 @@ +package hashicorpdocs + +import ( + "fmt" + "strings" + + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + "google.golang.org/api/drive/v3" +) + +const ( + // MaxDocSize is the maximum size of a doc's content in bytes. If the doc is + // larger than this, its content will be trimmed to this length. + // Algolia has a hard limit of 100000 bytes total per record. + MaxDocSize = 85000 +) + +type Doc interface { + DeleteFileRevision(string) + + // Getters for fields common to all document types. + GetApprovedBy() []string + GetApprovers() []string + GetChangesRequestedBy() []string + GetContributors() []string + GetCreatedTime() int64 + GetDocNumber() string + GetDocType() string + GetMetaTags() []string + GetModifiedTime() int64 + GetObjectID() string + GetOwners() []string + GetProduct() string + GetStatus() string + GetSummary() string + GetTitle() string + + MissingFields() []string + ReplaceHeader(fileID, baseURL string, isDraft bool, s *gw.Service) error + + // Setters for fields common to all document types. + SetApprovedBy([]string) + SetChangesRequestedBy([]string) + SetContent(s string) + SetDocNumber(string) + SetFileRevision(string, string) + SetModifiedTime(int64) + SetStatus(string) + + GetCustomEditableFields() map[string]CustomDocTypeField + SetCustomEditableFields() +} + +var ValidCustomDocTypeFieldTypes = []string{ + "PEOPLE", + "STRING", +} + +type CustomDocTypeField struct { + // DisplayName is the display name of the custom document-type field. + DisplayName string `json:"displayName"` + + // Type is the type of the custom document-type field. It is used by the + // frontend to display the proper input component. + // Valid values: "PEOPLE", "STRING". + Type string `json:"type"` +} + +type MissingFields struct { + ObjectID string `json:"objectID,omitempty"` + MissingFields []string `json:"missingFields,omitempty"` +} + +// NewEmptyDoc returns an empty doc struct for the provided doc type. +func NewEmptyDoc(docType string) (Doc, error) { + switch docType { + case "FRD": + return &FRD{}, nil + case "RFC": + return &RFC{}, nil + case "PRD": + return &PRD{}, nil + default: + return nil, fmt.Errorf("invalid doc type") + } +} + +// ParseDoc parses and returns a known document type, associated product name, +// document number or returns an error if the type is unknown. +func ParseDoc( + docType string, + f *drive.File, + s *gw.Service, + allFolders []string) (Doc, error) { + + // TODO: Add a Parse() function to the Doc interface to make this more + // extensible and not have to address all doc types here. + switch strings.ToLower(docType) { + case "frd": + r, err := NewFRD(f, s, allFolders) + if err != nil { + return nil, fmt.Errorf("error parsing FRD: %w", err) + } + return r, nil + + case "rfc": + r, err := NewRFC(f, s, allFolders) + if err != nil { + return nil, fmt.Errorf("error parsing RFC: %w", err) + } + return r, nil + + case "prd": + p, err := NewPRD(f, s, allFolders) + if err != nil { + return nil, fmt.Errorf("error parsing PRD: %w", err) + } + return p, nil + + default: + return nil, fmt.Errorf("unknown doc type: %s", docType) + } +} diff --git a/pkg/hashicorpdocs/doc.go b/pkg/hashicorpdocs/doc.go new file mode 100644 index 000000000..2841803b8 --- /dev/null +++ b/pkg/hashicorpdocs/doc.go @@ -0,0 +1,3 @@ +// Package hashicorpdocs contains helpers for working with HashiCorp's document +// templates. +package hashicorpdocs diff --git a/pkg/hashicorpdocs/frd.go b/pkg/hashicorpdocs/frd.go new file mode 100644 index 000000000..319895a00 --- /dev/null +++ b/pkg/hashicorpdocs/frd.go @@ -0,0 +1,346 @@ +package hashicorpdocs + +import ( + "fmt" + "io" + "reflect" + "regexp" + "strings" + "time" + + "github.com/araddon/dateparse" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + "google.golang.org/api/docs/v1" + "google.golang.org/api/drive/v3" +) + +// FRD contains metadata for documents based off of the HashiCorp FRD template. +type FRD struct { + BaseDoc `mapstructure:",squash"` + + // PRD is the associated PRD. + PRD string `json:"prd,omitempty"` + + // PRFAQ is the associated PRFAQ. + PRFAQ string `json:"prfaq,omitempty"` +} + +func (d FRD) GetCustomEditableFields() map[string]CustomDocTypeField { + return map[string]CustomDocTypeField{ + "prd": { + DisplayName: "PRD", + Type: "STRING", + }, + "prfaq": { + DisplayName: "PRDFAQ", + Type: "STRING", + }, + } +} + +func (d *FRD) SetCustomEditableFields() { + d.CustomEditableFields = d.GetCustomEditableFields() +} + +// MissingFields returns the missing fields of the doc struct. +func (d FRD) MissingFields() []string { + var missingFields []string + + rfcType := reflect.TypeOf(d) + for i := 0; i < rfcType.NumField(); i++ { + f := rfcType.Field(i) + val := reflect.ValueOf(d).FieldByName(f.Name) + if val.IsZero() { + missingFields = append(missingFields, f.Name) + } else if f.Type.Kind() == reflect.Slice && val.Len() == 0 { + // Parsing docs may generate an empty slice instead of nil zero value. + missingFields = append(missingFields, f.Name) + } + } + + return missingFields +} + +// NewFRD parses a Google Drive file based on the HashiCorp FRD template and +// returns the resulting FRD struct. +func NewFRD(f *drive.File, s *gw.Service, allFolders []string) (*FRD, error) { + r := &FRD{ + BaseDoc: BaseDoc{ + ObjectID: f.Id, + Title: f.Name, + DocType: "FRD", + ThumbnailLink: f.ThumbnailLink, + }, + } + + // Parse title and doc number. + r.parseFRDTitle(f.Name) + + // Convert modified time to Unix time so it can be sorted in Algolia. + mt, err := time.Parse(time.RFC3339, f.ModifiedTime) + if err != nil { + return nil, fmt.Errorf("error parsing modified time: %w: id=\"%s\"", + err, f.Id) + } + r.ModifiedTime = mt.Unix() + + doc, err := s.Docs.Documents.Get(f.Id).Do() + if err != nil { + return nil, fmt.Errorf("error getting doc: %w: id=\"%s\"", err, f.Id) + } + + // Assume the name of the parent folder is the FRD Product. + parent, err := s.Drive.Files.Get(f.Parents[0]). + SupportsAllDrives(true).Fields("name").Do() + if err != nil { + return nil, fmt.Errorf("error getting parent folder file: %w", err) + } + r.Product = parent.Name + + r.parseFRDSummary(doc.Body) + + // Parse FRD header for metadata. + r.parseFRDHeader(doc) + + // Parse all linked documents. + // TODO: use allFolders parameter and enable this. + // r.LinkedDocs = gw.GetLinkURLs(doc.Body) + + // Get owner photos by searching Google Workspace directory for owner strings. + for i, o := range r.Owners { + // Add empty string as a default. We will replace this with the actual photo + // URL if we can find it. + r.OwnerPhotos = append(r.OwnerPhotos, "") + + resp, err := s.People.SearchDirectoryPeople().Query(o). + ReadMask("photos"). + Sources("DIRECTORY_SOURCE_TYPE_DOMAIN_PROFILE"). + Do() + if err != nil { + return nil, fmt.Errorf( + "error searching directory for person: %w: owner=\"%s\"", + err, o) + } + + if len(resp.People) > 0 { + if len(resp.People[0].Photos) > 0 { + // Replace empty string default with actual value. + r.OwnerPhotos[i] = resp.People[0].Photos[0].Url + } + } + } + + // Get doc content. + resp, err := s.Drive.Files.Export(f.Id, "text/plain").Download() + if err != nil { + return nil, fmt.Errorf("error exporting doc: %w: id=\"%s\"", err, f.Id) + } + b, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("error reading exported doc: %w: id=\"%s\"", err, + f.Id) + } + + // Trim doc content if it is larger than the maximum size. + if len(b) > MaxDocSize { + b = b[:MaxDocSize] + } + + r.Content = string(b) + return r, nil +} + +// parseFRDHeader parses a HashiCorp FRD header for metadata. +func (r *FRD) parseFRDHeader(d *docs.Document) { + tables := gw.GetTables(d.Body) + + for _, t := range tables { + gw.VisitAllTableParagraphs(t, func(p *docs.Paragraph) { + if len(p.Elements) > 0 { + if p.Elements[0].TextRun != nil { + + // Get label of table cell. + label := p.Elements[0].TextRun.Content + + switch { + case strings.HasPrefix(label, "Contributor:") || + strings.HasPrefix(label, "Contributors:"): + r.parseFRDContributors(p) + + case strings.HasPrefix(label, "Created"): + r.parseFRDCreated(p) + + case strings.HasPrefix(label, "Owner:") || + strings.HasPrefix(label, "Owners:"): + r.parseFRDOwners(p) + + case strings.HasPrefix(label, "PRD"): + r.parseFRDPRD(p) + + case strings.HasPrefix(label, "PRFAQ"): + r.parseFRDPRFAQ(p) + + case strings.HasPrefix(label, "Status:"): + r.parseFRDStatus(p) + } + } + } + }) + } +} + +// parseFRDContributors parses the FRD Contributors from a Google Docs paragraph. +func (r *FRD) parseFRDContributors(p *docs.Paragraph) { + r.Contributors = parseParagraphWithEmails("Contributor", p) +} + +// parseFRDCreated parses the FRD Created date from a Google Docs paragraph. +func (r *FRD) parseFRDCreated(p *docs.Paragraph) error { + // Build string containing Created date. + var s string + for i, e := range p.Elements { + if i > 0 { + s += e.TextRun.Content + } + } + s = strings.TrimSpace(s) + + // Parse Created date string. + ts, err := dateparse.ParseAny(s) + if err != nil { + return fmt.Errorf("error parsing Created date: %w: date=\"%s\"", err, s) + + } + + res, err := ts.UTC().MarshalText() + if err != nil { + return fmt.Errorf("error marshaling time: %w", err) + } + r.Created = string(res) + + // Also store created date as Unix time so it can be sorted in Algolia. + r.CreatedTime = ts.Unix() + + return nil +} + +// parseFRDOwners parses FRD Owners from a Google Docs paragraph. +func (r *FRD) parseFRDOwners(p *docs.Paragraph) { + r.Owners = parseParagraphWithEmails("Owner", p) +} + +// parseFRDRFC parses the FRD PRD from a Google Docs paragraph. +func (r *FRD) parseFRDPRD(p *docs.Paragraph) { + r.PRD = parseParagraphWithText("PRD", p) + + // Text from the FRD template may be left in the doc. + if r.PRFAQ == "Link to PRD when created" { + r.PRD = "" + } +} + +// parseFRDRFC parses the FRD PRFAQ from a Google Docs paragraph. +func (r *FRD) parseFRDPRFAQ(p *docs.Paragraph) { + r.PRFAQ = parseParagraphWithText("PRFAQ", p) + + // Text from the FRD template may be left in the doc. + if r.PRFAQ == "Link to PRFAQ when created" { + r.PRFAQ = "" + } +} + +// parseFRDStatus parses the FRD Status from a Google Docs paragraph. +func (r *FRD) parseFRDStatus(p *docs.Paragraph) { + label := p.Elements[0].TextRun.Content + var status string + + // Sometimes "Status: WIP" is collected together as one text element. + if label == "Status: WIP" && p.Elements[0].TextRun.TextStyle.Bold { + status = "WIP" + } else { + for i, e := range p.Elements { + if i > 0 && e.TextRun.TextStyle.Bold { + status = e.TextRun.Content + } + } + } + + status = strings.TrimSpace(status) + r.Status = status +} + +// parseFRDSummary parses the FRD Summary from a Google Docs Body. +func (r *FRD) parseFRDSummary(b *docs.Body) { + elems := b.Content + + for _, e := range elems { + if e.Paragraph != nil { + // Summary paragraph in the FRD template will have at least 2 elements. + if len(e.Paragraph.Elements) > 1 { + if e.Paragraph.Elements[0].TextRun != nil { + if e.Paragraph.Elements[0].TextRun.Content == "Summary:" { + // We found the summary paragraph and the rest of the elements + // should be the summary value. + var s string + for i, ee := range e.Paragraph.Elements { + if i > 0 { + s += ee.TextRun.Content + } + } + r.Summary = strings.TrimSpace(s) + return + } + } + } + } + } +} + +// parseFRDTitle parses FRD title and document number from a string (which +// should be the name of the Google Drive file). +func (r *FRD) parseFRDTitle(s string) { + // Handle `[FRD] CSL-123: Some FRD Title` case. + // Also handles different types of "[FRD]" identifiers like "[Meta FRD]", + // "[Mini-FRD]", etc. + re := regexp.MustCompile(`\[.*FRD\] (?P[A-Z]+-[0-9xX#?]+): (?P.+)`) + matches := re.FindStringSubmatch(s) + if len(matches) > 1 { + r.DocNumber = matches[1] + r.Title = strings.TrimSpace(matches[2]) + return + } + + // Handle `[FRD] Some FRD Title` case. + // Also handles different types of "[FRD]" identifiers like "[Meta FRD]", + // "[Mini-FRD]", etc. + re = regexp.MustCompile(`\[.*FRD\] (?P<Title>.+)`) + matches = re.FindStringSubmatch(s) + if len(matches) > 1 { + r.Title = strings.TrimSpace(matches[1]) + return + } + + // Handle `[FRD]Some FRD Title` case. + // Also handles different types of "[FRD]" identifiers like "[Meta FRD]", + // "[Mini-FRD]", etc. + re = regexp.MustCompile(`\[.*FRD\](?P<Title>.+)`) + matches = re.FindStringSubmatch(s) + if len(matches) > 1 { + r.Title = strings.TrimSpace(matches[1]) + return + } + + // Handle `FRD - Some FRD Title` case. + // Also handles different types of "[FRD]" identifiers like "[Meta FRD]", + // "[Mini-FRD]", etc. + re = regexp.MustCompile(`.*FRD - (?P<Title>.+)`) + matches = re.FindStringSubmatch(s) + if len(matches) > 1 { + r.Title = strings.TrimSpace(matches[1]) + return + } + + // Couldn't parse title and doc ID, so setting FRD title to the whole + // string. + r.Title = s +} diff --git a/pkg/hashicorpdocs/frd_replace_header.go b/pkg/hashicorpdocs/frd_replace_header.go new file mode 100644 index 000000000..3ab32c90c --- /dev/null +++ b/pkg/hashicorpdocs/frd_replace_header.go @@ -0,0 +1,754 @@ +package hashicorpdocs + +import ( + "fmt" + "net/url" + "path" + "strings" + + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + "google.golang.org/api/docs/v1" +) + +// ReplaceHeader replaces the PRD document header, which is the first table +// in the document. +// +// The resulting table looks like this: +// +// |-----------------------------------------------------------------------------------| +// | Title: {{title}} | +// |-----------------------------------------------------------------------------------| +// | Summary: {{summary}} | +// |-----------------------------------------------------------------------------------| +// | | +// |-----------------------------------------------------------------------------------| +// | Created: {{created}} | Status: {{status}} | +// |-----------------------------------------------------------------------------------| +// | | +// |-----------------------------------------------------------------------------------| +// | Product: {{product}} | Owner: {{owner}} | +// |-----------------------------------------------------------------------------------| +// | Contributors: {{contributors}} | Approvers: {{approvers}} | +// |-----------------------------------------------------------------------------------| +// | PRFAQ: {{prfaq}} | PRD: {{prd}} | +// |-----------------------------------------------------------------------------------| +// | Tags: {{tags}} | +// |-----------------------------------------------------------------------------------| +// | | +// |-----------------------------------------------------------------------------------| +// | NOTE: This document is managed by Hermes... | +// |-----------------------------------------------------------------------------------| +// + +func (doc *FRD) ReplaceHeader(fileID, baseURL string, isDraft bool, s *gw.Service) error { + const ( + tableRows = 11 // Number of rows in the header table. + ) + + // Get doc. + d, err := s.Docs.Documents.Get(fileID).Do() + if err != nil { + return fmt.Errorf("error getting doc: %w", err) + } + + // Find the start and end indexes of the first table (assume that it is the + // doc header). + var ( + endIndex int64 + startIndex int64 + t *docs.Table + headerTableFound bool + ) + elems := d.Body.Content + for _, e := range elems { + if e.Table != nil { + t = e.Table + startIndex = e.StartIndex + endIndex = e.EndIndex + break + } + } + // startIndex should be 2, but we'll allow a little leeway in case someone + // accidentally added a newline or something. + if t != nil && startIndex < 5 { + headerTableFound = true + } else { + // Header table wasn't found, so we'll insert a new one at index 2. + startIndex = 2 + } + + // Delete existing header. + if headerTableFound { + req := &docs.BatchUpdateDocumentRequest{ + Requests: []*docs.Request{ + { + DeleteContentRange: &docs.DeleteContentRangeRequest{ + Range: &docs.Range{ + SegmentId: "", + StartIndex: startIndex, + EndIndex: endIndex + 1, + }, + }, + }, + }, + } + _, err = s.Docs.Documents.BatchUpdate(fileID, req).Do() + if err != nil { + return fmt.Errorf("error deleting existing header: %w", err) + } + } + + // Insert new header table. + req := &docs.BatchUpdateDocumentRequest{ + Requests: []*docs.Request{ + { + InsertTable: &docs.InsertTableRequest{ + Columns: 2, + Location: &docs.Location{ + Index: startIndex - 1, + }, + Rows: tableRows, + }, + }, + }, + } + _, err = s.Docs.Documents.BatchUpdate(fileID, req).Do() + if err != nil { + return fmt.Errorf("error inserting header table: %w", err) + } + + // Find new table index. + elems = d.Body.Content + for _, e := range elems { + if e.Table != nil { + startIndex = e.StartIndex + break + } + } + + // Apply formatting to the table. + req = &docs.BatchUpdateDocumentRequest{ + Requests: []*docs.Request{ + { + // Remove table borders (by setting width to 0 and setting color to + // white as a backup), and remove padding (by setting to 0). + UpdateTableCellStyle: &docs.UpdateTableCellStyleRequest{ + Fields: "borderBottom,borderLeft,borderRight,borderTop,paddingBottom,paddingLeft,paddingRight,paddingTop", + TableCellStyle: &docs.TableCellStyle{ + BorderBottom: &docs.TableCellBorder{ + Color: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 1.0, + Green: 1.0, + Red: 1.0, + }, + }, + }, + DashStyle: "SOLID", + Width: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + BorderLeft: &docs.TableCellBorder{ + Color: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 1.0, + Green: 1.0, + Red: 1.0, + }, + }, + }, + DashStyle: "SOLID", + Width: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + BorderRight: &docs.TableCellBorder{ + Color: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 1.0, + Green: 1.0, + Red: 1.0, + }, + }, + }, + DashStyle: "SOLID", + Width: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + BorderTop: &docs.TableCellBorder{ + Color: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 1.0, + Green: 1.0, + Red: 1.0, + }, + }, + }, + DashStyle: "SOLID", + Width: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + PaddingBottom: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + PaddingLeft: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + PaddingRight: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + PaddingTop: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: tableRows, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 0, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Update Title row minimum height. + { + UpdateTableRowStyle: &docs.UpdateTableRowStyleRequest{ + Fields: "minRowHeight", + RowIndices: []int64{0}, + TableRowStyle: &docs.TableRowStyle{ + MinRowHeight: &docs.Dimension{ + Magnitude: 27, + Unit: "PT", + }, + }, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + + // Update Summary row minimum height. + { + UpdateTableRowStyle: &docs.UpdateTableRowStyleRequest{ + Fields: "minRowHeight", + RowIndices: []int64{1}, + TableRowStyle: &docs.TableRowStyle{ + MinRowHeight: &docs.Dimension{ + Magnitude: 11, + Unit: "PT", + }, + }, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + + // Merge cells for the Title row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 0, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for the Summary row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 1, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for blank row after the Summary row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 2, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for blank row after the Created/Status row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 4, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for the Tags row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 8, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for blank row after the Tags row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 9, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for the "Managed by Hermes" note row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 10, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + }, + } + _, err = s.Docs.Documents.BatchUpdate(fileID, req).Do() + if err != nil { + return fmt.Errorf("error applying formatting to header table: %w", err) + } + + // Populate table. + var ( + pos int // Use to track position in document. + reqs []*docs.Request + cellReqs []*docs.Request // Temp var used for createTextCellRequests() results. + cellLength int // Temp var used for createTextCellRequests() results. + ) + + // Title cell. + pos = int(startIndex) + 3 + titleText := fmt.Sprintf("[%s] %s", doc.DocNumber, doc.Title) + reqs = append(reqs, + []*docs.Request{ + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "bold,fontSize,foregroundColor", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 1), + }, + TextStyle: &docs.TextStyle{ + Bold: true, + FontSize: &docs.Dimension{ + Magnitude: 20, + Unit: "PT", + }, + ForegroundColor: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 0.2627451, + Green: 0.2627451, + Red: 0.2627451, + }, + }, + }, + }, + }, + }, + { + InsertText: &docs.InsertTextRequest{ + Location: &docs.Location{ + Index: int64(pos), + }, + Text: titleText, + }, + }, + }..., + ) + pos += len(titleText) + 5 + + // Summary cell. + summaryText := fmt.Sprintf("Summary: %s", doc.Summary) + reqs = append(reqs, + []*docs.Request{ + { + InsertText: &docs.InsertTextRequest{ + Location: &docs.Location{ + Index: int64(pos), + }, + Text: summaryText, + }, + }, + + // Bold "Summary:". + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "bold", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 8), + }, + TextStyle: &docs.TextStyle{ + Bold: true, + }, + }, + }, + }..., + ) + pos += len(summaryText) + 5 + + // Blank row after summary row. + reqs = append(reqs, + []*docs.Request{ + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "fontSize", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 1), + }, + TextStyle: &docs.TextStyle{ + FontSize: &docs.Dimension{ + Magnitude: 8, + Unit: "PT", + }, + }, + }, + }, + }...) + pos += 5 + + // Created cell. + cellReqs, cellLength = createTextCellRequests( + "Created", doc.Created, int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 2 + + // Status cell. + cellReqs, cellLength = createTextCellRequests( + "Status", "WIP | In-Review | Approved | Obsolete", int64(pos)) + reqs = append(reqs, cellReqs...) + var statusStartIndex, statusEndIndex int + switch strings.ToLower(doc.Status) { + case "in review": + fallthrough + case "in-review": + statusStartIndex = 14 + statusEndIndex = 23 + case "approved": + statusStartIndex = 26 + statusEndIndex = 34 + case "obsolete": + statusStartIndex = 37 + statusEndIndex = 45 + case "wip": + fallthrough + default: + // Default to "WIP" for all unknown statuses. + statusStartIndex = 8 + statusEndIndex = 11 + } + reqs = append(reqs, + // Bold the status. + &docs.Request{ + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "bold", + Range: &docs.Range{ + StartIndex: int64(pos + statusStartIndex), + EndIndex: int64(pos + statusEndIndex), + }, + TextStyle: &docs.TextStyle{ + Bold: true, + }, + }, + }) + pos += cellLength + 3 + + // Blank row after Created/Status row. + reqs = append(reqs, + []*docs.Request{ + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "fontSize", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 1), + }, + TextStyle: &docs.TextStyle{ + FontSize: &docs.Dimension{ + Magnitude: 8, + Unit: "PT", + }, + }, + }, + }, + }...) + pos += 5 + + // Product cell. + cellReqs, cellLength = createTextCellRequests( + "Product", doc.Product, int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 2 + + // Owner cell. + cellReqs, cellLength = createTextCellRequests( + "Owner", doc.Owners[0], int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 3 + + // Contributors cell. + cellReqs, cellLength = createTextCellRequests( + "Contributors", strings.Join(doc.Contributors[:], ", "), int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 2 + + // Approvers cell. + // Build approvers slice with a check next to reviewers who have approved. + var approvers []string + for _, approver := range doc.Approvers { + if contains(doc.ApprovedBy, approver) { + approvers = append(approvers, "✅ "+approver) + } else if contains(doc.ChangesRequestedBy, approver) { + approvers = append(approvers, "❌ "+approver) + } else { + approvers = append(approvers, approver) + } + } + cellReqs, cellLength = createTextCellRequests( + "Approvers", strings.Join(approvers[:], ", "), int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 3 + + // PRFAQ cell. + // TODO: We only show a link with the text "PRFAQ" now if a PRFAQ is defined, + // but should show the document number instead. + prfaqCellVal := "" + if doc.PRFAQ != "" { + prfaqCellVal = "PRFAQ" + } + cellReqs, cellLength = createTextCellRequests( + "PRFAQ", prfaqCellVal, int64(pos)) + reqs = append(reqs, cellReqs...) + if doc.PRFAQ != "" { + reqs = append(reqs, + []*docs.Request{ + // Add link to PRFAQ. + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "link", + Range: &docs.Range{ + StartIndex: int64(pos + 7), + EndIndex: int64(pos + 10), + }, + TextStyle: &docs.TextStyle{ + Link: &docs.Link{ + Url: doc.PRFAQ, + }, + }, + }, + }, + }...) + } + pos += cellLength + 2 + + // PRD cell. + // TODO: We only show a link with the text "PRD" now if a PRD is defined, but + // should show the document number instead. + prdCellVal := "" + if doc.PRD != "" { + prdCellVal = "PRD" + } + cellReqs, cellLength = createTextCellRequests( + "PRD", prdCellVal, int64(pos)) + reqs = append(reqs, cellReqs...) + if doc.PRD != "" { + reqs = append(reqs, + []*docs.Request{ + // Add link to PRD. + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "link", + Range: &docs.Range{ + StartIndex: int64(pos + 5), + EndIndex: int64(pos + 8), + }, + TextStyle: &docs.TextStyle{ + Link: &docs.Link{ + Url: doc.PRD, + }, + }, + }, + }, + }...) + } + pos += cellLength + 3 + + // Tags cell. + cellReqs, cellLength = createTextCellRequests( + "Tags", strings.Join(doc.Tags[:], ", "), int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 5 + + // Blank row after Tags row. + reqs = append(reqs, + []*docs.Request{ + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "fontSize", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 1), + }, + TextStyle: &docs.TextStyle{ + FontSize: &docs.Dimension{ + Magnitude: 8, + Unit: "PT", + }, + }, + }, + }, + }...) + pos += 5 + + // "Managed by Hermes..." note cell. + docURL, err := url.Parse(baseURL) + if err != nil { + return fmt.Errorf("error parsing base URL: %w", err) + } + docURL.Path = path.Join(docURL.Path, "document", doc.ObjectID) + docURLString := docURL.String() + docURLString = strings.TrimRight(docURLString, "/") + if isDraft { + docURLString += "?draft=true" + } + cellReqs, cellLength = createTextCellRequests( + "NOTE", + "This document is managed by Hermes and this header will be periodically overwritten using document metadata.", + int64(pos)) + reqs = append(reqs, cellReqs...) + reqs = append(reqs, + []*docs.Request{ + // Add link to document in Hermes. + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "link", + Range: &docs.Range{ + StartIndex: int64(pos + 11), + EndIndex: int64(pos + 19), + }, + TextStyle: &docs.TextStyle{ + Link: &docs.Link{ + Url: docURLString, + }, + }, + }, + }, + + // Add link to Hermes. + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "link", + Range: &docs.Range{ + StartIndex: int64(pos + 34), + EndIndex: int64(pos + 40), + }, + TextStyle: &docs.TextStyle{ + Link: &docs.Link{ + Url: baseURL, + }, + }, + }, + }, + }...) + pos += cellLength + 5 + + // Do the batch update. + _, err = s.Docs.Documents.BatchUpdate(fileID, + &docs.BatchUpdateDocumentRequest{ + Requests: reqs}). + Do() + if err != nil { + return fmt.Errorf("error populating table: %w", err) + } + + // Rename file with new title. + err = s.RenameFile(fileID, fmt.Sprintf("[%s] %s", doc.DocNumber, doc.Title)) + if err != nil { + return fmt.Errorf("error renaming file with new title: %w", err) + } + + return nil +} diff --git a/pkg/hashicorpdocs/prd.go b/pkg/hashicorpdocs/prd.go new file mode 100644 index 000000000..43e8530c4 --- /dev/null +++ b/pkg/hashicorpdocs/prd.go @@ -0,0 +1,326 @@ +package hashicorpdocs + +import ( + "fmt" + "io" + "reflect" + "regexp" + "strings" + "time" + + "github.com/araddon/dateparse" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + "google.golang.org/api/docs/v1" + "google.golang.org/api/drive/v3" +) + +// PRD contains metadata for documents based off of the HashiCorp PRD template. +type PRD struct { + BaseDoc `mapstructure:",squash"` + + // RFC is the associated RFC. + RFC string `json:"rfc,omitempty"` + + // Stakeholders is a slice of email address strings for document stakeholders. + Stakeholders []string `json:"stakeholders,omitempty"` +} + +func (d PRD) GetCustomEditableFields() map[string]CustomDocTypeField { + return map[string]CustomDocTypeField{ + "rfc": { + DisplayName: "RFC", + Type: "STRING", + }, + } +} + +func (d *PRD) SetCustomEditableFields() { + d.CustomEditableFields = d.GetCustomEditableFields() +} + +// MissingFields returns the missing fields of the doc struct. +func (d PRD) MissingFields() []string { + var missingFields []string + + rfcType := reflect.TypeOf(d) + for i := 0; i < rfcType.NumField(); i++ { + f := rfcType.Field(i) + val := reflect.ValueOf(d).FieldByName(f.Name) + if val.IsZero() { + missingFields = append(missingFields, f.Name) + } else if f.Type.Kind() == reflect.Slice && val.Len() == 0 { + // Parsing docs may generate an empty slice instead of nil zero value. + missingFields = append(missingFields, f.Name) + } + } + + return missingFields +} + +// NewPRD parses a Google Drive file based on the HashiCorp PRD template and +// returns the resulting PRD struct. +func NewPRD(f *drive.File, s *gw.Service, allFolders []string) (*PRD, error) { + r := &PRD{ + BaseDoc: BaseDoc{ + ObjectID: f.Id, + Title: f.Name, + DocType: "PRD", + ThumbnailLink: f.ThumbnailLink, + }, + } + + // Parse title and doc number. + r.parsePRDTitle(f.Name) + + // Convert modified time to Unix time so it can be sorted in Algolia. + mt, err := time.Parse(time.RFC3339, f.ModifiedTime) + if err != nil { + return nil, fmt.Errorf("error parsing modified time: %w: id=\"%s\"", + err, f.Id) + } + r.ModifiedTime = mt.Unix() + + doc, err := s.GetDoc(f.Id) + if err != nil { + return nil, fmt.Errorf("error getting doc: %w: id=\"%s\"", err, f.Id) + } + + // Assume the name of the parent folder is the PRD Product. + parent, err := s.Drive.Files.Get(f.Parents[0]). + SupportsAllDrives(true).Fields("name").Do() + if err != nil { + return nil, fmt.Errorf("error getting parent folder file: %w", err) + } + r.Product = parent.Name + + r.parsePRDSummary(doc.Body) + + // Parse PRD header for metadata. + r.parsePRDHeader(doc) + + // Parse all linked documents. + // TODO: use allFolders parameter and enable this. + // r.LinkedDocs = gw.GetLinkURLs(doc.Body) + + // Get owner photos by searching Google Workspace directory for owner strings. + for i, o := range r.Owners { + // Add empty string as a default. We will replace this with the actual photo + // URL if we can find it. + r.OwnerPhotos = append(r.OwnerPhotos, "") + + people, err := s.SearchPeople(o) + if err != nil { + return nil, fmt.Errorf( + "error searching directory for person: %w: owner=\"%s\"", + err, o) + } + + if len(people) > 0 { + if len(people[0].Photos) > 0 { + // Replace empty string default with actual value. + r.OwnerPhotos[i] = people[0].Photos[0].Url + } + } + } + + // Get doc content. + resp, err := s.Drive.Files.Export(f.Id, "text/plain").Download() + if err != nil { + return nil, fmt.Errorf("error exporting doc: %w: id=\"%s\"", err, f.Id) + } + b, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("error reading exported doc: %w: id=\"%s\"", err, + f.Id) + } + + // Trim doc content if it is larger than the maximum size. + if len(b) > MaxDocSize { + b = b[:MaxDocSize] + } + + r.Content = string(b) + return r, nil +} + +// parsePRDHeader parses a HashiCorp PRD header for metadata. +func (r *PRD) parsePRDHeader(d *docs.Document) { + tables := gw.GetTables(d.Body) + + for _, t := range tables { + gw.VisitAllTableParagraphs(t, func(p *docs.Paragraph) { + if len(p.Elements) > 0 { + if p.Elements[0].TextRun != nil { + + // Get label of table cell. + label := p.Elements[0].TextRun.Content + + switch { + case strings.HasPrefix(label, "Contributor:") || + strings.HasPrefix(label, "Contributors:"): + r.parsePRDContributors(p) + + case strings.HasPrefix(label, "Created"): + r.parsePRDCreated(p) + + case strings.HasPrefix(label, "Owner:") || + strings.HasPrefix(label, "Owners:"): + r.parsePRDOwners(p) + + case strings.HasPrefix(label, "RFC"): + r.parsePRDRFC(p) + + case strings.HasPrefix(label, "Status:"): + r.parsePRDStatus(p) + } + } + } + }) + } +} + +// parsePRDContributors parses the PRD Contributors from a Google Docs paragraph. +func (r *PRD) parsePRDContributors(p *docs.Paragraph) { + r.Contributors = parseParagraphWithEmails("Contributor", p) +} + +// parsePRDCreated parses the PRD Created date from a Google Docs paragraph. +func (r *PRD) parsePRDCreated(p *docs.Paragraph) error { + // Build string containing Created date. + var s string + for i, e := range p.Elements { + if i > 0 { + s += e.TextRun.Content + } + } + s = strings.TrimSpace(s) + + // Parse Created date string. + ts, err := dateparse.ParseAny(s) + if err != nil { + return fmt.Errorf("error parsing Created date: %w: date=\"%s\"", err, s) + + } + + res, err := ts.UTC().MarshalText() + if err != nil { + return fmt.Errorf("error marshaling time: %w", err) + } + r.Created = string(res) + + // Also store created date as Unix time so it can be sorted in Algolia. + r.CreatedTime = ts.Unix() + + return nil +} + +// parsePRDOwners parses PRD Owners from a Google Docs paragraph. +func (r *PRD) parsePRDOwners(p *docs.Paragraph) { + r.Owners = parseParagraphWithEmails("Owner", p) +} + +// parsePRDRFC parses the PRD RFC from a Google Docs paragraph. +func (r *PRD) parsePRDRFC(p *docs.Paragraph) { + r.RFC = parseParagraphWithText("RFC", p) + + // Text from the PRD template may be left in the doc. + if r.RFC == "Link to RFC when created" { + r.RFC = "" + } +} + +// parsePRDStatus parses the PRD Status from a Google Docs paragraph. +func (r *PRD) parsePRDStatus(p *docs.Paragraph) { + label := p.Elements[0].TextRun.Content + var status string + + // Sometimes "Status: WIP" is collected together as one text element. + if label == "Status: WIP" && p.Elements[0].TextRun.TextStyle.Bold { + status = "WIP" + } else { + for i, e := range p.Elements { + if i > 0 && e.TextRun.TextStyle.Bold { + status = e.TextRun.Content + } + } + } + + status = strings.TrimSpace(status) + r.Status = status +} + +// parsePRDSummary parses the PRD Summary from a Google Docs Body. +func (r *PRD) parsePRDSummary(b *docs.Body) { + elems := b.Content + + for _, e := range elems { + if e.Paragraph != nil { + // Summary paragraph in the PRD template will have at least 2 elements. + if len(e.Paragraph.Elements) > 1 { + if e.Paragraph.Elements[0].TextRun != nil { + if e.Paragraph.Elements[0].TextRun.Content == "Summary:" { + // We found the summary paragraph and the rest of the elements + // should be the summary value. + var s string + for i, ee := range e.Paragraph.Elements { + if i > 0 { + s += ee.TextRun.Content + } + } + r.Summary = strings.TrimSpace(s) + return + } + } + } + } + } +} + +// parsePRDTitle parses PRD title and document number from a string (which +// should be the name of the Google Drive file). +func (r *PRD) parsePRDTitle(s string) { + // Handle `[PRD] CSL-123: Some PRD Title` case. + // Also handles different types of "[PRD]" identifiers like "[Meta PRD]", + // "[Mini-PRD]", etc. + re := regexp.MustCompile(`\[.*PRD\] (?P<DocID>[A-Z]+-[0-9xX#?]+): (?P<Title>.+)`) + matches := re.FindStringSubmatch(s) + if len(matches) > 1 { + r.DocNumber = matches[1] + r.Title = strings.TrimSpace(matches[2]) + return + } + + // Handle `[PRD] Some PRD Title` case. + // Also handles different types of "[PRD]" identifiers like "[Meta PRD]", + // "[Mini-PRD]", etc. + re = regexp.MustCompile(`\[.*PRD\] (?P<Title>.+)`) + matches = re.FindStringSubmatch(s) + if len(matches) > 1 { + r.Title = strings.TrimSpace(matches[1]) + return + } + + // Handle `[PRD]Some PRD Title` case. + // Also handles different types of "[PRD]" identifiers like "[Meta PRD]", + // "[Mini-PRD]", etc. + re = regexp.MustCompile(`\[.*PRD\](?P<Title>.+)`) + matches = re.FindStringSubmatch(s) + if len(matches) > 1 { + r.Title = strings.TrimSpace(matches[1]) + return + } + + // Handle `PRD - Some PRD Title` case. + // Also handles different types of "[PRD]" identifiers like "[Meta PRD]", + // "[Mini-PRD]", etc. + re = regexp.MustCompile(`.*PRD - (?P<Title>.+)`) + matches = re.FindStringSubmatch(s) + if len(matches) > 1 { + r.Title = strings.TrimSpace(matches[1]) + return + } + + // Couldn't parse title and doc ID, so setting PRD title to the whole + // string. + r.Title = s +} diff --git a/pkg/hashicorpdocs/prd_replace_header.go b/pkg/hashicorpdocs/prd_replace_header.go new file mode 100644 index 000000000..31a5b1595 --- /dev/null +++ b/pkg/hashicorpdocs/prd_replace_header.go @@ -0,0 +1,741 @@ +package hashicorpdocs + +import ( + "fmt" + "net/url" + "path" + "strings" + + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + "google.golang.org/api/docs/v1" +) + +// ReplaceHeader replaces the PRD document header, which is the first table +// in the document. +// +// The resulting table looks like this: +// +// |-----------------------------------------------------------------------------------| +// | Title: {{title}} | +// |-----------------------------------------------------------------------------------| +// | Summary: {{summary}} | +// |-----------------------------------------------------------------------------------| +// | | +// |-----------------------------------------------------------------------------------| +// | Created: {{created}} | Status: {{status}} | +// |-----------------------------------------------------------------------------------| +// | | +// |-----------------------------------------------------------------------------------| +// | Product: {{product}} | Owner: {{owner}} | +// |-----------------------------------------------------------------------------------| +// | Contributors: {{contributors}} | Other stakeholders: {{stakeholders}} | +// |-----------------------------------------------------------------------------------| +// | RFC: {{rfc}} | Approvers: {{approvers}} | +// |-----------------------------------------------------------------------------------| +// | Tags: {{tags}} | +// |-----------------------------------------------------------------------------------| +// | | +// |-----------------------------------------------------------------------------------| +// | NOTE: This document is managed by Hermes... | +// |-----------------------------------------------------------------------------------| +// + +func (doc *PRD) ReplaceHeader(fileID, baseURL string, isDraft bool, s *gw.Service) error { + const ( + tableRows = 11 // Number of rows in the header table. + ) + + // Get doc. + d, err := s.GetDoc(fileID) + if err != nil { + return fmt.Errorf("error getting doc: %w", err) + } + + // Find the start and end indexes of the first table (assume that it is the + // doc header). + var ( + endIndex int64 + startIndex int64 + t *docs.Table + headerTableFound bool + ) + elems := d.Body.Content + for _, e := range elems { + if e.Table != nil { + t = e.Table + startIndex = e.StartIndex + endIndex = e.EndIndex + break + } + } + // startIndex should be 2, but we'll allow a little leeway in case someone + // accidentally added a newline or something. + if t != nil && startIndex < 5 { + headerTableFound = true + } else { + // Header table wasn't found, so we'll insert a new one at index 2. + startIndex = 2 + } + + // Delete existing header. + if headerTableFound { + req := &docs.BatchUpdateDocumentRequest{ + Requests: []*docs.Request{ + { + DeleteContentRange: &docs.DeleteContentRangeRequest{ + Range: &docs.Range{ + SegmentId: "", + StartIndex: startIndex, + EndIndex: endIndex + 1, + }, + }, + }, + }, + } + _, err = s.Docs.Documents.BatchUpdate(fileID, req).Do() + if err != nil { + return fmt.Errorf("error deleting existing header: %w", err) + } + } + + // Insert new header table. + req := &docs.BatchUpdateDocumentRequest{ + Requests: []*docs.Request{ + { + InsertTable: &docs.InsertTableRequest{ + Columns: 2, + Location: &docs.Location{ + Index: startIndex - 1, + }, + Rows: tableRows, + }, + }, + }, + } + _, err = s.Docs.Documents.BatchUpdate(fileID, req).Do() + if err != nil { + return fmt.Errorf("error inserting header table: %w", err) + } + + // Find new table index. + elems = d.Body.Content + for _, e := range elems { + if e.Table != nil { + startIndex = e.StartIndex + break + } + } + + // Apply formatting to the table. + req = &docs.BatchUpdateDocumentRequest{ + Requests: []*docs.Request{ + { + // Remove table borders (by setting width to 0 and setting color to + // white as a backup), and remove padding (by setting to 0). + UpdateTableCellStyle: &docs.UpdateTableCellStyleRequest{ + Fields: "borderBottom,borderLeft,borderRight,borderTop,paddingBottom,paddingLeft,paddingRight,paddingTop", + TableCellStyle: &docs.TableCellStyle{ + BorderBottom: &docs.TableCellBorder{ + Color: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 1.0, + Green: 1.0, + Red: 1.0, + }, + }, + }, + DashStyle: "SOLID", + Width: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + BorderLeft: &docs.TableCellBorder{ + Color: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 1.0, + Green: 1.0, + Red: 1.0, + }, + }, + }, + DashStyle: "SOLID", + Width: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + BorderRight: &docs.TableCellBorder{ + Color: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 1.0, + Green: 1.0, + Red: 1.0, + }, + }, + }, + DashStyle: "SOLID", + Width: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + BorderTop: &docs.TableCellBorder{ + Color: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 1.0, + Green: 1.0, + Red: 1.0, + }, + }, + }, + DashStyle: "SOLID", + Width: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + PaddingBottom: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + PaddingLeft: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + PaddingRight: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + PaddingTop: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: tableRows, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 0, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Update Title row minimum height. + { + UpdateTableRowStyle: &docs.UpdateTableRowStyleRequest{ + Fields: "minRowHeight", + RowIndices: []int64{0}, + TableRowStyle: &docs.TableRowStyle{ + MinRowHeight: &docs.Dimension{ + Magnitude: 27, + Unit: "PT", + }, + }, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + + // Update Summary row minimum height. + { + UpdateTableRowStyle: &docs.UpdateTableRowStyleRequest{ + Fields: "minRowHeight", + RowIndices: []int64{1}, + TableRowStyle: &docs.TableRowStyle{ + MinRowHeight: &docs.Dimension{ + Magnitude: 11, + Unit: "PT", + }, + }, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + + // Merge cells for the Title row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 0, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for the Summary row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 1, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for blank row after the Summary row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 2, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for blank row after the Created/Status row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 4, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for the Tags row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 8, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for blank row after the Tags row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 9, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for the "Managed by Hermes" note row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 10, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + }, + } + _, err = s.Docs.Documents.BatchUpdate(fileID, req).Do() + if err != nil { + return fmt.Errorf("error applying formatting to header table: %w", err) + } + + // Populate table. + var ( + pos int // Use to track position in document. + reqs []*docs.Request + cellReqs []*docs.Request // Temp var used for createTextCellRequests() results. + cellLength int // Temp var used for createTextCellRequests() results. + ) + + // Title cell. + pos = int(startIndex) + 3 + titleText := fmt.Sprintf("[%s] %s", doc.DocNumber, doc.Title) + reqs = append(reqs, + []*docs.Request{ + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "bold,fontSize,foregroundColor", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 1), + }, + TextStyle: &docs.TextStyle{ + Bold: true, + FontSize: &docs.Dimension{ + Magnitude: 20, + Unit: "PT", + }, + ForegroundColor: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 0.2627451, + Green: 0.2627451, + Red: 0.2627451, + }, + }, + }, + }, + }, + }, + { + InsertText: &docs.InsertTextRequest{ + Location: &docs.Location{ + Index: int64(pos), + }, + Text: titleText, + }, + }, + }..., + ) + pos += len(titleText) + 5 + + // Summary cell. + summaryText := fmt.Sprintf("Summary: %s", doc.Summary) + reqs = append(reqs, + []*docs.Request{ + { + InsertText: &docs.InsertTextRequest{ + Location: &docs.Location{ + Index: int64(pos), + }, + Text: summaryText, + }, + }, + + // Bold "Summary:". + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "bold", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 8), + }, + TextStyle: &docs.TextStyle{ + Bold: true, + }, + }, + }, + }..., + ) + pos += len(summaryText) + 5 + + // Blank row after summary row. + reqs = append(reqs, + []*docs.Request{ + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "fontSize", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 1), + }, + TextStyle: &docs.TextStyle{ + FontSize: &docs.Dimension{ + Magnitude: 8, + Unit: "PT", + }, + }, + }, + }, + }...) + pos += 5 + + // Created cell. + cellReqs, cellLength = createTextCellRequests( + "Created", doc.Created, int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 2 + + // Status cell. + cellReqs, cellLength = createTextCellRequests( + "Status", "WIP | In-Review | Approved | Obsolete", int64(pos)) + reqs = append(reqs, cellReqs...) + var statusStartIndex, statusEndIndex int + switch strings.ToLower(doc.Status) { + case "in review": + fallthrough + case "in-review": + statusStartIndex = 14 + statusEndIndex = 23 + case "approved": + statusStartIndex = 26 + statusEndIndex = 34 + case "obsolete": + statusStartIndex = 37 + statusEndIndex = 45 + case "wip": + fallthrough + default: + // Default to "WIP" for all unknown statuses. + statusStartIndex = 8 + statusEndIndex = 11 + } + reqs = append(reqs, + // Bold the status. + &docs.Request{ + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "bold", + Range: &docs.Range{ + StartIndex: int64(pos + statusStartIndex), + EndIndex: int64(pos + statusEndIndex), + }, + TextStyle: &docs.TextStyle{ + Bold: true, + }, + }, + }) + pos += cellLength + 3 + + // Blank row after Created/Status row. + reqs = append(reqs, + []*docs.Request{ + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "fontSize", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 1), + }, + TextStyle: &docs.TextStyle{ + FontSize: &docs.Dimension{ + Magnitude: 8, + Unit: "PT", + }, + }, + }, + }, + }...) + pos += 5 + + // Product cell. + cellReqs, cellLength = createTextCellRequests( + "Product", doc.Product, int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 2 + + // Owner cell. + cellReqs, cellLength = createTextCellRequests( + "Owner", doc.Owners[0], int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 3 + + // Current Version cell. + // cellReqs, cellLength = createTextCellRequests( + // "Current Version", doc.CurrentVersion, int64(pos)) + // reqs = append(reqs, cellReqs...) + // pos += cellLength + 2 + + // Contributors cell. + cellReqs, cellLength = createTextCellRequests( + "Contributors", strings.Join(doc.Contributors[:], ", "), int64(pos)) + reqs = append(reqs, cellReqs...) + // pos += cellLength + 3 + pos += cellLength + 2 + + // Target Version cell. + // cellReqs, cellLength = createTextCellRequests( + // "Target Version", doc.TargetVersion, int64(pos)) + // reqs = append(reqs, cellReqs...) + // pos += cellLength + 2 + + // Other Stakeholders cell. + cellReqs, cellLength = createTextCellRequests( + "Other Stakeholders", strings.Join(doc.Stakeholders[:], ", "), int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 3 + + // RFC cell. + // TODO: We only show a link with the text "RFC" now if an RFC is defined, but + // should show the document number instead. + rfcCellVal := "" + if doc.RFC != "" { + rfcCellVal = "RFC" + } + cellReqs, cellLength = createTextCellRequests( + "RFC", rfcCellVal, int64(pos)) + reqs = append(reqs, cellReqs...) + if doc.RFC != "" { + reqs = append(reqs, + []*docs.Request{ + // Add link to RFC. + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "link", + Range: &docs.Range{ + StartIndex: int64(pos + 5), + EndIndex: int64(pos + 8), + }, + TextStyle: &docs.TextStyle{ + Link: &docs.Link{ + Url: doc.RFC, + }, + }, + }, + }, + }...) + } + pos += cellLength + 2 + + // Approvers cell. + // Build approvers slice with a check next to reviewers who have approved. + var approvers []string + for _, approver := range doc.Approvers { + if contains(doc.ApprovedBy, approver) { + approvers = append(approvers, "✅ "+approver) + } else if contains(doc.ChangesRequestedBy, approver) { + approvers = append(approvers, "❌ "+approver) + } else { + approvers = append(approvers, approver) + } + } + cellReqs, cellLength = createTextCellRequests( + "Approvers", strings.Join(approvers[:], ", "), int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 3 + + // Tags cell. + cellReqs, cellLength = createTextCellRequests( + "Tags", strings.Join(doc.Tags[:], ", "), int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 5 + + // Blank row after Tags row. + reqs = append(reqs, + []*docs.Request{ + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "fontSize", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 1), + }, + TextStyle: &docs.TextStyle{ + FontSize: &docs.Dimension{ + Magnitude: 8, + Unit: "PT", + }, + }, + }, + }, + }...) + pos += 5 + + // "Managed by Hermes..." note cell. + docURL, err := url.Parse(baseURL) + if err != nil { + return fmt.Errorf("error parsing base URL: %w", err) + } + docURL.Path = path.Join(docURL.Path, "document", doc.ObjectID) + docURLString := docURL.String() + docURLString = strings.TrimRight(docURLString, "/") + if isDraft { + docURLString += "?draft=true" + } + cellReqs, cellLength = createTextCellRequests( + "NOTE", + "This document is managed by Hermes and this header will be periodically overwritten using document metadata.", + int64(pos)) + reqs = append(reqs, cellReqs...) + reqs = append(reqs, + []*docs.Request{ + // Add link to document in Hermes. + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "link", + Range: &docs.Range{ + StartIndex: int64(pos + 11), + EndIndex: int64(pos + 19), + }, + TextStyle: &docs.TextStyle{ + Link: &docs.Link{ + Url: docURLString, + }, + }, + }, + }, + + // Add link to Hermes. + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "link", + Range: &docs.Range{ + StartIndex: int64(pos + 34), + EndIndex: int64(pos + 40), + }, + TextStyle: &docs.TextStyle{ + Link: &docs.Link{ + Url: baseURL, + }, + }, + }, + }, + }...) + pos += cellLength + 5 + + // Do the batch update. + _, err = s.Docs.Documents.BatchUpdate(fileID, + &docs.BatchUpdateDocumentRequest{ + Requests: reqs}). + Do() + if err != nil { + return fmt.Errorf("error populating table: %w", err) + } + + // Rename file with new title. + err = s.RenameFile(fileID, fmt.Sprintf("[%s] %s", doc.DocNumber, doc.Title)) + if err != nil { + return fmt.Errorf("error renaming file with new title: %w", err) + } + + return nil +} diff --git a/pkg/hashicorpdocs/rfc.go b/pkg/hashicorpdocs/rfc.go new file mode 100644 index 000000000..5a42b2921 --- /dev/null +++ b/pkg/hashicorpdocs/rfc.go @@ -0,0 +1,465 @@ +package hashicorpdocs + +import ( + "fmt" + "io" + "reflect" + "regexp" + "strings" + "time" + + "github.com/araddon/dateparse" + "github.com/forPelevin/gomoji" + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + "google.golang.org/api/docs/v1" + "google.golang.org/api/drive/v3" +) + +// RFC contains metadata for documents based off of the HashiCorp RFC template. +type RFC struct { + BaseDoc `mapstructure:",squash"` + + // CurrentVersion is the current version of the product at the time of + // document authoring. + CurrentVersion string `json:"currentVersion,omitempty"` + + // PRD is the associated PRD. + PRD string `json:"prd,omitempty"` + + // Stakeholders is a slice of email address strings for document stakeholders. + Stakeholders []string `json:"stakeholders,omitempty"` + + // TargetVersion is the target version of the product for the changes being + // proposed in the document. + TargetVersion string `json:"targetVersion,omitempty"` +} + +func (d RFC) GetCustomEditableFields() map[string]CustomDocTypeField { + return map[string]CustomDocTypeField{ + "currentVersion": { + DisplayName: "Current Version", + Type: "STRING", + }, + "prd": { + DisplayName: "PRD", + Type: "STRING", + }, + "stakeholders": { + DisplayName: "Stakeholders", + Type: "PEOPLE", + }, + "targetVersion": { + DisplayName: "Target Version", + Type: "STRING", + }, + } +} + +func (d *RFC) SetCustomEditableFields() { + d.CustomEditableFields = d.GetCustomEditableFields() +} + +// MissingFields returns the missing fields of the doc struct. +func (d RFC) MissingFields() []string { + var missingFields []string + + rfcType := reflect.TypeOf(d) + for i := 0; i < rfcType.NumField(); i++ { + f := rfcType.Field(i) + val := reflect.ValueOf(d).FieldByName(f.Name) + if val.IsZero() { + missingFields = append(missingFields, f.Name) + } else if f.Type.Kind() == reflect.Slice && val.Len() == 0 { + // Parsing docs may generate an empty slice instead of nil zero value. + missingFields = append(missingFields, f.Name) + } + } + + return missingFields +} + +// NewRFC parses a Google Drive file based on the HashiCorp RFC template and +// returns the resulting RFC struct. +func NewRFC(f *drive.File, s *gw.Service, allFolders []string) (*RFC, error) { + r := &RFC{ + BaseDoc: BaseDoc{ + ObjectID: f.Id, + Title: f.Name, + DocType: "RFC", + ThumbnailLink: f.ThumbnailLink, + }, + } + + // Parse title and doc number. + r.parseRFCTitle(f.Name) + + // Convert modified time to Unix time so it can be sorted in Algolia. + mt, err := time.Parse(time.RFC3339, f.ModifiedTime) + if err != nil { + return nil, fmt.Errorf("error parsing modified time: %w: id=\"%s\"", + err, f.Id) + } + r.ModifiedTime = mt.Unix() + + doc, err := s.GetDoc(f.Id) + if err != nil { + return nil, fmt.Errorf("error getting doc: %w: id=\"%s\"", err, f.Id) + } + + // Assume the name of the parent folder is the RFC Product. + parent, err := s.Drive.Files.Get(f.Parents[0]). + SupportsAllDrives(true).Fields("name").Do() + if err != nil { + return nil, fmt.Errorf("error getting parent folder file: %w", err) + } + r.Product = parent.Name + + r.parseRFCSummary(doc.Body) + + // Parse RFC header for metadata. + r.parseRFCHeader(doc) + + // Parse all linked documents. + // TODO: use allFolders parameter and enable this. + // r.LinkedDocs = gw.GetLinkURLs(doc.Body) + + // Get owner photos by searching Google Workspace directory for owner strings. + for i, o := range r.Owners { + // Add empty string as a default. We will replace this with the actual photo + // URL if we can find it. + r.OwnerPhotos = append(r.OwnerPhotos, "") + + people, err := s.SearchPeople(o) + if err != nil { + return nil, fmt.Errorf( + "error searching directory for person: %w: owner=\"%s\"", + err, o) + } + + if len(people) > 0 { + if len(people[0].Photos) > 0 { + // Replace empty string default with actual value. + r.OwnerPhotos[i] = people[0].Photos[0].Url + } + } + } + + // Get doc content. + resp, err := s.Drive.Files.Export(f.Id, "text/plain").Download() + if err != nil { + return nil, fmt.Errorf("error exporting doc: %w: id=\"%s\"", err, f.Id) + } + b, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("error reading exported doc: %w: id=\"%s\"", err, + f.Id) + } + + // Trim doc content if it is larger than the maximum size. + if len(b) > MaxDocSize { + b = b[:MaxDocSize] + } + + r.Content = string(b) + return r, nil +} + +// parseRFCHeader parses a HashiCorp RFC header for metadata. +func (r *RFC) parseRFCHeader(d *docs.Document) { + tables := gw.GetTables(d.Body) + + for _, t := range tables { + gw.VisitAllTableParagraphs(t, func(p *docs.Paragraph) { + if len(p.Elements) > 0 { + if p.Elements[0].TextRun != nil { + + // Get label of table cell. + label := p.Elements[0].TextRun.Content + + switch { + case strings.HasPrefix(label, "Approver:") || + strings.HasPrefix(label, "Approvers:"): + r.parseRFCApprovers(p) + + case strings.HasPrefix(label, "Contributor:") || + strings.HasPrefix(label, "Contributors:"): + r.parseRFCContributors(p) + + case strings.HasPrefix(label, "Created"): + r.parseRFCCreated(p) + + case strings.HasPrefix(label, "Current Version"): + r.parseRFCCurrentVersion(p) + + case strings.HasPrefix(label, "Owner:") || + strings.HasPrefix(label, "Owners:"): + r.parseRFCOwners(p) + + case strings.HasPrefix(label, "PRD"): + r.parseRFCPRD(p) + + case strings.HasPrefix(label, "Stakeholder:") || + strings.HasPrefix(label, "Stakeholders:"): + r.parseRFCStakeholders(p) + + case strings.HasPrefix(label, "Status:"): + r.parseRFCStatus(p) + } + } + } + }) + } +} + +// parseRFCApprovers parses RFC Approvers from a Google Doc paragraph. +func (r *RFC) parseRFCApprovers(p *docs.Paragraph) { + r.Approvers = parseParagraphWithEmails("Approver", p) +} + +// parseRFCContributors parses the RFC Contributors from a Google Docs paragraph. +func (r *RFC) parseRFCContributors(p *docs.Paragraph) { + r.Contributors = parseParagraphWithEmails("Contributor", p) +} + +// parseRFCCreated parses the RFC Created date from a Google Docs paragraph. +func (r *RFC) parseRFCCreated(p *docs.Paragraph) error { + // Build string containing Created date. + var s string + for i, e := range p.Elements { + if i > 0 { + s += e.TextRun.Content + } + } + s = strings.TrimSpace(s) + + // Parse Created date string. + ts, err := dateparse.ParseAny(s) + if err != nil { + return fmt.Errorf("error parsing Created date: %w: date=\"%s\"", err, s) + + } + + res, err := ts.UTC().MarshalText() + if err != nil { + return fmt.Errorf("error marshaling time: %w", err) + } + r.Created = string(res) + + // Also store created date as Unix time so it can be sorted in Algolia. + r.CreatedTime = ts.Unix() + + return nil +} + +// parseRFCCurrentVersion parses the RFC Current Version from a Google Docs +// paragraph. +func (r *RFC) parseRFCCurrentVersion(p *docs.Paragraph) { + r.CurrentVersion = parseParagraphWithText("Current Version", p) +} + +// parseRFCOwners parses RFC Owners from a Google Docs paragraph. +func (r *RFC) parseRFCOwners(p *docs.Paragraph) { + r.Owners = parseParagraphWithEmails("Owner", p) +} + +// parseRFCPRD parses the RFC PRD from a Google Docs paragraph. +func (r *RFC) parseRFCPRD(p *docs.Paragraph) { + r.PRD = parseParagraphWithText("PRD", p) + + // Text from the RFC template may be left in the doc. + if r.PRD == "Link to PRD if applicable" { + r.PRD = "" + } +} + +// parseRFCStakeholders parses RFC Stakeholders from a Google Docs paragraph. +func (r *RFC) parseRFCStakeholders(p *docs.Paragraph) { + r.Stakeholders = parseParagraphWithEmails("Stakeholder", p) +} + +// parseRFCStatus parses the RFC Status from a Google Docs paragraph. +func (r *RFC) parseRFCStatus(p *docs.Paragraph) { + label := p.Elements[0].TextRun.Content + var status string + + // Sometimes "Status: WIP" is collected together as one text element. + if label == "Status: WIP" && p.Elements[0].TextRun.TextStyle.Bold { + status = "WIP" + } else { + for i, e := range p.Elements { + if i > 0 && e.TextRun != nil && e.TextRun.TextStyle != nil && e.TextRun.TextStyle.Bold { + status = e.TextRun.Content + } + } + } + + status = strings.TrimSpace(status) + r.Status = status +} + +// parseRFCSummary parses the RFC Summary from a Google Docs Body. +func (r *RFC) parseRFCSummary(b *docs.Body) { + elems := b.Content + + for _, e := range elems { + if e.Paragraph != nil { + // Summary paragraph in the RFC template will have at least 2 elements. + if len(e.Paragraph.Elements) > 1 { + if e.Paragraph.Elements[0].TextRun != nil { + if e.Paragraph.Elements[0].TextRun.Content == "Summary:" { + // We found the summary paragraph and the rest of the elements + // should be the summary value. + var s string + for i, ee := range e.Paragraph.Elements { + if ee.TextRun != nil { + if i > 0 { + s += ee.TextRun.Content + } + } + } + r.Summary = strings.TrimSpace(s) + return + } + } + } + } + } +} + +// parseRFCTitle parses RFC title and document number from a string (which +// should be the name of the Google Drive file). +func (r *RFC) parseRFCTitle(s string) { + // Handle `[RFC-123]: Some RFC Title` case. + re := regexp.MustCompile(`\[(?P<DocID>[A-Z]+-[0-9xX#?]+)\] (?P<Title>.+)`) + matches := re.FindStringSubmatch(s) + if len(matches) > 1 { + r.DocNumber = matches[1] + r.Title = strings.TrimSpace(matches[2]) + return + } + + // Handle `RFC-123: Some RFC Title` case. + re = regexp.MustCompile(`(?P<DocID>[A-Z]+-[0-9xX#?]+): (?P<Title>.+)`) + matches = re.FindStringSubmatch(s) + if len(matches) > 1 { + r.DocNumber = matches[1] + r.Title = strings.TrimSpace(matches[2]) + return + } + + // Handle `RFC-123 - Some RFC Title` case. + re = regexp.MustCompile(`(?P<DocID>[A-Z]+-[0-9xX#?]+) - (?P<Title>.+)`) + matches = re.FindStringSubmatch(s) + if len(matches) > 1 { + r.DocNumber = matches[1] + r.Title = strings.TrimSpace(matches[2]) + return + } + + // Handle `RFC-123 Some RFC Title` case. + re = regexp.MustCompile(`(?P<DocID>[A-Z]+-[0-9xX#?]+) (?P<Title>.+)`) + matches = re.FindStringSubmatch(s) + if len(matches) > 1 { + r.DocNumber = matches[1] + r.Title = strings.TrimSpace(matches[2]) + return + } + + // Couldn't parse title and doc ID, so setting RFC title to the whole + // string. + r.Title = s +} + +// parseEmails parses email addresses from a string. +func parseEmails(s string) []string { + // We need an @ to continue. + if !strings.Contains(s, "@") { + return []string{} + } + + // Sanitize string. + s = gomoji.RemoveEmojis(s) + s = strings.TrimSpace(s) + s = strings.ReplaceAll(s, " ", "") + + // Handle "{a, b, c}@hashicorp.com" case. + if t, _ := regexp.MatchString("^{.+}@.+$", s); t { + split := strings.SplitN(s, "@", 2) + + // Get all email prefixes (before the @). + prefixes := strings.Split(strings.Trim(split[0], "{}"), ",") + + // Get the email domain (after the @). + domain := split[1] + + // Build results. + var res []string + for _, u := range prefixes { + res = append(res, u+"@"+domain) + } + + return res + } + + // Remove any trailing commas before splitting. + s = strings.TrimRight(s, ",") + + return removeEmptyStrings(strings.Split(s, ",")) +} + +// parseParagraphWithEmails parses a Google Doc paragraph containing email +// addresses. +func parseParagraphWithEmails(label string, p *docs.Paragraph) []string { + // Build string containing label and email addresses. + s := buildLabelAndValueString(label, p) + + // Parse email addresses. + return parseEmails(s) +} + +// parseParagraphWithText parses a Google Doc paragraph containing a text value. +func parseParagraphWithText(label string, p *docs.Paragraph) string { + // Build string containing label and text value. + s := buildLabelAndValueString(label, p) + + // If string is "N/A", return an empty string. + if strings.ToLower(s) == "n/a" { + return "" + } + + return s +} + +// buildLabelAndValueString builds a string containing a label and value from +// a Google Doc paragraph containing text in the form of `Label: Value`. +func buildLabelAndValueString(label string, p *docs.Paragraph) string { + var s string + if p.Elements != nil { + for _, e := range p.Elements { + if e.TextRun != nil { + s += e.TextRun.Content + } + } + } + + // Trim label. + s = strings.TrimPrefix(s, fmt.Sprintf("%s: ", label)) + s = strings.TrimPrefix(s, fmt.Sprintf("%ss: ", label)) // Plural + s = strings.TrimPrefix(s, fmt.Sprintf("%s:", label)) // No space + s = strings.TrimPrefix(s, fmt.Sprintf("%ss:", label)) // Plural, no space + + s = strings.TrimSpace(s) + + return s +} + +// removeEmptyStrings removes any empty string elements from a slice of strings. +func removeEmptyStrings(in []string) []string { + var out []string + for _, s := range in { + if s != "" { + out = append(out, s) + } + } + return out +} diff --git a/pkg/hashicorpdocs/rfc_replace_header.go b/pkg/hashicorpdocs/rfc_replace_header.go new file mode 100644 index 000000000..451d67c85 --- /dev/null +++ b/pkg/hashicorpdocs/rfc_replace_header.go @@ -0,0 +1,806 @@ +package hashicorpdocs + +import ( + "fmt" + "net/url" + "path" + "strings" + "unicode/utf8" + + gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace" + "google.golang.org/api/docs/v1" +) + +// ReplaceHeader replaces the RFC document header, which is the first table +// in the document. +// +// The resulting table looks like this: +// +// |-----------------------------------------------------------------------------------| +// | Title: {{title}} | +// |-----------------------------------------------------------------------------------| +// | Summary: {{summary}} | +// |-----------------------------------------------------------------------------------| +// | | +// |-----------------------------------------------------------------------------------| +// | Created: {{created}} | Status: {{status}} | +// |-----------------------------------------------------------------------------------| +// | | +// |-----------------------------------------------------------------------------------| +// | Product: {{product}} | Owner: {{owner}} | +// |-----------------------------------------------------------------------------------| +// | Current Version: {{current-version}} | Contributors: {{contributors}} | +// |-----------------------------------------------------------------------------------| +// | Target Version: {{target-version}} | Other stakeholders: {{stakeholders}} | +// |-----------------------------------------------------------------------------------| +// | PRD: {{prd}} | Approvers: {{approvers}} | +// |-----------------------------------------------------------------------------------| +// | Tags: {{tags}} | +// |-----------------------------------------------------------------------------------| +// | | +// |-----------------------------------------------------------------------------------| +// | NOTE: This document is managed by Hermes... | +// |-----------------------------------------------------------------------------------| +// + +const ( + tableRows = 12 // Number of rows in the header table. +) + +func (doc *RFC) ReplaceHeader(fileID, baseURL string, isDraft bool, s *gw.Service) error { + // Get doc. + d, err := s.GetDoc(fileID) + if err != nil { + return fmt.Errorf("error getting doc: %w", err) + } + + // Find the start and end indexes of the first table (assume that it is the + // doc header). + var ( + endIndex int64 + startIndex int64 + t *docs.Table + headerTableFound bool + ) + elems := d.Body.Content + for _, e := range elems { + if e.Table != nil { + t = e.Table + startIndex = e.StartIndex + endIndex = e.EndIndex + break + } + } + // startIndex should be 2, but we'll allow a little leeway in case someone + // accidentally added a newline or something. + if t != nil && startIndex < 5 { + headerTableFound = true + } else { + // Header table wasn't found, so we'll insert a new one at index 2. + startIndex = 2 + } + + // Delete existing header. + if headerTableFound { + req := &docs.BatchUpdateDocumentRequest{ + Requests: []*docs.Request{ + { + DeleteContentRange: &docs.DeleteContentRangeRequest{ + Range: &docs.Range{ + SegmentId: "", + StartIndex: startIndex, + EndIndex: endIndex + 1, + }, + }, + }, + }, + } + _, err = s.Docs.Documents.BatchUpdate(fileID, req).Do() + if err != nil { + return fmt.Errorf("error deleting existing header: %w", err) + } + } + + // Insert new header table. + req := &docs.BatchUpdateDocumentRequest{ + Requests: []*docs.Request{ + { + InsertTable: &docs.InsertTableRequest{ + Columns: 2, + Location: &docs.Location{ + Index: startIndex - 1, + }, + Rows: tableRows, + }, + }, + }, + } + _, err = s.Docs.Documents.BatchUpdate(fileID, req).Do() + if err != nil { + return fmt.Errorf("error inserting header table: %w", err) + } + + // Find new table index. + elems = d.Body.Content + for _, e := range elems { + if e.Table != nil { + startIndex = e.StartIndex + break + } + } + + // Apply formatting to the table. + req = &docs.BatchUpdateDocumentRequest{ + Requests: []*docs.Request{ + { + // Remove table borders (by setting width to 0 and setting color to + // white as a backup), and remove padding (by setting to 0). + UpdateTableCellStyle: &docs.UpdateTableCellStyleRequest{ + Fields: "borderBottom,borderLeft,borderRight,borderTop,paddingBottom,paddingLeft,paddingRight,paddingTop", + TableCellStyle: &docs.TableCellStyle{ + BorderBottom: &docs.TableCellBorder{ + Color: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 1.0, + Green: 1.0, + Red: 1.0, + }, + }, + }, + DashStyle: "SOLID", + Width: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + BorderLeft: &docs.TableCellBorder{ + Color: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 1.0, + Green: 1.0, + Red: 1.0, + }, + }, + }, + DashStyle: "SOLID", + Width: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + BorderRight: &docs.TableCellBorder{ + Color: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 1.0, + Green: 1.0, + Red: 1.0, + }, + }, + }, + DashStyle: "SOLID", + Width: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + BorderTop: &docs.TableCellBorder{ + Color: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 1.0, + Green: 1.0, + Red: 1.0, + }, + }, + }, + DashStyle: "SOLID", + Width: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + PaddingBottom: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + PaddingLeft: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + PaddingRight: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + PaddingTop: &docs.Dimension{ + Magnitude: 0, + Unit: "PT", + }, + }, + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: tableRows, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 0, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Update Title row minimum height. + { + UpdateTableRowStyle: &docs.UpdateTableRowStyleRequest{ + Fields: "minRowHeight", + RowIndices: []int64{0}, + TableRowStyle: &docs.TableRowStyle{ + MinRowHeight: &docs.Dimension{ + Magnitude: 27, + Unit: "PT", + }, + }, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + + // Update Summary row minimum height. + { + UpdateTableRowStyle: &docs.UpdateTableRowStyleRequest{ + Fields: "minRowHeight", + RowIndices: []int64{1}, + TableRowStyle: &docs.TableRowStyle{ + MinRowHeight: &docs.Dimension{ + Magnitude: 11, + Unit: "PT", + }, + }, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + + // Merge cells for the Title row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 0, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for the Summary row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 1, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for blank row after the Summary row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 2, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for blank row after the Created/Status row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 4, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for the Tags row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 9, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for blank row after the Tags row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 10, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + + // Merge cells for the "Managed by Hermes" note row. + { + MergeTableCells: &docs.MergeTableCellsRequest{ + TableRange: &docs.TableRange{ + ColumnSpan: 2, + RowSpan: 1, + TableCellLocation: &docs.TableCellLocation{ + ColumnIndex: 0, + RowIndex: 11, + TableStartLocation: &docs.Location{ + Index: startIndex, + }, + }, + }, + }, + }, + }, + } + _, err = s.Docs.Documents.BatchUpdate(fileID, req).Do() + if err != nil { + return fmt.Errorf("error applying formatting to header table: %w", err) + } + + // Populate table. + var ( + pos int // Use to track position in document. + reqs []*docs.Request + cellReqs []*docs.Request // Temp var used for createTextCellRequests() results. + cellLength int // Temp var used for createTextCellRequests() results. + ) + + // Title cell. + pos = int(startIndex) + 3 + titleText := fmt.Sprintf("[%s] %s", doc.DocNumber, doc.Title) + reqs = append(reqs, + []*docs.Request{ + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "bold,fontSize,foregroundColor", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 1), + }, + TextStyle: &docs.TextStyle{ + Bold: true, + FontSize: &docs.Dimension{ + Magnitude: 20, + Unit: "PT", + }, + ForegroundColor: &docs.OptionalColor{ + Color: &docs.Color{ + RgbColor: &docs.RgbColor{ + Blue: 0.2627451, + Green: 0.2627451, + Red: 0.2627451, + }, + }, + }, + }, + }, + }, + { + InsertText: &docs.InsertTextRequest{ + Location: &docs.Location{ + Index: int64(pos), + }, + Text: titleText, + }, + }, + }..., + ) + pos += len(titleText) + 5 + + // Summary cell. + summaryText := fmt.Sprintf("Summary: %s", doc.Summary) + reqs = append(reqs, + []*docs.Request{ + { + InsertText: &docs.InsertTextRequest{ + Location: &docs.Location{ + Index: int64(pos), + }, + Text: summaryText, + }, + }, + + // Bold "Summary:". + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "bold", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 8), + }, + TextStyle: &docs.TextStyle{ + Bold: true, + }, + }, + }, + }..., + ) + pos += len(summaryText) + 5 + + // Blank row after summary row. + reqs = append(reqs, + []*docs.Request{ + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "fontSize", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 1), + }, + TextStyle: &docs.TextStyle{ + FontSize: &docs.Dimension{ + Magnitude: 8, + Unit: "PT", + }, + }, + }, + }, + }...) + pos += 5 + + // Created cell. + cellReqs, cellLength = createTextCellRequests( + "Created", doc.Created, int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 2 + + // Status cell. + cellReqs, cellLength = createTextCellRequests( + "Status", "WIP | In-Review | Approved | Obsolete", int64(pos)) + reqs = append(reqs, cellReqs...) + var statusStartIndex, statusEndIndex int + switch strings.ToLower(doc.Status) { + case "in review": + fallthrough + case "in-review": + statusStartIndex = 14 + statusEndIndex = 23 + case "approved": + statusStartIndex = 26 + statusEndIndex = 34 + case "obsolete": + statusStartIndex = 37 + statusEndIndex = 45 + case "wip": + fallthrough + default: + // Default to "WIP" for all unknown statuses. + statusStartIndex = 8 + statusEndIndex = 11 + } + reqs = append(reqs, + // Bold the status. + &docs.Request{ + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "bold", + Range: &docs.Range{ + StartIndex: int64(pos + statusStartIndex), + EndIndex: int64(pos + statusEndIndex), + }, + TextStyle: &docs.TextStyle{ + Bold: true, + }, + }, + }) + pos += cellLength + 3 + + // Blank row after Created/Status row. + reqs = append(reqs, + []*docs.Request{ + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "fontSize", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 1), + }, + TextStyle: &docs.TextStyle{ + FontSize: &docs.Dimension{ + Magnitude: 8, + Unit: "PT", + }, + }, + }, + }, + }...) + pos += 5 + + // Product cell. + cellReqs, cellLength = createTextCellRequests( + "Product", doc.Product, int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 2 + + // Owner cell. + cellReqs, cellLength = createTextCellRequests( + "Owner", doc.Owners[0], int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 3 + + // Current Version cell. + cellReqs, cellLength = createTextCellRequests( + "Current Version", doc.CurrentVersion, int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 2 + + // Contributors cell. + cellReqs, cellLength = createTextCellRequests( + "Contributors", strings.Join(doc.Contributors[:], ", "), int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 3 + + // Target Version cell. + cellReqs, cellLength = createTextCellRequests( + "Target Version", doc.TargetVersion, int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 2 + + // Other Stakeholders cell. + cellReqs, cellLength = createTextCellRequests( + "Other Stakeholders", strings.Join(doc.Stakeholders[:], ", "), int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 3 + + // PRD cell. + // TODO: We only show a link with the text "PRD" now if a PRD is defined, but + // should show the document number instead. + prdCellVal := "" + if doc.PRD != "" { + prdCellVal = "PRD" + } + cellReqs, cellLength = createTextCellRequests( + "PRD", prdCellVal, int64(pos)) + reqs = append(reqs, cellReqs...) + if doc.PRD != "" { + reqs = append(reqs, + []*docs.Request{ + // Add link to PRD. + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "link", + Range: &docs.Range{ + StartIndex: int64(pos + 5), + EndIndex: int64(pos + 8), + }, + TextStyle: &docs.TextStyle{ + Link: &docs.Link{ + Url: doc.PRD, + }, + }, + }, + }, + }...) + } + pos += cellLength + 2 + + // Approvers cell. + // Build approvers slice with a check next to reviewers who have approved. + var approvers []string + for _, approver := range doc.Approvers { + if contains(doc.ApprovedBy, approver) { + approvers = append(approvers, "✅ "+approver) + } else if contains(doc.ChangesRequestedBy, approver) { + approvers = append(approvers, "❌ "+approver) + } else { + approvers = append(approvers, approver) + } + } + cellReqs, cellLength = createTextCellRequests( + "Approvers", strings.Join(approvers[:], ", "), int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 3 + + // Tags cell. + cellReqs, cellLength = createTextCellRequests( + "Tags", strings.Join(doc.Tags[:], ", "), int64(pos)) + reqs = append(reqs, cellReqs...) + pos += cellLength + 5 + + // Blank row after Tags row. + reqs = append(reqs, + []*docs.Request{ + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "fontSize", + Range: &docs.Range{ + StartIndex: int64(pos), + EndIndex: int64(pos + 1), + }, + TextStyle: &docs.TextStyle{ + FontSize: &docs.Dimension{ + Magnitude: 8, + Unit: "PT", + }, + }, + }, + }, + }...) + pos += 5 + + // "Managed by Hermes..." note cell. + docURL, err := url.Parse(baseURL) + if err != nil { + return fmt.Errorf("error parsing base URL: %w", err) + } + docURL.Path = path.Join(docURL.Path, "document", doc.ObjectID) + docURLString := docURL.String() + docURLString = strings.TrimRight(docURLString, "/") + if isDraft { + docURLString += "?draft=true" + } + cellReqs, cellLength = createTextCellRequests( + "NOTE", + "This document is managed by Hermes and this header will be periodically overwritten using document metadata.", + int64(pos)) + reqs = append(reqs, cellReqs...) + reqs = append(reqs, + []*docs.Request{ + // Add link to document in Hermes. + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "link", + Range: &docs.Range{ + StartIndex: int64(pos + 11), + EndIndex: int64(pos + 19), + }, + TextStyle: &docs.TextStyle{ + Link: &docs.Link{ + Url: docURLString, + }, + }, + }, + }, + + // Add link to Hermes. + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "link", + Range: &docs.Range{ + StartIndex: int64(pos + 34), + EndIndex: int64(pos + 40), + }, + TextStyle: &docs.TextStyle{ + Link: &docs.Link{ + Url: baseURL, + }, + }, + }, + }, + }...) + pos += cellLength + 5 + + // Do the batch update. + _, err = s.Docs.Documents.BatchUpdate(fileID, + &docs.BatchUpdateDocumentRequest{ + Requests: reqs}). + Do() + if err != nil { + return fmt.Errorf("error populating table: %w", err) + } + + // Rename file with new title. + err = s.RenameFile(fileID, fmt.Sprintf("[%s] %s", doc.DocNumber, doc.Title)) + if err != nil { + return fmt.Errorf("error renaming file with new title: %w", err) + } + + return nil +} + +// createTextCellRequests creates a slice of Google Docs requests for header +// table cells consisting of `cellName: cellVal`. +func createTextCellRequests( + cellName, cellVal string, + startIndex int64) (reqs []*docs.Request, cellLength int) { + + if cellVal == "" { + cellVal = "N/A" + } + cellText := fmt.Sprintf("%s: %s", cellName, cellVal) + cellLength = utf8.RuneCountInString(cellText) + + reqs = []*docs.Request{ + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "fontSize", + Range: &docs.Range{ + StartIndex: startIndex, + EndIndex: startIndex + 1, + }, + TextStyle: &docs.TextStyle{ + FontSize: &docs.Dimension{ + Magnitude: 8, + Unit: "PT", + }, + }, + }, + }, + { + InsertText: &docs.InsertTextRequest{ + Location: &docs.Location{ + Index: startIndex, + }, + Text: cellText, + }, + }, + { + UpdateTextStyle: &docs.UpdateTextStyleRequest{ + Fields: "bold", + Range: &docs.Range{ + StartIndex: startIndex, + EndIndex: startIndex + int64(len(cellName)), + }, + TextStyle: &docs.TextStyle{ + Bold: true, + }, + }, + }, + } + + return +} + +// contains returns true if a string is present in a slice of strings. +func contains(values []string, s string) bool { + for _, v := range values { + if s == v { + return true + } + } + return false +} diff --git a/pkg/links/data.go b/pkg/links/data.go new file mode 100644 index 000000000..99959fea4 --- /dev/null +++ b/pkg/links/data.go @@ -0,0 +1,63 @@ +package links + +import ( + "fmt" + "strings" + + "github.com/hashicorp-forge/hermes/pkg/algolia" +) + +// DeleteDocumentRedirectDetails deletes document redirect details from Algolia. +func DeleteDocumentRedirectDetails( + algo *algolia.Client, id string, docType string, docNumString string) error { + + if docNumString != "" && docType != "" { + objectID := getObjectID(docType, docNumString) + res, err := algo.Links.DeleteObject(objectID) + if err != nil { + return fmt.Errorf("error deleting redirect link details: %w", err) + } + err = res.Wait() + if err != nil { + return fmt.Errorf("error deleting redirect link details: %w", err) + } + } + + return nil +} + +// SaveDocumentRedirectDetails saves the short path of the document as the key +// and the document ID as the value in Algolia. +func SaveDocumentRedirectDetails( + algo *algolia.Client, id string, docType string, docNumString string) error { + + var ld LinkData + + // Save redirect details when document number {product-abbreviation}-{docnumber} is set + if docNumString != "" { + // Save object id as /doctype/{product_abbreviation-docnumber}. Eg. /rfc/lab-001 + ld.ObjectID = getObjectID(docType, docNumString) + // Save id of the document + ld.DocumentID = id + res, err := algo.Links.SaveObject(&ld) + if err != nil { + return fmt.Errorf("error saving redirect link details: %w", err) + } + err = res.Wait() + if err != nil { + return fmt.Errorf("error saving redirect link details: %w", err) + } + } + + return nil +} + +// getObjectID builds the ID for a document redirect details object in Algolia. +// Object ID's format is: /doctype/{product_abbreviation-docnumber} +// (e.g., "/rfc/lab-001"). +func getObjectID(docType, docNumString string) string { + return fmt.Sprintf( + "/%s/%s", + strings.ToLower(docType), + strings.ToLower(docNumString)) +} diff --git a/pkg/links/redirect.go b/pkg/links/redirect.go new file mode 100644 index 000000000..b26c359e5 --- /dev/null +++ b/pkg/links/redirect.go @@ -0,0 +1,116 @@ +package links + +import ( + "fmt" + "net/http" + "strings" + + "github.com/hashicorp-forge/hermes/pkg/algolia" + "github.com/hashicorp/go-hclog" +) + +// sharedRFCsFolderURL is the Google Drive URL to the shared RFCs folder +const sharedRFCsFolderURL = "https://drive.google.com/drive/folders/0AJA7q1x_uaLUUk9PVA" + +// sharedPRDsFolderURL is the Google Drive URL to the shared PRDs folder +const sharedPRDsFolderURL = "https://drive.google.com/drive/folders/0AJvQodV_kfUeUk9PVA" + +type LinkData struct { + // ObjectID is the short link path + ObjectID string `json:"objectID,omitempty"` + // DocumentID is the ID to the document + DocumentID string `json:"documentID,omitempty"` +} + +// RedirectHandler handles redirects from Hashilinks +func RedirectHandler(algo *algolia.Client, algoCfg *algolia.Config, log hclog.Logger) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Only allow GET requests. + if r.Method != http.MethodGet { + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + + // Check if the path matches exact paths such as "/l/rfc", "/l/rfc/", "/l/prd" + // or "/l/prd/". If so, redirect to the appropriate Google Drive shared + // folder URL. This is to support short link paths that existed in Hashilinks. + // Eg. https://go.hashi.co/rfc, https://go.hashi.co/prd. + redirectURL := matchStaticPathRedirects(r.URL.Path) + if redirectURL != "" { + http.Redirect(w, r, redirectURL, http.StatusTemporaryRedirect) + return + } + + // Parse url path and validate + p, err := parseAndValidatePath(r.URL.Path) + if err != nil { + log.Error("invalid url path", "error", err) + http.Error(w, "Invalid url path", http.StatusBadRequest) + return + } + + // Get document associated with the short link path from Algolia + ld := LinkData{ + ObjectID: p, + } + + err = algo.Links.GetObject(p, &ld) + if err != nil { + log.Error("error getting redirect link from algolia", "error", err, "id", p) + http.Error(w, "Error getting redirect link", http.StatusInternalServerError) + return + } + + // Redirect request + if ld.DocumentID != "" { + redirectPath := fmt.Sprintf("/document/%s", ld.DocumentID) + log.Info("document id for short link found", + "short_path", p, + "document_id", ld.DocumentID, + "redirect_path", redirectPath, + ) + http.Redirect(w, r, redirectPath, http.StatusTemporaryRedirect) + } + }) +} + +// parseAndValidatePath parses the short URL that is requested on "/l" +// route that has the format /l/doctype/product-docnumber and validates +// that the path has only two fields and removes the "/l" prefix to help +// get a valid short URL key to perform a look up in Algolia +func parseAndValidatePath(p string) (string, error) { + // Remove redirect url path "/l" + p = strings.TrimPrefix(p, "/l") + + // Remove empty entries and validate path + urlPath := strings.Split(p, "/") + var resultPath []string + for _, v := range urlPath { + // Only append non-empty values, this remove + // any empty strings in the slice + if v != "" { + resultPath = append(resultPath, v) + } + } + // Check if there are only two fields in the resultPath slice + // Eg. The path /rfc/lab-001 will have ["rfc", "lab-001"] + // otherwise, the path is invalid + if len(resultPath) != 2 { + return "", fmt.Errorf("invalid url path") + } + + return fmt.Sprintf("/%s/%s", resultPath[0], resultPath[1]), nil +} + +// matchStaticPathRedirects matches the URL path provided +// with string paths and returns the appropriate +// redirect URL +func matchStaticPathRedirects(p string) string { + switch { + case p == "/l/rfc" || p == "/l/rfc/": + return sharedRFCsFolderURL + case p == "/l/prd" || p == "/l/prd/": + return sharedPRDsFolderURL + } + return "" +} diff --git a/pkg/models/document.go b/pkg/models/document.go new file mode 100644 index 000000000..4cedaf529 --- /dev/null +++ b/pkg/models/document.go @@ -0,0 +1,384 @@ +package models + +import ( + "errors" + "fmt" + "time" + + validation "github.com/go-ozzo/ozzo-validation/v4" + "gorm.io/gorm" + "gorm.io/gorm/clause" +) + +// Document is a model for a document. +type Document struct { + gorm.Model + + // GoogleFileID is the Google Drive file ID of the document. + GoogleFileID string `gorm:"index;not null;unique"` + + // Approvers is the list of users whose approval is requested for the + // document. + Approvers []*User `gorm:"many2many:document_reviews;"` + + // Contributors are users who have contributed to the document. + Contributors []*User `gorm:"many2many:document_contributors;"` + + // CustomFields contains custom fields. + CustomFields []*DocumentCustomField + + // DocumentCreatedAt is the time of document creation. + DocumentCreatedAt time.Time + + // DocumentModifiedAt is the time the document was last modified. + DocumentModifiedAt time.Time + + // DocumentNumber is a document identifier containing a product/area + // abbreviation and a number (e.g., "TF-123"). + DocumentNumber int `gorm:"index:latest_product_number"` + + // DocumentType is the document type. + DocumentType DocumentType + DocumentTypeID uint + + // Imported is true if the document was not created through the application. + Imported bool + + // Owner is the owner of the document. + Owner *User `gorm:"default:null;not null"` + OwnerID *uint `gorm:"default:null"` + + // Product is the product or area that the document relates to. + Product Product + ProductID uint `gorm:"index:latest_product_number"` + + // Status is the status of the document. + Status DocumentStatus + + // Summary is a summary of the document. + Summary string + + // Title is the title of the document. It only contains the title, and not the + // product abbreviation, document number, or document type. + Title string +} + +// Documents is a slice of documents. +type Documents []Document + +// DocumentStatus is the status of the document (e.g., "WIP", "In-Review", +// "Approved", "Obsolete"). +type DocumentStatus int + +const ( + UnspecifiedDocumentStatus DocumentStatus = iota + WIPDocumentStatus + InReviewDocumentStatus + ApprovedDocumentStatus + ObsoleteDocumentStatus +) + +// BeforeSave is a hook used to find associations before saving. +func (d *Document) BeforeSave(tx *gorm.DB) error { + // Get approvers. + var approvers []*User + for _, a := range d.Approvers { + if err := a.Get(tx); err != nil { + return fmt.Errorf("error getting approver: %w", err) + } + approvers = append(approvers, a) + } + d.Approvers = approvers + + // Get contributors. + var contributors []*User + for _, c := range d.Contributors { + if err := c.FirstOrCreate(tx); err != nil { + return fmt.Errorf("error getting contributor: %w", err) + } + contributors = append(contributors, c) + } + d.Contributors = contributors + + // Get custom fields. + var customFields []*DocumentCustomField + for _, c := range d.CustomFields { + if err := c.DocumentTypeCustomField.Get(tx); err != nil { + return fmt.Errorf("error getting document type custom field: %w", err) + } + c.DocumentTypeCustomFieldID = c.DocumentTypeCustomField.DocumentType.ID + customFields = append(customFields, c) + } + d.CustomFields = customFields + + // Get document type. + dt := d.DocumentType + if err := dt.Get(tx); err != nil { + return fmt.Errorf("error getting document type: %w", err) + } + d.DocumentType = dt + d.DocumentTypeID = dt.ID + + // Get owner. + if d.Owner != nil && d.Owner.EmailAddress != "" { + if err := d.Owner.Get(tx); err != nil { + return fmt.Errorf("error getting owner: %w", err) + } + d.OwnerID = &d.Owner.ID + } + + // Get product. + if d.Product.Name != "" { + if err := d.Product.Get(tx); err != nil { + return fmt.Errorf("error getting product: %w", err) + } + d.ProductID = d.Product.ID + } + + return nil +} + +// Create creates a document in database db. +func (d *Document) Create(db *gorm.DB) error { + if err := validation.ValidateStruct(d, + validation.Field( + &d.ID, + validation.When(d.GoogleFileID == "", + validation.Required.Error("either ID or GoogleFileID is required"), + ), + ), + validation.Field( + &d.GoogleFileID, + validation.When(d.ID == 0, + validation.Required.Error("either ID or GoogleFileID is required"), + ), + ), + ); err != nil { + return err + } + + if err := d.createAssocations(db); err != nil { + return fmt.Errorf("error creating associations: %w", err) + } + + return db.Transaction(func(tx *gorm.DB) error { + if err := tx. + Model(&d). + Where(Document{GoogleFileID: d.GoogleFileID}). + Omit(clause.Associations). // We get associations in the BeforeSave hook. + Create(&d). + Error; err != nil { + return err + } + + if err := d.replaceAssocations(tx); err != nil { + return fmt.Errorf("error replacing associations: %w", err) + } + + return nil + }) +} + +// Find finds all documents from database db with the provided query, and +// assigns them to the receiver. +func (d *Documents) Find(db *gorm.DB, query string) error { + return db. + Where(query). + Preload(clause.Associations). + Find(&d).Error +} + +// FirstOrCreate finds the first document by Google file ID or creates a new +// record if it does not exist. +// func (d *Document) FirstOrCreate(db *gorm.DB) error { +// return db. +// Where(Document{GoogleFileID: d.GoogleFileID}). +// Preload(clause.Associations). +// FirstOrCreate(&d).Error +// } + +// Get gets a document from database db by Google file ID, and assigns it to the +// receiver. +func (d *Document) Get(db *gorm.DB) error { + if err := validation.ValidateStruct(d, + validation.Field( + &d.ID, + validation.When(d.GoogleFileID == "", + validation.Required.Error("either ID or GoogleFileID is required"), + ), + ), + validation.Field( + &d.GoogleFileID, + validation.When(d.ID == 0, + validation.Required.Error("either ID or GoogleFileID is required"), + ), + ), + ); err != nil { + return err + } + + return db. + Where(Document{GoogleFileID: d.GoogleFileID}). + Preload(clause.Associations). + First(&d).Error +} + +// GetLatestProductNumber gets the latest document number for a product. +func GetLatestProductNumber(db *gorm.DB, + documentTypeName, productName string) (int, error) { + // Validate required fields. + if err := validation.Validate(db, validation.Required); err != nil { + return 0, err + } + if err := validation.Validate(productName, validation.Required); err != nil { + return 0, err + } + + // Get document type. + dt := DocumentType{ + Name: documentTypeName, + } + if err := dt.Get(db); err != nil { + return 0, fmt.Errorf("error getting document type: %w", err) + } + + // Get product. + p := Product{ + Name: productName, + } + if err := p.Get(db); err != nil { + return 0, fmt.Errorf("error getting product: %w", err) + } + + // Get document with largest document number. + var d Document + if err := db. + Where(Document{ + DocumentTypeID: dt.ID, + ProductID: p.ID, + }). + Order("document_number desc"). + First(&d). + Error; err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return 0, nil + } else { + return 0, err + } + } + + return d.DocumentNumber, nil +} + +// Upsert updates or inserts the receiver document into database db. +func (d *Document) Upsert(db *gorm.DB) error { + if err := validation.ValidateStruct(d, + validation.Field( + &d.ID, + validation.When(d.GoogleFileID == "", + validation.Required.Error("either ID or GoogleFileID is required"), + ), + ), + validation.Field( + &d.GoogleFileID, + validation.When(d.ID == 0, + validation.Required.Error("either ID or GoogleFileID is required"), + ), + ), + ); err != nil { + return err + } + + // Create required associations. + if err := d.createAssocations(db); err != nil { + return fmt.Errorf("error creating associations: %w", err) + } + + return db.Transaction(func(tx *gorm.DB) error { + if err := tx. + Model(&d). + Where(Document{GoogleFileID: d.GoogleFileID}). + Omit(clause.Associations). // We manage associations in the BeforeSave hook. + Assign(*d). + FirstOrCreate(&d). + Error; err != nil { + return err + } + + // Replace has-many associations because we may have removed instances. + if err := d.replaceAssocations(tx); err != nil { + return fmt.Errorf("error replacing associations: %w", err) + } + + if err := d.Get(tx); err != nil { + return fmt.Errorf("error getting the document after upsert") + } + + return nil + }) +} + +// createAssocations creates required assocations for a document. +func (d *Document) createAssocations(db *gorm.DB) error { + // Find or create approvers. + var approvers []*User + for _, a := range d.Approvers { + if err := a.FirstOrCreate(db); err != nil { + return fmt.Errorf("error finding or creating approver: %w", err) + } + approvers = append(approvers, a) + } + d.Approvers = approvers + + // Find or create contributors. + var contributors []*User + for _, c := range d.Contributors { + if err := c.FirstOrCreate(db); err != nil { + return fmt.Errorf("error finding or creating contributor: %w", err) + } + contributors = append(contributors, c) + } + d.Contributors = contributors + + // Find or create owner. + if d.Owner != nil && d.Owner.EmailAddress != "" { + if err := d.Owner.FirstOrCreate(db); err != nil { + return fmt.Errorf("error finding or creating owner: %w", err) + } + d.OwnerID = &d.Owner.ID + } + + return nil +} + +// replaceAssocations replaces assocations for a document. +func (d *Document) replaceAssocations(db *gorm.DB) error { + // Replace approvers. + if err := db. + Session(&gorm.Session{SkipHooks: true}). + Model(&d). + Association("Approvers"). + Replace(d.Approvers); err != nil { + return err + } + + // Replace contributors. + if err := db. + Session(&gorm.Session{SkipHooks: true}). + Model(&d). + Association("Contributors"). + Replace(d.Contributors); err != nil { + return err + } + + // Replace custom fields. + if err := db. + Session(&gorm.Session{SkipHooks: true}). + Model(&d). + Association("CustomFields"). + Replace(d.CustomFields); err != nil { + return err + } + + return nil +} diff --git a/pkg/models/document_custom_field.go b/pkg/models/document_custom_field.go new file mode 100644 index 000000000..5630d4fda --- /dev/null +++ b/pkg/models/document_custom_field.go @@ -0,0 +1,63 @@ +package models + +import ( + "fmt" + "time" + + validation "github.com/go-ozzo/ozzo-validation/v4" + "gorm.io/gorm" + "gorm.io/gorm/clause" +) + +type DocumentCustomField struct { + CreatedAt time.Time + UpdatedAt time.Time + DeletedAt gorm.DeletedAt `gorm:"index"` + + DocumentID uint `gorm:"primaryKey"` + DocumentTypeCustomFieldID uint `gorm:"primaryKey"` + DocumentTypeCustomField DocumentTypeCustomField + // Value datatypes.JSON + Value string +} + +// BeforeSave is a hook to find or create associations before saving. +func (d *DocumentCustomField) BeforeSave(tx *gorm.DB) error { + // Validate required fields. + if err := validation.ValidateStruct(&d.DocumentTypeCustomField, + validation.Field( + &d.DocumentTypeCustomField.Name, validation.Required), + ); err != nil { + return err + } + if err := validation.ValidateStruct(&d.DocumentTypeCustomField.DocumentType, + validation.Field( + &d.DocumentTypeCustomField.DocumentType.Name, validation.Required), + ); err != nil { + return err + } + + // Get document type custom field. + // dt := d.DocumentTypeCustomField.DocumentType + if err := d.DocumentTypeCustomField.Get(tx); err != nil { + return fmt.Errorf("error getting document type custom field: %w", err) + } + // d.DocumentType = dt + d.DocumentTypeCustomFieldID = d.DocumentTypeCustomField.DocumentType.ID + + return nil +} + +// FirstOrCreate finds the first user by email address or creates a user record +// if it does not exist in database db. The result is saved back to the +// receiver. +// TODO: not upsert. +func (d *DocumentCustomField) Upsert(db *gorm.DB) error { + return db. + Where(DocumentCustomField{ + DocumentID: d.DocumentID, + DocumentTypeCustomFieldID: d.DocumentTypeCustomFieldID, + }). + Preload(clause.Associations). + FirstOrCreate(&d).Error +} diff --git a/pkg/models/document_review.go b/pkg/models/document_review.go new file mode 100644 index 000000000..91f844c27 --- /dev/null +++ b/pkg/models/document_review.go @@ -0,0 +1,160 @@ +package models + +import ( + "fmt" + "time" + + validation "github.com/go-ozzo/ozzo-validation/v4" + "gorm.io/gorm" + "gorm.io/gorm/clause" +) + +type DocumentReview struct { + CreatedAt time.Time + UpdatedAt time.Time + DeletedAt gorm.DeletedAt `gorm:"index"` + + DocumentID uint `gorm:"primaryKey"` + Document Document + UserID uint `gorm:"primaryKey"` + User User + Status DocumentReviewStatus +} + +type DocumentReviewStatus int + +const ( + UnspecifiedDocumentReviewStatus DocumentReviewStatus = iota + ApprovedDocumentReviewStatus + ChangesRequestedDocumentReviewStatus +) + +// DocumentReviews is a slice of document reviews. +type DocumentReviews []DocumentReview + +// BeforeSave is a hook to find or create associations before saving. +func (d *DocumentReview) BeforeSave(tx *gorm.DB) error { + // Validate required fields. + if err := validation.ValidateStruct(&d.Document, + validation.Field( + &d.Document.GoogleFileID, validation.Required), + ); err != nil { + return err + } + if err := validation.ValidateStruct(&d.User, + validation.Field( + &d.User.EmailAddress, validation.Required), + ); err != nil { + return err + } + + // Get document. + if err := d.Document.Get(tx); err != nil { + return fmt.Errorf("error getting document: %w", err) + } + d.DocumentID = d.Document.ID + + // Get user. + if err := d.User.Get(tx); err != nil { + return fmt.Errorf("error getting user: %w", err) + } + d.UserID = d.User.ID + + return nil +} + +// Find finds all document reviews with the provided query, and assigns them to +// the receiver. +func (d *DocumentReviews) Find(db *gorm.DB, dr DocumentReview) error { + // Validate required fields. + if err := validation.ValidateStruct(&dr.Document, + validation.Field( + &dr.Document.GoogleFileID, + validation.When(dr.User.EmailAddress == "", + validation.Required.Error("at least a Document's GoogleFileID or User's EmailAddress is required"), + ), + ), + ); err != nil { + return err + } + if err := validation.ValidateStruct(&dr.User, + validation.Field( + &dr.User.EmailAddress, + validation.When(dr.Document.GoogleFileID == "", + validation.Required.Error("at least a Document's GoogleFileID or User's EmailAddress is required"), + ), + ), + ); err != nil { + return err + } + + // Get document. + if dr.Document.GoogleFileID != "" { + if err := dr.Document.Get(db); err != nil { + return fmt.Errorf("error getting document: %w", err) + } + dr.DocumentID = dr.Document.ID + } + + // Get user. + if dr.User.EmailAddress != "" { + if err := dr.User.Get(db); err != nil { + return fmt.Errorf("error getting user: %w", err) + } + dr.UserID = dr.User.ID + } + + return db. + Where(DocumentReview{ + DocumentID: dr.DocumentID, + UserID: dr.UserID, + }). + Preload(clause.Associations). + Find(&d). + Error +} + +// Get gets the document review from database db, and assigns it to the +// receiver. +func (d *DocumentReview) Get(db *gorm.DB) error { + // Validate required fields. + if err := validation.ValidateStruct(&d.Document, + validation.Field(&d.Document.GoogleFileID, validation.Required), + ); err != nil { + return err + } + if err := validation.ValidateStruct(&d.User, + validation.Field(&d.User.EmailAddress, validation.Required), + ); err != nil { + return err + } + + // Get document. + if err := d.Document.Get(db); err != nil { + return fmt.Errorf("error getting document: %w", err) + } + d.DocumentID = d.Document.ID + + // Get user. + if err := d.User.Get(db); err != nil { + return fmt.Errorf("error getting user: %w", err) + } + d.UserID = d.User.ID + + return db. + Where(DocumentReview{ + DocumentID: d.DocumentID, + UserID: d.UserID, + }). + Preload(clause.Associations). + First(&d). + Error +} + +// Update updates the document review in database db. +func (d *DocumentReview) Update(db *gorm.DB) error { + return db. + Model(&d). + Updates(*d). + Error +} diff --git a/pkg/models/document_review_test.go b/pkg/models/document_review_test.go new file mode 100644 index 000000000..62d8b239a --- /dev/null +++ b/pkg/models/document_review_test.go @@ -0,0 +1,263 @@ +package models + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDocumentReviewModel(t *testing.T) { + dsn := os.Getenv("HERMES_TEST_POSTGRESQL_DSN") + if dsn == "" { + t.Skip("HERMES_TEST_POSTGRESQL_DSN environment variable isn't set") + } + + t.Run("Get and Update", func(t *testing.T) { + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run("Create a document type", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + dt := DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + } + err := dt.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Create a product", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product1", + Abbreviation: "P1", + } + err := p.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Get the review before we create the document", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + dr := DocumentReview{ + Document: Document{ + GoogleFileID: "fileID1", + }, + User: User{ + EmailAddress: "a@approver.com", + }, + } + err := dr.Get(db) + require.Error(err) + }) + + var d Document + t.Run("Create a document", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d = Document{ + GoogleFileID: "fileID1", + Approvers: []*User{ + { + EmailAddress: "a@approver.com", + }, + { + EmailAddress: "b@approver.com", + }, + }, + DocumentType: DocumentType{ + Name: "DT1", + }, + Product: Product{ + Name: "Product1", + }, + } + err := d.Create(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + }) + + t.Run("Get the review", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + dr := DocumentReview{ + Document: Document{ + GoogleFileID: "fileID1", + }, + User: User{ + EmailAddress: "b@approver.com", + }, + } + err := dr.Get(db) + require.NoError(err) + assert.EqualValues(1, dr.DocumentID) + assert.Equal("fileID1", dr.Document.GoogleFileID) + assert.EqualValues(2, dr.UserID) + assert.Equal("b@approver.com", dr.User.EmailAddress) + assert.Equal(UnspecifiedDocumentReviewStatus, dr.Status) + }) + + t.Run("Update review status", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + dr := DocumentReview{ + Document: Document{ + GoogleFileID: "fileID1", + }, + User: User{ + EmailAddress: "b@approver.com", + }, + Status: ApprovedDocumentReviewStatus, + } + err := dr.Update(db) + require.NoError(err) + assert.EqualValues(1, dr.DocumentID) + assert.EqualValues(2, dr.UserID) + assert.Equal(ApprovedDocumentReviewStatus, dr.Status) + }) + + t.Run("Get the review to verify", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + dr := DocumentReview{ + Document: Document{ + GoogleFileID: "fileID1", + }, + User: User{ + EmailAddress: "b@approver.com", + }, + } + err := dr.Get(db) + require.NoError(err) + assert.EqualValues(1, dr.DocumentID) + assert.EqualValues(2, dr.UserID) + assert.Equal(ApprovedDocumentReviewStatus, dr.Status) + }) + }) + + t.Run("Find", func(t *testing.T) { + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run("Create a document type", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + dt := DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + } + err := dt.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Create a product", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product1", + Abbreviation: "P1", + } + err := p.FirstOrCreate(db) + require.NoError(err) + }) + + var d1, d2, d3 Document + t.Run("Create first document", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d1 = Document{ + GoogleFileID: "fileID1", + Approvers: []*User{ + { + EmailAddress: "a@approver.com", + }, + { + EmailAddress: "b@approver.com", + }, + }, + DocumentType: DocumentType{ + Name: "DT1", + }, + Product: Product{ + Name: "Product1", + }, + } + err := d1.Create(db) + require.NoError(err) + assert.EqualValues(1, d1.ID) + }) + + t.Run("Create second document", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d2 = Document{ + GoogleFileID: "fileID2", + Approvers: []*User{ + { + EmailAddress: "a@approver.com", + }, + }, + DocumentType: DocumentType{ + Name: "DT1", + }, + Product: Product{ + Name: "Product1", + }, + } + err := d2.Create(db) + require.NoError(err) + assert.EqualValues(2, d2.ID) + }) + + t.Run("Create third document", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d3 = Document{ + GoogleFileID: "fileID3", + Approvers: []*User{ + { + EmailAddress: "b@approver.com", + }, + }, + DocumentType: DocumentType{ + Name: "DT1", + }, + Product: Product{ + Name: "Product1", + }, + } + err := d3.Create(db) + require.NoError(err) + assert.EqualValues(3, d3.ID) + }) + + t.Run("Find reviews without any search fields", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + var revs DocumentReviews + err := revs.Find(db, DocumentReview{}) + require.Error(err) + }) + + t.Run("Find all reviews for a document", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + var revs DocumentReviews + err := revs.Find(db, DocumentReview{ + Document: Document{ + GoogleFileID: "fileID1", + }, + }) + require.NoError(err) + require.Len(revs, 2) + assert.Equal("a@approver.com", revs[0].User.EmailAddress) + assert.Equal("b@approver.com", revs[1].User.EmailAddress) + }) + + t.Run("Find all reviews for a user", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + var revs DocumentReviews + err := revs.Find(db, DocumentReview{ + User: User{ + EmailAddress: "b@approver.com", + }, + }) + require.NoError(err) + require.Len(revs, 2) + assert.Equal("fileID1", revs[0].Document.GoogleFileID) + assert.Equal("fileID3", revs[1].Document.GoogleFileID) + assert.Equal("b@approver.com", revs[0].User.EmailAddress) + assert.Equal("b@approver.com", revs[1].User.EmailAddress) + }) + }) +} diff --git a/pkg/models/document_test.go b/pkg/models/document_test.go new file mode 100644 index 000000000..fa17c5ece --- /dev/null +++ b/pkg/models/document_test.go @@ -0,0 +1,759 @@ +package models + +import ( + "os" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDocumentModel(t *testing.T) { + dsn := os.Getenv("HERMES_TEST_POSTGRESQL_DSN") + if dsn == "" { + t.Skip("HERMES_TEST_POSTGRESQL_DSN environment variable isn't set") + } + + t.Run("Create and Get", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + // Try to create an empty document (should error). + d := Document{} + err := d.Create(db) + assert.Error(err) + assert.Empty(d.ID) + + // Create a document type. + dt := DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + CustomFields: []DocumentTypeCustomField{ + { + Name: "CustomStringField", + Type: StringDocumentTypeCustomFieldType, + }, + { + Name: "CustomPersonField", + Type: PersonDocumentTypeCustomFieldType, + }, + { + Name: "CustomPeopleField", + Type: PeopleDocumentTypeCustomFieldType, + }, + }, + } + err = dt.FirstOrCreate(db) + require.NoError(err) + + // Create a product. + p := Product{ + Name: "Product1", + Abbreviation: "P1", + } + err = p.FirstOrCreate(db) + require.NoError(err) + + // Create a first document with all fields. + d = Document{ + GoogleFileID: "fileID1", + Approvers: []*User{ + { + EmailAddress: "a@approver.com", + }, + { + EmailAddress: "b@approver.com", + }, + }, + Contributors: []*User{ + { + EmailAddress: "a@contributor.com", + }, + { + EmailAddress: "b@contributor.com", + }, + }, + CustomFields: []*DocumentCustomField{ + { + DocumentTypeCustomField: DocumentTypeCustomField{ + Name: "CustomStringField", + DocumentType: DocumentType{ + Name: "DT1", + }, + }, + Value: "string value 1", + }, + }, + DocumentCreatedAt: time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC), + DocumentModifiedAt: time.Date(2003, 1, 1, 0, 0, 0, 0, time.UTC), + DocumentNumber: 1, + DocumentType: DocumentType{ + Name: "DT1", + }, + Imported: true, + Owner: &User{ + EmailAddress: "a@owner.com", + }, + Product: Product{ + Name: "Product1", + }, + Status: InReviewDocumentStatus, + Summary: "test summary", + Title: "test title", + } + err = d.Create(db) + + // Create test function because we're going to reuse this for testing Get() + // afterwards. + testDoc1 := func(d Document) { + require.NoError(err) + assert.NotEmpty(d.ID) + + // GoogleFileID. + assert.Equal("fileID1", d.GoogleFileID) + + // Approvers. + require.Len(d.Approvers, 2) + assert.NotEmpty(d.Approvers[0].ID) + assert.Equal("a@approver.com", d.Approvers[0].EmailAddress) + assert.NotEmpty(d.Approvers[1].ID) + assert.Equal("b@approver.com", d.Approvers[1].EmailAddress) + + // Contributors. + require.Equal(2, len(d.Contributors)) + assert.NotEmpty(d.Contributors[0].ID) + assert.Equal("a@contributor.com", d.Contributors[0].EmailAddress) + assert.NotEmpty(d.Contributors[1].ID) + assert.Equal("b@contributor.com", d.Contributors[1].EmailAddress) + + // CustomFields. + require.Len(d.CustomFields, 1) + assert.Equal("string value 1", d.CustomFields[0].Value) + + // DocumentCreatedAt. + assert.WithinDuration( + time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC), d.DocumentCreatedAt, 0) + + // DocumentModifiedAt. + assert.WithinDuration( + time.Date(2003, 1, 1, 0, 0, 0, 0, time.UTC), d.DocumentModifiedAt, 0) + + // DocumentNumber. + assert.Equal(1, d.DocumentNumber) + + // DocumentType. + assert.NotEmpty(d.DocumentType.ID) + assert.Equal("DT1", d.DocumentType.Name) + assert.Equal("DocumentType1", d.DocumentType.LongName) + + // Imported. + assert.Equal(true, d.Imported) + + // Owner. + assert.NotEmpty(d.Owner.ID) + assert.Equal("a@owner.com", d.Owner.EmailAddress) + + // Product. + assert.NotEmpty(d.Product.ID) + assert.Equal("Product1", d.Product.Name) + assert.Equal("P1", d.Product.Abbreviation) + + // Status. + assert.Equal(InReviewDocumentStatus, d.Status) + + // Summary. + assert.Equal("test summary", d.Summary) + + // Title. + assert.Equal("test title", d.Title) + } + testDoc1(d) + + // Get the first document. + get := Document{ + GoogleFileID: "fileID1", + } + err = get.Get(db) + testDoc1(get) + + // Try creating a document with the same Google file ID (should error). + d = Document{ + GoogleFileID: "fileID1", + DocumentType: DocumentType{ + Name: "DT1", + }, + Product: Product{ + Name: "Product1", + }, + } + err = d.Create(db) + require.Error(err) + assert.Empty(d.ID) + + // Create a second (minimal) document. + d = Document{ + GoogleFileID: "fileID2", + DocumentType: DocumentType{ + Name: "DT1", + }, + Product: Product{ + Name: "Product1", + }, + } + err = d.Create(db) + require.NoError(err) + assert.NotEmpty(d.ID) + + // Get the second document. + get = Document{ + GoogleFileID: "fileID2", + } + err = get.Get(db) + require.NoError(err) + assert.NotEmpty(get.ID) + assert.Equal("fileID2", get.GoogleFileID) + assert.NotEmpty(get.DocumentType.ID) + assert.Equal("DT1", get.DocumentType.Name) + assert.NotEmpty(get.Product.ID) + assert.Equal("Product1", get.Product.Name) + }) + + t.Run("create two documents by Upsert and verify with Get", + func(t *testing.T) { + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run("Create a document type", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + dt := DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + } + err := dt.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Create a product", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product1", + Abbreviation: "P1", + } + err := p.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Create a document by upserting", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID1", + DocumentType: DocumentType{ + Name: "DT1", + }, + Product: Product{ + Name: "Product1", + }, + } + err := d.Upsert(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + assert.NotEmpty(d.DocumentType.ID) + assert.Equal("DT1", d.DocumentType.Name) + assert.Equal("DocumentType1", d.DocumentType.LongName) + assert.NotEmpty(d.Product.ID) + assert.Equal("Product1", d.Product.Name) + assert.Equal("P1", d.Product.Abbreviation) + }) + + t.Run("Create a second document by upserting", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID2", + DocumentType: DocumentType{ + Name: "DT1", + }, + Product: Product{ + Name: "Product1", + }, + } + err := d.Upsert(db) + require.NoError(err) + assert.EqualValues(2, d.ID) + assert.Equal("fileID2", d.GoogleFileID) + assert.NotEmpty(d.DocumentType.ID) + assert.Equal("DT1", d.DocumentType.Name) + assert.Equal("DocumentType1", d.DocumentType.LongName) + assert.NotEmpty(d.Product.ID) + assert.Equal("Product1", d.Product.Name) + assert.Equal("P1", d.Product.Abbreviation) + }) + + t.Run("Verify first document with a Get", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID1", + } + err := d.Get(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + assert.NotEmpty(d.DocumentType.ID) + assert.Equal("DT1", d.DocumentType.Name) + assert.Equal("DocumentType1", d.DocumentType.LongName) + assert.NotEmpty(d.Product.ID) + assert.Equal("Product1", d.Product.Name) + assert.Equal("P1", d.Product.Abbreviation) + }) + + t.Run("Verify second document with a Get", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID2", + } + err := d.Get(db) + require.NoError(err) + assert.EqualValues(2, d.ID) + assert.Equal("fileID2", d.GoogleFileID) + assert.NotEmpty(d.DocumentType.ID) + assert.Equal("DT1", d.DocumentType.Name) + assert.Equal("DocumentType1", d.DocumentType.LongName) + assert.NotEmpty(d.Product.ID) + assert.Equal("Product1", d.Product.Name) + assert.Equal("P1", d.Product.Abbreviation) + }) + }) + + t.Run("Upsert contributors", func(t *testing.T) { + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run("Create a document type", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + dt := DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + } + err := dt.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Create a product", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product1", + Abbreviation: "P1", + } + err := p.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Create a document without contributors by upserting", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID1", + DocumentType: DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + }, + Product: Product{ + Name: "Product1", + Abbreviation: "P1", + }, + } + err := d.Upsert(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + assert.Empty(d.Contributors) + }) + + t.Run("Add two contributors by upserting", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID1", + Contributors: []*User{ + { + EmailAddress: "a@contributor.com", + }, + { + EmailAddress: "b@contributor.com", + }, + }, + } + err := d.Upsert(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + require.Len(d.Contributors, 2) + assert.NotEmpty(d.Contributors[0].ID) + assert.Equal("a@contributor.com", d.Contributors[0].EmailAddress) + assert.NotEmpty(d.Contributors[1].ID) + assert.Equal("b@contributor.com", d.Contributors[1].EmailAddress) + }) + + t.Run("Verify with Get", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID1", + } + err := d.Get(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + require.Len(d.Contributors, 2) + assert.NotEmpty(d.Contributors[0].ID) + assert.Equal("a@contributor.com", d.Contributors[0].EmailAddress) + assert.NotEmpty(d.Contributors[1].ID) + assert.Equal("b@contributor.com", d.Contributors[1].EmailAddress) + }) + + t.Run("Update to only the second contributor", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID1", + Contributors: []*User{ + { + EmailAddress: "b@contributor.com", + }, + }, + } + err := d.Upsert(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + require.Equal(1, len(d.Contributors)) + assert.NotEmpty(d.Contributors[0].ID) + assert.Equal("b@contributor.com", d.Contributors[0].EmailAddress) + }) + }) + + // TODO: should we allow this? + /* + t.Run("Upsert Owner", func(t *testing.T) { + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run("Create a document type", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + dt := DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + } + err := dt.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Create a product", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product1", + Abbreviation: "P1", + } + err := p.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Create a document by Upsert", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID1", + DocumentType: DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + }, + Owner: &User{ + EmailAddress: "a@a.com", + }, + Product: Product{ + Name: "Product1", + Abbreviation: "P1", + }, + } + err := d.Upsert(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + assert.Equal("a@a.com", d.Owner.EmailAddress) + }) + + t.Run("Get the document", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID1", + } + err := d.Get(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + assert.Equal("a@a.com", d.Owner.EmailAddress) + }) + + t.Run("Update the Owner field by Upsert", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID1", + Owner: &User{ + EmailAddress: "b@b.com", + }, + } + err := d.Upsert(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + assert.Equal("b@b.com", d.Owner.EmailAddress) + }) + + t.Run("Get the document after upserting", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID1", + } + err := d.Get(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + assert.Equal("b@b.com", d.Owner.EmailAddress) + }) + + t.Run("Update the Owner field back to first value by Upsert", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID1", + Owner: &User{ + EmailAddress: "a@a.com", + }, + } + err := d.Upsert(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + assert.Equal("a@a.com", d.Owner.EmailAddress) + }) + + t.Run("Get the document after upserting", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID1", + } + err := d.Get(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + assert.Equal("a@a.com", d.Owner.EmailAddress) + }) + }) + */ + + t.Run("Upsert Summary", func(t *testing.T) { + // assert, require := assert.New(t), require.New(t) + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run("Create a document type", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + dt := DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + } + err := dt.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Create a product", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product1", + Abbreviation: "P1", + } + err := p.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Create a document by Upsert", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + Approvers: []*User{ + { + EmailAddress: "a@approver.com", + }, + { + EmailAddress: "b@approver.com", + }, + }, + GoogleFileID: "fileID1", + DocumentType: DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + }, + Product: Product{ + Name: "Product1", + Abbreviation: "P1", + }, + Summary: "summary1", + } + err := d.Upsert(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + assert.Equal("summary1", d.Summary) + }) + + t.Run("Get the document", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID1", + } + err := d.Get(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + assert.Equal("summary1", d.Summary) + }) + + t.Run("Update the Summary field by Upsert", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := Document{ + GoogleFileID: "fileID1", + Summary: "summary2", + } + err := d.Upsert(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("fileID1", d.GoogleFileID) + assert.Equal("summary2", d.Summary) + }) + }) +} + +func TestGetLatestProductNumber(t *testing.T) { + dsn := os.Getenv("HERMES_TEST_POSTGRESQL_DSN") + if dsn == "" { + t.Skip("HERMES_TEST_POSTGRESQL_DSN environment variable isn't set") + } + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run("Create a document type", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + + dt := DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + } + err := dt.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Create a product", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + + p := Product{ + Name: "Product1", + Abbreviation: "P1", + } + err := p.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Get latest product number without any documents", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + num, err := GetLatestProductNumber(db, "DT1", "Product1") + require.NoError(err) + assert.Equal(0, num) + }) + + t.Run("Create a document", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + d := Document{ + GoogleFileID: "fileID1", + DocumentType: DocumentType{ + Name: "DT1", + }, + Product: Product{ + Name: "Product1", + }, + DocumentNumber: 4, + } + err := d.Create(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + }) + + t.Run("Get latest product number", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + num, err := GetLatestProductNumber(db, "DT1", "Product1") + require.NoError(err) + assert.Equal(4, num) + }) + + t.Run("Create another document", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + d := Document{ + GoogleFileID: "fileID2", + DocumentType: DocumentType{ + Name: "DT1", + }, + Product: Product{ + Name: "Product1", + }, + DocumentNumber: 42, + } + err := d.Create(db) + require.NoError(err) + assert.EqualValues(2, d.ID) + }) + + t.Run("Get latest product number", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + num, err := GetLatestProductNumber(db, "DT1", "Product1") + require.NoError(err) + assert.Equal(42, num) + }) + + t.Run("Create a second document type", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + + dt := DocumentType{ + Name: "DT2", + LongName: "DocumentType2", + } + err := dt.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Create a document of the same product and second document type", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + d := Document{ + GoogleFileID: "fileID3", + DocumentType: DocumentType{ + Name: "DT2", + }, + Product: Product{ + Name: "Product1", + }, + DocumentNumber: 2, + } + err := d.Create(db) + require.NoError(err) + assert.EqualValues(3, d.ID) + }) + + t.Run("Get latest product number", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + num, err := GetLatestProductNumber(db, "DT2", "Product1") + require.NoError(err) + assert.Equal(2, num) + }) +} diff --git a/pkg/models/document_type.go b/pkg/models/document_type.go new file mode 100644 index 000000000..618f2d127 --- /dev/null +++ b/pkg/models/document_type.go @@ -0,0 +1,136 @@ +package models + +import ( + "fmt" + + validation "github.com/go-ozzo/ozzo-validation/v4" + "gorm.io/datatypes" + "gorm.io/gorm" + "gorm.io/gorm/clause" +) + +// DocumentType is a model for a type of document (e.g., "RFC", "PRD"). +type DocumentType struct { + gorm.Model + + // Name is the name of the document type, generally an abbreviation. + // Example: "RFC" + Name string `gorm:"index;not null;unique"` + + // LongName is the longer name for the document type. + // Example: "Request for Comments" + LongName string `gorm:"default:null;not null"` + + // Description is the description of the document type. + // Example: "Create a Request for Comments document to present a proposal to + // colleagues for their review and feedback." + Description string + + // MoreInfoLinkText is the text for a "more info" link. + // Example: "When should I create an RFC?" + MoreInfoLinkText string + + // MoreInfoLinkURL is the URL for a "more info" link. + MoreInfoLinkURL string + + // CustomFields contain custom fields that are specific to a particular + // document type. + CustomFields []DocumentTypeCustomField + + // Checks are document type checks, which require acknowledging a check box in + // order to publish a document. + Checks datatypes.JSON +} + +// DocumentTypes is a slice of document types. +type DocumentTypes []DocumentType + +// FirstOrCreate finds the first document type by name or creates a new record +// if it does not exist. +func (d *DocumentType) FirstOrCreate(db *gorm.DB) error { + return db. + Where(DocumentType{Name: d.Name}). + FirstOrCreate(&d). + Error +} + +// Get gets a document type from database db by document type name, and assigns +// it to the receiver. +func (d *DocumentType) Get(db *gorm.DB) error { + return db. + Where(DocumentType{Name: d.Name}). + Preload(clause.Associations). + First(&d). + Error +} + +// GetAll gets all document types from database db, and assigns them to the +// receiver. +func (d *DocumentTypes) GetAll(db *gorm.DB) error { + return db.Find(&d). + Error +} + +// Upsert updates or inserts the receiver into database db. +func (d *DocumentType) Upsert(db *gorm.DB) error { + if err := validation.ValidateStruct(d, + validation.Field(&d.Name, validation.Required), + ); err != nil { + return err + } + + return db.Transaction(func(tx *gorm.DB) error { + if err := tx. + Where(DocumentType{ + Name: d.Name, + }). + Omit(clause.Associations). + Assign(*d). + FirstOrCreate(&d). + Error; err != nil { + return err + } + + if err := d.upsertAssocations(tx); err != nil { + return fmt.Errorf("error upserting associations: %w", err) + } + + if err := d.Get(tx); err != nil { + return fmt.Errorf("error getting the record after upsert: %w", err) + } + + return nil + }) +} + +// upsertAssocations creates required assocations. +func (d *DocumentType) upsertAssocations(db *gorm.DB) error { + // Custom fields. + var customFields []DocumentTypeCustomField + for _, c := range d.CustomFields { + // Make sure document type name is provided. + c.DocumentType.Name = d.Name + if err := c.Upsert(db); err != nil { + return fmt.Errorf("error upserting document type custom field: %w", err) + } + customFields = append(customFields, c) + } + d.CustomFields = customFields + + return nil +} + +// getAssocations gets assocations. +func (d *DocumentType) getAssocations(db *gorm.DB) error { + // Custom fields. + var customFields []DocumentTypeCustomField + for _, c := range d.CustomFields { + if err := c.Get(db); err != nil { + return fmt.Errorf("error getting document type custom field: %w", err) + } + customFields = append(customFields, c) + } + d.CustomFields = customFields + + return nil +} diff --git a/pkg/models/document_type_custom_field.go b/pkg/models/document_type_custom_field.go new file mode 100644 index 000000000..8a7dbc776 --- /dev/null +++ b/pkg/models/document_type_custom_field.go @@ -0,0 +1,107 @@ +package models + +import ( + "fmt" + + validation "github.com/go-ozzo/ozzo-validation/v4" + "gorm.io/gorm" + "gorm.io/gorm/clause" +) + +type DocumentTypeCustomField struct { + gorm.Model + + Name string + DocumentTypeID uint + DocumentType DocumentType + ReadOnly bool + Type DocumentTypeCustomFieldType +} + +type DocumentTypeCustomFieldType int + +const ( + UnspecifiedDocumentTypeCustomFieldType DocumentTypeCustomFieldType = iota + StringDocumentTypeCustomFieldType + PersonDocumentTypeCustomFieldType + PeopleDocumentTypeCustomFieldType +) + +// Get gets a document type custom field from database db by name and document +// type name, and assigns it to the receiver. +func (d *DocumentTypeCustomField) Get(db *gorm.DB) error { + if err := validation.ValidateStruct(d, + validation.Field(&d.Name, validation.Required), + ); err != nil { + return err + } + if err := validation.ValidateStruct(&d.DocumentType, + validation.Field(&d.DocumentType.Name, validation.Required), + ); err != nil { + return err + } + + if err := d.getAssocations(db); err != nil { + return fmt.Errorf("error getting associations: %w", err) + } + + return db. + Where(DocumentTypeCustomField{ + Name: d.Name, + DocumentTypeID: d.DocumentTypeID, + }). + Omit(clause.Associations). + First(&d). + Error +} + +// getAssocations gets assocations. +func (d *DocumentTypeCustomField) getAssocations(db *gorm.DB) error { + // Get document type. + if err := d.DocumentType.Get(db); err != nil { + return fmt.Errorf("error getting document type: %w", err) + } + d.DocumentTypeID = d.DocumentType.ID + + return nil +} + +// Upsert updates or inserts the receiver into database db. +func (d *DocumentTypeCustomField) Upsert(db *gorm.DB) error { + if err := validation.ValidateStruct(d, + validation.Field(&d.Name, validation.Required), + validation.Field(&d.Type, validation.Required), + ); err != nil { + return err + } + if err := validation.ValidateStruct(&d.DocumentType, + validation.Field( + &d.DocumentType.Name, validation.Required), + ); err != nil { + return err + } + + if err := d.getAssocations(db); err != nil { + return fmt.Errorf("error getting associations: %w", err) + } + + return db.Transaction(func(tx *gorm.DB) error { + if err := tx. + Where(DocumentTypeCustomField{ + Name: d.Name, + DocumentTypeID: d.DocumentTypeID, + }). + Omit(clause.Associations). + Assign(*d). + FirstOrCreate(&d). + Error; err != nil { + return err + } + + if err := d.Get(tx); err != nil { + return fmt.Errorf("error getting the record after upsert: %w", err) + } + + return nil + }) +} diff --git a/pkg/models/document_type_custom_field_test.go b/pkg/models/document_type_custom_field_test.go new file mode 100644 index 000000000..501f36c76 --- /dev/null +++ b/pkg/models/document_type_custom_field_test.go @@ -0,0 +1,161 @@ +package models + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDocumentTypeCustomFieldModel(t *testing.T) { + dsn := os.Getenv("HERMES_TEST_POSTGRESQL_DSN") + if dsn == "" { + t.Skip("HERMES_TEST_POSTGRESQL_DSN environment variable isn't set") + } + + t.Run("Upsert and Get", func(t *testing.T) { + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run("Get a document type custom field before any exist", + func(t *testing.T) { + require := require.New(t) + + d := DocumentTypeCustomField{ + Name: "CustomField1", + DocumentType: DocumentType{ + Name: "DT1", + }, + } + err := d.Get(db) + require.Error(err) + }) + + t.Run("Upsert an empty document type custom field", + func(t *testing.T) { + require := require.New(t) + + p := DocumentTypeCustomField{} + err := p.Upsert(db) + require.Error(err) + }) + + t.Run("Create a document type", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + } + err := d.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("DT1", d.Name) + assert.Equal("DocumentType1", d.LongName) + }) + + t.Run("Create a first document type custom field using Upsert", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + d := DocumentTypeCustomField{ + Name: "CustomField1", + DocumentType: DocumentType{ + Name: "DT1", + }, + Type: StringDocumentTypeCustomFieldType, + } + err := d.Upsert(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("CustomField1", d.Name) + require.NotNil(d.DocumentType) + assert.EqualValues(1, d.DocumentType.ID) + assert.Equal("DT1", d.DocumentType.Name) + }) + + t.Run("Get an empty document type custom field", + func(t *testing.T) { + require := require.New(t) + + p := DocumentTypeCustomField{} + err := p.Get(db) + require.Error(err) + }) + + t.Run("Upsert the first document type custom field with no changes", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + d := DocumentTypeCustomField{ + Name: "CustomField1", + DocumentType: DocumentType{ + Name: "DT1", + }, + Type: StringDocumentTypeCustomFieldType, + } + err := d.Upsert(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("CustomField1", d.Name) + require.NotNil(d.DocumentType) + assert.EqualValues(1, d.DocumentType.ID) + assert.Equal("DT1", d.DocumentType.Name) + }) + + t.Run("Create a second document type custom field (same document type) "+ + "using Upsert", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + d := DocumentTypeCustomField{ + Name: "CustomField2", + DocumentType: DocumentType{ + Name: "DT1", + }, + Type: StringDocumentTypeCustomFieldType, + } + err := d.Upsert(db) + require.NoError(err) + assert.EqualValues(2, d.ID) + assert.Equal("CustomField2", d.Name) + require.NotNil(d.DocumentType) + assert.EqualValues(1, d.DocumentType.ID) + assert.Equal("DT1", d.DocumentType.Name) + }) + + t.Run("Create a second document type", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := DocumentType{ + Name: "DT2", + LongName: "DocumentType2", + } + err := d.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(2, d.ID) + assert.Equal("DT2", d.Name) + assert.Equal("DocumentType2", d.LongName) + }) + + t.Run("Create a third document type custom field (same name) "+ + "using Upsert", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + d := DocumentTypeCustomField{ + Name: "CustomField1", + DocumentType: DocumentType{ + Name: "DT2", + }, + Type: StringDocumentTypeCustomFieldType, + } + err := d.Upsert(db) + require.NoError(err) + assert.EqualValues(3, d.ID) + assert.Equal("CustomField1", d.Name) + require.NotNil(d.DocumentType) + assert.EqualValues(2, d.DocumentType.ID) + assert.Equal("DT2", d.DocumentType.Name) + }) + }) +} diff --git a/pkg/models/document_type_test.go b/pkg/models/document_type_test.go new file mode 100644 index 000000000..a38b57d32 --- /dev/null +++ b/pkg/models/document_type_test.go @@ -0,0 +1,143 @@ +package models + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gorm.io/gorm" +) + +func TestDocumentType(t *testing.T) { + dsn := os.Getenv("HERMES_TEST_POSTGRESQL_DSN") + if dsn == "" { + t.Skip("HERMES_TEST_POSTGRESQL_DSN environment variable isn't set") + } + + t.Run("FirstOrCreate and Get", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + // Get document type, which won't exist yet (should error). + d := DocumentType{ + Name: "DT1", + } + err := d.Get(db) + require.Error(err) + require.ErrorIs(gorm.ErrRecordNotFound, err) + + // Create a document type. + d = DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + } + err = d.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("DT1", d.Name) + assert.Equal("DocumentType1", d.LongName) + + // Get the document type. + d = DocumentType{ + Name: "DT1", + } + err = d.Get(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("DT1", d.Name) + assert.Equal("DocumentType1", d.LongName) + + // Create another document type. + d = DocumentType{ + Name: "DT2", + LongName: "DocumentType2", + } + err = d.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(2, d.ID) + assert.Equal("DT2", d.Name) + assert.Equal("DocumentType2", d.LongName) + + // Get the document type. + d = DocumentType{ + Name: "DT2", + } + err = d.Get(db) + require.NoError(err) + assert.EqualValues(2, d.ID) + assert.Equal("DT2", d.Name) + assert.Equal("DocumentType2", d.LongName) + + // Get all document types. + ds := DocumentTypes{} + err = ds.GetAll(db) + require.NoError(err) + require.Len(ds, 2) + assert.EqualValues(1, ds[0].ID) + assert.Equal("DT1", ds[0].Name) + assert.Equal("DocumentType1", ds[0].LongName) + assert.EqualValues(2, ds[1].ID) + assert.Equal("DT2", ds[1].Name) + assert.Equal("DocumentType2", ds[1].LongName) + }) + + t.Run("FirstOrCreate with custom fields", func(t *testing.T) { + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run("Create document type", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + CustomFields: []DocumentTypeCustomField{ + { + Name: "CustomStringField", + Type: StringDocumentTypeCustomFieldType, + }, + { + Name: "CustomPersonField", + Type: PersonDocumentTypeCustomFieldType, + }, + { + Name: "CustomPeopleField", + Type: PeopleDocumentTypeCustomFieldType, + }, + }, + } + err := d.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("DT1", d.Name) + assert.Equal("DocumentType1", d.LongName) + require.Len(d.CustomFields, 3) + assert.Equal("CustomStringField", d.CustomFields[0].Name) + assert.Equal(StringDocumentTypeCustomFieldType, d.CustomFields[0].Type) + assert.Equal("CustomPersonField", d.CustomFields[1].Name) + assert.Equal(PersonDocumentTypeCustomFieldType, d.CustomFields[1].Type) + assert.Equal("CustomPeopleField", d.CustomFields[2].Name) + assert.Equal(PeopleDocumentTypeCustomFieldType, d.CustomFields[2].Type) + }) + + t.Run("Get document type", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + d := DocumentType{ + Name: "DT1", + } + err := d.Get(db) + require.NoError(err) + assert.EqualValues(1, d.ID) + assert.Equal("DT1", d.Name) + assert.Equal("DocumentType1", d.LongName) + require.Len(d.CustomFields, 3) + assert.Equal("CustomStringField", d.CustomFields[0].Name) + assert.Equal(StringDocumentTypeCustomFieldType, d.CustomFields[0].Type) + assert.Equal("CustomPersonField", d.CustomFields[1].Name) + assert.Equal(PersonDocumentTypeCustomFieldType, d.CustomFields[1].Type) + assert.Equal("CustomPeopleField", d.CustomFields[2].Name) + assert.Equal(PeopleDocumentTypeCustomFieldType, d.CustomFields[2].Type) + }) + }) +} diff --git a/pkg/models/gorm.go b/pkg/models/gorm.go new file mode 100644 index 000000000..529e2314e --- /dev/null +++ b/pkg/models/gorm.go @@ -0,0 +1,16 @@ +package models + +func ModelsToAutoMigrate() []interface{} { + return []interface{}{ + &DocumentType{}, + &Document{}, + &DocumentCustomField{}, + &DocumentReview{}, + &DocumentTypeCustomField{}, + &IndexerFolder{}, + &IndexerMetadata{}, + &Product{}, + &ProductLatestDocumentNumber{}, + &User{}, + } +} diff --git a/pkg/models/indexer_folder.go b/pkg/models/indexer_folder.go new file mode 100644 index 000000000..849427f61 --- /dev/null +++ b/pkg/models/indexer_folder.go @@ -0,0 +1,63 @@ +package models + +import ( + "fmt" + "log" + "time" + + validation "github.com/go-ozzo/ozzo-validation/v4" + "gorm.io/gorm" + "gorm.io/gorm/logger" +) + +// IndexerFolder is a model for a indexer folder. +type IndexerFolder struct { + gorm.Model + + // GoogleDriveID is the Google Drive ID of the folder. + GoogleDriveID string `gorm:"default:null;not null;uniqueIndex"` + + // LastIndexedAt is the time that the folder was last indexed. + LastIndexedAt time.Time +} + +// Get gets the indexer folder and assigns it to the receiver. +func (f *IndexerFolder) Get(db *gorm.DB) error { + if err := validation.ValidateStruct(f, + validation.Field(&f.GoogleDriveID, validation.Required), + ); err != nil { + return err + } + + // Don't log "record not found" errors (will still return the error). + tx := db.Session(&gorm.Session{Logger: logger.New( + log.Default(), + logger.Config{IgnoreRecordNotFoundError: true}, + )}) + return tx. + Where(IndexerFolder{GoogleDriveID: f.GoogleDriveID}). + First(&f). + Error +} + +// Upsert updates or inserts the receiver indexer folder into database db. +func (l *IndexerFolder) Upsert(db *gorm.DB) error { + if err := validation.ValidateStruct(l, + validation.Field(&l.GoogleDriveID, validation.Required), + ); err != nil { + return err + } + + tx := db. + Where(IndexerFolder{GoogleDriveID: l.GoogleDriveID}). + Assign(*l). + FirstOrCreate(&l) + if err := tx.Error; err != nil { + return err + } + if tx.RowsAffected != 1 { + return fmt.Errorf("expected 1 row affected, got %d", tx.RowsAffected) + } + + return nil +} diff --git a/pkg/models/indexer_folder_test.go b/pkg/models/indexer_folder_test.go new file mode 100644 index 000000000..33b3036c5 --- /dev/null +++ b/pkg/models/indexer_folder_test.go @@ -0,0 +1,98 @@ +package models + +import ( + "os" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gorm.io/gorm" +) + +func TestIndexerFolder(t *testing.T) { + dsn := os.Getenv("HERMES_TEST_POSTGRESQL_DSN") + if dsn == "" { + t.Skip("HERMES_TEST_POSTGRESQL_DSN environment variable isn't set") + } + + t.Run("Get and Upsert", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + // Get folder, which won't exist yet (should error). + l := IndexerFolder{ + GoogleDriveID: "ID1", + } + err := l.Get(db) + require.Error(err) + require.ErrorIs(err, gorm.ErrRecordNotFound) + + // Insert folder using Upsert. + time1 := time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + l = IndexerFolder{ + GoogleDriveID: "ID1", + LastIndexedAt: time1, + } + err = l.Upsert(db) + require.NoError(err) + assert.EqualValues(1, l.ID) + assert.Equal("ID1", l.GoogleDriveID) + assert.Equal(time1, l.LastIndexedAt) + + // Get folder. + l = IndexerFolder{ + GoogleDriveID: "ID1", + } + err = l.Get(db) + require.NoError(err) + assert.EqualValues(1, l.ID) + assert.Equal("ID1", l.GoogleDriveID) + assert.Equal(time1, l.LastIndexedAt.UTC()) + + // Insert another folder using Upsert. + time2 := time.Date(2002, 1, 1, 0, 0, 0, 0, time.UTC) + l = IndexerFolder{ + GoogleDriveID: "ID2", + LastIndexedAt: time2, + } + err = l.Upsert(db) + require.NoError(err) + assert.EqualValues(2, l.ID) + assert.Equal("ID2", l.GoogleDriveID) + assert.Equal(time2, l.LastIndexedAt.UTC()) + + // Get folder. + l = IndexerFolder{ + GoogleDriveID: "ID2", + } + err = l.Get(db) + require.NoError(err) + assert.EqualValues(2, l.ID) + assert.Equal("ID2", l.GoogleDriveID) + assert.Equal(time2, l.LastIndexedAt.UTC()) + + // Update the second folder using Upsert. + time3 := time.Date(2003, 1, 1, 0, 0, 0, 0, time.UTC) + l = IndexerFolder{ + GoogleDriveID: "ID2", + LastIndexedAt: time3, + } + err = l.Upsert(db) + require.NoError(err) + assert.EqualValues(2, l.ID) + assert.Equal("ID2", l.GoogleDriveID) + assert.Equal(time3, l.LastIndexedAt.UTC()) + + // Get folder. + l = IndexerFolder{ + GoogleDriveID: "ID2", + } + err = l.Get(db) + require.NoError(err) + assert.EqualValues(2, l.ID) + assert.Equal("ID2", l.GoogleDriveID) + assert.Equal(time3, l.LastIndexedAt.UTC()) + }) +} diff --git a/pkg/models/indexer_metadata.go b/pkg/models/indexer_metadata.go new file mode 100644 index 000000000..ad2e7f0a1 --- /dev/null +++ b/pkg/models/indexer_metadata.go @@ -0,0 +1,45 @@ +package models + +import ( + "fmt" + "log" + "time" + + "gorm.io/gorm" + "gorm.io/gorm/logger" +) + +// Indexer is a model for indexer metadata. +type IndexerMetadata struct { + gorm.Model + + // LastFullIndexAt is the time that the indexer last completed a full index. + LastFullIndexAt time.Time +} + +// Get gets the indexer metadata and assigns it to the receiver. +func (m *IndexerMetadata) Get(db *gorm.DB) error { + // Don't log "record not found" errors (will still return the error). + tx := db.Session(&gorm.Session{Logger: logger.New( + log.Default(), + logger.Config{IgnoreRecordNotFoundError: true}, + )}) + return tx. + First(&m, 1). // There should only ever be one row in this table. + Error +} + +// Upsert updates or inserts the indexer metadata. +func (m *IndexerMetadata) Upsert(db *gorm.DB) error { + tx := db. + Assign(*m). + FirstOrCreate(&m, 1) + if err := tx.Error; err != nil { + return err + } + if tx.RowsAffected != 1 { + return fmt.Errorf("expected 1 row affected, got %d", tx.RowsAffected) + } + + return nil +} diff --git a/pkg/models/indexer_metadata_test.go b/pkg/models/indexer_metadata_test.go new file mode 100644 index 000000000..5901c2693 --- /dev/null +++ b/pkg/models/indexer_metadata_test.go @@ -0,0 +1,64 @@ +package models + +import ( + "os" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gorm.io/gorm" +) + +func TestIndexerMetadata(t *testing.T) { + dsn := os.Getenv("HERMES_TEST_POSTGRESQL_DSN") + if dsn == "" { + t.Skip("HERMES_TEST_POSTGRESQL_DSN environment variable isn't set") + } + + t.Run("Get and Upsert", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + // Get metadata, which won't exist yet (should error). + l := IndexerMetadata{} + err := l.Get(db) + require.Error(err) + require.ErrorIs(gorm.ErrRecordNotFound, err) + + // Insert metadata using upsert. + time1 := time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + l = IndexerMetadata{ + LastFullIndexAt: time1, + } + err = l.Upsert(db) + require.NoError(err) + assert.EqualValues(1, l.ID) + assert.Equal(time1, l.LastFullIndexAt) + + // Get metadata. + l = IndexerMetadata{} + err = l.Get(db) + require.NoError(err) + assert.EqualValues(1, l.ID) + assert.Equal(time1, l.LastFullIndexAt.UTC()) + + // Update metadata using upsert. + time2 := time.Date(2002, 1, 1, 0, 0, 0, 0, time.UTC) + l = IndexerMetadata{ + LastFullIndexAt: time2, + } + err = l.Upsert(db) + require.NoError(err) + assert.EqualValues(1, l.ID) + assert.Equal(time2, l.LastFullIndexAt.UTC()) + + // Get metadata. + l = IndexerMetadata{} + err = l.Get(db) + require.NoError(err) + assert.EqualValues(1, l.ID) + assert.Equal(time2, l.LastFullIndexAt.UTC()) + }) +} diff --git a/pkg/models/product.go b/pkg/models/product.go new file mode 100644 index 000000000..97e308fa5 --- /dev/null +++ b/pkg/models/product.go @@ -0,0 +1,78 @@ +package models + +import ( + validation "github.com/go-ozzo/ozzo-validation/v4" + "gorm.io/gorm" + "gorm.io/gorm/clause" +) + +// Product is a model for product data. +type Product struct { + gorm.Model + + // Name is the name of the product. + Name string `gorm:"default:null;index;not null;type:citext;unique"` + + // Abbreviation is a short group of capitalized letters to represent the + // product. + Abbreviation string `gorm:"default:null;not null;type:citext;unique"` + + // UserSubscribers are the users that subscribed to this product. + UserSubscribers []User `gorm:"many2many:user_product_subscriptions;"` +} + +// FirstOrCreate finds the first product by name or creates a record if it does +// not exist in database db. +func (p *Product) FirstOrCreate(db *gorm.DB) error { + if err := validation.ValidateStruct(p, + validation.Field( + &p.ID, + validation.When(p.Name == "", + validation.Required.Error("either ID or Name is required")), + ), + validation.Field( + &p.Name, + validation.When(p.ID == 0, + validation.Required.Error("either ID or Name is required"))), + ); err != nil { + return err + } + + return db. + Where(Product{Name: p.Name}). + FirstOrCreate(&p). + Error +} + +// Get gets a product from database db by name, and assigns it back to the +// receiver. +func (p *Product) Get(db *gorm.DB) error { + if err := validation.ValidateStruct(p, + validation.Field( + &p.ID, + validation.When(p.Name == "", + validation.Required.Error("either ID or Name is required")), + ), + validation.Field( + &p.Name, + validation.When(p.ID == 0, + validation.Required.Error("either ID or Name is required"))), + ); err != nil { + return err + } + + return db. + Where(Product{Name: p.Name}). + Preload(clause.Associations). + First(&p). + Error +} + +// Upsert updates or inserts a product into database db. +func (p *Product) Upsert(db *gorm.DB) error { + return db. + Where(Product{Name: p.Name}). + Assign(*p). + FirstOrCreate(&p). + Error +} diff --git a/pkg/models/product_latest_document_number.go b/pkg/models/product_latest_document_number.go new file mode 100644 index 000000000..9c8aa5be1 --- /dev/null +++ b/pkg/models/product_latest_document_number.go @@ -0,0 +1,90 @@ +package models + +import ( + "fmt" + "time" + + "gorm.io/gorm" + "gorm.io/gorm/clause" +) + +// ProductLatestDocumentNumber is a model for latest product document numbers. +type ProductLatestDocumentNumber struct { + CreatedAt time.Time + UpdatedAt time.Time + DeletedAt gorm.DeletedAt `gorm:"index"` + + DocumentType DocumentType + DocumentTypeID uint `gorm:"primaryKey"` + Product Product + ProductID uint `gorm:"primaryKey"` + + // LatestDocumentNumber is a the latest document number per product and + // document type. + LatestDocumentNumber int `gorm:"default:null;not null"` +} + +// BeforeSave is a hook to find or create associations before saving. +func (p *ProductLatestDocumentNumber) BeforeSave(tx *gorm.DB) error { + if err := p.getAssociations(tx); err != nil { + return fmt.Errorf("error getting associations: %w", err) + } + + return nil +} + +// Get gets the latest product number and assigns it to the receiver. +func (p *ProductLatestDocumentNumber) Get(db *gorm.DB) error { + if err := p.getAssociations(db); err != nil { + return fmt.Errorf("error getting associations: %w", err) + } + + return db. + Where(ProductLatestDocumentNumber{ + DocumentType: DocumentType{ + Name: p.DocumentType.Name, + }, + Product: Product{ + Name: p.Product.Name, + }, + }). + Preload(clause.Associations). + First(&p).Error +} + +// Upsert updates or inserts the receiver document into database db. +func (p *ProductLatestDocumentNumber) Upsert(db *gorm.DB) error { + return db. + Where(ProductLatestDocumentNumber{ + DocumentType: DocumentType{ + Name: p.DocumentType.Name, + }, + Product: Product{ + Name: p.Product.Name, + }, + }). + Assign(*p). + FirstOrCreate(&p). + Error +} + +// getAssociations gets required associations, creating them where appropriate. +func (p *ProductLatestDocumentNumber) getAssociations(tx *gorm.DB) error { + // Find or create document type. + dt := p.DocumentType + if err := dt.FirstOrCreate(tx); err != nil { + return fmt.Errorf("error getting document type: %w", err) + } + p.DocumentType = dt + p.DocumentTypeID = dt.ID + + // Find or create product. + product := p.Product + if err := product.FirstOrCreate(tx); err != nil { + return fmt.Errorf("error getting product: %w", err) + } + p.Product = product + p.ProductID = product.ID + + return nil +} diff --git a/pkg/models/product_latest_document_number_test.go b/pkg/models/product_latest_document_number_test.go new file mode 100644 index 000000000..855a2efac --- /dev/null +++ b/pkg/models/product_latest_document_number_test.go @@ -0,0 +1,206 @@ +package models + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestProductLatestDocumentNumber(t *testing.T) { + dsn := os.Getenv("HERMES_TEST_POSTGRESQL_DSN") + if dsn == "" { + t.Skip("HERMES_TEST_POSTGRESQL_DSN environment variable isn't set") + } + + t.Run("Get and Upsert", func(t *testing.T) { + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run( + "Get latest product document number which won't exist yet (should error)", + func(t *testing.T) { + _, require := assert.New(t), require.New(t) + p := ProductLatestDocumentNumber{} + err := p.Get(db) + require.Error(err) + }) + + var product Product + t.Run("Create a product", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + product = Product{ + Name: "product1", + Abbreviation: "TEST", + } + err := product.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(1, product.ID) + assert.Equal("product1", product.Name) + assert.Equal("TEST", product.Abbreviation) + }) + + t.Run( + "Try to upsert a new latest product document number with only a product", + func(t *testing.T) { + _, require := assert.New(t), require.New(t) + p := ProductLatestDocumentNumber{ + Product: product, + } + err := p.Upsert(db) + require.Error(err) + }) + + // Try to upsert a new latest product document number with only a product + // and latest document number (should error). + t.Run( + "Try to upsert a new latest product document number with only a product"+ + "and latest document number", + func(t *testing.T) { + _, require := assert.New(t), require.New(t) + p := ProductLatestDocumentNumber{ + Product: product, + LatestDocumentNumber: 5, + } + err := p.Upsert(db) + require.Error(err) + }) + + var docType DocumentType + t.Run("Create a document type", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + docType = DocumentType{ + Name: "RFC", + LongName: "Request For Comments", + } + err := docType.FirstOrCreate(db) + require.NoError(err) + assert.NotEmpty(docType.ID) + assert.Equal("RFC", docType.Name) + assert.Equal("Request For Comments", docType.LongName) + }) + + t.Run("Try to upsert a new latest product document number without a latest"+ + " document number", + func(t *testing.T) { + _, require := assert.New(t), require.New(t) + p := ProductLatestDocumentNumber{ + DocumentType: docType, + Product: product, + } + err := p.Upsert(db) + require.Error(err) + }) + + t.Run("Insert by upserting a new latest product document number", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + p := ProductLatestDocumentNumber{ + DocumentType: DocumentType{ + Name: "RFC", + }, + LatestDocumentNumber: 5, + Product: Product{ + Name: "product1", + }, + } + err := p.Upsert(db) + require.NoError(err) + assert.NotEmpty(p.DocumentTypeID) + assert.Equal("RFC", p.DocumentType.Name) + assert.Equal("Request For Comments", p.DocumentType.LongName) + assert.EqualValues(1, p.ProductID) + assert.Equal("product1", p.Product.Name) + assert.Equal(5, p.LatestDocumentNumber) + }) + + t.Run("Get the latest product document number", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + p := ProductLatestDocumentNumber{ + DocumentType: DocumentType{ + Name: "RFC", + }, + Product: Product{ + Name: "product1", + }, + } + err := p.Get(db) + require.NoError(err) + assert.NotEmpty(p.DocumentTypeID) + assert.Equal("RFC", p.DocumentType.Name) + assert.Equal("Request For Comments", p.DocumentType.LongName) + assert.EqualValues(1, p.ProductID) + assert.Equal("product1", p.Product.Name) + assert.Equal(5, p.LatestDocumentNumber) + }) + + t.Run("Update by upserting a latest product document number", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + p := ProductLatestDocumentNumber{ + DocumentType: DocumentType{ + Name: "RFC", + }, + LatestDocumentNumber: 10, + Product: Product{ + Name: "product1", + }, + } + err := p.Upsert(db) + require.NoError(err) + assert.NotEmpty(p.DocumentTypeID) + assert.Equal("RFC", p.DocumentType.Name) + assert.Equal("Request For Comments", p.DocumentType.LongName) + assert.EqualValues(1, p.ProductID) + assert.Equal("product1", p.Product.Name) + assert.Equal(10, p.LatestDocumentNumber) + }) + + t.Run("Get the latest product document number", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + p := ProductLatestDocumentNumber{ + DocumentType: DocumentType{ + Name: "RFC", + }, + Product: Product{ + Name: "product1", + }, + } + err := p.Get(db) + require.NoError(err) + assert.NotEmpty(p.DocumentTypeID) + assert.Equal("RFC", p.DocumentType.Name) + assert.Equal("Request For Comments", p.DocumentType.LongName) + assert.EqualValues(1, p.ProductID) + assert.Equal("product1", p.Product.Name) + assert.Equal(10, p.LatestDocumentNumber) + }) + + t.Run( + "Insert by upserting a new latest product document number with a "+ + "document type and product that both don't exist yet", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + p := ProductLatestDocumentNumber{ + DocumentType: DocumentType{ + Name: "NEW", + LongName: "New Document Type", + }, + LatestDocumentNumber: 1, + Product: Product{ + Name: "New Product", + Abbreviation: "NP", + }, + } + err := p.Upsert(db) + require.NoError(err) + assert.NotEmpty(p.DocumentTypeID) + assert.Equal("NEW", p.DocumentType.Name) + assert.EqualValues(2, p.ProductID) + assert.Equal("New Product", p.Product.Name) + assert.Equal("NP", p.Product.Abbreviation) + assert.Equal(1, p.LatestDocumentNumber) + }) + }) +} diff --git a/pkg/models/product_test.go b/pkg/models/product_test.go new file mode 100644 index 000000000..1b721c4e0 --- /dev/null +++ b/pkg/models/product_test.go @@ -0,0 +1,325 @@ +package models + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestProductModel(t *testing.T) { + dsn := os.Getenv("HERMES_TEST_POSTGRESQL_DSN") + if dsn == "" { + t.Skip("HERMES_TEST_POSTGRESQL_DSN environment variable isn't set") + } + + t.Run("FirstOrCreate and Get", func(t *testing.T) { + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run("Get a product before any exist", + func(t *testing.T) { + require := require.New(t) + + p := Product{ + Name: "Product1", + } + err := p.Get(db) + require.Error(err) + }) + + t.Run("Create a first product", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + p := Product{ + Name: "Product1", + Abbreviation: "P1", + } + err := p.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(1, p.ID) + assert.Equal("Product1", p.Name) + assert.Equal("P1", p.Abbreviation) + }) + + t.Run("Get a product without any name", + func(t *testing.T) { + require := require.New(t) + + p := Product{} + err := p.Get(db) + require.Error(err) + }) + + t.Run("Create a second product", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + p := Product{ + Name: "Product2", + Abbreviation: "P2", + } + err := p.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(2, p.ID) + assert.Equal("Product2", p.Name) + assert.Equal("P2", p.Abbreviation) + }) + + t.Run("Get the first product by name using FirstOrCreate", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + p := Product{ + Name: "Product1", + } + err := p.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(1, p.ID) + assert.Equal("Product1", p.Name) + assert.Equal("P1", p.Abbreviation) + }) + + t.Run("Get the first product by name using Get", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + p := Product{ + Name: "Product1", + } + err := p.Get(db) + require.NoError(err) + assert.EqualValues(1, p.ID) + assert.Equal("Product1", p.Name) + assert.Equal("P1", p.Abbreviation) + }) + + t.Run("Get the first product by lowercase name using FirstOrCreate", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + p := Product{ + Name: "Product1", + } + err := p.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(1, p.ID) + assert.Equal("Product1", p.Name) + assert.Equal("P1", p.Abbreviation) + }) + + t.Run("Get the first product by lowercase name using Get", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + p := Product{ + Name: "Product1", + } + err := p.Get(db) + require.NoError(err) + assert.EqualValues(1, p.ID) + assert.Equal("Product1", p.Name) + assert.Equal("P1", p.Abbreviation) + }) + + t.Run("Get the second product by name using FirstOrCreate", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + p := Product{ + Name: "Product2", + } + err := p.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(2, p.ID) + assert.Equal("Product2", p.Name) + assert.Equal("P2", p.Abbreviation) + }) + + t.Run("Get the second product by name using Get", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + p := Product{ + Name: "Product2", + } + err := p.Get(db) + require.NoError(err) + assert.EqualValues(2, p.ID) + assert.Equal("Product2", p.Name) + assert.Equal("P2", p.Abbreviation) + }) + + t.Run("Get the first product by name and abbreviation using FirstOrCreate", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + p := Product{ + Name: "Product1", + Abbreviation: "P1", + } + err := p.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(1, p.ID) + assert.Equal("Product1", p.Name) + assert.Equal("P1", p.Abbreviation) + }) + + t.Run("Get the first product by name and abbreviation using Get", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + p := Product{ + Name: "Product1", + Abbreviation: "P1", + } + err := p.Get(db) + require.NoError(err) + assert.EqualValues(1, p.ID) + assert.Equal("Product1", p.Name) + assert.Equal("P1", p.Abbreviation) + }) + + t.Run( + "Get the second product by name and wrong abbreviation using FirstOrCreate", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + p := Product{ + Name: "Product2", + Abbreviation: "P1", + } + err := p.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(2, p.ID) + assert.Equal("Product2", p.Name) + assert.Equal("P2", p.Abbreviation) + }) + + t.Run( + "Get the second product by name and wrong abbreviation using Get", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + + p := Product{ + Name: "Product2", + Abbreviation: "P1", + } + err := p.Get(db) + require.NoError(err) + assert.EqualValues(2, p.ID) + assert.Equal("Product2", p.Name) + assert.Equal("P2", p.Abbreviation) + }) + }) + + t.Run("FirstOrCreate bad", func(t *testing.T) { + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run("Create a product with an empty string for name", + func(t *testing.T) { + require := require.New(t) + + p := Product{ + Name: "", + Abbreviation: "EMPTYSTRING", + } + err := p.FirstOrCreate(db) + require.Error(err) + }) + + t.Run("Create a product with an empty string for abbreviation", + func(t *testing.T) { + require := require.New(t) + + p := Product{ + Name: "Product", + Abbreviation: "", + } + err := p.FirstOrCreate(db) + require.Error(err) + }) + }) + + t.Run("Upsert", + func(t *testing.T) { + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run("Create a product using Upsert", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product1", + Abbreviation: "P1", + } + err := p.Upsert(db) + require.NoError(err) + assert.EqualValues(1, p.ID) + assert.Equal("Product1", p.Name) + assert.Equal("P1", p.Abbreviation) + }) + + t.Run("Create a second product using Upsert", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product2", + Abbreviation: "P2", + } + err := p.Upsert(db) + require.NoError(err) + assert.EqualValues(2, p.ID) + assert.Equal("Product2", p.Name) + assert.Equal("P2", p.Abbreviation) + }) + + t.Run("Verify first product using Get", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product1", + } + err := p.Get(db) + require.NoError(err) + assert.EqualValues(1, p.ID) + assert.Equal("Product1", p.Name) + assert.Equal("P1", p.Abbreviation) + }) + + t.Run("Verify second product using Get", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product2", + } + err := p.Get(db) + require.NoError(err) + assert.EqualValues(2, p.ID) + assert.Equal("Product2", p.Name) + assert.Equal("P2", p.Abbreviation) + }) + + t.Run("Update the second product using Upsert", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product2", + Abbreviation: "P2U", + } + err := p.Upsert(db) + require.NoError(err) + assert.EqualValues(2, p.ID) + assert.Equal("Product2", p.Name) + assert.Equal("P2U", p.Abbreviation) + }) + + t.Run("Verify second product after update using Get", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product2", + } + err := p.Get(db) + require.NoError(err) + assert.EqualValues(2, p.ID) + assert.Equal("Product2", p.Name) + assert.Equal("P2U", p.Abbreviation) + }) + }) +} diff --git a/pkg/models/testing.go b/pkg/models/testing.go new file mode 100644 index 000000000..bdd4757e7 --- /dev/null +++ b/pkg/models/testing.go @@ -0,0 +1,37 @@ +package models + +import ( + "log" + "testing" + + "github.com/hashicorp-forge/hermes/internal/test" + "github.com/stretchr/testify/require" + "gorm.io/gorm" +) + +func setupTest(t *testing.T, dsn string) ( + db *gorm.DB, tearDownFunc func(t *testing.T), +) { + // Create test database. + db, dbName, err := test.CreateTestDatabase(t, dsn) + require.NoError(t, err) + + // Enable citext extension. + sqlDB, err := db.DB() + require.NoError(t, err) + _, err = sqlDB.Exec("CREATE EXTENSION IF NOT EXISTS citext;") + require.NoError(t, err) + + // Migrate test database. + err = db.AutoMigrate( + ModelsToAutoMigrate()..., + ) + require.NoError(t, err) + + return db, func(t *testing.T) { + // TODO: add back and make configurable. + // err := test.DropTestDatabase(dsn, dbName) + // require.NoError(t, err) + log.Printf("would have dropped test database %q here", dbName) + } +} diff --git a/pkg/models/user.go b/pkg/models/user.go new file mode 100644 index 000000000..d5ab6f9f0 --- /dev/null +++ b/pkg/models/user.go @@ -0,0 +1,138 @@ +package models + +import ( + "fmt" + "time" + + validation "github.com/go-ozzo/ozzo-validation/v4" + "gorm.io/gorm" + "gorm.io/gorm/clause" +) + +// User is a model for an application user. +type User struct { + gorm.Model + + // EmailAddress is the email address of the user. + EmailAddress string `gorm:"default:null;index;not null;type:citext;unique"` + + // ProductSubscriptions are the products that have been subscribed to by the + // user. + ProductSubscriptions []Product `gorm:"many2many:user_product_subscriptions;"` + + // RecentlyViewedDocs are the documents recently viewed by the user. + RecentlyViewedDocs []Document `gorm:"many2many:recently_viewed_docs;"` +} + +type RecentlyViewedDoc struct { + UserID int `gorm:"primaryKey"` + DocumentID int `gorm:"primaryKey"` + ViewedAt time.Time +} + +// BeforeSave is a hook to find or create associations before saving. +func (u *User) BeforeSave(tx *gorm.DB) error { + if err := u.getAssociations(tx); err != nil { + return fmt.Errorf("error getting associations: %w", err) + } + + return nil +} + +// FirstOrCreate finds the first user by email address or creates a user record +// if it does not exist in database db. The result is saved back to the +// receiver. +func (u *User) FirstOrCreate(db *gorm.DB) error { + if err := validation.ValidateStruct(u, + validation.Field( + &u.EmailAddress, validation.Required), + ); err != nil { + return err + } + + return db.Transaction(func(tx *gorm.DB) error { + if err := tx. + Where(User{EmailAddress: u.EmailAddress}). + Omit(clause.Associations). + Clauses(clause.OnConflict{DoNothing: true}). + FirstOrCreate(&u). + Error; err != nil { + return err + } + + if err := u.Get(tx); err != nil { + return fmt.Errorf( + "error getting the record after find or create: %w", err) + } + + return nil + }) +} + +// Get gets a user from database db by email address, and assigns it to the +// receiver. +func (u *User) Get(db *gorm.DB) error { + return db. + Where(User{EmailAddress: u.EmailAddress}). + Preload(clause.Associations). + First(&u).Error +} + +// Upsert updates or inserts the receiver user into database db. +func (u *User) Upsert(db *gorm.DB) error { + return db.Transaction(func(tx *gorm.DB) error { + if err := tx. + Where(User{EmailAddress: u.EmailAddress}). + Omit(clause.Associations). + Assign(*u). + FirstOrCreate(&u). + Error; err != nil { + return err + } + + // Replace associations. + if err := tx. + Model(&u). + Association("ProductSubscriptions"). + Replace(u.ProductSubscriptions); err != nil { + return err + } + if err := tx. + Model(&u). + Association("RecentlyViewedDocs"). + Replace(u.RecentlyViewedDocs); err != nil { + return err + } + + if err := u.Get(tx); err != nil { + return fmt.Errorf("error getting the user after upsert") + } + + return nil + }) +} + +// getAssociations gets required associations, creating them where appropriate. +func (u *User) getAssociations(tx *gorm.DB) error { + // Get product subscriptions. + var ps []Product + for _, p := range u.ProductSubscriptions { + if err := p.Get(tx); err != nil { + return fmt.Errorf("error getting product: %w", err) + } + ps = append(ps, p) + } + u.ProductSubscriptions = ps + + // Get recently viewed documents. + var rvd []Document + for _, d := range u.RecentlyViewedDocs { + if err := d.Get(tx); err != nil { + return fmt.Errorf("error getting document: %w", err) + } + rvd = append(rvd, d) + } + u.RecentlyViewedDocs = rvd + + return nil +} diff --git a/pkg/models/user_test.go b/pkg/models/user_test.go new file mode 100644 index 000000000..13c57848d --- /dev/null +++ b/pkg/models/user_test.go @@ -0,0 +1,345 @@ +package models + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestUserModel(t *testing.T) { + dsn := os.Getenv("HERMES_TEST_POSTGRESQL_DSN") + if dsn == "" { + t.Skip("HERMES_TEST_POSTGRESQL_DSN environment variable isn't set") + } + + t.Run("FirstOrCreate", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + // Create a first user. + u := User{ + EmailAddress: "a@a.com", + } + err := u.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(1, u.ID) + assert.Equal("a@a.com", u.EmailAddress) + + // Get the user using FirstOrCreate. + get := User{ + EmailAddress: "a@a.com", + } + err = get.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(1, get.ID) + assert.Equal("a@a.com", get.EmailAddress) + + // Create a second user. + u2 := User{ + EmailAddress: "b@b.com", + } + err = u2.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(2, u2.ID) + assert.Equal("b@b.com", u2.EmailAddress) + + // Get the second user using FirstOrCreate. + get2 := User{ + EmailAddress: "b@b.com", + } + err = get2.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(2, get2.ID) + assert.Equal("b@b.com", get2.EmailAddress) + }) + + t.Run("Upsert", func(t *testing.T) { + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run("Create user", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + u := User{ + EmailAddress: "a@a.com", + } + err := u.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(1, u.ID) + assert.Empty(u.RecentlyViewedDocs) + assert.Equal("a@a.com", u.EmailAddress) + }) + + t.Run("Create a document type", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + dt := DocumentType{ + Name: "DT1", + LongName: "DocumentType1", + } + err := dt.FirstOrCreate(db) + require.NoError(err) + }) + + t.Run("Create a product", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product1", + Abbreviation: "P1", + } + err := p.FirstOrCreate(db) + require.NoError(err) + }) + + var doc1 Document + t.Run("Create document", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + doc1 = Document{ + GoogleFileID: "fileID1", + DocumentType: DocumentType{ + Name: "DT1", + }, + Product: Product{ + Name: "Product1", + }, + } + err := doc1.Create(db) + require.NoError(err) + assert.EqualValues(1, doc1.ID) + }) + + t.Run( + "Update user to add the document as a recently viewed document", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + u := User{ + EmailAddress: "a@a.com", + RecentlyViewedDocs: []Document{doc1}, + } + err := u.Upsert(db) + require.NoError(err) + require.Equal(1, len(u.RecentlyViewedDocs)) + assert.EqualValues(1, u.RecentlyViewedDocs[0].ID) + assert.Equal("fileID1", u.RecentlyViewedDocs[0].GoogleFileID) + }) + + t.Run("Get the user and verify it was updated", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + u := User{ + EmailAddress: "a@a.com", + } + err := u.Get(db) + require.NoError(err) + require.Equal(1, len(u.RecentlyViewedDocs)) + assert.EqualValues(1, u.RecentlyViewedDocs[0].ID) + assert.Equal("fileID1", u.RecentlyViewedDocs[0].GoogleFileID) + }) + + var doc2 Document + t.Run("Create another document", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + doc2 = Document{ + GoogleFileID: "fileID2", + DocumentType: DocumentType{ + Name: "DT1", + }, + Product: Product{ + Name: "Product1", + }, + } + err := doc2.Create(db) + require.NoError(err) + assert.EqualValues(2, doc2.ID) + }) + + t.Run("Update user to add both documents as recently viewed documents", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + u := User{ + EmailAddress: "a@a.com", + RecentlyViewedDocs: []Document{ + { + GoogleFileID: "fileID1", + }, + { + GoogleFileID: "fileID2", + }, + }, + } + err := u.Upsert(db) + require.NoError(err) + require.Equal(2, len(u.RecentlyViewedDocs)) + assert.EqualValues(1, u.RecentlyViewedDocs[0].ID) + assert.Equal("fileID1", u.RecentlyViewedDocs[0].GoogleFileID) + assert.EqualValues(2, u.RecentlyViewedDocs[1].ID) + assert.Equal("fileID2", u.RecentlyViewedDocs[1].GoogleFileID) + }) + + t.Run("Get the user and verify it was updated", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + u := User{ + EmailAddress: "a@a.com", + } + err := u.Get(db) + require.NoError(err) + require.Equal(2, len(u.RecentlyViewedDocs)) + assert.EqualValues(1, u.RecentlyViewedDocs[0].ID) + assert.Equal("fileID1", u.RecentlyViewedDocs[0].GoogleFileID) + assert.EqualValues(2, u.RecentlyViewedDocs[1].ID) + assert.Equal("fileID2", u.RecentlyViewedDocs[1].GoogleFileID) + }) + + t.Run( + "Update user to only have the second document in recently viewed documents", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + u := User{ + EmailAddress: "a@a.com", + RecentlyViewedDocs: []Document{ + { + GoogleFileID: "fileID2", + }, + }, + } + err := u.Upsert(db) + require.NoError(err) + require.Equal(1, len(u.RecentlyViewedDocs)) + assert.EqualValues(2, u.RecentlyViewedDocs[0].ID) + assert.Equal("fileID2", u.RecentlyViewedDocs[0].GoogleFileID) + }) + + t.Run("Get the user and verify it was updated", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + u := User{ + EmailAddress: "a@a.com", + } + err := u.Get(db) + require.NoError(err) + require.Equal(1, len(u.RecentlyViewedDocs)) + assert.EqualValues(2, u.RecentlyViewedDocs[0].ID) + assert.Equal("fileID2", u.RecentlyViewedDocs[0].GoogleFileID) + }) + }) + + t.Run("Product subscriptions", func(t *testing.T) { + db, tearDownTest := setupTest(t, dsn) + defer tearDownTest(t) + + t.Run("Create user", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + u := User{ + EmailAddress: "a@a.com", + } + err := u.FirstOrCreate(db) + require.NoError(err) + assert.EqualValues(1, u.ID) + assert.Empty(u.RecentlyViewedDocs) + assert.Equal("a@a.com", u.EmailAddress) + }) + + t.Run("Create a product", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product1", + Abbreviation: "P1", + } + err := p.FirstOrCreate(db) + require.NoError(err) + require.EqualValues(1, p.ID) + }) + + t.Run("Create a second product", func(t *testing.T) { + _, require := assert.New(t), require.New(t) + p := Product{ + Name: "Product2", + Abbreviation: "P2", + } + err := p.FirstOrCreate(db) + require.NoError(err) + require.EqualValues(2, p.ID) + }) + + t.Run("Get the user without any product subscriptions", func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + u := User{ + EmailAddress: "a@a.com", + } + err := u.Get(db) + require.NoError(err) + assert.Len(u.ProductSubscriptions, 0) + }) + + t.Run("Update user to subscribe to the second product", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + u := User{ + EmailAddress: "a@a.com", + ProductSubscriptions: []Product{ + { + Name: "Product2", + }, + }, + } + err := u.Upsert(db) + require.NoError(err) + require.Equal(1, len(u.ProductSubscriptions)) + assert.EqualValues(2, u.ProductSubscriptions[0].ID) + assert.Equal("Product2", u.ProductSubscriptions[0].Name) + }) + + t.Run("Verify with a Get", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + u := User{ + EmailAddress: "a@a.com", + } + err := u.Get(db) + require.NoError(err) + require.Equal(1, len(u.ProductSubscriptions)) + assert.EqualValues(2, u.ProductSubscriptions[0].ID) + assert.Equal("Product2", u.ProductSubscriptions[0].Name) + }) + + t.Run("Update user to also subscribe to the first product", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + u := User{ + EmailAddress: "a@a.com", + ProductSubscriptions: []Product{ + { + Name: "Product1", + }, + { + Name: "Product2", + }, + }, + } + err := u.Upsert(db) + require.NoError(err) + require.Equal(2, len(u.ProductSubscriptions)) + assert.EqualValues(1, u.ProductSubscriptions[0].ID) + assert.Equal("Product1", u.ProductSubscriptions[0].Name) + assert.EqualValues(2, u.ProductSubscriptions[1].ID) + assert.Equal("Product2", u.ProductSubscriptions[1].Name) + }) + + t.Run("Update user to only subscribe to the first product", + func(t *testing.T) { + assert, require := assert.New(t), require.New(t) + u := User{ + EmailAddress: "a@a.com", + ProductSubscriptions: []Product{ + { + Name: "Product1", + }, + }, + } + err := u.Upsert(db) + require.NoError(err) + require.Equal(1, len(u.ProductSubscriptions)) + assert.EqualValues(1, u.ProductSubscriptions[0].ID) + assert.Equal("Product1", u.ProductSubscriptions[0].Name) + }) + }) +} diff --git a/web/.prettierrc.json b/web/.prettierrc.json new file mode 100644 index 000000000..0967ef424 --- /dev/null +++ b/web/.prettierrc.json @@ -0,0 +1 @@ +{} diff --git a/web/app/adapters/google/drive.js b/web/app/adapters/google/drive.js new file mode 100644 index 000000000..c0d6c3cfa --- /dev/null +++ b/web/app/adapters/google/drive.js @@ -0,0 +1,15 @@ +import RESTAdapter from "@ember-data/adapter/rest"; +import { inject as service } from "@ember/service"; + +export default class GoogleDriveAdapter extends RESTAdapter { + @service session; + + host = "https://www.googleapis.com/drive"; + namespace = "v3"; + + get headers() { + return { + Authorization: "Bearer " + this.session.data.authenticated.access_token, + }; + } +} diff --git a/web/app/adapters/google/drive/file.js b/web/app/adapters/google/drive/file.js new file mode 100644 index 000000000..8f152d2c0 --- /dev/null +++ b/web/app/adapters/google/drive/file.js @@ -0,0 +1,8 @@ +import GoogleDriveAdapter from "../drive"; + +export default class GoogleDriveFileAdapter extends GoogleDriveAdapter { + urlForQuery(query, modelName) { + let baseUrl = this.buildURL(); + return `${baseUrl}/files`; + } +} diff --git a/web/app/adapters/google/userinfo.js b/web/app/adapters/google/userinfo.js new file mode 100644 index 000000000..0e15e0eae --- /dev/null +++ b/web/app/adapters/google/userinfo.js @@ -0,0 +1,15 @@ +import RESTAdapter from "@ember-data/adapter/rest"; +import { inject as service } from "@ember/service"; + +export default class GoogleUserinfoAdapter extends RESTAdapter { + @service session; + + host = "https://www.googleapis.com/userinfo"; + namespace = "v2"; + + get headers() { + return { + Authorization: "Bearer " + this.session.data.authenticated.access_token, + }; + } +} diff --git a/web/app/adapters/google/userinfo/me.js b/web/app/adapters/google/userinfo/me.js new file mode 100644 index 000000000..5fd4cd892 --- /dev/null +++ b/web/app/adapters/google/userinfo/me.js @@ -0,0 +1,8 @@ +import GoogleUserinfoAdapter from "../userinfo"; + +export default class GoogleUserinfoMeAdapter extends GoogleUserinfoAdapter { + urlForQueryRecord(query, modelName) { + let baseUrl = this.buildURL(); + return `${baseUrl}/me`; + } +} diff --git a/web/app/app.ts b/web/app/app.ts new file mode 100644 index 000000000..c5a97aa85 --- /dev/null +++ b/web/app/app.ts @@ -0,0 +1,12 @@ +import Application from '@ember/application'; +import Resolver from 'ember-resolver'; +import loadInitializers from 'ember-load-initializers'; +import config from 'hermes/config/environment'; + +export default class App extends Application { + modulePrefix = config.modulePrefix; + podModulePrefix = config.podModulePrefix; + Resolver = Resolver; +} + +loadInitializers(App, config.modulePrefix); diff --git a/web/app/authenticators/torii.js b/web/app/authenticators/torii.js new file mode 100644 index 000000000..f686aefbb --- /dev/null +++ b/web/app/authenticators/torii.js @@ -0,0 +1,6 @@ +import { inject as service } from "@ember/service"; +import Torii from "ember-simple-auth/authenticators/torii"; + +export default class ToriiAuthenticator extends Torii { + @service torii; +} diff --git a/web/app/components/.gitkeep b/web/app/components/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/web/app/components/action.hbs b/web/app/components/action.hbs new file mode 100644 index 000000000..ffc2afea1 --- /dev/null +++ b/web/app/components/action.hbs @@ -0,0 +1,3 @@ +<button type="button" class="action" ...attributes> + {{yield}} +</button> diff --git a/web/app/components/application-loading/index.hbs b/web/app/components/application-loading/index.hbs new file mode 100644 index 000000000..c62128a7b --- /dev/null +++ b/web/app/components/application-loading/index.hbs @@ -0,0 +1,3 @@ +<div class="text-center hds-typography-display-400"> + <FlightIcon @name="loading" @size="24" /> +</div> diff --git a/web/app/components/application-loading/index.js b/web/app/components/application-loading/index.js new file mode 100644 index 000000000..c0cd95955 --- /dev/null +++ b/web/app/components/application-loading/index.js @@ -0,0 +1,6 @@ +import Component from "@glimmer/component"; + +/** + * Renders the initial loading screen before the app is loaded. + */ +export default class ApplicationLoadingComponent extends Component {} diff --git a/web/app/components/custom-editable-field.hbs b/web/app/components/custom-editable-field.hbs new file mode 100644 index 000000000..04f966795 --- /dev/null +++ b/web/app/components/custom-editable-field.hbs @@ -0,0 +1,71 @@ +{{#if this.typeIsString}} + <EditableField + data-test-custom-string-field + @value={{get @document @field}} + @onChange={{@onChange}} + @loading={{@loading}} + @disabled={{@disabled}} + > + <:default> + {{#let (get @document @field) as |documentField|}} + <p + class="hds-typography-body-200 hds-foreground-primary truncate + {{unless documentField 'italic'}}" + title={{or documentField "None"}} + > + {{#if documentField}} + {{documentField}} + {{else}} + <CustomEditableFields::EmptyState /> + {{/if}} + </p> + {{/let}} + </:default> + <:editing as |F|> + <Hds::Form::Textarea::Field + @value={{F.value}} + name={{field}} + {{on "blur" F.update}} + data-test-custom-string-field-input + /> + </:editing> + </EditableField> + +{{else if this.typeIsPeople}} + <EditableField + data-test-custom-people-field + @value={{get @attributes "value"}} + @onChange={{@onChange}} + @loading={{@loading}} + @disabled={{@disabled}} + > + <:default> + {{#if this.people.length}} + <ol class="list-none"> + {{#each this.people as |person|}} + <li class="whitespace-nowrap"> + {{#if person.imgURL}} + <img + src="{{person.imgURL}}" + class="align-middle h-4 w-4 rounded-full" + /> + {{/if}} + {{person.email}} + </li> + {{/each}} + </ol> + {{else}} + <CustomEditableFields::EmptyState /> + {{/if}} + </:default> + <:editing as |F|> + <Inputs::PeopleSelect + class="multiselect--narrow" + @selected={{this.people}} + @onChange={{this.updateEmails}} + {{click-outside (fn F.update this.emails)}} + data-test-custom-people-field-input + /> + </:editing> + </EditableField> +{{/if}} diff --git a/web/app/components/custom-editable-field.js b/web/app/components/custom-editable-field.js new file mode 100644 index 000000000..3d07ce808 --- /dev/null +++ b/web/app/components/custom-editable-field.js @@ -0,0 +1,24 @@ +import Component from "@glimmer/component"; +import { tracked } from "@glimmer/tracking"; +import { action } from "@ember/object"; + +export default class customEditableFields extends Component { + get typeIsString() { + return this.args.attributes.type === "STRING"; + } + + get typeIsPeople() { + return this.args.attributes.type === "PEOPLE"; + } + + @tracked emails = this.args.attributes.value || []; + + @action updateEmails(people) { + this.emails = people.map((person) => person.email); + + } + + get people() { + return this.emails.map((email) => ({ email, imgURL: null })); + } +} diff --git a/web/app/components/custom-editable-fields/empty-state.hbs b/web/app/components/custom-editable-fields/empty-state.hbs new file mode 100644 index 000000000..bd8156ea2 --- /dev/null +++ b/web/app/components/custom-editable-fields/empty-state.hbs @@ -0,0 +1,3 @@ +<p class="text-color-foreground-primary opacity-50"> + --- +</p> diff --git a/web/app/components/dashboard/latest-updates.hbs b/web/app/components/dashboard/latest-updates.hbs new file mode 100644 index 000000000..8612d7760 --- /dev/null +++ b/web/app/components/dashboard/latest-updates.hbs @@ -0,0 +1,55 @@ +<div class="flex items-center space-x-2 mb-6"> + <FlightIcon @name="collections" @size="24" /> + <h2 + class="hds-typography-display-300 hds-font-weight-semibold hds-foreground-strong" + >Latest updates</h2> +</div> + +<div class="flex" {{did-insert (perform this.didInsert)}}> + <X::HdsTab + @label="New docs" + @icon="star" + @isSelected={{eq this.currentTab "new"}} + @action={{fn this.setCurrentTab "new"}} + /> + <X::HdsTab + @label="In review" + @icon="circle-half" + @isSelected={{eq this.currentTab "in-review"}} + @action={{fn this.setCurrentTab "in-review"}} + /> + <X::HdsTab + @label="Approved" + @icon="check-circle" + @isSelected={{eq this.currentTab "approved"}} + @action={{fn this.setCurrentTab "approved"}} + /> +</div> +{{#if this.didInsert.isRunning}} + {{! Approximate height of the tile list }} + <div class="h-[350px]"> + <FlightIcon @name="loading" class="mt-8" /> + </div> +{{else}} + {{#if this.docsToShow}} + <div class="tile-list mt-8"> + {{#each this.docsToShow as |doc|}} + <Doc::Tile + @avatar={{get doc.ownerPhotos 0}} + @docID={{doc.objectID}} + @docNumber={{doc.docNumber}} + @modifiedAgo={{doc.modifiedAgo}} + @owner={{get doc.owners 0}} + @productArea={{doc.product}} + @status={{lowercase doc.status}} + @thumbnail={{doc.googleMetadata.thumbnailLink}} + @title={{doc.title}} + /> + {{/each}} + </div> + {{else}} + <div class="text-display-200 mt-8"> + {{this.emptyStateMessage}} + </div> + {{/if}} +{{/if}} diff --git a/web/app/components/dashboard/latest-updates.ts b/web/app/components/dashboard/latest-updates.ts new file mode 100644 index 000000000..de423e0a6 --- /dev/null +++ b/web/app/components/dashboard/latest-updates.ts @@ -0,0 +1,98 @@ +import { action } from "@ember/object"; +import { inject as service } from "@ember/service"; +import Component from "@glimmer/component"; +import { tracked } from "@glimmer/tracking"; +import { task } from "ember-concurrency"; +import AlgoliaService from "hermes/services/algolia"; +import ConfigService from "hermes/services/config"; +import { HermesDocument } from "hermes/types/document"; +import { SearchResponse } from "instantsearch.js"; + +// @ts-ignore - not yet typed +import timeAgo from "hermes/utils/time-ago"; + +interface DashboardLatestUpdatesComponentSignature { + Args: {}; +} + +export default class DashboardLatestUpdatesComponent extends Component<DashboardLatestUpdatesComponentSignature> { + @service("config") declare configSvc: ConfigService; + + @service declare algolia: AlgoliaService; + + @tracked currentTab = "new"; + @tracked docsToShow: HermesDocument[] | null = null; + + /** + * The message to show when there are no docs for a given tab. + */ + get emptyStateMessage() { + switch (this.currentTab) { + case "new": + return "No documents have been created yet."; + case "in-review": + return "No docs are in review."; + case "approved": + return "No docs have been approved."; + } + } + /** + * Calls the initial fetchDocs task. + * Used in the template to show a loader on initial load. + */ + didInsert = task(async () => { + await this.fetchDocs.perform(); + }); + + /** + * Set the current tab (if necessary) and fetch its docs. + */ + @action setCurrentTab(tab: string) { + if (tab !== this.currentTab) { + this.currentTab = tab; + this.fetchDocs.perform(); + } + } + + /** + * Sends an Algolia query to fetch the docs for the current tab. + * Called onLoad and when tabs are changed. + */ + fetchDocs = task(async () => { + let { currentTab } = this; + + // Translate the current tab to an Algolia facetFilter. + switch (currentTab) { + case "new": + currentTab = ""; + break; + case "in-review": + currentTab = "status:In-Review"; + break; + case "approved": + currentTab = "status:approved"; + break; + } + + let newDocsToShow = await this.algolia.searchIndex + .perform( + this.configSvc.config.algolia_docs_index_name + "_modifiedTime_desc", + "", + { + facetFilters: [currentTab], + hitsPerPage: 4, + } + ) + .then((result: SearchResponse<unknown>) => { + // Add modifiedAgo for each doc. + for (const hit of result.hits as HermesDocument[]) { + const modifiedAgo = new Date(hit.modifiedTime * 1000); + hit.modifiedAgo = `Modified ${timeAgo(modifiedAgo)}`; + } + return result.hits; + }); + + // Update the docsToShow array with the new docs. + this.docsToShow = newDocsToShow as HermesDocument[]; + }); +} diff --git a/web/app/components/dashboard/new-features-banner.hbs b/web/app/components/dashboard/new-features-banner.hbs new file mode 100644 index 000000000..5d7a05cbb --- /dev/null +++ b/web/app/components/dashboard/new-features-banner.hbs @@ -0,0 +1,27 @@ +{{#if this.isShown}} + <Hds::Alert + @type="inline" + @color="highlight" + @icon="gift" + @onDismiss={{this.dismiss}} + class="mt-6" + as |A| + > + <A.Title>New features available</A.Title> + <A.Description> + <ul> + <li>You can now create RFC, PRD, and FRD drafts!</li> + <li>Send a RFC, PRD, and FRD document for review. Hermes will + automatically email the approvers!</li> + <li>Approvers can now approve documents through Hermes.</li> + <li>Archive documents by marking them "obsolete". </li> + </ul> + </A.Description> + <A.Link::Standalone + @icon="arrow-right" + @iconPosition="leading" + @text="Create a document draft" + @route="authenticated.new" + /> + </Hds::Alert> +{{/if}} diff --git a/web/app/components/dashboard/new-features-banner.ts b/web/app/components/dashboard/new-features-banner.ts new file mode 100644 index 000000000..2c1503ece --- /dev/null +++ b/web/app/components/dashboard/new-features-banner.ts @@ -0,0 +1,31 @@ +import Component from "@glimmer/component"; +import { tracked } from "@glimmer/tracking"; +import window from "ember-window-mock"; +import { action } from "@ember/object"; + +let LOCAL_STORAGE_ITEM_NAME = "newFeaturesBanner"; + +interface DashboardNewFeaturesBannerSignature { + Args: {}; +} + +export default class DashboardNewFeaturesBanner extends Component<DashboardNewFeaturesBannerSignature> { + @tracked protected isDismissed = false; + + isShown(): boolean { + const storageItem = window.localStorage.getItem(LOCAL_STORAGE_ITEM_NAME); + + if (storageItem === null) { + window.localStorage.setItem(LOCAL_STORAGE_ITEM_NAME, "true"); + return true; + } else if (storageItem === "true" && !this.isDismissed) { + return true; + } else return false; + } + + @action + dismiss() { + window.localStorage.setItem(LOCAL_STORAGE_ITEM_NAME, "false"); + this.isDismissed = true; + } +} diff --git a/web/app/components/doc/inline.hbs b/web/app/components/doc/inline.hbs new file mode 100644 index 000000000..6f36074fe --- /dev/null +++ b/web/app/components/doc/inline.hbs @@ -0,0 +1,77 @@ +<!-- + Used to render a document in a compact inline format eg. for use in dropdown menus + + <Doc::Inline /> requires 2 properties, with 2 optional properties: + @avatar (string): link to avatar image + @docID (string): document ID + @docNumber (string): document number + @productArea eg. 'Waypoint' or 'Design Systems' etc + @status eg. 'approved', 'review', 'obsolete' + @isResult eg. 'true' (false by default) - optional + @isOwner eg. 'true' (false by default) - optional + @snippet: HTML string - optional +--> + +<LinkTo + @route="authenticated.document" + @model="{{@docID}}" + class="flex items-center space-x-3 no-underline py-2 px-3 hover:bg-[color:var(--token-color-palette-neutral-100)]" +> + <div + class="relative flex flex-shrink-0 items-center w-[40px] h-[56px] hds-surface-mid + {{if + (eq @status 'obsolete') + 'hds-surface-faint opacity-75' + 'hds-surface-primary' + }} + rounded overflow-hidden" + > + <img src="/images/document.png" class="w-full mix-blend-multiply" /> + {{#if (eq @status "approved")}} + <FlightIcon + @name="check-circle" + @size="24" + class="absolute -right-2 top-1 fill-[var(--token-color-palette-green-200)] w-8 h-8 -rotate-12 mix-blend-multiply opacity-75" + /> + {{/if}} + {{#if (eq @status "obsolete")}} + <div + class="absolute top-0 left-0 right-1 bottom-0 flex items-center hds-surface-strong hds-surface-mid rounded overflow-hidden" + > + <FlightIcon + @name="archive" + @size="24" + class="absolute -left-2 top-1 fill-[var(--token-color-palette-neutral-200)] w-8 h-8 -rotate-12 mix-blend-multiply opacity-75" + /> + </div> + {{/if}} + <div class="{{if (eq @status 'obsolete') 'opacity-50'}}"> + <Doc::Tag @name={{@productArea}} @isSmall="{true}" /> + </div> + </div> + + <div class="flex flex-col space-y-1 overflow-hidden"> + <h4 + class="hds-typography-body-200 hds-font-weight-semibold hds-foreground-strong" + >{{@title}}</h4> + {{#if (not (is-empty @docNumber))}} + <small + class="hds-typography-body-100 hds-foreground-faint" + >{{@docNumber}}</small> + {{/if}} + + <Person + @ignoreUnknown={{true}} + @imgURL={{@avatar}} + @email={{@owner}} + /> + + {{#if @isResult}} + {{#if @snippet}} + <p + class="hds-typography-body-100 hds-foreground-faint truncate" + >{{{@snippet}}}</p> + {{/if}} + {{/if}} + </div> +</LinkTo> diff --git a/web/app/components/doc/inline.js b/web/app/components/doc/inline.js new file mode 100644 index 000000000..da262e154 --- /dev/null +++ b/web/app/components/doc/inline.js @@ -0,0 +1,59 @@ +import Component from '@glimmer/component'; + +export default class DocInline extends Component { + get productAreaName() { + let productAreaName; + + if (this.args.productArea == 'Boundary') { + productAreaName = "Boundary"; + } else if (this.args.productArea == 'Consul') { + productAreaName = 'Consul'; + } else if (this.args.productArea == 'Cloud Platform') { + productAreaName = 'HCP'; + } else if (this.args.productArea == 'Nomad') { + productAreaName = 'Nomad'; + } else if (this.args.productArea == 'Packer') { + productAreaName = 'Packer'; + } else if (this.args.productArea == 'Terraform') { + productAreaName = 'Terraform'; + } else if (this.args.productArea == 'Vagrant') { + productAreaName = 'Vagrant'; + } else if (this.args.productArea == 'Vault') { + productAreaName = 'Vault'; + } else if (this.args.productArea == 'Waypoint') { + productAreaName = 'Waypoint'; + } else { + productAreaName = this.args.productArea; + } + + return productAreaName; + } + + get productAreaIcon() { + let productAreaIcon; + + if (this.args.productArea == 'Boundary') { + productAreaIcon = "boundary"; + } else if (this.args.productArea == 'Consul') { + productAreaIcon = 'consul'; + } else if (this.args.productArea == 'Cloud Platform') { + productAreaIcon = 'hcp'; + } else if (this.args.productArea == 'Nomad') { + productAreaIcon = 'nomad'; + } else if (this.args.productArea == 'Packer') { + productAreaIcon = 'packer'; + } else if (this.args.productArea == 'Terraform') { + productAreaIcon = 'terraform'; + } else if (this.args.productArea == 'Vagrant') { + productAreaIcon = 'vagrant'; + } else if (this.args.productArea == 'Vault') { + productAreaIcon = 'vault'; + } else if (this.args.productArea == 'Waypoint') { + productAreaIcon = 'waypoint'; + } else { + productAreaIcon = 'folder' + } + + return productAreaIcon; + } +} diff --git a/web/app/components/doc/row.hbs b/web/app/components/doc/row.hbs new file mode 100644 index 000000000..64f62bf93 --- /dev/null +++ b/web/app/components/doc/row.hbs @@ -0,0 +1,87 @@ +{{! + Used to render a document in a table row format + + <Doc::Row /> properties: + @avatar (string): link to avatar image + @createdDate (string): created date + @docNumber (string): document number + @docID (string): document ID + @docType (string): document type + @isDraft (bool): document is a draft + @productArea eg. 'Waypoint' or 'Design Systems' etc + @status eg. 'approved', 'review', 'obsolete' + @isResult eg. 'true' (false by default) - optional + @isOwner eg. 'true' (false by default) - optional +}} + +<Hds::Table::Tr class="row-results__table__row"> + <Hds::Table::Td class="name"> + <LinkTo + @route="authenticated.document" + @model="{{@docID}}" + @query={{hash draft=@isDraft}} + class="flex space-x-4 no-underline" + > + <div + class="relative flex flex-shrink-0 items-center w-[40px] h-[56px] hds-surface-mid + {{if + (eq @status 'obsolete') + 'hds-surface-faint opacity-75' + 'hds-surface-primary' + }} + rounded overflow-hidden" + > + <img src="/images/document.png" class="w-full mix-blend-multiply" /> + {{#if (eq @status "approved")}} + <FlightIcon + @name="check-circle" + @size="24" + class="absolute -right-2 top-1 fill-[var(--token-color-palette-green-200)] w-8 h-8 -rotate-12 mix-blend-multiply opacity-75" + /> + {{/if}} + {{#if (eq @status "obsolete")}} + <div + class="absolute top-0 left-0 right-1 bottom-0 flex items-center hds-surface-strong hds-surface-mid rounded overflow-hidden" + > + <FlightIcon + @name="archive" + @size="24" + class="absolute -left-2 top-1 fill-[var(--token-color-palette-neutral-200)] w-8 h-8 -rotate-12 mix-blend-multiply opacity-75" + /> + </div> + {{/if}} + <div class="{{if (eq @status 'obsolete') 'opacity-50'}}"> + <Doc::Tag @name={{@productArea}} @isSmall="{true}" /> + </div> + </div> + <div> + <h4 + class="hds-typography-body-200 hds-font-weight-semibold hds-foreground-strong" + >{{@title}}</h4> + {{#if (not (is-empty @docNumber))}} + <small + class="hds-typography-body-100 hds-foreground-faint" + >{{@docNumber}}</small> + {{/if}} + </div> + </LinkTo> + </Hds::Table::Td> + <Hds::Table::Td class="type"> + {{@docType}} + </Hds::Table::Td> + <Hds::Table::Td class="status"> + <Doc::State + @state={{@status}} + @hideProgress={{true}} + class="whitespace-nowrap" + /></Hds::Table::Td> + <Hds::Table::Td class="product"><Hds::Badge + @text={{this.productAreaName}} + @icon={{this.productAreaIcon}} + title={{this.productAreaName}} + /></Hds::Table::Td> + <Hds::Table::Td class="owner"> + <Person @ignoreUnknown={{true}} @imgURL={{@avatar}} @email={{@owner}} /> + </Hds::Table::Td> + <Hds::Table::Td class="created">{{@createdDate}}</Hds::Table::Td> +</Hds::Table::Tr> diff --git a/web/app/components/doc/row.js b/web/app/components/doc/row.js new file mode 100644 index 000000000..87f558bdc --- /dev/null +++ b/web/app/components/doc/row.js @@ -0,0 +1,59 @@ +import Component from '@glimmer/component'; + +export default class DocRow extends Component { + get productAreaName() { + let productAreaName; + + if (this.args.productArea == 'Boundary') { + productAreaName = "Boundary"; + } else if (this.args.productArea == 'Consul') { + productAreaName = 'Consul'; + } else if (this.args.productArea == 'Cloud Platform') { + productAreaName = 'HCP'; + } else if (this.args.productArea == 'Nomad') { + productAreaName = 'Nomad'; + } else if (this.args.productArea == 'Packer') { + productAreaName = 'Packer'; + } else if (this.args.productArea == 'Terraform') { + productAreaName = 'Terraform'; + } else if (this.args.productArea == 'Vagrant') { + productAreaName = 'Vagrant'; + } else if (this.args.productArea == 'Vault') { + productAreaName = 'Vault'; + } else if (this.args.productArea == 'Waypoint') { + productAreaName = 'Waypoint'; + } else { + productAreaName = this.args.productArea; + } + + return productAreaName; + } + + get productAreaIcon() { + let productAreaIcon; + + if (this.args.productArea == 'Boundary') { + productAreaIcon = "boundary"; + } else if (this.args.productArea == 'Consul') { + productAreaIcon = 'consul'; + } else if (this.args.productArea == 'Cloud Platform') { + productAreaIcon = 'hcp'; + } else if (this.args.productArea == 'Nomad') { + productAreaIcon = 'nomad'; + } else if (this.args.productArea == 'Packer') { + productAreaIcon = 'packer'; + } else if (this.args.productArea == 'Terraform') { + productAreaIcon = 'terraform'; + } else if (this.args.productArea == 'Vagrant') { + productAreaIcon = 'vagrant'; + } else if (this.args.productArea == 'Vault') { + productAreaIcon = 'vault'; + } else if (this.args.productArea == 'Waypoint') { + productAreaIcon = 'waypoint'; + } else { + productAreaIcon = 'folder' + } + + return productAreaIcon; + } +} diff --git a/web/app/components/doc/state-progress-bar.hbs b/web/app/components/doc/state-progress-bar.hbs new file mode 100644 index 000000000..30ff845b7 --- /dev/null +++ b/web/app/components/doc/state-progress-bar.hbs @@ -0,0 +1,5 @@ +<li + class="h-0.5 rounded-full bg-[color:var(--token-color-palette-neutral-200)]" + ...attributes +> +</li> diff --git a/web/app/components/doc/state.hbs b/web/app/components/doc/state.hbs new file mode 100644 index 000000000..a5c07010a --- /dev/null +++ b/web/app/components/doc/state.hbs @@ -0,0 +1,16 @@ +{{#unless @hideProgress}} + <ol + class="state--{{this.dasherizedName}} + grid grid-cols-3 w-full items-center list-none gap-0.5" + > + <Doc::StateProgressBar class={{this.state.barOneClass}} /> + <Doc::StateProgressBar class={{this.state.barTwoClass}} /> + <Doc::StateProgressBar class={{this.state.barThreeClass}} /> + </ol> +{{/unless}} + +<Hds::Badge + @text={{this.state.label}} + @color={{this.state.color}} + ...attributes +/> diff --git a/web/app/components/doc/state.js b/web/app/components/doc/state.js new file mode 100644 index 000000000..06fd37dba --- /dev/null +++ b/web/app/components/doc/state.js @@ -0,0 +1,59 @@ +import { dasherize } from "@ember/string"; +import Component from "@glimmer/component"; + +/** + * Args: + * @state?: string; the doc's status, e.g., 'in review' + * @hideProgress?: boolean; whether to hide the progress bar + */ + +export default class State extends Component { + get dasherizedName() { + let name = ""; + if (typeof this.args.state === "string") { + name = dasherize(this.args.state); + } + switch (name) { + case "in-review": + case "approved": + case "obsolete": + return name; + default: + return "wip"; + } + } + + get state() { + switch (this.dasherizedName) { + case "in-review": + return { + label: "In review", + color: "highlight", + barOneClass: + "bg-[color:var(--token-color-palette-purple-200)] opacity-75", + barTwoClass: "bg-[color:var(--token-color-palette-purple-200)] h-1", + }; + + case "approved": + return { + label: "Approved", + color: "success", + barOneClass: + "bg-[color:var(--token-color-palette-green-200)] opacity-75", + barTwoClass: + "bg-[color:var(--token-color-palette-green-200)] opacity-75", + barThreeClass: "bg-[color:var(--token-color-palette-green-200)] h-1", + }; + + case "obsolete": + return { label: "Obsolete", color: "neutral" }; + + default: + return { + label: "WIP", + color: "neutral", + barOneClass: "bg-[color:var(--token-color-palette-blue-200)] h-1", + }; + } + } +} diff --git a/web/app/components/doc/tag.hbs b/web/app/components/doc/tag.hbs new file mode 100644 index 000000000..f2ddeebe7 --- /dev/null +++ b/web/app/components/doc/tag.hbs @@ -0,0 +1,12 @@ +<!-- + Used to render the product tag that appears on document thumbnails + + <Doc::Tag /> requires 1 property: + @name eg. 'Waypoint', 'Consul' or 'Cloud Platform' etc +--> + +{{#if this.icon}} + <div class="flex items-center justify-center absolute left-0 {{if @isSmall 'w-[28px] h-[24px] bottom-1' 'w-[36px] h-[28px] bottom-2'}} {{this.foregroundColor}} {{this.gradientStartColor}} {{this.gradientStopColor}} bg-gradient-to-br rounded-r-md"> + <FlightIcon @name={{this.icon}} /> + </div> +{{/if}} diff --git a/web/app/components/doc/tag.js b/web/app/components/doc/tag.js new file mode 100644 index 000000000..22e3ac040 --- /dev/null +++ b/web/app/components/doc/tag.js @@ -0,0 +1,107 @@ +import Component from '@glimmer/component'; + +export default class DocTag extends Component { + get icon() { + let icon; + + if (this.args.name == 'Boundary') { + icon = 'boundary'; + } else if (this.args.name == 'Consul') { + icon = 'consul'; + } else if (this.args.name == 'Cloud Platform') { + icon = 'hcp'; + } else if (this.args.name == 'Nomad') { + icon = 'nomad'; + } else if (this.args.name == 'Packer') { + icon = 'packer'; + } else if (this.args.name == 'Terraform') { + icon = 'terraform'; + } else if (this.args.name == 'Vagrant') { + icon = 'vagrant'; + } else if (this.args.name == 'Vault') { + icon = 'vault'; + } else if (this.args.name == 'Waypoint') { + icon = 'waypoint'; + } + + return icon; + } + + get foregroundColor() { + let foregroundColor; + + if (this.args.name == 'Boundary') { + foregroundColor = 'text-white'; + } else if (this.args.name == 'Consul') { + foregroundColor = 'text-white'; + } else if (this.args.name == 'Cloud Platform') { + foregroundColor = 'text-white'; + } else if (this.args.name == 'Nomad') { + foregroundColor = 'text-[color:var(--token-color-nomad-foreground)]'; + } else if (this.args.name == 'Packer') { + foregroundColor = 'text-[color:var(--token-color-packer-foreground)]'; + } else if (this.args.name == 'Terraform') { + foregroundColor = 'text-white'; + } else if (this.args.name == 'Vagrant') { + foregroundColor = 'text-[color:var(--token-color-vagrant-foreground)]'; + } else if (this.args.name == 'Vault') { + foregroundColor = 'text-[color:var(--token-color-vault-foreground)]'; + } else if (this.args.name == 'Waypoint') { + foregroundColor = 'text-[color:var(--token-color-waypoint-foreground)]'; + } + + return foregroundColor; + } + + get gradientStartColor() { + let gradientStartColor; + + if (this.args.name == 'Boundary') { + gradientStartColor = 'from-[color:var(--token-color-boundary-gradient-primary-start)]'; + } else if (this.args.name == 'Consul') { + gradientStartColor = 'from-[color:var(--token-color-consul-gradient-primary-start)]'; + } else if (this.args.name == 'Cloud Platform') { + gradientStartColor = 'from-[#666]'; + } else if (this.args.name == 'Nomad') { + gradientStartColor = 'from-[color:var(--token-color-nomad-gradient-primary-start)]'; + } else if (this.args.name == 'Packer') { + gradientStartColor = 'from-[color:var(--token-color-packer-gradient-primary-start)]'; + } else if (this.args.name == 'Terraform') { + gradientStartColor = 'from-[color:var(--token-color-terraform-gradient-primary-start)]'; + } else if (this.args.name == 'Vagrant') { + gradientStartColor = 'from-[color:var(--token-color-vagrant-gradient-primary-start)]'; + } else if (this.args.name == 'Vault') { + gradientStartColor = 'from-[color:var(--token-color-vault-gradient-primary-start)]'; + } else if (this.args.name == 'Waypoint') { + gradientStartColor = 'from-[color:var(--token-color-waypoint-gradient-primary-start)]'; + } + + return gradientStartColor; + } + + get gradientStopColor() { + let gradientStopColor; + + if (this.args.name == 'Boundary') { + gradientStopColor = 'to-[color:var(--token-color-boundary-gradient-primary-stop)]'; + } else if (this.args.name == 'Consul') { + gradientStopColor = 'to-[color:var(--token-color-consul-gradient-primary-stop)]'; + } else if (this.args.name == 'Cloud Platform') { + gradientStopColor = 'to-[#333]'; + } else if (this.args.name == 'Nomad') { + gradientStopColor = 'to-[color:var(--token-color-nomad-gradient-primary-stop)]'; + } else if (this.args.name == 'Packer') { + gradientStopColor = 'to-[color:var(--token-color-packer-gradient-primary-stop)]'; + } else if (this.args.name == 'Terraform') { + gradientStopColor = 'to-[color:var(--token-color-terraform-gradient-primary-stop)]'; + } else if (this.args.name == 'Vagrant') { + gradientStopColor = 'to-[color:var(--token-color-vagrant-gradient-primary-stop)]'; + } else if (this.args.name == 'Vault') { + gradientStopColor = 'to-[color:var(--token-color-vault-gradient-primary-stop)]'; + } else if (this.args.name == 'Waypoint') { + gradientStopColor = 'to-[color:var(--token-color-waypoint-gradient-primary-stop)]'; + } + + return gradientStopColor; + } +} diff --git a/web/app/components/doc/tile.hbs b/web/app/components/doc/tile.hbs new file mode 100644 index 000000000..e83f10df8 --- /dev/null +++ b/web/app/components/doc/tile.hbs @@ -0,0 +1,101 @@ +<!-- + Used to render a document tile in a card format + + <Doc::Tile /> properties: + @avatar (string): link to avatar image + @docID (string): document ID + @docNumber (string): document number + @isOwner eg. 'true' (false by default) - optional + @isResult eg. 'true' (false by default) - optional + @modifiedAgo (string): e.g. 'Modified 3 days ago' + @owner (string): owner of document + @productArea eg. 'Waypoint' or 'Design Systems' etc + @snippet: HTML string - optional + @status eg. 'approved', 'review', 'obsolete' + @thumbnail (string): link to thumbnail image + @title (string): title of document +--> + +<LinkTo + @route="authenticated.document" + @model="{{@docID}}" + class="flex flex-col items-start space-y-2 no-underline p-4 -m-4 rounded-md hover:bg-[color:var(--token-color-palette-neutral-50)] active:bg-[color:var(--token-color-palette-neutral-100)] overflow-hidden" +> + <div class="flex flex-col items-start w-[108px] space-y-3"> + <div + class="relative flex items-center w-[108px] h-[148px] hds-surface-mid + {{if + (eq @status 'obsolete') + 'hds-surface-faint opacity-75' + 'hds-surface-primary' + }} + rounded overflow-hidden" + > + {{#if (is-empty @thumbnail)}} + <img src="/images/document.png" class="w-full mix-blend-multiply" /> + {{else}} + <img + src="{{@thumbnail}}" + class="w-full mix-blend-multiply" + referrerpolicy="no-referrer" + /> + {{/if}} + {{#if (eq @status "approved")}} + <FlightIcon + @name="check-circle" + @size="24" + class="absolute -right-4 top-2 fill-[var(--token-color-palette-green-200)] w-20 h-20 -rotate-12 mix-blend-multiply opacity-75" + /> + {{/if}} + {{#if (eq @status "obsolete")}} + <div + class="absolute top-0 left-0 right-5 bottom-0 flex items-center hds-surface-strong hds-surface-mid rounded overflow-hidden" + > + <FlightIcon + @name="archive" + @size="24" + class="absolute -left-4 top-2 fill-[var(--token-color-palette-neutral-200)] w-20 h-20 -rotate-12 mix-blend-multiply opacity-75" + /> + </div> + {{/if}} + <div class="{{if (eq @status 'obsolete') 'opacity-50'}}"> + <Doc::Tag @name={{@productArea}} /> + </div> + </div> + <Doc::State @state="{{@status}}" /> + </div> + + <div class="flex flex-col items-start space-y-1"> + <h4 + class="hds-typography-display-200 hds-font-weight-semibold hds-foreground-strong" + >{{@title}}</h4> + {{#if (not (is-empty @docNumber))}} + <small + class="hds-typography-body-100 hds-font-weight-medium hds-foreground-faint" + >{{@docNumber}}</small> + {{/if}} + </div> + + <div class="flex flex-col items-start space-y-1 pb-1 max-w-full"> + <Person + @ignoreUnknown={{true}} + @imgURL={{@avatar}} + @email={{@owner}} + /> + {{#if (not (is-empty @modifiedAgo))}} + <p + class="hds-typography-body-100 hds-foreground-faint" + >{{@modifiedAgo}}</p> + {{/if}} + </div> + + <Hds::Badge @text={{this.productAreaName}} @icon={{this.productAreaIcon}} /> + + {{#if @isResult}} + {{#if @snippet}} + <p + class="hds-typography-body-100 hds-foreground-faint pt-2" + >{{{@snippet}}}</p> + {{/if}} + {{/if}} +</LinkTo> diff --git a/web/app/components/doc/tile.js b/web/app/components/doc/tile.js new file mode 100644 index 000000000..3f3780157 --- /dev/null +++ b/web/app/components/doc/tile.js @@ -0,0 +1,59 @@ +import Component from '@glimmer/component'; + +export default class DocTile extends Component { + get productAreaName() { + let productAreaName; + + if (this.args.productArea == 'Boundary') { + productAreaName = "Boundary"; + } else if (this.args.productArea == 'Consul') { + productAreaName = 'Consul'; + } else if (this.args.productArea == 'Cloud Platform') { + productAreaName = 'HCP'; + } else if (this.args.productArea == 'Nomad') { + productAreaName = 'Nomad'; + } else if (this.args.productArea == 'Packer') { + productAreaName = 'Packer'; + } else if (this.args.productArea == 'Terraform') { + productAreaName = 'Terraform'; + } else if (this.args.productArea == 'Vagrant') { + productAreaName = 'Vagrant'; + } else if (this.args.productArea == 'Vault') { + productAreaName = 'Vault'; + } else if (this.args.productArea == 'Waypoint') { + productAreaName = 'Waypoint'; + } else { + productAreaName = this.args.productArea; + } + + return productAreaName; + } + + get productAreaIcon() { + let productAreaIcon; + + if (this.args.productArea == 'Boundary') { + productAreaIcon = "boundary"; + } else if (this.args.productArea == 'Consul') { + productAreaIcon = 'consul'; + } else if (this.args.productArea == 'Cloud Platform') { + productAreaIcon = 'hcp'; + } else if (this.args.productArea == 'Nomad') { + productAreaIcon = 'nomad'; + } else if (this.args.productArea == 'Packer') { + productAreaIcon = 'packer'; + } else if (this.args.productArea == 'Terraform') { + productAreaIcon = 'terraform'; + } else if (this.args.productArea == 'Vagrant') { + productAreaIcon = 'vagrant'; + } else if (this.args.productArea == 'Vault') { + productAreaIcon = 'vault'; + } else if (this.args.productArea == 'Waypoint') { + productAreaIcon = 'waypoint'; + } else { + productAreaIcon = 'folder' + } + + return productAreaIcon; + } +} diff --git a/web/app/components/document/index.hbs b/web/app/components/document/index.hbs new file mode 100644 index 000000000..1084209f5 --- /dev/null +++ b/web/app/components/document/index.hbs @@ -0,0 +1,25 @@ +<div class="flex flex-1 p-4 space-x-4 max-h-screen"> + <Sidebar + @profile={{this.authenticatedUser.info}} + @document={{@document}} + @deleteDraft={{this.deleteDraft}} + @docType={{@docType}} + /> + + <Hds::Card::Container + @level="high" + @hasBorder="true" + @overflow="hidden" + class="flex items-center justify-center flex-1" + > + <iframe + title="Google Doc" + height="100%" + width="100%" + class="border-0" + src="https://docs.google.com/document/d/{{@document.objectID}}/edit?embedded=true" + > + </iframe> + </Hds::Card::Container> + +</div> diff --git a/web/app/components/document/index.js b/web/app/components/document/index.js new file mode 100644 index 000000000..a067575df --- /dev/null +++ b/web/app/components/document/index.js @@ -0,0 +1,48 @@ +import Component from "@glimmer/component"; +import { inject as service } from "@ember/service"; +import { action } from "@ember/object"; +import { task } from "ember-concurrency"; + +export default class DocumentIndexComponent extends Component { + @service authenticatedUser; + @service("fetch") fetchSvc; + @service router; + @service flashMessages; + + @task *deleteDraft(docID) { + // Returns a promise that always results after the provided number of milliseconds + const wait = (ms) => new Promise((res) => setTimeout(res, ms)); + + try { + const docResp = yield this.fetchSvc + .fetch("/api/v1/drafts/" + docID, { + method: "DELETE", + headers: { "Content-Type": "application/json" }, + }) + .then((resp) => resp.json()) + .catch((err) => { + console.log(`Error deleting document draft: ${err}`); + throw err; + }); + + // Wait for document to be deleted. + yield wait(2000); + + // Add a notification for the user + this.flashMessages.add({ + message: "Document draft deleted", + title: "Done!", + type: "success", + timeout: 6000, + extendedTimeout: 1000, + }); + + // Transition to my drafts view + this.router.transitionTo("authenticated.drafts"); + } catch (err) { + // TODO: Handle error by using a toast and showing the create form again with + // everything still populated + throw err; + } + } +} diff --git a/web/app/components/editable-field.hbs b/web/app/components/editable-field.hbs new file mode 100644 index 000000000..3d7c871de --- /dev/null +++ b/web/app/components/editable-field.hbs @@ -0,0 +1,18 @@ +<div class="editable-field" ...attributes {{did-insert this.captureElement}}> + {{#if (and this.editing (not @loading))}} + <div {{on "keydown" this.preventNewlines}} {{on "keyup" this.cancel}}> + {{yield (hash value=@value update=this.update) to="editing"}} + </div> + {{else}} + {{#if @loading}} + <FlightIcon @name="loading" class="loading-indicator" /> + {{/if}} + <button + class="field-toggle {{if @loading 'loading'}}" + disabled={{or @disabled @loading}} + {{on "click" this.edit}} + > + {{yield}} + </button> + {{/if}} +</div> diff --git a/web/app/components/editable-field.js b/web/app/components/editable-field.js new file mode 100644 index 000000000..2de826c16 --- /dev/null +++ b/web/app/components/editable-field.js @@ -0,0 +1,62 @@ +import Component from "@glimmer/component"; +import { tracked } from "@glimmer/tracking"; +import { action } from "@ember/object"; +import { scheduleOnce } from "@ember/runloop"; + +const FOCUSABLE = + 'button, [href], input, select, textarea, [tabindex]:not([tabindex="-1"])'; + +export default class EditableField extends Component { + @tracked editing = false; + @tracked element = null; + @tracked cachedValue = null; + + @action + captureElement(el) { + this.element = el; + } + + @action + edit() { + this.cachedValue = this.args.value; + this.editing = true; + + // Kinda gross, but this gives focus to the first focusable element in the + // :editing block, which will typically be an input. + scheduleOnce("afterRender", this, () => { + if (this.element && !this.element.contains(document.activeElement)) { + const firstInput = this.element.querySelector(FOCUSABLE); + if (firstInput) firstInput.focus(); + } + }); + } + + @action + cancel(ev) { + if (ev.key === "Escape") { + scheduleOnce("actions", this, () => { + this.editing = false; + }); + ev.preventDefault(); + } + } + + @action + preventNewlines(ev) { + if (ev.key === "Enter") { + ev.preventDefault(); + } + } + + @action + update(ev) { + scheduleOnce("actions", this, () => { + this.editing = false; + }); + + const newValue = ev instanceof Event ? ev.target.value : ev; + if (newValue !== this.cachedValue) { + this.args.onChange?.(newValue); + } + } +} diff --git a/web/app/components/footer.hbs b/web/app/components/footer.hbs new file mode 100644 index 000000000..4352deffd --- /dev/null +++ b/web/app/components/footer.hbs @@ -0,0 +1,22 @@ +{{#unless (eq this.currentRouteName "authenticated.document")}} + <div class="footer"> + <div class="x-container"> + <div + class="text-body-200 text-color-foreground-faint flex items-center justify-center" + > + © + {{this.currentYear}} + HashiCorp + <div class="mx-2">•</div> + <a + href="https://github.com/hashicorp-forge/hermes" + target="_blank" + rel="noopener nofollow" + class="flex" + > + View Hermes on GitHub + </a> + </div> + </div> + </div> +{{/unless}} diff --git a/web/app/components/footer.ts b/web/app/components/footer.ts new file mode 100644 index 000000000..a860dd481 --- /dev/null +++ b/web/app/components/footer.ts @@ -0,0 +1,15 @@ +import Component from "@glimmer/component"; +import { inject as service } from "@ember/service"; +import RouterService from "@ember/routing/router-service"; + +export default class FooterComponent extends Component { + @service declare router: RouterService; + + protected get currentRouteName(): string { + return this.router.currentRouteName; + } + + protected get currentYear(): number { + return new Date().getFullYear(); + } +} diff --git a/web/app/components/header.hbs b/web/app/components/header.hbs new file mode 100644 index 000000000..8197fecf8 --- /dev/null +++ b/web/app/components/header.hbs @@ -0,0 +1,8 @@ +<header class="bg-color-page-faint border-b border-b-color-border-faint mb-7"> + <Header::Nav /> +</header> + +<Header::Toolbar + @facets={{@facets}} + @sortControlIsHidden={{@sortControlIsHidden}} +/> diff --git a/web/app/components/header/facet-dropdown.hbs b/web/app/components/header/facet-dropdown.hbs new file mode 100644 index 000000000..890ff2534 --- /dev/null +++ b/web/app/components/header/facet-dropdown.hbs @@ -0,0 +1,24 @@ +<Hds::Dropdown + class="facets-dropdown" + @listPosition="left" + ...attributes + as |dd| +> + <dd.ToggleButton + data-test-facets-dropdown-toggle-button + @text={{@label}} + @color="secondary" + disabled={{@disabled}} + /> + {{#if @facets}} + {{#each-in this.firstTenFacets as |value attrs|}} + <dd.Interactive + class="facet-interactive-item" + {{on "click" (fn this.onClick value dd.close)}} + @text="{{value}} ({{attrs.count}})" + @icon={{if attrs.selected "check-square-fill" "square"}} + class={{if attrs.selected "checked"}} + /> + {{/each-in}} + {{/if}} +</Hds::Dropdown> diff --git a/web/app/components/header/facet-dropdown.ts b/web/app/components/header/facet-dropdown.ts new file mode 100644 index 000000000..1eeb36ffc --- /dev/null +++ b/web/app/components/header/facet-dropdown.ts @@ -0,0 +1,47 @@ +import Component from "@glimmer/component"; +import { action } from "@ember/object"; +import { FacetDropdownObjects } from "facets"; + +interface FacetDropdownComponentSignature { + Args: { + onClick: (facetName: FacetName, value: string) => void; + label: string; + facets: FacetDropdownObjects; + disabled: boolean; + }; +} + +export enum FacetName { + DocType = "docType", + Owners = "owners", + Status = "status", + Product = "product", +} + +export default class FacetDropdownComponent extends Component<FacetDropdownComponentSignature> { + get facetName(): FacetName | undefined { + switch (this.args.label) { + case "Type": + return FacetName.DocType; + case "Status": + return FacetName.Status; + case "Product/Area": + return FacetName.Product; + case "Owner": + return FacetName.Owners; + } + } + + get firstTenFacets(): FacetDropdownObjects { + let firstTenEntries = Object.entries(this.args.facets).slice(0, 10); + let firstTenFacetsObjects = Object.fromEntries(firstTenEntries); + return firstTenFacetsObjects; + } + + @action onClick(value: string, close: () => void) { + if (this.facetName) { + this.args.onClick(this.facetName, value); + } + close(); + } +} diff --git a/web/app/components/header/nav.hbs b/web/app/components/header/nav.hbs new file mode 100644 index 000000000..663d3ea0b --- /dev/null +++ b/web/app/components/header/nav.hbs @@ -0,0 +1,68 @@ +<div class="x-container"> + + <nav class="header-nav"> + <LinkTo @route="authenticated.dashboard" class="header-nav-logo"> + <HermesLogo /> + </LinkTo> + + <div class="primary-links"> + <LinkTo + @route="authenticated.all" + @current-when="authenticated.all" + @query={{this.defaultBrowseScreenQueryParams}} + > + All Docs + </LinkTo> + <LinkTo + @route="authenticated.my" + @current-when="authenticated.my" + @query={{this.defaultBrowseScreenQueryParams}} + > + My Docs + </LinkTo> + <LinkTo + @route="authenticated.drafts" + @current-when="authenticated.drafts" + @query={{this.defaultBrowseScreenQueryParams}} + > + My Drafts + </LinkTo> + </div> + + <Header::Search class="search-bar" /> + + <div class="user-buttons"> + <Hds::Button + @route="authenticated.new" + @isIconOnly={{true}} + @text="Create draft" + @icon="file-plus" + class="create-draft-button" + /> + <div class="relative"> + {{! Workaround until `referrerPolicy` is supported in dd.ToggleIcon }} + <img + src={{this.profile.picture}} + class="user-avatar" + role="presentation" + referrerpolicy="no-referrer" + /> + <Hds::Dropdown as |dd|> + <dd.ToggleIcon @text="User menu" @icon="user" /> + <dd.Title @text={{this.profile.name}} class="text-body-200" /> + <dd.Description @text={{this.profile.email}} class="text-body-200" /> + <dd.Separator class="mt-2" /> + <dd.Interactive + @route="authenticated.settings" + @text="Email notifications" + /> + + <dd.Interactive + {{on "click" this.invalidateSession}} + @text="Sign Out" + /> + </Hds::Dropdown> + </div> + </div> + </nav> +</div> diff --git a/web/app/components/header/nav.js b/web/app/components/header/nav.js new file mode 100644 index 000000000..293046e0f --- /dev/null +++ b/web/app/components/header/nav.js @@ -0,0 +1,36 @@ +import Component from "@glimmer/component"; +import { inject as service } from "@ember/service"; +import { action } from "@ember/object"; + +export default class Nav extends Component { + @service("config") configSvc; + @service session; + @service flags; + @service router; + + @service authenticatedUser; + + @action + invalidateSession() { + this.session.invalidate(); + } + + get profile() { + return this.authenticatedUser.info; + } + + get defaultBrowseScreenQueryParams() { + return { + docType: [], + owners: [], + page: 1, + product: [], + status: [], + sortBy: "dateDesc", + }; + } + + get currentRouteName() { + return this.router.currentRouteName; + } +} diff --git a/web/app/components/header/search.hbs b/web/app/components/header/search.hbs new file mode 100644 index 000000000..d9a6789d0 --- /dev/null +++ b/web/app/components/header/search.hbs @@ -0,0 +1,90 @@ +{{on-document "keydown" this.onKeydown}} +<div ...attributes> + <form class="w-full" {{on "submit" this.goToResults}}> + <BasicDropdown @renderInPlace={{true}} @horizontalPosition="right" as |dd|> + <Hds::Form::TextInput::Base + @type="search" + @value={{this.query}} + name="query" + size="25" + placeholder="Find a document..." + aria-label="Find a document..." + data-ebd-id="{{dd.uniqueId}}-trigger" + {{on "focusin" dd.actions.open}} + {{on "mousedown" (fn this.maybeCloseDropdown dd)}} + {{on "input" (perform this.search dd)}} + {{did-insert this.registerInput}} + /> + {{#unless this.query}} + <span + class="absolute right-[13px] top-1/2 -translate-y-1/2 text-color-foreground-faint" + > + ⌘K + </span> + {{/unless}} + {{#if this.query}} + <dd.Content class="hds-dropdown-list search-dropdown"> + {{#if this.bestMatches}} + <div + class="flex flex-col border-0 border-b hds-border-primary pb-1" + > + <LinkTo + @route="authenticated.results" + @query={{hash q=this.query}} + class="hds-dropdown-list-item--interactive" + > + <FlightIcon @name="search" class="mr-1.5" /> + <span>View all results for "{{this.query}}"</span> + </LinkTo> + </div> + <div + class="flex flex-col border-0 border-b hds-border-primary py-1" + > + {{#let (get (get this.bestMatches 0) "product") as |product|}} + <LinkTo + @route="authenticated.all" + @query={{hash product=(array product)}} + class="hds-dropdown-list-item--interactive" + > + <FlightIcon @name="folder" class="mr-1.5" /> + <span class="flex items-center">View all + <Hds::Badge + @text={{product}} + @icon={{get-product-id product}} + class="mx-2" + /> + documents + </span> + </LinkTo> + {{/let}} + </div> + <div id="bestMatches" class="flex flex-col pt-1"> + <span + class="hds-typography-body-100 hds-font-weight-medium hds-foreground-faint px-3 py-1" + >Best matches</span> + {{#each this.bestMatches as |match|}} + <Doc::Inline + @avatar="{{get match.ownerPhotos 0}}" + @docID="{{match.objectID}}" + @docNumber="{{match.docNumber}}" + @isResult="{true}" + @owner="{{get match.owners 0}}" + @productArea="{{match.product}}" + @snippet="{{match._snippetResult.content.value}}" + @status="{{match.status}}" + @title="{{match.title}}" + /> + {{/each}} + </div> + {{else}} + <div class="flex flex-col border-0 hds-border-primary pb-1"> + <div class="py-1 px-3"> + <span class="flex items-center">No results found for "{{this.query}}"</span> + </div> + </div> + {{/if}} + </dd.Content> + {{/if}} + </BasicDropdown> + </form> +</div> diff --git a/web/app/components/header/search.ts b/web/app/components/header/search.ts new file mode 100644 index 000000000..6f35ab5a0 --- /dev/null +++ b/web/app/components/header/search.ts @@ -0,0 +1,89 @@ +import { restartableTask } from "ember-concurrency"; +import Component from "@glimmer/component"; +import { inject as service } from "@ember/service"; +import { tracked } from "@glimmer/tracking"; +import { action } from "@ember/object"; +import AlgoliaService from "hermes/services/algolia"; +import RouterService from "@ember/routing/router-service"; +import { HermesDocument } from "hermes/types/document"; +import { assert } from "@ember/debug"; + +interface BasicDropdownAPI { + uniqueId: string; + isOpen: boolean; + disabled: boolean; + actions: { + close: () => void; + open: () => void; + toggle: () => void; + reposition: () => void; + }; +} + +export default class Search extends Component { + @service declare algolia: AlgoliaService; + @service declare router: RouterService; + + @tracked protected searchInput: HTMLInputElement | null = null; + @tracked protected bestMatches: HermesDocument[] = []; + @tracked protected query: string = ""; + + @action protected registerInput(element: HTMLInputElement): void { + this.searchInput = element; + } + + @action protected onKeydown(e: KeyboardEvent): void { + if (e.metaKey && e.key === "k") { + e.preventDefault(); + assert("searchInput is expected", this.searchInput); + this.searchInput.focus(); + } + } + + /** + * Checks whether the dropdown is open and closes it if it is. + * Uses mousedown instead of click to get ahead of the focusin event. + * This allows users to click the search input to dismiss the dropdown. + */ + @action protected maybeCloseDropdown(dd: BasicDropdownAPI): void { + if (dd.isOpen) { + dd.actions.close(); + } + } + + @action protected goToResults(ev: Event): void { + ev.preventDefault(); + this.router.transitionTo("authenticated.results", { + queryParams: { q: this.query }, + }); + } + + protected search = restartableTask( + async (dd: BasicDropdownAPI, inputEvent: InputEvent): Promise<void> => { + let input = inputEvent.target; + + assert( + "inputEvent.target must be an HTMLInputElement", + input instanceof HTMLInputElement + ); + + this.query = input.value; + + if (this.query) { + const params = { + hitsPerPage: 5, + }; + const response = await this.algolia.search.perform(this.query, params); + + if (response) { + this.bestMatches = response.hits as HermesDocument[]; + } + } + + // Reopen the dropdown if it was closed on mousedown + if (!dd.isOpen) { + dd.actions.open(); + } + } + ); +} diff --git a/web/app/components/header/toolbar.hbs b/web/app/components/header/toolbar.hbs new file mode 100644 index 000000000..2b7896959 --- /dev/null +++ b/web/app/components/header/toolbar.hbs @@ -0,0 +1,60 @@ +{{#if @facets}} + <div class="toolbar mb-7"> + <div class="x-container"> + <div class="flex justify-between w-full"> + <div class="flex items-center"> + <div + class="mr-4 tracking-wide font-medium text-body-100 text-color-foreground-faint uppercase" + > + Filter by: + </div> + <div class="facets flex items-center space-x-1.5"> + <Header::FacetDropdown + @label="Type" + @facets={{@facets.docType}} + @onClick={{this.handleClick}} + @disabled={{not @facets.docType}} + /> + <Header::FacetDropdown + @label="Status" + @facets={{this.statuses}} + @onClick={{this.handleClick}} + @disabled={{not this.statuses}} + /> + <Header::FacetDropdown + @label="Product/Area" + @facets={{@facets.product}} + @onClick={{this.handleClick}} + @disabled={{not @facets.product}} + /> + <Header::FacetDropdown + @label="Owner" + @facets={{@facets.owners}} + @onClick={{this.handleClick}} + @disabled={{this.ownerFacetIsDisabled}} + /> + </div> + </div> + + {{#if (and @facets (not @sortControlIsHidden))}} + <Hds::Dropdown class="sort-by-dropdown" as |dd|> + <dd.ToggleButton + @text="Sort: {{this.getSortByLabel}}" + @color="secondary" + /> + <dd.Interactive + {{on "click" (fn this.updateSortBy "dateDesc" dd.close)}} + @text="Newest" + @icon="sort-desc" + /> + <dd.Interactive + {{on "click" (fn this.updateSortBy "dateAsc" dd.close)}} + @text="Oldest" + @icon="sort-asc" + /> + </Hds::Dropdown> + {{/if}} + </div> + </div> + </div> +{{/if}} diff --git a/web/app/components/header/toolbar.js b/web/app/components/header/toolbar.js new file mode 100644 index 000000000..27a177d0a --- /dev/null +++ b/web/app/components/header/toolbar.js @@ -0,0 +1,116 @@ +import Component from "@glimmer/component"; +import { action } from "@ember/object"; +import { getOwner } from "@ember/application"; +import { inject as service } from "@ember/service"; + +export default class Toolbar extends Component { + @service router; + @service toolbar; + + get currentRouteName() { + return this.router.currentRouteName; + } + + get getSortByLabel() { + switch (this.toolbar.sortBy) { + case "dateDesc": + return "Newest"; + case "dateAsc": + return "Oldest"; + } + } + + // Disable `owner` dropdown on My and Draft screens + get ownerFacetIsDisabled() { + switch (this.currentRouteName) { + case "authenticated.my": + case "authenticated.drafts": + return true; + default: + return false; + } + } + + // True in the case of no drafts or docs + get sortControlIsDisabled() { + return Object.keys(this.args.facets).length === 0; + } + + // TODO: Remove when status facet values are cleaned up + get statuses() { + let statuses = {}; + for (let status in this.args.facets.status) { + if ( + status === "Approved" || + status === "In-Review" || + status === "In Review" || + status === "Obsolete" || + status === "WIP" + ) { + statuses[status] = this.args.facets.status[status]; + } + } + + if (Object.keys(statuses).length === 0) { + // This will disable the status dropdown + return null; + } else { + return statuses; + } + } + + @action + handleClick(name, value) { + // Build filters (selected facet values). + let filters = { + docType: [], + owners: [], + status: [], + product: [], + }; + for (const facet in this.args.facets) { + let selectedFacetVals = []; + for (const facetVal in this.args.facets[facet]) { + if (this.args.facets[facet][facetVal]["selected"]) { + selectedFacetVals.push(facetVal); + } + } + filters[facet] = selectedFacetVals; + } + + // Update filters based on what facet value was clicked and if it was + // previously selected or not. + if (this.args.facets[name][value]["selected"]) { + // Facet value was already selected so we need to remove it. + const index = filters[name].indexOf(value); + if (index > -1) { + filters[name].splice(index, 1); + } + } else { + // Facet value wasn't selected before so now we need to add it. + filters[name].push(value); + } + + this.router.transitionTo({ + queryParams: { + docType: filters["docType"], + owners: filters["owners"], + page: 1, + product: filters["product"], + status: filters["status"], + }, + }); + } + + @action + updateSortBy(value, closeDropdown) { + this.toolbar.sortBy = value; + + this.router.transitionTo({ + queryParams: { + sortBy: value, + }, + }); + closeDropdown(); + } +} diff --git a/web/app/components/hermes-logo.hbs b/web/app/components/hermes-logo.hbs new file mode 100644 index 000000000..468e9cc35 --- /dev/null +++ b/web/app/components/hermes-logo.hbs @@ -0,0 +1,10 @@ +<div + class="hermes-logo" + ...attributes +> + <FlightIcon @name="hashicorp" @size="24" /> + <div class="hermes-logo-divider"></div> + <div class="hermes-logo-text"> + Hermes + </div> +</div> diff --git a/web/app/components/hermes-logo.ts b/web/app/components/hermes-logo.ts new file mode 100644 index 000000000..e27f169b1 --- /dev/null +++ b/web/app/components/hermes-logo.ts @@ -0,0 +1,8 @@ +import Component from "@glimmer/component"; + +interface HermesLogoComponentSignature { + Element: HTMLDivElement; + Args: {}; +} + +export default class HermesLogoComponent extends Component<HermesLogoComponentSignature> {} diff --git a/web/app/components/inputs/people-select.hbs b/web/app/components/inputs/people-select.hbs new file mode 100644 index 000000000..62d0a18e3 --- /dev/null +++ b/web/app/components/inputs/people-select.hbs @@ -0,0 +1,18 @@ +<PowerSelectMultiple + class="hds-form-field__control multiselect w-full" + @searchEnabled={{true}} + @search={{perform this.searchDirectory}} + @searchField="email" + @options={{this.people}} + @selected={{@selected}} + @renderInPlace={{@renderInPlace}} + @placeholder="Search for your peers..." + @onChange={{@onChange}} + @onInput={{this.onInput}} + @onClose={{this.onClose}} + @selectedItemComponent={{component "multiselect/user-email-image-chip"}} + ...attributes + as |value| +> + {{value.email}} +</PowerSelectMultiple> diff --git a/web/app/components/inputs/people-select.ts b/web/app/components/inputs/people-select.ts new file mode 100644 index 000000000..7c79962ce --- /dev/null +++ b/web/app/components/inputs/people-select.ts @@ -0,0 +1,86 @@ +import Component from "@glimmer/component"; +import { tracked } from "@glimmer/tracking"; +import { inject as service } from "@ember/service"; +import { task } from "ember-concurrency"; +import { action } from "@ember/object"; + +export interface Person { + emailAddresses: { value: string }[]; + photos: { url: string }[]; +} + +interface PeopleSelectComponentSignature { + Args: { + selected: Person[]; + onBlur?: () => void; + onChange: (people: Person[]) => void; + }; +} + +export default class PeopleSelectComponent extends Component<PeopleSelectComponentSignature> { + // @ts-ignore + // FetchService not yet in the registry + @service("fetch") declare fetchSvc: any; + + /** + * The list of people to display in the dropdown. + * Instantiated empty and populated by the `searchDirectory` task. + */ + @tracked protected people = []; + + /** + * An action occurring on every keystroke. + * Handles cases where the user clears the input, + * since `onChange` is not called in that case. + * See: https://ember-power-select.com/docs/custom-search-action + */ + @action onInput(inputValue: string) { + if (inputValue === "") { + this.people = []; + } + } + + /** + * The action taken when focus leaves the component. + * Clears the people list and calls `this.args.onBlur` if it exists. + */ + @action onClose() { + this.people = []; + if (this.args.onBlur) { + this.args.onBlur(); + } + } + + /** + * A task that queries the server for people matching the given query. + * Used as the `search` action for the `ember-power-select` component. + * Sets `this.people` to the results of the query. + */ + protected searchDirectory = task(async (query) => { + try { + const res = await this.fetchSvc.fetch("/api/v1/people", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + query: query, + }), + }); + + const peopleJson = await res.json(); + + if (peopleJson) { + this.people = peopleJson.map((p: Person) => { + return { + email: p.emailAddresses[0]?.value, + imgURL: p.photos?.[0]?.url, + }; + }); + } else { + this.people = []; + } + } catch (err) { + console.log(`Error querying people: ${err}`); + throw err; + } + }); +} diff --git a/web/app/components/inputs/tag-select.hbs b/web/app/components/inputs/tag-select.hbs new file mode 100644 index 000000000..6ab5a0440 --- /dev/null +++ b/web/app/components/inputs/tag-select.hbs @@ -0,0 +1,15 @@ +<PowerSelectMultipleWithCreate + class="hds-form-field__control multiselect" + @searchEnabled={{true}} + @search={{perform this.searchTags}} + @selected={{@selected}} + @placeholder="Select some tags..." + @onChange={{optional @onChange}} + @onCreate={{this.createTag}} + @buildSuggestion={{this.createTagSuggestion}} + @selectedItemComponent={{component "multiselect/tag-chip"}} + ...attributes + as |value| +> + {{value}} +</PowerSelectMultipleWithCreate> diff --git a/web/app/components/inputs/tag-select.js b/web/app/components/inputs/tag-select.js new file mode 100644 index 000000000..43b008d41 --- /dev/null +++ b/web/app/components/inputs/tag-select.js @@ -0,0 +1,40 @@ +import Component from "@glimmer/component"; +import { tracked } from "@glimmer/tracking"; +import { action } from "@ember/object"; +import { inject as service } from "@ember/service"; +import { task } from "ember-concurrency"; + +export default class TagSelect extends Component { + @service algolia; + @service("config") configSvc; + + @tracked tagOpts = []; + + @action + createTag(tag) { + // Format tag as lowercase and remove spaces. + tag = tag.toLowerCase().replace(/\s+/g, ""); + + this.tagOpts.push(tag); + this.args.onChange?.([...this.args.selected, tag]); + } + + @action + createTagSuggestion(tag) { + return `Create new "${tag}" tag...`; + } + + @task({ restartable: true }) *searchTags(query) { + if (query) { + const resp = yield this.algolia.searchForFacetValues.perform( + this.configSvc.config.algolia_docs_index_name, + "tags", + query, + { + maxFacetHits: 7, + } + ); + return resp["facetHits"].map(({ value }) => value); + } + } +} diff --git a/web/app/components/modal-alert-error.hbs b/web/app/components/modal-alert-error.hbs new file mode 100644 index 000000000..aa383d9bb --- /dev/null +++ b/web/app/components/modal-alert-error.hbs @@ -0,0 +1,11 @@ +<Hds::Alert + @type="inline" + @color="critical" + @icon={{false}} + @onDismiss={{@onDismiss}} + class="mt-3" + as |A| +> + <A.Title>{{@title}}</A.Title> + <A.Description>{{@description}}</A.Description> +</Hds::Alert> diff --git a/web/app/components/modals/doc-created.hbs b/web/app/components/modals/doc-created.hbs new file mode 100644 index 000000000..575b0405d --- /dev/null +++ b/web/app/components/modals/doc-created.hbs @@ -0,0 +1,46 @@ +{{#if this.modalIsShown}} + <Hds::Modal data-test-doc-created-modal as |M|> + <M.Header> + <div class="flex items-center"> + <FlightIcon + @name="check-circle-fill" + class="text-color-palette-green-200 mr-2" + /> + Doc Created! + </div> + </M.Header> + <M.Body> + <p class="text-body-300"> + Youʼll be notified about @mentions, threads involving you, and tasks + assigned to you. To subscribe to all comments, + <Hds::Link::Inline + @icon="external-link" + @iconPosition="trailing" + @hrefIsExternal={{true}} + @href="https://support.google.com/docs/answer/91588" + >manage your notifications</Hds::Link::Inline> + on Google. + </p> + <p class="text-color-foreground-faint mt-3 mb-1">We expect to make this + easier in the future.</p> + </M.Body> + <M.Footer as |F|> + <div class="flex space-x-6 items-center"> + <Hds::Button + data-test-doc-created-modal-button + @text="Continue to doc" + {{on "click" this.close}} + /> + <Hds::Form::Checkbox::Field + data-test-doc-created-modal-checkbox + {{on "click" this.toggleChecked}} + as |F| + > + <F.Label> + Donʼt show this again + </F.Label> + </Hds::Form::Checkbox::Field> + </div> + </M.Footer> + </Hds::Modal> +{{/if}} diff --git a/web/app/components/modals/doc-created.ts b/web/app/components/modals/doc-created.ts new file mode 100644 index 000000000..9026350c8 --- /dev/null +++ b/web/app/components/modals/doc-created.ts @@ -0,0 +1,36 @@ +import { action } from "@ember/object"; +import Component from "@glimmer/component"; +import window from "ember-window-mock"; + +interface ModalsDocCreatedComponentSignature { + Args: { + close: () => void; + }; +} + +export default class ModalsDocCreatedComponent extends Component<ModalsDocCreatedComponentSignature> { + get localStorageSetting(): string | null { + return window.localStorage.getItem("docCreatedModalIsHidden"); + } + + checkboxClicked = false; + + get modalIsShown(): boolean { + if (this.localStorageSetting) { + return false; + } else { + return true; + } + } + + @action close() { + if (this.checkboxClicked) { + window.localStorage.setItem("docCreatedModalIsHidden", "true"); + } + this.args.close(); + } + + @action toggleChecked() { + this.checkboxClicked = !this.checkboxClicked; + } +} diff --git a/web/app/components/modals/index.hbs b/web/app/components/modals/index.hbs new file mode 100644 index 000000000..2e57aa0a6 --- /dev/null +++ b/web/app/components/modals/index.hbs @@ -0,0 +1,3 @@ +{{#if (eq this.modalAlerts.activeModal "docCreated")}} + <Modals::DocCreated @close={{this.modalAlerts.close}} /> +{{/if}} diff --git a/web/app/components/modals/index.ts b/web/app/components/modals/index.ts new file mode 100644 index 000000000..e173d6ecc --- /dev/null +++ b/web/app/components/modals/index.ts @@ -0,0 +1,7 @@ +import Component from "@glimmer/component"; +import { inject as service } from "@ember/service"; +import ModalAlertsService from "hermes/services/modal-alerts"; + +export default class ModalsComponent extends Component { + @service declare modalAlerts: ModalAlertsService; +} diff --git a/web/app/components/multiselect/tag-chip.hbs b/web/app/components/multiselect/tag-chip.hbs new file mode 100644 index 000000000..66dbde71b --- /dev/null +++ b/web/app/components/multiselect/tag-chip.hbs @@ -0,0 +1,2 @@ +<FlightIcon @name="tag" @color="var(--token-color-palette-neutral-500)" /> +{{@option}} diff --git a/web/app/components/multiselect/user-email-image-chip.hbs b/web/app/components/multiselect/user-email-image-chip.hbs new file mode 100644 index 000000000..06d8c06c9 --- /dev/null +++ b/web/app/components/multiselect/user-email-image-chip.hbs @@ -0,0 +1,4 @@ +<Person + @imgURL={{@option.imgURL}} + @email={{@option.email}} +/> diff --git a/web/app/components/new/doc-form.hbs b/web/app/components/new/doc-form.hbs new file mode 100644 index 000000000..9750e805c --- /dev/null +++ b/web/app/components/new/doc-form.hbs @@ -0,0 +1,193 @@ +{{#if this.docIsBeingCreated}} + <div class="text-center hds-typography-display-400 mt-3"> + <FlightIcon @name="loading" @size="24" /> + <div class="mt-8 text-display-200 font-semibold"> + Creating + {{@docType}} + draft... + </div> + <div class="text-body-200 text-color-foreground-faint">This usually takes 10-20 seconds.</div> + </div> +{{else}} + <form + class="grid gap-10 grid-cols-[1fr_250px] grid-rows-1" + {{on "submit" this.submit}} + > + <div> + <div class="space-y-4"> + <h1 + class="hds-typography-display-500 hds-font-weight-bold hds-foreground-strong" + >Create your {{@docType}}</h1> + <p>Complete the following metadata to create your + {{@docType}} + and begin editing your draft.</p> + </div> + <div class="pt-10 space-y-6"> + <Hds::Form::TextInput::Field + @type="text" + @isRequired={{true}} + @value={{this.title}} + name="title" + placeholder="Enter a document title" + {{on "input" this.updateForm}} + as |F| + > + <F.Label>Title</F.Label> + <F.HelperText>Your title should succinctly outline the idea you're + proposing.</F.HelperText> + </Hds::Form::TextInput::Field> + + <Hds::Form::Textarea::Field + @value={{this.summary}} + rows="3" + name="summary" + {{on "input" this.updateForm}} + as |F| + > + <F.Label>Summary</F.Label> + <F.HelperText>One or two sentences outlining your doc.</F.HelperText> + </Hds::Form::Textarea::Field> + + <Hds::Form::Select::Field + @type="text" + @isRequired={{true}} + @value={{this.productArea}} + name="productArea" + {{on "input" this.updateForm}} + as |F| + > + <F.Label>Product/Area</F.Label> + <F.HelperText>Specify the full name of the product or area this + {{@docType}} + belongs to.</F.HelperText> + {{#if @productAbbrevMappings}} + <F.Options> + <option value=""></option> + {{#each-in @productAbbrevMappings as |name|}} + <option value="{{name}}">{{name}}</option> + {{/each-in}} + </F.Options> + {{/if}} + </Hds::Form::Select::Field> + + <Hds::Form::TextInput::Field + @type="text" + @isRequired={{true}} + @value={{this.productAbbreviation}} + placeholder="Select product/area to populate abbreviation" + @isInvalid={{this.formErrors.productAbbreviation}} + name="productAbbreviation" + {{on "input" this.updateForm}} + disabled + as |F| + > + <F.Label>Product/Area abbreviation</F.Label> + {{#if this.formErrors.productAbbreviation}} + <F.Error as |E|> + <E.Message>{{this.formErrors.productAbbreviation}}</E.Message> + </F.Error> + {{/if}} + <F.HelperText>Product/Area abbreviation is automatically populated on + selecting the "Product/Area" option. + </F.HelperText> + </Hds::Form::TextInput::Field> + + {{! Note: We are still refining the subscribe/follow feature set. + As part of that effort we will be looking into how the concept + of "tags" would be useful. For now, we are choosing to + comment out defining tags as part of the document draft + creation workflow. + }} + {{!-- <Hds::Form::Field @layout="vertical" @isOptional={{true}} as |F|> + {{yield + (hash + Error=F.Error + HelperText=F.HelperText + Label=F.Label + isRequired=F.isRequired + isOptional=F.isOptional + ) + }} + <F.Control> + <Inputs::TagSelect + @selected={{this.tags}} + @onChange={{this.updateTags}} + /> + </F.Control> + <F.Label><FlightIcon @name="tag" /> + Add tags + </F.Label> + {{#if this.formErrors.tags}} + <F.Error as |E|> + <E.Message>{{this.formErrors.tags}}</E.Message> + </F.Error> + {{/if}} + <F.HelperText> + Use tags to help people discover this document based on their + cross-functional interests. For instance, "raft", "design" or + "a11y". There is a maximum of 5 tags. + </F.HelperText> + </Hds::Form::Field> --}} + + <Hds::Form::Field @layout="vertical" @isOptional={{true}} as |F|> + {{yield + (hash + Error=F.Error + HelperText=F.HelperText + Label=F.Label + isRequired=F.isRequired + isOptional=F.isOptional + ) + }} + <F.Control> + <PowerSelectMultiple + class="hds-form-field__control multiselect" + @searchEnabled={{true}} + @search={{perform this.searchDirectory}} + @searchField="email" + @options={{this.people}} + @selected={{this.contributors}} + @placeholder="Search for your peers..." + @onChange={{this.updateContributors}} + @selectedItemComponent={{component "multiselect/user-email-image-chip"}} + as |value| + > + {{value.email}} + </PowerSelectMultiple> + </F.Control> + <F.Label><FlightIcon @name="users" /> + Add contributors + </F.Label> + {{#if this.formErrors.contributors}} + <F.Error as |E|> + <E.Message>{{this.formErrors.contributors}}</E.Message> + </F.Error> + {{/if}} + <F.HelperText> + If you're collaborating with others on this + {{@docType}}, add them here. The document will automatically be + shared with the collaborators specified here. You can also add + contributors later. + </F.HelperText> + </Hds::Form::Field> + </div> + </div> + <div> + <div class="preview-card"> + <h3><FlightIcon @name="eye" /> Preview</h3> + <Doc::Tile + @productArea={{this.productArea}} + @status="draft" + @title={{this.title}} + @owner={{this.authenticatedUser.info.email}} + /> + <Hds::Button + @text="Create {{@docType}} in Google Drive" + type="submit" + disabled={{not this.isValid}} + class="w-full" + /> + </div> + </div> + </form> +{{/if}} diff --git a/web/app/components/new/doc-form.js b/web/app/components/new/doc-form.js new file mode 100644 index 000000000..a98c00e07 --- /dev/null +++ b/web/app/components/new/doc-form.js @@ -0,0 +1,209 @@ +import Component from "@glimmer/component"; +import { task } from "ember-concurrency"; +import { inject as service } from "@ember/service"; +import { tracked } from "@glimmer/tracking"; +import { action } from "@ember/object"; +import Ember from 'ember'; + +const FORM_ERRORS = { + title: null, + summary: null, + productAbbreviation: null, + tags: null, + contributors: null, +}; + +const AWAIT_DOC_DELAY = Ember.testing ? 0 : 2000; +const AWAIT_DOC_CREATED_MODAL_DELAY = Ember.testing ? 0 : 1500; + +export default class NewDocForm extends Component { + @service("fetch") fetchSvc; + @service authenticatedUser; + @service flashMessages; + @service modalAlerts; + @service router; + + @tracked title = ""; + @tracked summary = ""; + @tracked productArea = ""; + @tracked tags = []; + @tracked contributors = []; + + get productAbbreviation() { + return this.args.productAbbrevMappings.get(this.productArea); + } + + get form() { + return { + title: this.title, + summary: this.summary, + productArea: this.productArea, + productAbbreviation: this.productAbbreviation, + tags: this.tags, + contributors: this.contributors, + }; + } + + @tracked docType = null; + @tracked isValid = false; + @tracked docIsBeingCreated = false; + @tracked people = []; + @tracked formErrors = { ...FORM_ERRORS }; + + // Don't do that annoying thing where fields become red before the user even gets a chance to submit the form. + @tracked eagerValidation = false; + + get hasErrors() { + const defined = (a) => a != null; + return Object.values(this.formErrors).filter(defined).length > 0; + } + + @action updateForm(ev) { + const formObject = Object.fromEntries( + new FormData(ev.target.form).entries() + ); + + this.title = formObject.title; + this.summary = formObject.summary; + this.productArea = formObject.productArea; + + // Check for required fields. + this.isValid = this.title && this.productArea; + + // Validate other fields. + if (this.eagerValidation) { + this.validate(); + } + } + + @action updateTags(tags) { + this.tags = tags; + + if (this.eagerValidation) { + this.validate(); + } + } + + @action + updateContributors(contributors) { + this.contributors = contributors; + } + + @action submit(ev) { + ev.preventDefault(); + + // Now that a submission has been attempted, we can be aggressive about validation + this.eagerValidation = true; + this.validate(); + if (this.isValid && !this.hasErrors) { + this.createDoc.perform(this.form); + } + } + + validate() { + const errors = { ...FORM_ERRORS }; + if (/\d/.test(this.productAbbreviation)) { + errors.productAbbreviation = + "Product abbreviation can't include a number"; + } + + if (this.tags.length > 5) { + errors.tags = "A maximum of 5 tags are allowed."; + } + + this.formErrors = errors; + } + + // getEmails extracts the emails + // from the select options object + getEmails(values) { + const emails = []; + values.forEach(function (v) { + if (v) { + emails.push(v.email); + } + }); + + return emails; + } + + @task *createDoc() { + this.docIsBeingCreated = true; + + // Returns a promise that always results after the provided number of milliseconds + const wait = (ms) => new Promise((res) => setTimeout(res, ms)); + + try { + const doc = yield this.fetchSvc + .fetch("/api/v1/drafts", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + contributors: this.getEmails(this.contributors), + docType: this.args.docType, + owner: this.authenticatedUser.info.email, + product: this.productArea, + productAbbreviation: this.productAbbreviation, + summary: this.summary, + title: this.title, + tags: this.tags, + }), + }) + .then((resp) => resp.json()) + .catch((err) => { + this.docIsBeingCreated = false; + console.log(`Error creating document draft: ${err}`); + throw err; + }); + + // Wait for document to be available. + yield wait(AWAIT_DOC_DELAY); + + // Set modal on a delay so it appears after transition. + this.modalAlerts.setActive.perform("docCreated", AWAIT_DOC_CREATED_MODAL_DELAY); + + this.router.transitionTo("authenticated.document", doc.id, { + queryParams: { draft: true }, + }); + + } catch (err) { + this.docIsBeingCreated = false; + // TODO: Handle error by using a toast and showing the create form again with + // everything still populated + throw err; + } + } + + @task + *searchDirectory(query) { + const peopleResp = yield this.fetchSvc + .fetch("/api/v1/people", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + query: query, + }), + }) + .then((resp) => resp.json()) + .catch((err) => { + console.log(`Error querying people: ${err}`); + throw err; + }); + + const peopleData = []; + peopleResp.forEach(function (p) { + // Only set image URL for + // users that have it set + if (p.photos) { + peopleData.push({ + email: p.emailAddresses[0].value, + imgURL: p.photos[0].url, + }); + } else { + peopleData.push({ email: p.emailAddresses[0].value }); + } + }); + + this.people = peopleData; + } +} diff --git a/web/app/components/notification.hbs b/web/app/components/notification.hbs new file mode 100644 index 000000000..b40f151e4 --- /dev/null +++ b/web/app/components/notification.hbs @@ -0,0 +1,15 @@ +<div class="notifications-container"> + {{#each this.flashMessages.queue as |flash|}} + <FlashMessage @flash={{flash}} class="notification" as |component flash|> + <Hds::Toast + @color={{flash.type}} + @icon={{flash.icon}} + @onDismiss={{this.dismiss}} + as |T| + > + <T.Title>{{flash.title}}</T.Title> + <T.Description>{{flash.message}}</T.Description> + </Hds::Toast> + </FlashMessage> + {{/each}} +</div> diff --git a/web/app/components/notification.js b/web/app/components/notification.js new file mode 100644 index 000000000..518a0f769 --- /dev/null +++ b/web/app/components/notification.js @@ -0,0 +1,12 @@ +import Component from '@glimmer/component'; +import { inject as service } from '@ember/service'; +import { action } from "@ember/object"; + +export default class Notification extends Component { + @service flashMessages; + + @action + dismiss() { + // Left empty as flash messages disappear on click + } +} diff --git a/web/app/components/pagination/index.hbs b/web/app/components/pagination/index.hbs new file mode 100644 index 000000000..27affdcba --- /dev/null +++ b/web/app/components/pagination/index.hbs @@ -0,0 +1,46 @@ +<div class="flex justify-center"> + {{#if (eq @nbPages 1)}} + {{! There is only one page of results }} + <Pagination::Link @icon="chevron-left" @disabled={{true}} /> + <Pagination::Link @page="1" @disabled={{true}} /> + <Pagination::Link @icon="chevron-right" @disabled={{true}} /> + {{else}} + {{! There is more than one page of results }} + {{#each this.pages as |page index|}} + {{#if (eq page @currentPage)}} + {{! This is the current page }} + {{#if (eq this.pages.length (add index 1))}} + {{! This is the current page and also the last page }} + <Pagination::Link @page={{page}} @disabled={{true}} /> + <Pagination::Link @icon="chevron-right" @disabled={{true}} /> + {{else if (eq index 0)}} + {{! This is the current page and also the first page }} + <Pagination::Link @icon="chevron-left" @disabled={{true}} /> + <Pagination::Link @page={{page}} @disabled={{true}} /> + {{else}} + {{! This is the current page, but not the last page}} + <Pagination::Link @page={{page}} @disabled={{true}} /> + {{/if}} + {{else}} + {{! This is not the current page }} + {{#if (eq this.pages.length (add index 1))}} + {{! This is the last page, and not current }} + <Pagination::Link @page={{page}} /> + <Pagination::Link + @icon="chevron-right" + @page={{add @currentPage 1}} + /> + {{else if (eq index 0)}} + {{! This is the first page, and not current }} + <Pagination::Link + @icon="chevron-left" + @page={{add @currentPage -1}} + /> + <Pagination::Link @page={{page}} /> + {{else}} + <Pagination::Link @page={{page}} /> + {{/if}} + {{/if}} + {{/each}} + {{/if}} +</div> diff --git a/web/app/components/pagination/index.ts b/web/app/components/pagination/index.ts new file mode 100644 index 000000000..a2a1d20ec --- /dev/null +++ b/web/app/components/pagination/index.ts @@ -0,0 +1,46 @@ +import Component from "@glimmer/component"; +import { inject as service } from "@ember/service"; +import RouterService from "@ember/routing/router-service"; + +interface PaginationComponentSignature { + Args: { + nbPages: number; + currentPage: number; + }; +} + +export default class PaginationComponent extends Component<PaginationComponentSignature> { + @service declare router: RouterService; + + protected get currentRouteName(): string { + return this.router.currentRouteName; + } + + get pages(): number[] { + let pages = []; + // If there are less than 10 pages of results, show all pages. + // Or if the current page is 6 or less and there are more than 10 pages of + // results, show the first 10 pages. + if (this.args.nbPages < 10 || this.args.currentPage <= 6) { + for (let i = 1; i <= this.args.nbPages && i <= 10; i++) { + pages.push(i); + } + } else if (this.args.nbPages - this.args.currentPage <= 4) { + // We're at the end of the results so show the last 10 pages. + for (let i = this.args.nbPages - 9; i <= this.args.nbPages; i++) { + pages.push(i); + } + } else { + // We're in the middle of the results, so show pages (current-5) to + // (current+4). + for ( + let i = this.args.currentPage - 5; + i <= this.args.currentPage + 4; + i++ + ) { + pages.push(i); + } + } + return pages; + } +} diff --git a/web/app/components/pagination/link.hbs b/web/app/components/pagination/link.hbs new file mode 100644 index 000000000..adb8afecf --- /dev/null +++ b/web/app/components/pagination/link.hbs @@ -0,0 +1,27 @@ +{{#if @disabled}} + <span + class="w-9 h-12 grid place-items-center no-underline text-color-foreground-primary relative + {{if @icon 'opacity-50' 'text-color-foreground-action'}}" + > + {{#if @icon}} + <FlightIcon @name={{@icon}} /> + {{else}} + {{@page}} + <div + class="absolute h-0.5 w-6 bg-color-palette-blue-200 bottom-0 left-1/2 -translate-x-1/2" + ></div> + {{/if}} + </span> +{{else}} + <LinkTo + @route={{this.currentRouteName}} + @query={{hash page=@page}} + class="w-9 h-12 hover:text-color-foreground-strong grid place-items-center no-underline text-color-foreground-primary" + > + {{#if @icon}} + <FlightIcon @name={{@icon}} /> + {{else}} + {{@page}} + {{/if}} + </LinkTo> +{{/if}} diff --git a/web/app/components/pagination/link.ts b/web/app/components/pagination/link.ts new file mode 100644 index 000000000..7d7bff840 --- /dev/null +++ b/web/app/components/pagination/link.ts @@ -0,0 +1,16 @@ +import RouterService from "@ember/routing/router-service"; +import { inject as service } from "@ember/service"; +import Component from "@glimmer/component"; + +interface PaginationLinkComponentSignature { + Element: HTMLAnchorElement; + Args: {}; +} + +export default class PaginationLinkComponent extends Component<PaginationLinkComponentSignature> { + @service declare router: RouterService; + + protected get currentRouteName(): string { + return this.router.currentRouteName; + } +} diff --git a/web/app/components/person.hbs b/web/app/components/person.hbs new file mode 100644 index 000000000..cf34a901f --- /dev/null +++ b/web/app/components/person.hbs @@ -0,0 +1,27 @@ +{{#unless this.isHidden}} + <div class="flex space-x-2 w-full" ...attributes> + <div + class="w-5 h-5 shrink-0 overflow-hidden rounded-full flex justify-center items-center bg-[color:var(--token-color-palette-alpha-300)]" + > + {{#if @imgURL}} + <img src={{@imgURL}} referrerpolicy="no-referrer" class="w-full" /> + {{else}} + <div class="hds-foreground-high-contrast hds-typography-body-100 flex"> + {{#if @email}} + <span class="capitalize"> + {{get-first-letter @email}} + </span> + {{else}} + <FlightIcon @name="user" class="scale-90" /> + {{/if}} + </div> + {{/if}} + </div> + <div + class="person-email hds-typography-body-200 truncate hds-foreground-primary" + title={{@email}} + > + {{or @email "Unknown"}} + </div> + </div> +{{/unless}} diff --git a/web/app/components/person.js b/web/app/components/person.js new file mode 100644 index 000000000..fe80b5d7f --- /dev/null +++ b/web/app/components/person.js @@ -0,0 +1,7 @@ +import Component from "@glimmer/component"; + +export default class Person extends Component { + get isHidden() { + return this.args.ignoreUnknown && !this.args.email; + } +} diff --git a/web/app/components/results/index.hbs b/web/app/components/results/index.hbs new file mode 100644 index 000000000..c2bf928b4 --- /dev/null +++ b/web/app/components/results/index.hbs @@ -0,0 +1,57 @@ +<section class="flex flex-col items-center flex-1 min-h-full"> + <div class="x-container"> + + {{#let (get (get @results.hits 0) "product") as |product|}} + {{#if product}} + <div class="flex flex-col items-start w-full pb-10"> + <Hds::Card::Container + @level="mid" + @hasBorder="true" + @overflow="hidden" + class="flex flex-col items-start space-y-3 pt-4 px-4 pb-3" + > + <Hds::Badge + @text={{product}} + @icon={{or (get-product-id product) "folder"}} + /> + <Hds::Link::Standalone + @text="View all {{product}} documents" + @icon="arrow-right-circle" + @iconPosition="trailing" + @route="authenticated.all" + @query={{hash product=(array product)}} + /> + </Hds::Card::Container> + </div> + {{/if}} + {{/let}} + + <h1 + class="hds-typography-display-300 hds-font-weight-semibold hds-foreground-strong" + >{{@results.nbHits}} documents matching “{{@query}}”</h1> + + <div class="flex flex-col space-y-12 w-full py-10"> + <div class="tile-list"> + {{#each @results.hits as |doc index|}} + <Doc::Tile + @avatar="{{get doc.ownerPhotos 0}}" + @docID="{{doc.objectID}}" + @isResult="{true}" + @owner="{{get doc.owners 0}}" + @productArea="{{doc.product}}" + @snippet="{{doc._snippetResult.content.value}}" + @status="{{lowercase doc.status}}" + @title="{{doc.title}}" + /> + {{/each}} + </div> + + </div> + + <Pagination + @currentPage={{(add @results.page 1)}} + @nbPages={{@results.nbPages}} + /> + </div> + +</section> diff --git a/web/app/components/results/index.ts b/web/app/components/results/index.ts new file mode 100644 index 000000000..fd7150863 --- /dev/null +++ b/web/app/components/results/index.ts @@ -0,0 +1,11 @@ +import Component from "@glimmer/component"; +import { SearchResponse } from "@algolia/client-search"; + +interface ResultsIndexComponentSignature { + Args: { + results?: SearchResponse; + query: string; + }; +} + +export default class ResultsIndexComponent extends Component<ResultsIndexComponentSignature> {} diff --git a/web/app/components/row-results.hbs b/web/app/components/row-results.hbs new file mode 100644 index 000000000..48923461e --- /dev/null +++ b/web/app/components/row-results.hbs @@ -0,0 +1,59 @@ +{{! + Displays the results of an Algolia search in a rows format. + + <RowResults /> properties: + @docs: Algolia result's "hits" element from a docs search response. + @isDraft: Document is a draft. + @nbPages: Algolia result's "nbPages" element from a docs search response. + @page: Algolia result's "page" element from a docs search response. +}} + +<section> + <div class="x-container"> + <div class="row-results"> + {{#if @docs}} + <Hds::Table @isStriped={{false}} class="row-results__table"> + <:head as |H|> + <H.Tr> + <H.Th class="name">Name</H.Th> + <H.Th class="type">Type</H.Th> + <H.Th class="status">Status</H.Th> + <H.Th class="product">Product/Area</H.Th> + <H.Th class="owner">Owner</H.Th> + <H.Th class="created">Created</H.Th> + </H.Tr> + </:head> + <:body> + {{#each @docs as |doc index|}} + <Doc::Row + @avatar="{{get doc.ownerPhotos 0}}" + @createdDate="{{parse-date doc.created}}" + @docID="{{doc.objectID}}" + @docNumber="{{doc.docNumber}}" + @docType="{{doc.docType}}" + @owner="{{get doc.owners 0}}" + @productArea="{{doc.product}}" + @status="{{lowercase doc.status}}" + @title="{{doc.title}}" + @isDraft={{@isDraft}} + /> + {{/each}} + </:body> + </Hds::Table> + <Pagination @nbPages={{@nbPages}} @currentPage={{@currentPage}} /> + {{else}} + {{#if @isDraft}} + <Hds::Alert @type="inline" as |A|> + <A.Title>No drafts found</A.Title> + <A.Button + @text="Create a document draft" + @color="primary" + @icon="file-plus" + @route="authenticated.new" + /> + </Hds::Alert> + {{/if}} + {{/if}} + </div> + </div> +</section> diff --git a/web/app/components/row-results.ts b/web/app/components/row-results.ts new file mode 100644 index 000000000..03f1a3ea4 --- /dev/null +++ b/web/app/components/row-results.ts @@ -0,0 +1,12 @@ +import Component from "@glimmer/component"; + +interface RowResultsComponentSignature { + Args: { + // TODO: Add HermesDocument[] when we have a type for it. + docs: unknown[]; + isDraft?: boolean; + nbPages: number; + currentPage: number; + }; +} +export default class RowResultsComponent extends Component<RowResultsComponentSignature> {} diff --git a/web/app/components/settings/subscription-list-item.hbs b/web/app/components/settings/subscription-list-item.hbs new file mode 100644 index 000000000..ffeee5901 --- /dev/null +++ b/web/app/components/settings/subscription-list-item.hbs @@ -0,0 +1,29 @@ +<li class="group" data-test-subscription-list-item> + <button + class="text-body-300 text-color-foreground-primary flex items-center space-x-6 hover:cursor-pointer bg-transparent hover:bg-color-surface-action border-0 w-full h-full justify-between py-3 px-3.5 transition-colors" + type="button" + {{on "click" this.toggleChecked}} + > + <div class="flex space-x-3 items-center"> + {{#let (get-product-id @productArea) as |productID|}} + <Hds::IconTile + @size="small" + @logo={{productID}} + @icon={{unless productID "folder"}} + /> + {{/let}} + <div data-test-subscription-list-item-name> + {{@productArea}} + </div> + </div> + <div class="shrink-0 flex items-center justify-end relative"> + <Hds::Form::Toggle::Base + tabindex="-1" + name="toggle-subscription" + aria-label={{if this.isChecked "Unsubscribe" "Subscribe"}} + @value="enable" + checked={{this.isChecked}} + /> + </div> + </button> +</li> diff --git a/web/app/components/settings/subscription-list-item.ts b/web/app/components/settings/subscription-list-item.ts new file mode 100644 index 000000000..ab58097d0 --- /dev/null +++ b/web/app/components/settings/subscription-list-item.ts @@ -0,0 +1,40 @@ +import Component from "@glimmer/component"; +import { action } from "@ember/object"; +import { inject as service } from "@ember/service"; +import AuthenticatedUserService from "hermes/services/authenticated-user"; +import { assert } from "@ember/debug"; + +interface SettingsSubscriptionListItemComponentSignature { + Args: { + productArea: string; + }; +} + +export default class SettingsSubscriptionListItemComponent extends Component<SettingsSubscriptionListItemComponentSignature> { + @service declare authenticatedUser: AuthenticatedUserService; + + /** + * Determines whether the user is subscribed to the product area. + */ + protected get isChecked(): boolean { + assert( + "isChecked expects a subscriptions list", + this.authenticatedUser.subscriptions + ); + return this.authenticatedUser.subscriptions.some( + (subscription) => subscription.productArea === this.args.productArea + ); + } + + /** + * Toggles the subscription for the product area. + * Currently only supports "instant" subscriptions. + */ + @action protected toggleChecked(): void { + if (this.isChecked) { + this.authenticatedUser.removeSubscription.perform(this.args.productArea); + } else { + this.authenticatedUser.addSubscription.perform(this.args.productArea); + } + } +} diff --git a/web/app/components/settings/subscription-list.hbs b/web/app/components/settings/subscription-list.hbs new file mode 100644 index 000000000..4fb820d2c --- /dev/null +++ b/web/app/components/settings/subscription-list.hbs @@ -0,0 +1,16 @@ +<Hds::Form::TextInput::Field + data-test-subscription-list-filter-input + @type="search" + placeholder="Filter..." + @width="100%" + class="mt-6" + {{on "input" (perform this.onInput)}} +/> +<ol + data-test-subscription-list + class="list-none mt-5 w-full divide-y divide-color-border-primary" +> + {{#each this.shownItems as |listItem|}} + <Settings::SubscriptionListItem @productArea={{listItem}} /> + {{/each}} +</ol> diff --git a/web/app/components/settings/subscription-list.ts b/web/app/components/settings/subscription-list.ts new file mode 100644 index 000000000..0e3d90b6e --- /dev/null +++ b/web/app/components/settings/subscription-list.ts @@ -0,0 +1,31 @@ +import Component from "@glimmer/component"; +import { restartableTask, task } from "ember-concurrency"; +import { tracked } from "@glimmer/tracking"; + +interface SettingsSubscriptionListComponentSignature { + Args: { + allProductAreas: string[]; + }; +} + +export default class SettingsSubscriptionListComponent extends Component<SettingsSubscriptionListComponentSignature> { + /** + * The list of product areas to show. Updated by the `onInput` task. + */ + @tracked protected shownItems: string[] = this.args.allProductAreas; + + /** + * Searches for matches and updates the `filteredList`. + * Restarts on every keystroke and resets when the input is empty. + */ + protected onInput = restartableTask(async (event) => { + let input = event.target.value; + if (input.length > 0) { + this.shownItems = this.args.allProductAreas.filter((item) => + item.toLowerCase().includes(input.toLowerCase()) + ); + } else { + this.shownItems = this.args.allProductAreas; + } + }); +} diff --git a/web/app/components/sidebar.hbs b/web/app/components/sidebar.hbs new file mode 100644 index 000000000..469a41e21 --- /dev/null +++ b/web/app/components/sidebar.hbs @@ -0,0 +1,712 @@ +<aside class="sidebar {{if this.isCollapsed 'w-8' 'w-64'}}"> + {{#if this.isCollapsed}} + <nav class="relative flex flex-col space-y-2"> + <ul class="flex flex-col items-center space-y-3 list-none"> + <X::HdsTab + @label="Expand" + @icon="sidebar-show" + @iconOnly="true" + @action={{this.expandSidebar}} + /> + <div class="flex flex-col space-y-1"> + <a + href="https://docs.google.com/document/d/{{this.docID}}" + target="_blank" + class="x-hds-tab--link" + > + <FlightIcon @name="external-link" /> + </a> + </div> + </ul> + </nav> + {{else}} + <header class="relative flex flex-col space-y-2"> + <ul class="flex w-full items-center justify-between list-none"> + <X::HdsTab + @label="Back to Dashboard" + @icon="arrow-left" + @link="authenticated.dashboard" + /> + <X::HdsTab + @label="Collapse" + @icon="sidebar-hide" + @iconOnly="true" + @action={{this.collapseSidebar}} + /> + </ul> + + <ul class="flex w-full items-center justify-end space-x-1 pr-1 list-none"> + + {{#if this.shareButtonIsShown}} + <Hds::Dropdown @listPosition="left" as |dd|> + <dd.ToggleIcon @icon="share" @text="share" @hasChevron={{false}} /> + <dd.CopyItem + @text="{{this.shortLinkBaseURL}}/{{lowercase + @document.docType + }}/{{lowercase @document.docNumber}}" + @copyItemTitle="Copy URL" + /> + </Hds::Dropdown> + {{/if}} + + <a + href="https://docs.google.com/document/d/{{this.docID}}" + target="_blank" + class="flex items-center justify-center w-[32px] h-[32px] rounded-full text-white bg-[color:var(--token-color-foreground-action)] hover:bg-[color:var(--token-color-foreground-action-hover)] active:bg-[color:var(--token-color-foreground-action-active)]" + > + <FlightIcon @name="external-link" class="relative ml-0.5 mt-0.5" /> + </a> + </ul> + + {{#let (get-product-id @document.product) as |productIcon|}} + {{#if productIcon}} + <div class="product-badge {{productIcon}}"> + <FlightIcon @name={{productIcon}} /> + </div> + {{/if}} + {{/let}} + </header> + + <div class="body {{if this.sidebarBodyIsShorter 'is-shorter'}}"> + <div class="flex flex-col items-start space-y-2 px-3"> + {{#if (is-empty @document.docNumber)}} + <small class="hds-typography-body-100 hds-foreground-faint">{{#unless + (is-empty @document.docType) + }}{{@document.docType}}{{/unless}} + </small> + {{else}} + <small class="hds-typography-body-100 hds-foreground-faint">{{#unless + (is-empty @document.docType) + }}{{@document.docType}}{{/unless}} + • + {{@document.docNumber}}</small> + {{/if}} + {{#if this.editingIsDisabled}} + <h1 + class="hds-typography-display-300 hds-font-weight-semibold hds-foreground-strong" + >{{this.title}}</h1> + {{else}} + <EditableField + @value={{this.title}} + @onChange={{perform this.save "title"}} + @loading={{this.save.isRunning}} + @disabled={{this.editingIsDisabled}} + > + <:default> + {{#unless (is-empty this.title)}} + <h1 + class="hds-typography-display-300 hds-font-weight-semibold hds-foreground-strong" + >{{this.title}}</h1> + {{else}} + <h1 + class="hds-typography-display-300 hds-font-weight-semibold hds-foreground-faint" + >Enter a title here.</h1> + {{/unless}} + </:default> + <:editing as |F|> + <Hds::Form::Textarea::Field + @value={{F.value}} + name="title" + {{on "blur" F.update}} + as |F| + /> + </:editing> + </EditableField> + {{/if}} + </div> + + <hr class="border-0 border-b hds-border-faint mx-3" /> + + <div class="flex flex-col items-start space-y-2 px-3"> + <small + class="hds-typography-body-100 hds-foreground-faint" + >Summary</small> + {{#if this.editingIsDisabled}} + <p + class="hds-typography-body-200 hds-font-weight-medium hds-foreground-primary" + >{{this.summary}}</p> + {{else}} + <EditableField + @value={{this.summary}} + @onChange={{perform this.save "summary"}} + @loading={{this.save.isRunning}} + @disabled={{this.editingIsDisabled}} + > + <:default> + {{#unless (is-empty this.summary)}} + <p + class="hds-typography-body-200 hds-font-weight-medium hds-foreground-primary" + > + {{this.summary}}</p> + {{else}} + <p + class="hds-typography-body-200 hds-font-weight-medium hds-foreground-faint" + > + Enter a summary here.</p> + {{/unless}} + </:default> + <:editing as |F|> + <Hds::Form::Textarea::Field + @value={{F.value}} + name="summary" + {{on "blur" F.update}} + as |F| + /> + </:editing> + </EditableField> + {{/if}} + </div> + + <div class="flex flex-col items-start space-y-2 px-3"> + <small + class="hds-typography-body-100 hds-foreground-faint" + >Status</small> + <Doc::State + @state={{@document.status}} + @hideProgress={{true}} + class="whitespace-nowrap" + /> + </div> + + <div class="flex flex-col items-start space-y-2 px-3"> + <small + class="hds-typography-body-100 hds-foreground-faint" + >Product/Area</small> + <Hds::Badge + @text={{@document.product}} + @icon={{get-product-id @document.product}} + /> + </div> + + <div class="flex flex-col items-start space-y-2 px-3"> + <small + class="hds-typography-body-100 hds-foreground-faint" + >Owner</small> + <Person + @imgURL={{get @document.ownerPhotos 0}} + @email={{get @document.owners 0}} + /> + </div> + + <div class="flex flex-col items-start space-y-2 px-3"> + <small + class="hds-typography-body-100 hds-foreground-faint" + >Contributors</small> + {{#if this.isOwner}} + <EditableField + @value={{this.contributors}} + @onChange={{perform this.save "contributors"}} + @loading={{this.save.isRunning}} + @disabled={{this.editingIsDisabled}} + > + <:default> + {{#if this.contributors.length}} + <ol class="person-list"> + {{#each this.contributors as |contributor|}} + <li> + <Person + @imgURL={{contributor.imgURL}} + @email={{contributor.email}} + /> + </li> + {{/each}} + </ol> + {{else}} + <em>No contributors</em> + {{/if}} + </:default> + <:editing as |F|> + <Inputs::PeopleSelect + class="multiselect--narrow" + @selected={{this.contributors}} + @onChange={{this.updateContributors}} + {{click-outside (fn F.update this.contributors)}} + /> + </:editing> + </EditableField> + {{else}} + {{#if this.contributors.length}} + <ol class="person-list"> + {{#each this.contributors as |contributor|}} + <li> + <Person + @imgURL={{contributor.imgURL}} + @email={{contributor.email}} + /> + </li> + {{/each}} + </ol> + {{else}} + <em>No contributors</em> + {{/if}} + {{/if}} + </div> + + {{#unless this.isDraft}} + <div class="flex flex-col items-start space-y-2 px-3"> + <small + class="hds-typography-body-100 hds-foreground-faint" + >Approvers</small> + {{#if this.isOwner}} + <EditableField + @value={{this.approvers}} + @onChange={{perform this.save "approvers"}} + @loading={{this.save.isRunning}} + @disabled={{this.editingIsDisabled}} + > + <:default> + {{#if this.approvers.length}} + <ol class="person-list"> + {{#each this.approvers as |approver|}} + <li> + <Person + @imgURL={{approver.imgURL}} + @email={{approver.email}} + /> + </li> + {{/each}} + </ol> + {{else}} + <em>No approvers</em> + {{/if}} + </:default> + <:editing as |F|> + <Inputs::PeopleSelect + class="multiselect--narrow" + @selected={{this.approvers}} + @onChange={{this.updateApprovers}} + {{click-outside (fn F.update this.approvers)}} + /> + </:editing> + </EditableField> + {{else}} + {{#if this.approvers.length}} + <ol class="person-list"> + {{#each this.approvers as |approver|}} + <li> + <Person + @imgURL={{approver.imgURL}} + @email={{approver.email}} + /> + </li> + {{/each}} + </ol> + {{else}} + <em>No approvers</em> + {{/if}} + {{/if}} + </div> + {{/unless}} + + <div class="flex flex-col items-start space-y-2 px-3"> + <small class="hds-typography-body-100 hds-foreground-faint"> + Created + </small> + <p>{{or @document.createdDate "Unknown"}}</p> + </div> + + <div class="flex flex-col items-start space-y-2 px-3"> + <small class="hds-typography-body-100 hds-foreground-faint"> + Last modified + </small> + <p>{{@document.lastModified}}</p> + </div> + + {{#each-in this.customEditableFields as |field attributes|}} + {{#if + (or + (and (not this.editingIsDisabled) (not this.docIsApproved)) + attributes.value + ) + }} + <div class="flex flex-col items-start space-y-2 px-3"> + <small class="hds-typography-body-100 hds-foreground-faint"> + {{attributes.displayName}} + </small> + {{#if this.isOwner}} + <CustomEditableField + @document={{@document}} + @field={{field}} + @attributes={{attributes}} + @onChange={{perform this.save field}} + @updateFieldValue={{this.updateCustomFieldValue}} + @loading={{this.save.isRunning}} + @disabled={{this.editingIsDisabled}} + /> + {{/if}} + </div> + {{/if}} + {{/each-in}} + </div> + + {{!-- <div class="flex flex-col items-start space-y-2 px-3"> + <small class="hds-typography-body-100 hds-foreground-faint">Tags</small> + {{#if this.isOwner}} + <EditableField + @value={{this.tags}} + @onChange={{perform this.save "tags"}} + @loading={{this.save.isRunning}} + > + <:default> + {{#if this.tags.length}} + <ol> + {{#each this.tags as |tag|}} + <li> + <FlightIcon + @name="tag" + @color="var(--token-color-palette-neutral-500)" + /> + {{tag}} + </li> + {{/each}} + </ol> + {{else}} + <em>No tags</em> + {{/if}} + </:default> + <:editing as |F|> + <Inputs::TagSelect + class="multiselect--narrow" + @selected={{this.tags}} + @onChange={{this.updateTags}} + {{click-outside (fn F.update this.tags)}} + /> + </:editing> + </EditableField> + {{else}} + {{#if this.tags.length}} + <ol> + {{#each this.tags as |tag|}} + <li> + <FlightIcon + @name="tag" + @color="var(--token-color-palette-neutral-500)" + /> + {{tag}} + </li> + {{/each}} + </ol> + {{else}} + <em>No tags</em> + {{/if}} + {{/if}} + </div> --}} + + {{#if this.userHasEditPrivileges}} + <div class="sidebar-footer"> + {{#if (not @document.appCreated)}} + <div class="px-3 -mb-1"> + <div class="w-full pt-3.5 border-t border-color-border-primary"> + <div class="text-body-200 text-color-foreground-faint"> + <h5 + class="text-body-200 font-semibold text-color-foreground-primary flex items-center mb-1" + > + Read-only headers + <FlightIcon + @name="lock" + class="shrink-0 text-color-foreground-faint -mt-px ml-1.5" + /> + </h5> + <p class="mb-1.5"> + Weʼre unable to edit the metadata of files created offsite. + </p> + </div> + <div class="text-[12px] opacity-60 italic">Create docs in-app for + best results</div> + </div> + </div> + {{else}} + {{#if this.isDraft}} + {{#if this.isOwner}} + <div class="flex items-start px-3 gap-2"> + <Hds::Button + @text="Request Review" + @size="medium" + @color="primary" + class="w-full" + {{on + "click" + (fn (set this "requestReviewModalIsActive" true)) + }} + /> + <Hds::Button + @text="Delete" + @size="medium" + @color="critical" + @icon="trash" + @isIconOnly={{true}} + class="basis-0" + {{on "click" (fn (set this "deleteModalIsActive" true))}} + /> + </div> + {{/if}} + {{else}} + {{#if this.isOwner}} + <div class="flex items-start px-3 gap-2"> + + <Hds::Button + @text={{this.moveToStatusButtonText}} + @size="medium" + @color={{this.moveToStatusButtonColor}} + class="w-full" + @icon={{if this.changeDocumentStatus.isRunning "loading"}} + disabled={{this.changeDocumentStatus.isRunning}} + {{on + "click" + (perform + this.changeDocumentStatus + this.moveToStatusButtonTargetStatus + ) + }} + /> + <Hds::Button + @text="Archive" + @size="medium" + @color="critical" + @icon="archive" + @isIconOnly={{true}} + class="basis-0" + disabled={{(eq @document.status "Obsolete")}} + {{on "click" (fn (set this "archiveModalIsActive" true))}} + /> + </div> + + {{else}} + {{#if this.isApprover}} + <div class="flex flex-col items-start px-3 gap-2"> + <Hds::Button + @text={{this.approveButtonText}} + @size="medium" + @color="primary" + class="w-full" + @icon={{if this.approve.isRunning "loading"}} + disabled={{(or + this.approve.isRunning + this.requestChanges.isRunning + this.hasApproved + )}} + {{on "click" (perform this.approve @profile.email)}} + /> + {{#if (eq @document.docType "FRD")}} + <Hds::Button + @text={{this.requestChangesButtonText}} + @size="medium" + @color="secondary" + class="w-full" + @icon={{if this.requestChanges.isRunning "loading"}} + disabled={{(or + this.approve.isRunning + this.requestChanges.isRunning + this.hasRequestedChanges + )}} + {{on + "click" + (perform this.requestChanges @profile.email) + }} + /> + {{/if}} + </div> + {{/if}} + {{/if}} + {{/if}} + {{/if}} + + </div> + {{/if}} + {{/if}} + + {{! Place modals at the end of the DOM so they don't inherit styles }} + + {{#in-element this.modalContainer insertBefore=null}} + + {{#if this.archiveModalIsActive}} + <Hds::Modal @color="critical" @onClose={{this.closeArchiveModal}} as |M|> + <M.Header> + Archive document? + </M.Header> + <M.Body> + Are you sure you want to move the document + <strong>{{this.title}}</strong> + to + <strong>Obsolete</strong> + status? + {{#if this.modalErrorIsShown}} + <ModalAlertError + @onDismiss={{this.resetModalErrors}} + @title={{this.errorTitle}} + @description={{this.errorDescription}} + /> + {{/if}} + </M.Body> + <M.Footer as |F|> + <Hds::ButtonSet> + <Hds::Button + @text={{if + this.changeDocumentStatus.isRunning + "Archiving..." + "Yes, archive" + }} + @color="critical" + @icon={{if + this.changeDocumentStatus.isRunning + "loading" + "archive" + }} + disabled={{this.changeDocumentStatus.isRunning}} + {{on "click" (perform this.changeDocumentStatus "Obsolete")}} + /> + <Hds::Button + @text="Cancel" + @color="secondary" + disabled={{this.changeDocumentStatus.isRunning}} + {{on "click" F.close}} + /> + </Hds::ButtonSet> + </M.Footer> + </Hds::Modal> + {{/if}} + + {{#if this.deleteModalIsActive}} + <Hds::Modal @color="critical" @onClose={{this.closeDeleteModal}} as |M|> + <M.Header> + Delete document draft? + </M.Header> + <M.Body> + Are you sure you want to delete the draft + <strong>{{this.title}}</strong>? + + {{#if this.modalErrorIsShown}} + <ModalAlertError + @onDismiss={{this.resetModalErrors}} + @title={{this.errorTitle}} + @description={{this.errorDescription}} + /> + {{/if}} + </M.Body> + <M.Footer as |F|> + <Hds::ButtonSet> + <Hds::Button + @text={{if + this.deleteDraft.isRunning + "Deleting..." + "Yes, delete" + }} + @color="critical" + @icon={{if this.deleteDraft.isRunning "loading" "trash"}} + disabled={{this.deleteDraft.isRunning}} + {{on "click" (perform this.deleteDraft)}} + /> + <Hds::Button + @text="Cancel" + @color="secondary" + disabled={{this.deleteDraft.isRunning}} + {{on "click" F.close}} + /> + </Hds::ButtonSet> + </M.Footer> + </Hds::Modal> + {{/if}} + + {{#if this.requestReviewModalIsActive}} + <Hds::Modal @onClose={{this.closeRequestReviewModal}} as |M|> + <M.Header> + Request review? + </M.Header> + <M.Body> + <p class="mb-4 pr-16 text-body-300 text-color-foreground-primary">This + will publish your document company-wide, and anyone you request + below will receive a notification to review.</p> + + <Hds::Form::Field @layout="vertical" as |F|> + <F.Control> + <Inputs::PeopleSelect + @renderInPlace={{true}} + @selected={{this.approvers}} + @onChange={{this.updateApprovers}} + class="mb-0" + /> + </F.Control> + <F.Label>Request reviews</F.Label> + </Hds::Form::Field> + + {{#if @docType.checks.length}} + {{! For now, we only support one check }} + {{#each (take 1 @docType.checks) as |check|}} + <div class="mt-3.5"> + <Hds::Form::Checkbox::Field + {{on "change" this.onDocTypeCheckboxChange}} + @checked={{this.docTypeCheckboxValue}} + required + as |F| + > + <F.Label> + {{check.label}} + </F.Label> + {{#if check.helperText}} + <F.HelperText> + {{check.helperText}} + </F.HelperText> + {{/if}} + </Hds::Form::Checkbox::Field> + </div> + {{#if check.links.length}} + <ul class="list-none pl-6 mt-1.5"> + {{#each check.links as |link|}} + <li> + <Hds::Link::Inline + @icon="external-link" + @iconPosition="trailing" + @hrefIsExternal={{true}} + @href={{link.url}} + class="no-underline text-body-100" + > + {{link.text}} + </Hds::Link::Inline> + </li> + {{/each}} + </ul> + {{/if}} + {{/each}} + + {{/if}} + + {{#if this.modalErrorIsShown}} + <ModalAlertError + @onDismiss={{this.resetModalErrors}} + @title={{this.errorTitle}} + @description={{this.errorDescription}} + /> + {{/if}} + </M.Body> + <M.Footer as |F|> + <Hds::ButtonSet> + <Hds::Button + {{! TODO: In the future may be use a flash message to show the + "Takes about a minute to complete" message instead of the text in the button?}} + @text={{if + this.requestReview.isRunning + "Takes about a minute to complete" + "Request Review" + }} + @color="primary" + @icon={{if this.requestReview.isRunning "loading"}} + disabled={{or + this.requestReview.isRunning + (and @docType.checks.length (not this.docTypeCheckboxValue)) + }} + {{on "click" (perform this.requestReview)}} + /> + <Hds::Button + @text="Cancel" + @color="secondary" + disabled={{@deleteDraftIsRunning}} + disabled={{this.requestReview.isRunning}} + {{on "click" F.close}} + /> + </Hds::ButtonSet> + </M.Footer> + </Hds::Modal> + {{/if}} + + {{/in-element}} +</aside> diff --git a/web/app/components/sidebar.js b/web/app/components/sidebar.js new file mode 100644 index 000000000..2212d678b --- /dev/null +++ b/web/app/components/sidebar.js @@ -0,0 +1,419 @@ +import Component from "@glimmer/component"; +import { tracked } from "@glimmer/tracking"; +import { action } from "@ember/object"; +import { getOwner } from "@ember/application"; +import { inject as service } from "@ember/service"; +import { task } from "ember-concurrency"; +import { dasherize } from "@ember/string"; + +export default class Sidebar extends Component { + @service("config") configSvc; + @service("fetch") fetchSvc; + @service router; + @service session; + @service flashMessages; + + @tracked isCollapsed = false; + @tracked archiveModalIsActive = false; + @tracked deleteModalIsActive = false; + @tracked requestReviewModalIsActive = false; + @tracked docTypeCheckboxValue = false; + @tracked emailFields = ["approvers", "contributors"]; + + @tracked modalErrorIsShown = false; + @tracked errorTitle = null; + @tracked errorDescription = null; + + get modalContainer() { + return document.querySelector(".ember-application"); + } + + get isDraft() { + return this.args.document?.isDraft; + } + + get docID() { + return this.args.document?.objectID; + } + + // TODO: This state tracking could be improved with a document model + // (not necessarily, an ember data model, but some sort of tracking-aware + // class to stuff this in instead of passing a POJO around). + @tracked title = this.args.document.title || ""; + @tracked summary = this.args.document.summary || ""; + @tracked tags = this.args.document.tags || []; + + @tracked contributors = this.args.document.contributors || []; + @tracked approvers = this.args.document.approvers || []; + + get customEditableFields() { + let customEditableFields = this.args.document.customEditableFields || {}; + for (const field in customEditableFields) { + customEditableFields[field]["value"] = this.args.document[field]; + } + return customEditableFields; + } + + @action + collapseSidebar() { + this.isCollapsed = true; + } + + @action + expandSidebar() { + this.isCollapsed = false; + } + + // sidebarBodyIsShorter returns true in the case(s) where there are two + // vertically stacked buttons, making the body shorter. + get sidebarBodyIsShorter() { + // If you are an approver for a published FRD: + if ( + !this.isDraft && + !this.isOwner && + this.isApprover && + this.args.document.docType === "FRD" + ) { + return true; + } + + return false; + } + + get approveButtonText() { + if (!this.hasApproved) { + return "Approve"; + } else { + return "Already approved"; + } + } + + get shareButtonIsShown() { + return ( + !this.isDraft && + this.args.document.docNumber && + this.args.document.docType + ); + } + + get requestChangesButtonText() { + // FRDs are a special case that can be approved or not approved. + if (this.args.document.docType === "FRD") { + if (!this.hasRequestedChanges) { + return "Not approved"; + } else { + return "Already not approved"; + } + } + + if (!this.hasRequestedChanges) { + return "Request changes"; + } else { + return "Already requested changes"; + } + } + + @action + onDocTypeCheckboxChange(event) { + this.docTypeCheckboxValue = event.target.checked; + } + + get moveToStatusButtonColor() { + switch (this.args.document.status) { + case "In-Review": + return "primary"; + default: + return "secondary"; + } + } + + // moveToStatusButtonTargetStatus returns the target status that the button + // will move a document to. + get moveToStatusButtonTargetStatus() { + switch (this.args.document.status) { + case "In-Review": + return "Approved"; + default: + return "In-Review"; + } + } + + get moveToStatusButtonText() { + if (this.changeDocumentStatus.isRunning) { + return "Working..."; + } + + return `Move to ${this.moveToStatusButtonTargetStatus}`; + } + + // isApprover returns true if the logged in user is a document approver. + get isApprover() { + return this.args.document.approvers?.some( + (e) => e.email === this.args.profile.email + ); + } + + get isContributor() { + return this.args.document.contributors?.some( + (e) => e.email === this.args.profile.email + ); + } + + // hasApproved returns true if the logged in user has approved the document. + get hasApproved() { + return this.args.document.approvedBy?.includes(this.args.profile.email); + } + + // hasRequestedChanges returns true if the logged in user has requested + // changes of the document. + get hasRequestedChanges() { + return this.args.document.changesRequestedBy?.includes( + this.args.profile.email + ); + } + + get docIsApproved() { + return this.args.document.status.toLowerCase() === "approved"; + } + + get docIsInReview() { + return dasherize(this.args.document.status) === "in-review"; + } + + // isOwner returns true if the logged in user is the document owner. + get isOwner() { + return this.args.document.owners?.[0] === this.args.profile.email; + } + + get userHasEditPrivileges() { + return this.isOwner || this.isContributor || this.isApprover; + } + + get editingIsDisabled() { + if (!this.args.document.appCreated) { + // true is the doc wasn't appCreated or if the doc is Approved + return true; + } else if (this.isDraft || this.docIsInReview || this.docIsApproved) { + // true is the doc is a draft/in review/approved and the user is not an owner, contributor, or approver + return !this.userHasEditPrivileges; + } else { + // doc is obsolete or some unknown status.. + return true; + } + } + + get shortLinkBaseURL() { + return this.configSvc.config.short_link_base_url; + } + + @action refreshRoute() { + // We force refresh due to a bug with `refreshModel: true` + // See: https://github.com/emberjs/ember.js/issues/19260 + getOwner(this).lookup(`route:${this.router.currentRouteName}`).refresh(); + } + + @task + *save(field, val) { + if (field && val) { + const oldVal = this[field]; + this[field] = val; + + try { + const serializedValue = this.emailFields.includes(field) + ? val.map((p) => p.email) + : val; + yield this.patchDocument.perform({ [field]: serializedValue }); + } catch (err) { + // revert field value on failure + this[field] = oldVal; + } + } + } + + @task + *patchDocument(fields) { + const endpoint = this.isDraft ? "drafts" : "documents"; + + try { + yield this.fetchSvc.fetch(`/api/v1/${endpoint}/${this.docID}`, { + method: "PATCH", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(fields), + }); + } catch (err) { + this.showModalError("Error updating document", err); + throw err; + } + + this.refreshRoute(); + } + + @task + *requestReview() { + // Update approvers. + try { + yield this.patchDocument.perform({ + approvers: this.approvers.compact().mapBy("email"), + }); + } catch (err) { + this.showModalError("Error updating approvers", err); + throw err; + } + + // Create review. + try { + yield this.fetchSvc.fetch(`/api/v1/reviews/${this.docID}`, { + method: "POST", + }); + // Add a notification for the user + this.flashMessages.add({ + message: "Document review requested", + title: "Done!", + type: "success", + timeout: 6000, + extendedTimeout: 1000, + }); + + this.router.transitionTo("authenticated.document", this.docID, { + queryParams: { draft: false }, + }); + + this.requestReviewModalIsActive = false; + } catch (err) { + this.showModalError("Error creating review", err); + } + + this.refreshRoute(); + } + + @task + *deleteDraft() { + try { + yield this.args.deleteDraft.perform(this.docID); + } catch (err) { + this.showModalError("Error deleting draft", err); + } + } + + @action + updateApprovers(approvers) { + this.approvers = approvers; + } + + @action + updateContributors(contributors) { + this.contributors = contributors; + } + + @action + updateCustomFieldValue(field, value) { + this.customEditableFields[field].value = value; + } + + @action + updateTags(tags) { + this.tags = tags; + } + + @action closeDeleteModal() { + this.deleteModalIsActive = false; + this.resetModalErrors(); + } + + @action closeRequestReviewModal() { + this.requestReviewModalIsActive = false; + this.resetModalErrors(); + } + + @action closeArchiveModal() { + this.archiveModalIsActive = false; + this.resetModalErrors(); + } + + @action resetModalErrors() { + this.modalErrorIsShown = false; + this.errorTitle = null; + this.errorDescription = null; + } + + @task + *approve(approver) { + try { + yield this.fetchSvc.fetch(`/api/v1/approvals/${this.docID}`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + }); + + // Add a notification for the user + this.flashMessages.add({ + message: "Document approved", + title: "Done!", + type: "success", + timeout: 6000, + extendedTimeout: 1000, + }); + } catch (err) { + this.showModalError("Error approving document", err); + } + + this.refreshRoute(); + } + + @task + *requestChanges(approver) { + try { + yield this.fetchSvc.fetch(`/api/v1/approvals/${this.docID}`, { + method: "DELETE", + headers: { "Content-Type": "application/json" }, + }); + // Add a notification for the user + let msg = "Requested changes for document"; + // FRDs are a special case that can be approved or not approved. + if (this.args.document.docType === "FRD") { + msg = "Document marked as not approved"; + } + this.flashMessages.add({ + message: msg, + title: "Done!", + type: "success", + timeout: 6000, + extendedTimeout: 1000, + }); + } catch (err) { + this.showModalError("Error requesting changes of document", err); + } + + this.refreshRoute(); + } + + @task + *changeDocumentStatus(status) { + try { + yield this.patchDocument.perform({ + status: status, + }); + + // Add a notification for the user + this.flashMessages.add({ + message: `Document status changed to "${status}"`, + title: "Done!", + type: "success", + timeout: 6000, + extendedTimeout: 1000, + }); + + this.archiveModalIsActive = false; + } catch (err) { + this.showModalError(`Error marking document status as ${status}`, err); + throw err; + } + + this.refreshRoute(); + } + + showModalError(errMsg, error) { + this.modalErrorIsShown = true; + this.errorTitle = errMsg; + this.errorDescription = error; + } +} diff --git a/web/app/components/x/hds-tab.hbs b/web/app/components/x/hds-tab.hbs new file mode 100644 index 000000000..8ae13f894 --- /dev/null +++ b/web/app/components/x/hds-tab.hbs @@ -0,0 +1,17 @@ +<div class="x-hds-tab {{if @isSelected 'x-hds-tab--selected'}}"> + {{#if @action}} + <button {{on "click" @action}} class="x-hds-tab--button"> + <FlightIcon @name={{@icon}} /> + {{#unless @iconOnly}} + <span>{{@label}}</span> + {{/unless}} + </button> + {{else}} + <LinkTo @route={{@link}} @query={{hds-link-to-query @query}} class="x-hds-tab--link"> + <FlightIcon @name={{@icon}} /> + {{#unless @iconOnly}} + <span>{{@label}}</span> + {{/unless}} + </LinkTo> + {{/if}} +</div> diff --git a/web/app/config/environment.d.ts b/web/app/config/environment.d.ts new file mode 100644 index 000000000..1e946d3f4 --- /dev/null +++ b/web/app/config/environment.d.ts @@ -0,0 +1,26 @@ +/** + * Type declarations for + * import config from 'my-app/config/environment' + */ +declare const config: { + environment: string; + modulePrefix: string; + podModulePrefix: string; + locationType: 'history' | 'hash' | 'none' | 'auto'; + rootURL: string; + APP: Record<string, unknown>; + algolia: { + appID: string; + apiKey: string; + docsIndexName: string; + draftsIndexName: string; + internalIndexName: string; + }; + featureFlags: Record<string, boolean>; + google: { + docFolders: string; + }; + shortLinkBaseURL: string; +}; + +export default config; diff --git a/web/app/controllers/.gitkeep b/web/app/controllers/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/web/app/controllers/application.js b/web/app/controllers/application.js new file mode 100644 index 000000000..041077aac --- /dev/null +++ b/web/app/controllers/application.js @@ -0,0 +1,8 @@ +import Controller from "@ember/controller"; +import { inject as service } from "@ember/service"; +import config from "../config/environment"; + +export default class ApplicationController extends Controller { + @service session; + config = config.torii.providers["google-oauth2-bearer"]; +} diff --git a/web/app/controllers/authenticate.ts b/web/app/controllers/authenticate.ts new file mode 100644 index 000000000..f5e56bc28 --- /dev/null +++ b/web/app/controllers/authenticate.ts @@ -0,0 +1,39 @@ +import Controller from "@ember/controller"; +import { inject as service } from "@ember/service"; +import { action } from "@ember/object"; +import RouterService from "@ember/routing/router-service"; +import Transition from "@ember/routing/transition"; + +export default class AuthenticateController extends Controller { + @service declare router: RouterService; + @service declare session: any; + + previousTransition: Transition | null = null; + + protected get currentYear(): number { + return new Date().getFullYear(); + } + + @action protected authenticate(): void { + this.session.authenticate("authenticator:torii", "google-oauth2-bearer"); + + // Capture the previousTransition locally if it exists + let _previousTransition = this.previousTransition; + + if (_previousTransition) { + // Clear the previousTransition class property + this.previousTransition = null; + + // Retry the initial transition + _previousTransition.retry(); + } else { + this.router.transitionTo("authenticated.dashboard"); + } + } +} + +declare module "@ember/controller" { + interface Registry { + authenticate: AuthenticateController; + } +} diff --git a/web/app/controllers/authenticated/all.ts b/web/app/controllers/authenticated/all.ts new file mode 100644 index 000000000..ed81e94d5 --- /dev/null +++ b/web/app/controllers/authenticated/all.ts @@ -0,0 +1,11 @@ +import Controller from "@ember/controller"; + +export default class AuthenticatedAllController extends Controller { + queryParams = ["docType", "owners", "page", "product", "sortBy", "status"]; + docType = []; + owners = []; + page = 1; + product = []; + sortBy = "dateDesc"; + status = []; +} diff --git a/web/app/controllers/authenticated/dashboard.js b/web/app/controllers/authenticated/dashboard.js new file mode 100644 index 000000000..6457e8f54 --- /dev/null +++ b/web/app/controllers/authenticated/dashboard.js @@ -0,0 +1,15 @@ +import Controller from "@ember/controller"; +import { alias } from "@ember/object/computed"; +import { inject as service } from "@ember/service"; + +export default class AuthenticatedDashboardController extends Controller { + @alias("model.docsWaitingForReview") docsWaitingForReview; + @alias("model.recentlyViewedDocs") recentlyViewedDocs; + + @service router; + @service authenticatedUser; + @service("config") configSvc; + + queryParams = ["latestUpdates"]; + latestUpdates = "newDocs"; +} diff --git a/web/app/controllers/authenticated/document.js b/web/app/controllers/authenticated/document.js new file mode 100644 index 000000000..432cec5b3 --- /dev/null +++ b/web/app/controllers/authenticated/document.js @@ -0,0 +1,6 @@ +import Controller from "@ember/controller"; + +export default class AuthenticatedDocumentController extends Controller { + queryParams = ["draft"]; + draft = false; +} diff --git a/web/app/controllers/authenticated/drafts.ts b/web/app/controllers/authenticated/drafts.ts new file mode 100644 index 000000000..4f5825bd5 --- /dev/null +++ b/web/app/controllers/authenticated/drafts.ts @@ -0,0 +1,11 @@ +import Controller from "@ember/controller"; + +export default class AuthenticatedDraftsController extends Controller { + queryParams = ["docType", "owners", "page", "product", "sortBy", "status"]; + docType = []; + page = 1; + owners = []; + product = []; + sortBy = "dateDesc"; + status = []; +} diff --git a/web/app/controllers/authenticated/my.ts b/web/app/controllers/authenticated/my.ts new file mode 100644 index 000000000..c96de9ea0 --- /dev/null +++ b/web/app/controllers/authenticated/my.ts @@ -0,0 +1,11 @@ +import Controller from "@ember/controller"; + +export default class AuthenticatedMyController extends Controller { + queryParams = ["docType", "owners", "page", "product", "sortBy", "status"]; + docType = []; + page = 1; + owners = []; + product = []; + sortBy = "dateDesc"; + status = []; +} diff --git a/web/app/controllers/authenticated/new/doc.js b/web/app/controllers/authenticated/new/doc.js new file mode 100644 index 000000000..180a7e5b4 --- /dev/null +++ b/web/app/controllers/authenticated/new/doc.js @@ -0,0 +1,11 @@ +import Controller from "@ember/controller"; +import { inject as service } from "@ember/service"; +import { action } from "@ember/object"; + +export default class AuthenticatedNewDocController extends Controller { + @service router; + + queryParams = ["docType"]; + + +} diff --git a/web/app/controllers/authenticated/results.ts b/web/app/controllers/authenticated/results.ts new file mode 100644 index 000000000..94bc28455 --- /dev/null +++ b/web/app/controllers/authenticated/results.ts @@ -0,0 +1,12 @@ +import Controller from "@ember/controller"; + +export default class AuthenticatedResultsController extends Controller { + queryParams = ["docType", "owners", "page", "product", "q", "status"]; + + docType = []; + page = 1; + q = null; + owners = []; + product = []; + status = []; +} diff --git a/web/app/helpers/.gitkeep b/web/app/helpers/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/web/app/helpers/add.js b/web/app/helpers/add.js new file mode 100644 index 000000000..3fbfb5878 --- /dev/null +++ b/web/app/helpers/add.js @@ -0,0 +1,7 @@ +import { helper } from "@ember/component/helper"; + +export default helper(function add([first, second]) { + const firstInt = parseInt(first); + const secondInt = parseInt(second); + return firstInt + secondInt; +}); diff --git a/web/app/helpers/get-first-letter.js b/web/app/helpers/get-first-letter.js new file mode 100644 index 000000000..c81168d1c --- /dev/null +++ b/web/app/helpers/get-first-letter.js @@ -0,0 +1,10 @@ +import { helper } from "@ember/component/helper"; + +function getFirstLetter([string]) { + if (typeof string === "string") { + return string.match(/[a-zA-Z]/)[0] + } + return null; +} + +export default helper(getFirstLetter); diff --git a/web/app/helpers/get-product-id.js b/web/app/helpers/get-product-id.js new file mode 100644 index 000000000..5e2fa0cdb --- /dev/null +++ b/web/app/helpers/get-product-id.js @@ -0,0 +1,5 @@ +import { helper } from "@ember/component/helper"; +import getProductId from "hermes/utils/get-product-id"; +export default helper(([productName]) => { + return getProductId(productName); +}); diff --git a/web/app/helpers/lowercase.js b/web/app/helpers/lowercase.js new file mode 100644 index 000000000..5b29b8ada --- /dev/null +++ b/web/app/helpers/lowercase.js @@ -0,0 +1,7 @@ +import { helper } from "@ember/component/helper"; + +function lowercase(string) { + return string.toString().toLowerCase(); +} + +export default helper(lowercase); diff --git a/web/app/helpers/parse-date.ts b/web/app/helpers/parse-date.ts new file mode 100644 index 000000000..de5c0f404 --- /dev/null +++ b/web/app/helpers/parse-date.ts @@ -0,0 +1,6 @@ +import { helper } from "@ember/component/helper"; +import parseDate from "hermes/utils/parse-date"; + +export default helper(([time]: [string | number | Date | undefined]) => { + return parseDate(time); +}); diff --git a/web/app/helpers/uid.js b/web/app/helpers/uid.js new file mode 100644 index 000000000..6fe102c4b --- /dev/null +++ b/web/app/helpers/uid.js @@ -0,0 +1,17 @@ +import { helper } from "@ember/component/helper"; +import { guidFor } from "@ember/object/internals"; + +/* + * Returns a unique id that contains the provided label and guid from the salt + * + * @salt: An object to generate a salt value from (using guidFor) + * @label: A human-readable label + * + * @example + * {{uid this "title"}} + * + * "title-ember123912" + */ +export default helper(([salt, label]) => { + return `${label}-${guidFor(salt)}`; +}); diff --git a/web/app/index.html b/web/app/index.html new file mode 100644 index 000000000..0e59d639e --- /dev/null +++ b/web/app/index.html @@ -0,0 +1,24 @@ +<!DOCTYPE html> +<html lang="en"> + <head> + <meta charset="utf-8"> + <meta http-equiv="X-UA-Compatible" content="IE=edge"> + <title>Hermes + + + + {{content-for "head"}} + + + + {{content-for "head-footer"}} + + + {{content-for "body"}} + + + + + {{content-for "body-footer"}} + + diff --git a/web/app/initializers/custom-inflector-rules.js b/web/app/initializers/custom-inflector-rules.js new file mode 100644 index 000000000..9d3faa7db --- /dev/null +++ b/web/app/initializers/custom-inflector-rules.js @@ -0,0 +1,13 @@ +import Inflector from "ember-inflector"; + +export function initialize() { + const inflector = Inflector.inflector; + + // Don't pluralize the document route when using a dynamic segment. + inflector.uncountable("document"); +} + +export default { + name: "custom-inflector-rules", + initialize, +}; diff --git a/web/app/models/.gitkeep b/web/app/models/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/web/app/models/document.js b/web/app/models/document.js new file mode 100644 index 000000000..8318b1e9f --- /dev/null +++ b/web/app/models/document.js @@ -0,0 +1,3 @@ +import Model, { attr } from "@ember-data/model"; + +export default class DocumentModel extends Model {} diff --git a/web/app/models/google/drive/file.js b/web/app/models/google/drive/file.js new file mode 100644 index 000000000..d5773f216 --- /dev/null +++ b/web/app/models/google/drive/file.js @@ -0,0 +1,11 @@ +import Model, { attr } from "@ember-data/model"; + +export default class GoogleDriveFileModel extends Model { + @attr("string") createdTime; + @attr lastModifyingUser; + @attr("string") modifiedTime; + @attr("boolean") ownedByMe; + @attr owners; + @attr("string") name; + @attr("string") thumbnailLink; +} diff --git a/web/app/models/google/userinfo/me.js b/web/app/models/google/userinfo/me.js new file mode 100644 index 000000000..d11711626 --- /dev/null +++ b/web/app/models/google/userinfo/me.js @@ -0,0 +1,8 @@ +import Model, { attr } from "@ember-data/model"; + +export default class GoogleUserinfoMeModel extends Model { + @attr("string") email; + @attr("string") given_name; + @attr("string") name; + @attr("string") picture; +} diff --git a/web/app/router.js b/web/app/router.js new file mode 100644 index 000000000..e1dfa25e0 --- /dev/null +++ b/web/app/router.js @@ -0,0 +1,23 @@ +import EmberRouter from "@ember/routing/router"; +import config from "hermes/config/environment"; + +export default class Router extends EmberRouter { + location = config.locationType; + rootURL = config.rootURL; +} + +Router.map(function () { + this.route("authenticated", { path: "/" }, function () { + this.route("all"); + this.route("dashboard"); + this.route("document", { path: "/document/:document_id" }); + this.route("drafts"); + this.route("my"); + this.route("results"); + this.route("settings"); + this.route("new", function () { + this.route("doc"); + }); + }); + this.route("authenticate"); +}); diff --git a/web/app/routes/.gitkeep b/web/app/routes/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/web/app/routes/application.js b/web/app/routes/application.js new file mode 100644 index 000000000..da21c133f --- /dev/null +++ b/web/app/routes/application.js @@ -0,0 +1,41 @@ +import Route from "@ember/routing/route"; +import { UnauthorizedError } from "@ember-data/adapter/error"; +import { action } from "@ember/object"; +import config from "hermes/config/environment"; +import { inject as service } from "@ember/service"; + +export default class ApplicationRoute extends Route { + @service config; + @service("fetch") fetchSvc; + @service flags; + @service session; + + @action + error(error) { + if (error instanceof UnauthorizedError) { + this.session.invalidate(); + return; + } + } + + async beforeModel() { + this.session.setup(); + + // Flags read from the environment and set properties on the service this + // could be done in an initializer, but this seems more natural these days + this.flags.initialize(); + + // Get web config from backend if this is a production build. + if (config.environment === "production") { + return this.fetchSvc + .fetch("/api/v1/web/config") + .then((response) => response.json()) + .then((json) => { + this.config.setConfig(json); + }) + .catch((err) => { + console.log("Error fetching and setting web config: " + err); + }); + } + } +} diff --git a/web/app/routes/authenticate.ts b/web/app/routes/authenticate.ts new file mode 100644 index 000000000..2e7531056 --- /dev/null +++ b/web/app/routes/authenticate.ts @@ -0,0 +1,10 @@ +import Route from "@ember/routing/route"; +import { inject as service } from "@ember/service"; + +export default class AuthenticateRoute extends Route { + @service declare session: any; + + beforeModel() { + this.session.prohibitAuthentication("/"); + } +} diff --git a/web/app/routes/authenticated.ts b/web/app/routes/authenticated.ts new file mode 100644 index 000000000..59e11a5e4 --- /dev/null +++ b/web/app/routes/authenticated.ts @@ -0,0 +1,34 @@ +import Route from "@ember/routing/route"; +import { inject as service } from "@ember/service"; +import ConfigService from "hermes/services/config"; +import AuthenticateController from "hermes/controllers/authenticate"; +import AuthenticatedUserService from "hermes/services/authenticated-user"; +import Transition from "@ember/routing/transition"; + +export default class AuthenticatedRoute extends Route { + @service declare session: any; + @service declare authenticatedUser: AuthenticatedUserService; + @service("config") declare configSvc: ConfigService; + + async afterModel(): Promise { + // Load user info + await this.authenticatedUser.loadInfo.perform(); + } + + async beforeModel(transition: Transition): Promise { + // Check if the request requires authentication and if so, preserve the URL + let requireAuthentication = this.session.requireAuthentication( + transition, + "authenticate" + ); + + if (!requireAuthentication && transition.to.name != "authenticated.index") { + let authenticateController = this.controllerFor( + "authenticate" + ) as AuthenticateController; + + // Set previous transition to preserve URL + authenticateController.previousTransition = transition; + } + } +} diff --git a/web/app/routes/authenticated/all.ts b/web/app/routes/authenticated/all.ts new file mode 100644 index 000000000..2322e4e38 --- /dev/null +++ b/web/app/routes/authenticated/all.ts @@ -0,0 +1,44 @@ +import Route from "@ember/routing/route"; +import RSVP from "rsvp"; +import { inject as service } from "@ember/service"; +import ConfigService from "hermes/services/config"; +import AlgoliaService from "hermes/services/algolia"; +import { DocumentsRouteParams } from "hermes/types/document-routes"; + +export default class AllRoute extends Route { + @service declare algolia: AlgoliaService; + @service("config") declare configSvc: ConfigService; + + queryParams = { + docType: { + refreshModel: true, + }, + owners: { + refreshModel: true, + }, + page: { + refreshModel: true, + }, + product: { + refreshModel: true, + }, + sortBy: { + refreshModel: true, + }, + status: { + refreshModel: true, + }, + }; + + async model(params: DocumentsRouteParams) { + const searchIndex = + params.sortBy === "dateAsc" + ? this.configSvc.config.algolia_docs_index_name + "_createdTime_asc" + : this.configSvc.config.algolia_docs_index_name + "_createdTime_desc"; + + return RSVP.hash({ + facets: this.algolia.getFacets.perform(searchIndex, params), + results: this.algolia.getDocResults.perform(searchIndex, params), + }); + } +} diff --git a/web/app/routes/authenticated/dashboard.js b/web/app/routes/authenticated/dashboard.js new file mode 100644 index 000000000..087f38eff --- /dev/null +++ b/web/app/routes/authenticated/dashboard.js @@ -0,0 +1,138 @@ +import Route from "@ember/routing/route"; +import RSVP from "rsvp"; +import { inject as service } from "@ember/service"; +import timeAgo from "hermes/utils/time-ago"; + +export default class DashboardRoute extends Route { + @service algolia; + @service("config") configSvc; + @service("fetch") fetchSvc; + @service("recently-viewed-docs") recentDocs; + @service session; + @service authenticatedUser; + + queryParams = { + latestUpdates: { + refreshModel: true, + replace: true, + }, + }; + + resetController(controller, isExiting, transition) { + if (isExiting) { + controller.set("latestUpdates", "newDocs"); + } + } + + async model(params) { + // Create facet filter for recently updated docs depending on the selected + // "Latest updates" tab. + let facetFilter = ""; + if (params.latestUpdates == "approved") { + facetFilter = "status:approved"; + } else if (params.latestUpdates == "inReview") { + facetFilter = "status:In-Review"; + } + + const userInfo = this.authenticatedUser.info; + + const docsWaitingForReview = this.algolia.searchIndex + .perform(this.configSvc.config.algolia_docs_index_name, "", { + filters: + `approvers:'${userInfo.email}'` + + ` AND NOT approvedBy:'${userInfo.email}'` + + " AND appCreated:true" + + " AND status:In-Review", + hitsPerPage: 4, + }) + .then((result) => { + // Add modifiedAgo for each doc. + for (const hit of result.hits) { + this.fetchSvc + .fetch("/api/v1/documents/" + hit.objectID) + .then((resp) => resp.json()) + .then((doc) => { + if (doc.modifiedTime) { + const modifiedDate = new Date(doc.modifiedTime * 1000); + hit.modifiedAgo = `Modified ${timeAgo(modifiedDate)}`; + } + }) + .catch((err) => { + console.log( + `Error getting document waiting for review (${hit.objectID}):`, + err + ); + }); + } + return result.hits; + }); + + // Get recently viewed docs from app data. + const recentlyViewedDocIDs = await this.recentDocs.get.perform(); + + // For each recently viewed doc (max 4 docs), fetch doc metadata from the + // app backend. + const recentlyViewedPromises = recentlyViewedDocIDs + .slice(0, 4) + .map((docID) => + this.fetchSvc + .fetch("/api/v1/documents/" + docID) + .then((resp) => resp.json()) + .catch((err) => { + console.log( + `Error getting recently updated document (${hit.objectID}):`, + err + ); + }) + ); + + // Create promise for all Algolia recently viewed docs promises. + const recentlyViewedDocsPromise = Promise.allSettled( + recentlyViewedPromises + ); + + // Create array of docs that we also indexed in Algolia. + // We can't display documents without this data. + const recentlyViewedDocs = recentlyViewedDocsPromise.then((promises) => { + let recentlyViewedDocs = []; + + promises.forEach((promise, index) => { + if (promise.status == "fulfilled") { + let doc = promise.value; + doc.modifiedAgo = `Modified ${timeAgo( + new Date(doc.modifiedTime * 1000) + )}`; + + recentlyViewedDocs.push(promise.value); + } + }); + + return recentlyViewedDocs; + }); + + return RSVP.hash({ + docsWaitingForReview: docsWaitingForReview, + recentlyViewedDocs: recentlyViewedDocs, + }); + } + + /** + * Builds a parent query string for searching for Google files. The folders + * parameter is an array of all folder ID strings to search. + */ + buildParentsQuery(folders) { + let parentsQuery = ""; + if (folders.length > 0) { + parentsQuery += " and ("; + folders.forEach((folder, index) => { + if (index == 0) { + parentsQuery += `'${folder}' in parents`; + } else { + parentsQuery += ` or '${folder}' in parents`; + } + }); + parentsQuery += ")"; + } + return parentsQuery; + } +} diff --git a/web/app/routes/authenticated/document.js b/web/app/routes/authenticated/document.js new file mode 100644 index 000000000..1ba208f98 --- /dev/null +++ b/web/app/routes/authenticated/document.js @@ -0,0 +1,150 @@ +import Route from "@ember/routing/route"; +import { inject as service } from "@ember/service"; +import timeAgo from "hermes/utils/time-ago"; +import RSVP from "rsvp"; +import parseDate from "hermes/utils/parse-date"; + +const serializePeople = (people) => + people.map((p) => ({ + email: p.emailAddresses[0].value, + imgURL: p.photos?.[0]?.url, + })); + +export default class DocumentRoute extends Route { + @service algolia; + @service("config") configSvc; + @service("fetch") fetchSvc; + @service("recently-viewed-docs") recentDocs; + @service session; + @service flashMessages; + @service router; + + // Ideally we'd refresh the model when the draft query param changes, but + // because of a suspected bug in Ember, we can't do that. + // https://github.com/emberjs/ember.js/issues/19260 + // queryParams = { + // draft: { + // refreshModel: true, + // }, + // }; + + async model(params) { + let doc = {}; + + // Get doc data from the app backend. + if (params.draft) { + try { + doc = await this.fetchSvc + .fetch("/api/v1/drafts/" + params.document_id) + .then((r) => r.json()); + doc.isDraft = params.draft; + } catch (err) { + const errorMessage = `Failed to get document draft: ${err}`; + + this.flashMessages.add({ + message: errorMessage, + title: "Error", + type: "critical", + sticky: true, + extendedTimeout: 1000, + }); + + // Transition to dashboard + this.router.transitionTo("authenticated.dashboard"); + throw new Error(errorMessage); + } + } else { + try { + doc = await this.fetchSvc + .fetch("/api/v1/documents/" + params.document_id, { + method: "GET", + headers: { + // We set this header to differentiate between document views and + // requests to only retrieve document metadata. + "Add-To-Recently-Viewed": "true", + }, + }) + .then((r) => r.json()); + doc.isDraft = params.draft; + } catch (err) { + const errorMessage = `Failed to get document: ${err}`; + + this.flashMessages.add({ + message: errorMessage, + title: "Error", + type: "critical", + sticky: true, + extendedTimeout: 1000, + }); + + // Transition to dashboard + this.router.transitionTo("authenticated.dashboard"); + throw new Error(errorMessage); + } + } + if (!!doc.createdTime) { + doc.createdDate = + parseDate(doc.createdTime * 1000) + + ` (${timeAgo(new Date(doc.createdTime * 1000))})`; + } + + // Build strings for created and last-modified. + doc.lastModified = `${timeAgo(new Date(doc.modifiedTime * 1000))}`; + + // Record analytics. + try { + await this.fetchSvc.fetch("/api/v1/web/analytics", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + document_id: params.document_id, + product_name: doc.product, + }), + }); + } catch (err) { + console.log("Error recording analytics: " + err); + } + + // If not a draft, record the doc as recently viewed. + if (!params.draft) { + this.recentDocs.addDoc.perform(params.document_id); + } + + // Load the document as well as the logged in user info + + // Preload avatars for all approvers in the Algolia index. + if (doc.contributors?.length) { + const contributors = await this.fetchSvc + .fetch(`/api/v1/people?emails=${doc.contributors.join(",")}`) + .then((r) => r.json()); + + if (contributors) { + doc.contributors = serializePeople(contributors); + } else { + doc.contributors = []; + } + } + if (doc.approvers?.length) { + const approvers = await this.fetchSvc + .fetch(`/api/v1/people?emails=${doc.approvers.join(",")}`) + .then((r) => r.json()); + + if (approvers) { + doc.approvers = serializePeople(approvers); + } else { + doc.approvers = []; + } + } + + let docTypes = await this.fetchSvc + .fetch("/api/v1/document-types") + .then((r) => r.json()); + + let docType = docTypes.find((docType) => docType.name === doc.docType); + + return RSVP.hash({ + doc, + docType, + }); + } +} diff --git a/web/app/routes/authenticated/drafts.ts b/web/app/routes/authenticated/drafts.ts new file mode 100644 index 000000000..8cd987511 --- /dev/null +++ b/web/app/routes/authenticated/drafts.ts @@ -0,0 +1,140 @@ +import Route from "@ember/routing/route"; +import RSVP from "rsvp"; +import { inject as service } from "@ember/service"; +import AlgoliaService, { + AlgoliaFacetsObject, + AlgoliaSearchParams, + FACET_NAMES, + HITS_PER_PAGE, + MAX_VALUES_PER_FACET, +} from "hermes/services/algolia"; +import { DocumentsRouteParams } from "hermes/types/document-routes"; +import { FacetRecords } from "hermes/types/facets"; +import AuthenticatedUserService from "hermes/services/authenticated-user"; +import { task } from "ember-concurrency"; +import FetchService from "hermes/services/fetch"; +import { assert } from "@ember/debug"; + +interface DraftResponseJSON { + facets: AlgoliaFacetsObject; + Hits: Array; // Documents, not yet typed + params: string; + page: number; +} + +export default class DraftsRoute extends Route { + @service declare algolia: AlgoliaService; + @service declare authenticatedUser: AuthenticatedUserService; + @service("fetch") declare fetchSvc: FetchService; + + queryParams = { + docType: { + refreshModel: true, + }, + owners: { + refreshModel: true, + }, + page: { + refreshModel: true, + }, + product: { + refreshModel: true, + }, + sortBy: { + refreshModel: true, + }, + status: { + refreshModel: true, + }, + }; + + /** + * Generates a URLSearchParams object for the drafts endpoint. + */ + private createDraftURLSearchParams( + params: AlgoliaSearchParams, + ownerFacetOnly: boolean + ): URLSearchParams { + /** + * In the case of facets, we want to filter by just the owner facet. + * In the case of documents, we want to filter by all facets. + */ + let facetFilters = ownerFacetOnly + ? [`owners:${this.authenticatedUser.info.email}`] + : this.algolia.buildFacetFilters(params); + + return new URLSearchParams( + Object.entries({ + facets: FACET_NAMES, + hitsPerPage: HITS_PER_PAGE, + maxValuesPerFacet: MAX_VALUES_PER_FACET, + facetFilters: facetFilters, + page: params.page ? params.page - 1 : 0, + sortBy: params["sortBy"], + ownerEmail: this.authenticatedUser.info.email, + }) + .map(([key, val]) => `${key}=${val}`) + .join("&") + ); + } + + /** + * Fetches draft doc information based on searchParams and the current user. + */ + private getDraftResults = task( + async ( + params: AlgoliaSearchParams, + ownerFacetOnly = false + ): Promise => { + try { + let response = await this.fetchSvc + .fetch( + "/api/v1/drafts?" + + this.createDraftURLSearchParams(params, ownerFacetOnly) + ) + .then((response) => response.json()); + return response; + } catch (e: unknown) { + console.error(e); + } + } + ); + /** + * Gets facets for the drafts page. Scoped to the current user. + * FIXME: These facets compound as "OR" rather than "AND". + * FIXME: Facet counts should update when a facet is selected + */ + private getDraftFacets = task( + async (params: AlgoliaSearchParams): Promise => { + try { + let algoliaFacets = await this.getDraftResults.perform(params, true); + assert("getDraftFacets expects algoliaFacets to exist", algoliaFacets); + + /** + * Map the facets to a new object with additional nested properties + */ + let facets: FacetRecords = this.algolia.mapStatefulFacetKeys( + algoliaFacets.facets + ); + + Object.entries(facets).forEach(([name, facet]) => { + /** + * e.g., name === "product" + * e.g., facet === { "Vault": { count: 1, selected: false }} + */ + this.algolia.markSelected(facet, params[name]); + }); + return facets; + } catch (e) { + console.error(e); + } + } + ); + + async model(params: DocumentsRouteParams) { + return RSVP.hash({ + facets: this.getDraftFacets.perform(params), + results: this.getDraftResults.perform(params), + }); + } +} diff --git a/web/app/routes/authenticated/index.js b/web/app/routes/authenticated/index.js new file mode 100644 index 000000000..220b39f77 --- /dev/null +++ b/web/app/routes/authenticated/index.js @@ -0,0 +1,10 @@ +import Route from "@ember/routing/route"; +import { inject as service } from "@ember/service"; + +export default class AuthenticatedIndexRoute extends Route { + @service router; + + redirect() { + this.router.replaceWith("authenticated.dashboard"); + } +} diff --git a/web/app/routes/authenticated/my.ts b/web/app/routes/authenticated/my.ts new file mode 100644 index 000000000..fca8cd716 --- /dev/null +++ b/web/app/routes/authenticated/my.ts @@ -0,0 +1,43 @@ +import Route from "@ember/routing/route"; +import RSVP from "rsvp"; +import { inject as service } from "@ember/service"; +import ConfigService from "hermes/services/config"; +import { DocumentsRouteParams } from "hermes/types/document-routes"; +import AlgoliaService from "hermes/services/algolia"; + +export default class AuthenticatedMyRoute extends Route { + @service declare algolia: AlgoliaService; + @service("config") declare configSvc: ConfigService; + + queryParams = { + docType: { + refreshModel: true, + }, + owners: { + refreshModel: true, + }, + page: { + refreshModel: true, + }, + product: { + refreshModel: true, + }, + sortBy: { + refreshModel: true, + }, + status: { + refreshModel: true, + }, + }; + + async model(params: DocumentsRouteParams) { + const searchIndex = + params.sortBy === "dateAsc" + ? this.configSvc.config.algolia_docs_index_name + "_createdTime_asc" + : this.configSvc.config.algolia_docs_index_name + "_createdTime_desc"; + return RSVP.hash({ + facets: this.algolia.getFacets.perform(searchIndex, params, true), + results: this.algolia.getDocResults.perform(searchIndex, params, true), + }); + } +} diff --git a/web/app/routes/authenticated/new/doc.js b/web/app/routes/authenticated/new/doc.js new file mode 100644 index 000000000..ad2204138 --- /dev/null +++ b/web/app/routes/authenticated/new/doc.js @@ -0,0 +1,65 @@ +import Route from "@ember/routing/route"; +import RSVP from "rsvp"; +import { inject as service } from "@ember/service"; + +export default class AuthenticatedNewDocRoute extends Route { + @service("fetch") fetchSvc; + @service flashMessages; + @service router; + + queryParams = { + docType: { + refreshModel: true, + }, + }; + + async model(params) { + // Validate docType. + switch (params.docType) { + case "FRD": + case "PRD": + case "RFC": + break; + default: + this.flashMessages.add({ + message: `Invalid document type: ${params.docType}`, + title: "Invalid document type", + type: "critical", + timeout: 7000, + extendedTimeout: 1000, + }); + this.router.transitionTo("authenticated.new"); + } + + return RSVP.hash({ + docType: params?.docType, + productAbbrevMappings: this.getProductAbbrevMappings(), + }); + } + + async getProductAbbrevMappings() { + const products = await this.fetchSvc + .fetch("/api/v1/products") + .then((resp) => resp.json()) + .catch((err) => { + console.log(`Error requesting products: ${err}`); + }); + + // Sort product names alphabetically + const sortedProducts = Object.keys(products) + .sort() + .reduce((accum, key) => { + accum[key] = products[key]; + return accum; + }, {}); + + // Convert to map of product or area name + // and abbreviation to make look ups easier + const productAbbrevMappings = new Map(); + Object.keys(sortedProducts).forEach((key) => { + productAbbrevMappings.set(key, products[key].abbreviation); + }); + + return productAbbrevMappings; + } +} diff --git a/web/app/routes/authenticated/new/index.js b/web/app/routes/authenticated/new/index.js new file mode 100644 index 000000000..0893fb0e6 --- /dev/null +++ b/web/app/routes/authenticated/new/index.js @@ -0,0 +1,12 @@ +import Route from "@ember/routing/route"; +import { inject as service } from "@ember/service"; + +export default class AuthenticatedNewIndexRoute extends Route { + @service("fetch") fetchSvc; + + async model() { + return await this.fetchSvc + .fetch("/api/v1/document-types") + .then((r) => r.json()); + } +} diff --git a/web/app/routes/authenticated/results.ts b/web/app/routes/authenticated/results.ts new file mode 100644 index 000000000..d9f711e16 --- /dev/null +++ b/web/app/routes/authenticated/results.ts @@ -0,0 +1,41 @@ +import Route from "@ember/routing/route"; +import RSVP from "rsvp"; +import { inject as service } from "@ember/service"; +import AlgoliaService from "hermes/services/algolia"; +import ConfigService from "hermes/services/config"; +import { ResultsRouteParams } from "hermes/types/document-routes"; + +export default class ResultsRoute extends Route { + @service declare algolia: AlgoliaService; + @service("config") declare configSvc: ConfigService; + + queryParams = { + docType: { + refreshModel: true, + }, + owners: { + refreshModel: true, + }, + page: { + refreshModel: true, + }, + product: { + refreshModel: true, + }, + status: { + refreshModel: true, + }, + }; + + async model(params: ResultsRouteParams) { + const searchIndex = + params.sortBy === "dateAsc" + ? this.configSvc.config.algolia_docs_index_name + "_createdTime_asc" + : this.configSvc.config.algolia_docs_index_name + "_createdTime_desc"; + + return RSVP.hash({ + facets: this.algolia.getFacets.perform(searchIndex, params), + results: this.algolia.getDocResults.perform(searchIndex, params), + }); + } +} diff --git a/web/app/routes/authenticated/settings.ts b/web/app/routes/authenticated/settings.ts new file mode 100644 index 000000000..f75e111e1 --- /dev/null +++ b/web/app/routes/authenticated/settings.ts @@ -0,0 +1,30 @@ +import Route from "@ember/routing/route"; +import { inject as service } from "@ember/service"; +import ConfigService from "hermes/services/config"; +import AlgoliaService from "hermes/services/algolia"; +import FetchService from "hermes/services/fetch"; +import AuthenticatedUserService from "hermes/services/authenticated-user"; + +export default class SettingsRoute extends Route { + @service("config") declare configSvc: ConfigService; + @service("fetch") declare fetchSvc: FetchService; + @service declare algolia: AlgoliaService; + @service declare authenticatedUser: AuthenticatedUserService; + + async model(): Promise { + const allProducts = await this.fetchSvc + .fetch("/api/v1/products") + .then((resp) => { + return resp.json()}) + .catch((err) => { + console.log(`Error requesting products: ${err}`); + }); + + /** + * Make sure the user's subscriptions are loaded before rendering the page. + */ + await this.authenticatedUser.fetchSubscriptions.perform(); + + return Object.keys(allProducts).sort(); + } +} diff --git a/web/app/serializers/google/drive/file.js b/web/app/serializers/google/drive/file.js new file mode 100644 index 000000000..1aabec493 --- /dev/null +++ b/web/app/serializers/google/drive/file.js @@ -0,0 +1,13 @@ +import JSONSerializer from "@ember-data/serializer/json"; + +export default class GoogleDriveFileSerializer extends JSONSerializer { + normalizeQueryResponse(store, primaryModelClass, payload, id, requestType) { + return super.normalizeQueryResponse( + store, + primaryModelClass, + payload.files, + id, + requestType + ); + } +} diff --git a/web/app/serializers/google/userinfo/me.js b/web/app/serializers/google/userinfo/me.js new file mode 100644 index 000000000..997987671 --- /dev/null +++ b/web/app/serializers/google/userinfo/me.js @@ -0,0 +1,21 @@ +import RESTSerializer from "@ember-data/serializer/rest"; + +export default class GoogleDriveFileSerializer extends RESTSerializer { + normalizeQueryRecordResponse( + store, + primaryModelClass, + payload, + id, + requestType + ) { + return super.normalizeQueryResponse( + store, + primaryModelClass, + { + "google.userinfo.me": payload, + }, + id, + requestType + ); + } +} diff --git a/web/app/services/algolia.ts b/web/app/services/algolia.ts new file mode 100644 index 000000000..c043c8354 --- /dev/null +++ b/web/app/services/algolia.ts @@ -0,0 +1,371 @@ +import Service from "@ember/service"; +import algoliaSearch, { SearchClient, SearchIndex } from "algoliasearch"; +import { SearchForFacetValuesResponse } from "@algolia/client-search"; +import config from "hermes/config/environment"; +import { inject as service } from "@ember/service"; +import { restartableTask, task } from "ember-concurrency"; +import AuthenticatedUserService from "hermes/services/authenticated-user"; +import { RequestOptions } from "@algolia/transporter"; +import { + SearchOptions, + SearchResponse, + ObjectWithObjectID, +} from "@algolia/client-search"; +import { assert } from "@ember/debug"; +import ConfigService from "./config"; +import { FacetOption, FacetRecord, FacetRecords } from "hermes/types/facets"; +import FetchService from "./fetch"; + +export const HITS_PER_PAGE = 12; +export const MAX_VALUES_PER_FACET = 100; +export const FACET_NAMES = ["docType", "owners", "product", "status"]; + +export type AlgoliaSearchParams = RequestOptions & SearchOptions; +export type AlgoliaFacetsObject = NonNullable; + +export default class AlgoliaService extends Service { + @service("config") declare configSvc: ConfigService; + @service("fetch") declare fetchSvc: FetchService; + @service declare authenticatedUser: AuthenticatedUserService; + // TODO: use actual type. + @service session: any; + + /** + * A shorthand getter for the authenticatedUser's email. + */ + private get userEmail(): string { + return this.authenticatedUser.info.email; + } + + /** + * Returns an Algolia SearchClient based on the environment. + */ + private createClient(): SearchClient { + /** + * If not running as production, use environment variables and directly + * interact with Algolia's API. + */ + if (config.environment != "production") { + console.log( + "Running as non-production environment: Algolia client configured to directly interact with Algolia's API." + ); + return algoliaSearch(config.algolia.appID, config.algolia.apiKey); + } + /** + * If running as production, use environment variables and route Algolia + * requests through the Hermes API. + */ + if ( + window.location.hostname === "127.0.0.1" || + window.location.hostname === "localhost" + ) { + console.log( + "Running locally as production environment: Algolia client configured to proxy requests through the Hermes API." + ); + return algoliaSearch("", "", { + headers: { + "Hermes-Google-Access-Token": + this.session.data.authenticated.access_token, + }, + hosts: [ + { + protocol: "http", + url: window.location.hostname + ":" + window.location.port, + }, + ], + }); + } + /** + * If running remotely as production, use HTTPS and route Algolia requests + * through the Hermes API. + */ + return algoliaSearch("", "", { + headers: { + "Hermes-Google-Access-Token": + this.session.data.authenticated.access_token, + }, + hosts: [ + { + protocol: "https", + url: window.location.hostname + ":" + window.location.port, + }, + ], + }); + } + + /** + * An Algolia SearchClient. + * Used to initialize an environment-scoped SearchIndex. + */ + private client: SearchClient = this.createClient(); + + /** + * An Algolia SearchIndex scoped to the environment. + */ + private get index(): SearchIndex { + return this.client.initIndex(this.configSvc.config.algolia_docs_index_name); + } + + /** + * Iterates over the keys of a facet object and transforms the `count` value + * into a `FacetOption` object with `count` and `selected` properties. + */ + mapStatefulFacetKeys = (facetObject: AlgoliaFacetsObject): FacetRecords => { + /** + * e.g., facetObject === { + * owners: { + * "meg@hashicorp.com": 10, + * }, + * status: { + * Obsolete: 4, + * Approved: 6, + * }, and so on ... + * } + */ + let entries = Object.entries(facetObject).reduce( + (newObj, [key, val]) => { + /** + * e.g., `key` === "owners" + * e.g., `val` === { "meg@hashicorp.com": 10 } + */ + let newVal: FacetRecord = {}; + let mapper = (count: number) => ({ + count, + selected: false, + }); + for (let prop in val) { + /** + * e.g., prop === "meg@hashicorp.com" + * e.g., val[prop] === 10 + */ + let valProp = val[prop]; + if (valProp) { + /** + * Use the mapper to transform the count into a `FacetOption` object: + * { "meg@hashicorp.com": { count: 10, selected: false }} + */ + newVal[prop] = mapper(valProp); + } + } + newObj[key] = newVal; + return newObj; + }, + {} + ); + /** + * e.g., entries === { + * owners: { + * "meg@hashicorp.com": { count: 10, selected: false }, + * }, + * status: { + * Obsolete: { count: 4, selected: false }, + * Approved: { count: 6, selected: false }, + * }, and so on ... + * } + */ + return entries; + }; + + /** + * Iterates over the filter selection and marks corresponding facets "selected" + */ + markSelected = (facet: FacetRecord, selection?: string[]): void => { + /** + * e.g., facet === { + * Obsolete: { count: 4, selected: false }, + * Approved: { count: 6, selected: false }, + * } + */ + if (selection) { + /** + * e.g., selection === ["Approved"] + */ + for (let param of selection) { + (facet[param] as FacetOption).selected = true; + } + /** + * e.g., facet["Approved"] === { count: 6, selected: true } + */ + } + }; + + /** + * Searches an index by query and search params. + * Returns an Algolia SearchResponse. + */ + searchIndex = task( + async ( + indexName: string, + query: string, + params: AlgoliaSearchParams + ): Promise> => { + let index: SearchIndex = this.client.initIndex(indexName); + return await index.search(query, params); + } + ); + + /** + * Returns an array of facet filters based on the current parameters, + * and whether the owner is looking at their own docs. + */ + buildFacetFilters(params: AlgoliaSearchParams, userIsOwner = false) { + let facets = FACET_NAMES; + let facetFilters = []; + + for (let facet of facets) { + let facetValues = []; + + for (let val of params[facet]) { + facetValues.push(`${facet}:${val}`); + } + + if (facetValues.length > 0) { + facetFilters.push(facetValues); + } + } + + if (userIsOwner) { + facetFilters.push(`owners:${this.userEmail}`); + } + return facetFilters; + } + + /** + * Returns an object of a given index and objectID. + * Used in the footer to show the date of the last full index. + */ + getSearchIndexObject = task( + async ( + indexName: string, + objectID: string + ): Promise => { + /** + * e.g., indexName = "hermes-staging" + * e.g., objectID = "LastFullIndex" + */ + try { + let index: SearchIndex = this.client.initIndex(indexName); + return index.getObject(objectID).then( + (result) => + /** + * e.g., result = { + * lastFullIndexTime: "1995-01-06T20:58:17.59404Z", + * objectID: "LastFullIndex", + * }; + */ + result + ); + } catch (e: unknown) { + console.error(e); + } + } + ); + /** + * Returns a search response for a given query and params. + * Restarts with every search input keystroke. + */ + search = restartableTask( + async ( + query: string, + params + ): Promise | undefined> => { + try { + return await this.index + .search(query, params) + .then((response) => response); + } catch (e: unknown) { + console.error(e); + } + } + ); + + /** + * Returns FacetRecords for a given index and params. + * If the user is the owner, the facets will be filtered by the owner's email. + */ + getFacets = task( + async ( + searchIndex: string, + params: AlgoliaSearchParams, + userIsOwner = false + ): Promise => { + try { + let facetFilters = userIsOwner ? [`owners:${this.userEmail}`] : []; + let algoliaFacets = await this.searchIndex.perform(searchIndex, "", { + facetFilters: facetFilters, + facets: FACET_NAMES, + hitsPerPage: HITS_PER_PAGE, + maxValuesPerFacet: MAX_VALUES_PER_FACET, + page: params.page ? params.page - 1 : 0, + }); + + assert("getFacets expects facets to exist", algoliaFacets.facets); + + /** + * Map the facets to a new object with additional nested properties + */ + let facets: FacetRecords = this.mapStatefulFacetKeys( + algoliaFacets.facets + ); + + // Mark facets as selected based on query parameters + Object.entries(facets).forEach(([name, facet]) => { + /** + * e.g., name === "owner" + * e.g., facet === { "meg@hashicorp.com": { count: 1, selected: false }} + */ + this.markSelected(facet, params[name]); + }); + + return facets; + } catch (e: unknown) { + console.error(e); + } + } + ); + + /** + * Returns a SearchResponse for a given index and params. + * If the user is the owner, i.e., when on the `/my` route, + * facets will be scoped to the owner's email. + */ + getDocResults = task( + async ( + searchIndex: string, + params: AlgoliaSearchParams, + userIsOwner = false + ): Promise => { + try { + return await this.searchIndex.perform(searchIndex, "", { + facetFilters: this.buildFacetFilters(params, userIsOwner), + facets: FACET_NAMES, + hitsPerPage: HITS_PER_PAGE, + maxValuesPerFacet: MAX_VALUES_PER_FACET, + page: params.page ? params.page - 1 : 0, + }); + } catch (e: unknown) { + console.error(e); + } + } + ); + + /** + * CURRENTLY UNUSED + * Mocked for use in `TagSelect` component + */ + searchForFacetValues = restartableTask( + async ( + indexName: string, + facetName: string, + query: string, + params: RequestOptions + ): Promise => { + try { + let index = this.client.initIndex(indexName); + return await index.searchForFacetValues(facetName, query, params); + } catch (e: unknown) { + console.error(e); + } + } + ); +} diff --git a/web/app/services/authenticated-user.ts b/web/app/services/authenticated-user.ts new file mode 100644 index 000000000..ea540d35d --- /dev/null +++ b/web/app/services/authenticated-user.ts @@ -0,0 +1,178 @@ +import Service from "@ember/service"; +import { tracked } from "@glimmer/tracking"; +import { inject as service } from "@ember/service"; +import Store from "@ember-data/store"; +import { assert } from "@ember/debug"; +import { task } from "ember-concurrency"; +import FetchService from "hermes/services/fetch"; + +export interface AuthenticatedUser { + email: string; + given_name: string; + picture: string; + subscriptions: Subscription[]; +} + +export interface Subscription { + productArea: string; + subscriptionType: SubscriptionType; +} + +enum SubscriptionType { + Digest = "digest", + Instant = "instant", +} + +export default class AuthenticatedUserService extends Service { + @service("fetch") declare fetchSvc: FetchService; + @service declare store: Store; + @service declare session: any; + + @tracked subscriptions: Subscription[] | null = null; + @tracked private _info: AuthenticatedUser | null = null; + + get info(): AuthenticatedUser { + assert("Authenticated must exist", this._info); + return this._info; + } + + /** + * Returns the user's subscriptions as a JSON string. + * E.g., '{"subscriptions":["Customer Success", "Terraform"]}' + * Used in POST requests to the subscriptions endpoint. + */ + private get subscriptionsPostBody(): string { + assert("subscriptions must be defined", this.subscriptions); + let subscriptions = this.subscriptions.map( + (subscription: Subscription) => subscription.productArea + ); + return JSON.stringify({ subscriptions }); + } + + /** + * The headers to use in POST requests to the subscriptions endpoint. + */ + private get subscriptionsPostHeaders() { + return { + "Content-Type": "application/json", + "Hermes-Google-Access-Token": + this.session.data.authenticated.access_token, + }; + } + + /** + * Loads the user's info from the Google API. + * Called by the `authenticated` route on load. + * Ensures `authenticatedUser.info` is always defined. + */ + loadInfo = task(async () => { + this._info = await this.store.queryRecord("google.userinfo.me", {}); + }); + + /** + * Loads the user's subscriptions from the API. + * If the user has no subscriptions, returns an empty array. + */ + fetchSubscriptions = task(async () => { + try { + let response = await this.fetchSvc.fetch("/api/v1/me/subscriptions", { + method: "GET", + headers: { + "Hermes-Google-Access-Token": + this.session.data.authenticated.access_token, + }, + }); + let subscriptions: string[] = await response.json(); + + let newSubscriptions: Subscription[] = []; + + if (subscriptions) { + // map + newSubscriptions = subscriptions.map((subscription: string) => { + return { + productArea: subscription, + subscriptionType: SubscriptionType.Instant, + }; + }); + } + this.subscriptions = newSubscriptions; + } catch (e: unknown) { + console.error("Error loading subscriptions: ", e); + throw e; + } + }); + + /** + * Adds a subscription and saves the subscription index. + * Subscriptions default to the "instant" subscription type. + */ + addSubscription = task( + async ( + productArea: string, + subscriptionType = SubscriptionType.Instant + ) => { + assert( + "removeSubscription expects a valid subscriptions array", + this.subscriptions + ); + + let cached = this.subscriptions; + + this.subscriptions.addObject({ + productArea, + subscriptionType, + }); + + try { + await this.fetchSvc.fetch(`/api/v1/me/subscriptions`, { + method: "POST", + headers: this.subscriptionsPostHeaders, + body: this.subscriptionsPostBody, + }); + } catch (e: unknown) { + console.error("Error updating subscriptions: ", e); + this.subscriptions = cached; + throw e; + } + } + ); + + /** + * Removes a subscription and saves the subscription index. + */ + removeSubscription = task( + async ( + productArea: string, + subscriptionType = SubscriptionType.Instant + ) => { + assert( + "removeSubscription expects a subscriptions array", + this.subscriptions + ); + + let cached = this.subscriptions; + let subscriptionToRemove = this.subscriptions.find( + (subscription) => subscription.productArea === productArea + ); + + assert( + "removeSubscription expects a valid productArea", + subscriptionToRemove + ); + + this.subscriptions.removeObject(subscriptionToRemove); + + try { + await this.fetchSvc.fetch("/api/v1/me/subscriptions", { + method: "POST", + headers: this.subscriptionsPostHeaders, + body: this.subscriptionsPostBody, + }); + } catch (e: unknown) { + console.error("Error updating subscriptions: ", e); + this.subscriptions = cached; + throw e; + } + } + ); +} diff --git a/web/app/services/config.ts b/web/app/services/config.ts new file mode 100644 index 000000000..f7705336c --- /dev/null +++ b/web/app/services/config.ts @@ -0,0 +1,29 @@ +// @ts-nocheck +// TODO: Type this file. + +import Service from "@ember/service"; +import config from "hermes/config/environment"; + +export default class ConfigService extends Service { + config = { + algolia_docs_index_name: config.algolia.docsIndexName, + algolia_drafts_index_name: config.algolia.draftsIndexName, + algolia_internal_index_name: config.algolia.internalIndexName, + feature_flags: config.featureFlags, + google_doc_folders: config.google.docFolders ?? "", + short_link_base_url: config.shortLinkBaseURL, + // google_oauth2_client_id: + // config.torii.providers["google-oauth2-bearer"].apiKey ?? "", + // google_oauth2_hd: config.torii.providers["google-oauth2-bearer"].hd ?? "", + }; + + setConfig(param) { + this.set("config", param); + } +} + +declare module "@ember/service" { + interface Registry { + config: ConfigService; + } +} diff --git a/web/app/services/fetch.ts b/web/app/services/fetch.ts new file mode 100644 index 000000000..b413c501b --- /dev/null +++ b/web/app/services/fetch.ts @@ -0,0 +1,51 @@ +// @ts-nocheck +// TODO: Type this file. +import Service from "@ember/service"; +import fetch from "fetch"; +import { inject as service } from "@ember/service"; + +export default class FetchService extends Service { + @service session; + + async fetch(url, options = {}) { + // Add the Google access token in a header (for auth) if the URL starts with + // a frontslash, which will only target the application backend. + if (Array.from(url)[0] == "/") { + options.headers = { + ...options.headers, + "Hermes-Google-Access-Token": + this.session.data.authenticated.access_token, + }; + } + try { + + const resp = await fetch(url, options); + + if (!resp.ok) { + throw new Error(`Bad response: ${resp.statusText}`); + } + + return resp; + } catch (err) { + // Assume this case is a CORS error because of a redirect is to an OIDC + // identity provider, so invalidate the session. + if ( + err instanceof TypeError && + (err.message === "Network request failed" || + err.message === "Failed to fetch") + ) { + // Swallow error and handle gracefully. + this.session.invalidate(); + } else { + // Re-throw the error to be handled at the call site. + throw err; + } + } + } +} + +declare module "@ember/service" { + interface Registry { + fetch: FetchService; + } +} diff --git a/web/app/services/flags.js b/web/app/services/flags.js new file mode 100644 index 000000000..bdef6acb8 --- /dev/null +++ b/web/app/services/flags.js @@ -0,0 +1,8 @@ +import Service from "@ember/service"; +import config from "hermes/config/environment"; + +export default class FlagsService extends Service { + initialize() { + this.setProperties(config.featureFlags); + } +} diff --git a/web/app/services/modal-alerts.ts b/web/app/services/modal-alerts.ts new file mode 100644 index 000000000..eb392ad77 --- /dev/null +++ b/web/app/services/modal-alerts.ts @@ -0,0 +1,29 @@ +import Service, { inject as service } from "@ember/service"; +import { tracked } from "@glimmer/tracking"; +import { action } from "@ember/object"; +import RouterService from "@ember/routing/router-service"; +import { task, timeout } from "ember-concurrency"; + +export type ModalType = "docCreated"; + +export default class ModalAlertsService extends Service { + @service declare router: RouterService; + + init() { + super.init(); + this.router.on("routeWillChange", () => { + this.close(); + }); + } + + @tracked activeModal: ModalType | null = null; + + @action close(): void { + this.activeModal = null; + } + + setActive = task(async (modalType: ModalType, delay: number = 0) => { + await timeout(delay); + this.activeModal = modalType; + }); +} diff --git a/web/app/services/recently-viewed-docs.js b/web/app/services/recently-viewed-docs.js new file mode 100644 index 000000000..e46f21163 --- /dev/null +++ b/web/app/services/recently-viewed-docs.js @@ -0,0 +1,147 @@ +import Service from "@ember/service"; +import { inject as service } from "@ember/service"; +import { task } from "ember-concurrency"; + +export default class RecentlyViewedDocsService extends Service { + @service("fetch") fetchSvc; + @service session; + + recentlyViewedDocsFilename = "recently_viewed_docs.json"; + + // get returns an array of recently viewed docs. + @task({ restartable: true }) *get() { + const recentlyViewedDocsFileID = + yield this._getRecentlyViewedDocsFileID.perform(); + + if (recentlyViewedDocsFileID) { + // Download file contents. + const recentlyViewedDocs = yield this.fetchSvc + .fetch( + `https://www.googleapis.com/drive/v3/files/${recentlyViewedDocsFileID}?` + + new URLSearchParams({ + alt: "media", + fields: "files(id, name)", + }), + { + headers: { + Authorization: + "Bearer " + this.session.data.authenticated.access_token, + "Content-Type": "application/json", + }, + } + ) + .then((resp) => resp.json()) + .catch((err) => { + console.log(`Error getting recently viewed docs file: ${err}`); + throw err; + }); + + return recentlyViewedDocs; + } + + // Return empty array if file didn't exist. + return []; + } + + // _getRecentlyViewedDocsFileID returns the Google Drive file ID of the + // recently viewed docs file. + @task({ restartable: true }) *_getRecentlyViewedDocsFileID() { + // List app data files. + const appDataFiles = yield this.fetchSvc + .fetch( + "https://www.googleapis.com/drive/v3/files?" + + new URLSearchParams({ + fields: "files(id, name)", + spaces: "appDataFolder", + }), + { + headers: { + Authorization: + "Bearer " + this.session.data.authenticated.access_token, + "Content-Type": "application/json", + }, + } + ) + .then((resp) => resp.json()) + .catch((err) => { + console.log(`Error listing app data files: ${err}`); + throw err; + }); + + const file = appDataFiles?.files?.find( + (o) => o.name === this.recentlyViewedDocsFilename + ); + return file?.id; + } + + // Add adds a recently viewed doc. + @task({ restartable: true }) *addDoc(docID) { + if (!docID) { + throw new Error("docID is required"); + } + + // Get recently viewed docs array. + let recentlyViewedDocs = yield this.get.perform(); + + // Filter docID from array. + recentlyViewedDocs = recentlyViewedDocs.filter((e) => e !== docID); + + // Add docID to beginning of array. + recentlyViewedDocs.unshift(docID); + + const recentlyViewedDocsFileID = + yield this._getRecentlyViewedDocsFileID.perform(); + + const body = + "--PART_BOUNDARY\nContent-Type: application/json; charset=UTF-8\n\n" + + JSON.stringify({ + name: this.recentlyViewedDocsFilename, + parents: ["appDataFolder"], + }) + + "\n--PART_BOUNDARY\nContent-Type: application/json\n\n" + + JSON.stringify(recentlyViewedDocs) + + "\n--PART_BOUNDARY--"; + + if (recentlyViewedDocsFileID) { + // The file exists, so update it. + yield this.fetchSvc + .fetch( + `https://www.googleapis.com/upload/drive/v3/files/${recentlyViewedDocsFileID}`, + { + method: "PATCH", + headers: { + Authorization: + "Bearer " + this.session.data.authenticated.access_token, + "Content-Type": "application/json", + }, + body: JSON.stringify(recentlyViewedDocs), + } + ) + .then((resp) => resp.json()) + .catch((err) => { + console.log(`Error saving recently viewed docs: ${err}`); + throw err; + }); + } else { + // The file doesn't exist, so create it. + yield this.fetchSvc + .fetch( + "https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart", + { + method: "POST", + headers: { + Authorization: + "Bearer " + this.session.data.authenticated.access_token, + "Content-Type": "multipart/related; boundary=PART_BOUNDARY", + }, + body: body, + } + ) + .then((resp) => resp.json()) + .catch((err) => { + console.log(`Error saving recently viewed docs: ${err}`); + throw err; + }); + } + } +} diff --git a/web/app/services/toolbar.js b/web/app/services/toolbar.js new file mode 100644 index 000000000..bee503bde --- /dev/null +++ b/web/app/services/toolbar.js @@ -0,0 +1,6 @@ +import Service from "@ember/service"; +import { tracked } from "@glimmer/tracking"; + +export default class ToolbarService extends Service { + @tracked sortBy = "dateDesc"; +} diff --git a/web/app/styles/app.scss b/web/app/styles/app.scss new file mode 100644 index 000000000..b081d267b --- /dev/null +++ b/web/app/styles/app.scss @@ -0,0 +1,113 @@ +@use "components/action"; +@use "components/toolbar"; +@use "components/footer"; +@use "components/nav"; +@use "components/x-hds-tab"; +@use "components/editable-field"; +@use "components/modal-dialog"; +@use "components/multiselect"; +@use "components/page"; +@use "components/row-results"; +@use "components/template-card"; +@use "components/tile-list"; +@use "components/preview-card"; +@use "components/notification"; +@use "components/sidebar"; +@use "components/hds-badge"; +@use "hashicorp/product-badge"; +@use "hashicorp/hermes-logo"; + +@use "./ember-power-select-theme"; + +@use "@hashicorp/design-system-components"; +@use "./hds-overrides"; + +@use "tailwindcss/base"; +@use "tailwindcss/components"; +@use "tailwindcss/utilities"; + +*, +*::before, +*::after { + box-sizing: border-box; + margin: 0; + padding: 0; + border-width: 0; + border-style: solid; + border-color: transparent; +} + +dialog { + margin: auto; // centers the dialog +} + +body, +button { + font-family: var(--token-typography-font-stack-display); + font-size: var(--token-typography-body-200-font-size); +} + +body { + line-height: var(--token-typography-body-200-line-height); + color: var(--token-color-foreground-primary); +} + +.x-container { + @apply w-full max-w-screen-lg mx-auto px-8; +} + +h1 { + @apply text-display-500 font-bold text-color-foreground-strong mb-1.5; + + + p { + @apply text-body-300; + } +} + +// Prevent the Flight Icons shim from taking up space +.flight-sprite-container { + position: fixed; + top: 0; + right: 0; +} + +// HACK! This should be imported dynamically from @hashicorp/ember-flight-icons +// but it's not and this is the fastest way to ship milestone 2 on time. +.flight-icon { + display: block; +} + +.flight-icon-display-inline { + display: inline-block; +} + +@keyframes hds-flight-icon-animation-rotation { + to { + transform: rotate(360deg); + } +} + +.flight-icon--animation-loading { + animation: hds-flight-icon-animation-rotation 9s linear infinite; +} + +.flight-icon--animation-running { + animation: hds-flight-icon-animation-rotation 9s linear infinite; +} + +@media (prefers-reduced-motion: no-preference) { + .flight-icon--animation-loading { + animation-duration: 0.7s; + } + + .flight-icon--animation-running { + animation-duration: 1s; + } +} + +.flight-sprite-container { + position: absolute; + width: 0; + height: 0; + visibility: hidden; +} diff --git a/web/app/styles/components/action.scss b/web/app/styles/components/action.scss new file mode 100644 index 000000000..ba77aa165 --- /dev/null +++ b/web/app/styles/components/action.scss @@ -0,0 +1,4 @@ +.action { + @apply appearance-none font-sans p-0 border-0 bg-transparent text-left cursor-pointer; + font-weight: inherit; +} diff --git a/web/app/styles/components/editable-field.scss b/web/app/styles/components/editable-field.scss new file mode 100644 index 000000000..732f4db1c --- /dev/null +++ b/web/app/styles/components/editable-field.scss @@ -0,0 +1,61 @@ +.editable-field { + @apply w-full relative; + + & > .field-toggle { + all: unset; + width: 100%; + cursor: pointer; + position: relative; + + &[disabled] { + cursor: default; + } + + &.loading { + opacity: 0.5; + } + + &::before { + pointer-events: none; + content: ""; + padding: 3px 5px; + position: absolute; + top: -5px; + right: -5px; + bottom: -5px; + left: -5px; + } + + &::after { + pointer-events: none; + content: "edit"; + position: absolute; + right: -5px; + top: -5px; + padding: 3px 5px; + background: var(--token-form-control-base-border-color-default); + border-top-right-radius: var(--token-form-control-border-radius); + border-bottom-left-radius: var(--token-form-control-border-radius); + color: var(--token-color-foreground-high-contrast); + visibility: hidden; + } + + &:not([disabled]):hover, + &:not([disabled]):focus { + &::before { + border: 1px solid var(--token-form-control-base-border-color-default); + border-radius: var(--token-form-control-border-radius); + } + + &::after { + visibility: visible; + } + } + } + + .loading-indicator { + position: absolute; + top: 0; + right: 0; + } +} diff --git a/web/app/styles/components/footer.scss b/web/app/styles/components/footer.scss new file mode 100644 index 000000000..53feefef7 --- /dev/null +++ b/web/app/styles/components/footer.scss @@ -0,0 +1,7 @@ +.footer { + @apply text-center mt-12 mb-16; + + a { + @apply text-inherit no-underline border-b border-b-color-border-primary hover:border-b-color-foreground-action hover:text-color-foreground-action; + } +} diff --git a/web/app/styles/components/hds-badge.scss b/web/app/styles/components/hds-badge.scss new file mode 100644 index 000000000..9efd7a54f --- /dev/null +++ b/web/app/styles/components/hds-badge.scss @@ -0,0 +1,3 @@ +.hds-badge__text { + @apply truncate; +} diff --git a/web/app/styles/components/modal-dialog.scss b/web/app/styles/components/modal-dialog.scss new file mode 100644 index 000000000..1df0945b3 --- /dev/null +++ b/web/app/styles/components/modal-dialog.scss @@ -0,0 +1,19 @@ +.ember-application { + .hds-modal { + @apply overflow-visible; + + .ember-basic-dropdown { + @apply w-full mb-2 mt-1; + } + + &__body { + // Allows PeopleSelect dropdown to be completely visible + @apply overflow-y-visible; + } + + .hds-form-field--layout-vertical .hds-form-field__control:not(:last-child) { + // Positions dropdown correctly + @apply mb-0; + } + } +} diff --git a/web/app/styles/components/multiselect.scss b/web/app/styles/components/multiselect.scss new file mode 100644 index 000000000..2c0db4450 --- /dev/null +++ b/web/app/styles/components/multiselect.scss @@ -0,0 +1,86 @@ +// This is built on ember-power-select +.multiselect { + min-width: 300px; + background-image: var(--token-form-select-background-image-data-url); + background-position: right + var(--token-form-select-background-image-position-right-x) top + var(--token-form-select-background-image-position-top-y); + background-size: var(--token-form-select-background-image-size); + background-repeat: no-repeat; + box-shadow: var(--token-elevation-low-box-shadow); + + &--narrow { + min-width: 0; + } + + &.ember-power-select-trigger { + line-height: 1; + } + + &:hover, + &.mock-hover { + border-color: var(--token-form-control-base-border-color-hover); + } + + &:focus, + &.mock-focus { + border-color: var(--token-color-focus-action-internal); + outline: 3px solid var(--token-color-focus-action-external); + outline-offset: 0px; + } + + .ember-power-select-status-icon { + display: none; + } + + .ember-power-select-multiple-options { + display: flex; + flex-wrap: wrap; + } + + .ember-power-select-trigger-multiple-input { + min-width: 80px; + + &::-webkit-search-cancel-button { + display: none; + } + } + + .ember-power-select-multiple-option { + position: relative; + border: none; + padding: 3px 7px; + cursor: pointer; + flex-basis: 0; + white-space: nowrap; + + // avoid overlapping the dropdown affordance + width: calc(100% - 28px); + + &:hover { + background: var(--token-color-palette-neutral-200); + } + + .flight-icon { + vertical-align: middle; + } + + // This is an unremovable element used to remove selected options. + // We don't want the little x at all, but we need to continue to use this + // button, so we're hacking it a bit. + .ember-power-select-multiple-remove-btn { + position: absolute; + left: 0; + top: 0; + right: 0; + bottom: 0; + color: transparent; + border-radius: var(--token-form-control-border-radius); + + &:focus, + &:active { + outline: 2px solid var(--token-color-focus-action-internal); + } + } + } +} diff --git a/web/app/styles/components/nav.scss b/web/app/styles/components/nav.scss new file mode 100644 index 000000000..5a98ad6cc --- /dev/null +++ b/web/app/styles/components/nav.scss @@ -0,0 +1,57 @@ +.header-nav { + @apply flex flex-wrap justify-between md:flex-nowrap items-center w-full relative; + + .header-nav-logo { + @apply mr-6 mt-3.5 md:mt-0 shrink-0 flex items-center md:mr-0 order-1 md:h-16 hover:text-color-foreground-strong; + } + + .primary-links { + @apply -ml-2.5 h-16 flex shrink-0 md:mx-6 order-3 md:order-2; + + a { + @apply px-2.5 md:px-3.5 h-full flex items-center text-color-foreground-faint; + + &:hover { + @apply text-color-foreground-strong; + } + + &.active { + @apply text-color-palette-blue-200 relative; + + /* Active indicator */ + &::before { + content: ""; + @apply absolute w-full h-1 left-0 -bottom-px bg-color-palette-blue-200; + } + } + } + } + + .search-bar { + @apply order-2 md:order-3 pt-3.5 md:py-0 w-[calc(100%-9rem)] md:w-full flex; + } + + .user-buttons { + @apply flex items-center justify-end space-x-1.5 sm:ml-6 md:ml-8 order-4; + } + + .user-avatar { + @apply w-[30px] h-[30px] rounded-[3px] absolute z-10 top-1/2 -translate-y-1/2 left-[3px] pointer-events-none; + } + + .search-dropdown { + @apply w-full max-w-none min-w-[320px]; + } + + a { + @apply no-underline; + + &.hds-button:hover { + @apply no-underline; + } + } + + .create-draft-button { + @apply rounded-full w-9 p-0; + } +} diff --git a/web/app/styles/components/notification.scss b/web/app/styles/components/notification.scss new file mode 100644 index 000000000..a8e0c27f7 --- /dev/null +++ b/web/app/styles/components/notification.scss @@ -0,0 +1,24 @@ +.notifications-container { + @apply fixed; + z-index: 20; + bottom: 8px; + right: 8px; +} + +.notification { + // Animation example taken from: https://github.com/adopted-ember-addons/ember-cli-flash#animated-example + opacity: 0; + + transition: all 700ms cubic-bezier(0.68, -0.55, 0.265, 1.55); + margin: 16px; + + &.active { + opacity: 1; + @apply left-2; + + &.exiting { + opacity: 0; + @apply left-0; + } + } +} diff --git a/web/app/styles/components/page.scss b/web/app/styles/components/page.scss new file mode 100644 index 000000000..45472ef57 --- /dev/null +++ b/web/app/styles/components/page.scss @@ -0,0 +1,7 @@ +.page { + @apply flex flex-col items-center flex-1 min-h-full px-8 py-10; +} + +.page--fixed-width { + @apply flex flex-col space-y-4 w-full max-w-screen-lg pb-12 container; +} diff --git a/web/app/styles/components/preview-card.scss b/web/app/styles/components/preview-card.scss new file mode 100644 index 000000000..c7e5bea07 --- /dev/null +++ b/web/app/styles/components/preview-card.scss @@ -0,0 +1,4 @@ +.preview-card { + @apply p-4 sticky top-4 space-y-4; + background: var(--token-color-surface-faint); +} diff --git a/web/app/styles/components/row-results.scss b/web/app/styles/components/row-results.scss new file mode 100644 index 000000000..e584ebd82 --- /dev/null +++ b/web/app/styles/components/row-results.scss @@ -0,0 +1,51 @@ +.row-results { + @apply w-full; + + .hds-table { + @apply table-auto lg:table-fixed mb-8; + @apply border-0; + + &__th, + &__td { + &.name { + @apply w-auto; + @apply pl-0; + } + + &.type { + @apply w-24; + } + + &.status { + @apply w-32; + } + + &.product { + @apply w-40; + } + + &.owner { + @apply w-48; + } + + &.created { + @apply w-28 text-right; + @apply pr-0; + } + } + + .hds-table__thead .hds-table__tr { + @apply bg-transparent; + } + + .hds-table__thead .hds-table__tr .hds-table__th { + &.name { + @apply pl-0; + } + &.created { + @apply text-right; + @apply pr-0; + } + } + } +} diff --git a/web/app/styles/components/sidebar.scss b/web/app/styles/components/sidebar.scss new file mode 100644 index 000000000..14941b8ec --- /dev/null +++ b/web/app/styles/components/sidebar.scss @@ -0,0 +1,22 @@ +.sidebar { + @apply flex flex-col max-h-full relative z-10; + + header, nav { + @apply sticky top-0 mb-4; + } + + .body { + @apply space-y-8 pb-6; + overflow-y: auto; + height: 100%; + } + + .person-list { + @apply w-full list-none space-y-2; + } + + .sidebar-footer { + @apply w-full shrink-0 pt-8 pb-6; + background-color: var(--token-color-page-faint); + } +} diff --git a/web/app/styles/components/template-card.scss b/web/app/styles/components/template-card.scss new file mode 100644 index 000000000..6d0440472 --- /dev/null +++ b/web/app/styles/components/template-card.scss @@ -0,0 +1,32 @@ +.template-card { + @apply p-5 h-full flex flex-col justify-between duration-300 cursor-pointer; + + transition-property: background, box-shadow; + background: linear-gradient( + to bottom, + var(--token-color-surface-interactive), + var(--token-color-surface-interactive) + ); + + &:hover { + background: linear-gradient( + to bottom, + var(--token-color-surface-interactive), + var(--token-color-surface-action) + ); + } + + &.disabled { + @apply cursor-default transition-none; + + background: var(--token-color-surface-faint); + + &:hover { + background: var(--token-color-surface-faint); + } + } + + &--with-link { + @apply pb-12; + } +} diff --git a/web/app/styles/components/tile-list.scss b/web/app/styles/components/tile-list.scss new file mode 100644 index 000000000..018990538 --- /dev/null +++ b/web/app/styles/components/tile-list.scss @@ -0,0 +1,3 @@ +.tile-list { + @apply grid gap-x-8 gap-y-12 grid-cols-4; +} diff --git a/web/app/styles/components/toolbar.scss b/web/app/styles/components/toolbar.scss new file mode 100644 index 000000000..6b8d4aa1d --- /dev/null +++ b/web/app/styles/components/toolbar.scss @@ -0,0 +1,13 @@ +.toolbar { + .hds-dropdown-list-item { + &__interactive-text { + font-weight: normal; + } + + .checked { + svg { + fill: var(--token-color-foreground-action); + } + } + } +} diff --git a/web/app/styles/components/x-hds-tab.scss b/web/app/styles/components/x-hds-tab.scss new file mode 100644 index 000000000..945c10372 --- /dev/null +++ b/web/app/styles/components/x-hds-tab.scss @@ -0,0 +1,43 @@ +$border-radius: 5px; + +.x-hds-tab { + &--link, + &--button { + display: flex; + align-items: center; + padding: 8px 12px; + color: var(--token-color-foreground-primary); + border-radius: $border-radius; + background: transparent; + border: none; + font-family: inherit; + text-decoration: none; + cursor: pointer; + + &:hover { + background: var(--token-color-palette-neutral-100); + } + + &:active { + color: var(--token-color-foreground-strong); + background: var(--token-color-palette-neutral-200); + } + + &:focus { + outline: none; + box-shadow: var(--token-focus-ring-action-box-shadow); + } + + * ~ * { + margin-left: 6px; + } + } + + &--selected { + a, + button { + color: var(--token-color-foreground-strong); + background: var(--token-color-palette-neutral-100); + } + } +} diff --git a/web/app/styles/ember-power-select-theme.scss b/web/app/styles/ember-power-select-theme.scss new file mode 100644 index 000000000..b7a0c39e7 --- /dev/null +++ b/web/app/styles/ember-power-select-theme.scss @@ -0,0 +1,44 @@ +@import "ember-basic-dropdown"; + +// ember-power-select +$ember-power-select-background-color: var( + --token-form-control-base-surface-color-default +); +$ember-power-select-disabled-background-color: var( + --token-form-control-disabled-surface-color +); +$ember-power-select-multiple-selection-background-color: var( + --token-color-palette-neutral-100 +); +$ember-power-select-highlighted-background: var( + --token-color-foreground-action +); +$ember-power-select-border-color: var( + --token-form-control-base-border-color-default +); +$ember-power-select-default-border: var(--token-form-control-border-width) solid + $ember-power-select-border-color; +$ember-power-select-default-border-radius: var( + --token-form-control-border-radius +); +$ember-power-select-focus-box-shadow: var(--token-elevation-low-box-shadow); +$ember-power-select-dropdown-box-shadow: var(--token-elevation-low-box-shadow); +$ember-power-select-option-padding: 7px; // Can't use --token-form-control-padding here. +$ember-power-select-focus-outline: 3px solid + var(--token-color-focus-action-external); +$ember-power-select-trigger-ltr-padding: var(--token-form-control-padding); +$ember-power-select-trigger-rtl-padding: var(--token-form-control-padding); +$ember-power-select-multiple-option-padding: 0 7px; +$ember-power-select-multiple-option-line-height: 1.3; + +.ember-basic-dropdown-content { + .hds-dropdown-list-item--interactive { + @apply no-underline text-color-foreground-primary flex items-center px-3; + + &:hover { + @apply bg-color-surface-interactive-hover; + } + } +} + +@import "ember-power-select"; diff --git a/web/app/styles/hashicorp/hermes-logo.scss b/web/app/styles/hashicorp/hermes-logo.scss new file mode 100644 index 000000000..e99043ea5 --- /dev/null +++ b/web/app/styles/hashicorp/hermes-logo.scss @@ -0,0 +1,11 @@ +.hermes-logo { + @apply shrink-0 flex items-center space-x-2.5 text-color-foreground-primary; +} + +.hermes-logo-text { + @apply text-display-300 font-semibold; +} + +.hermes-logo-divider { + @apply h-6 relative w-px bg-color-border-strong content-none; +} diff --git a/web/app/styles/hashicorp/product-badge.scss b/web/app/styles/hashicorp/product-badge.scss new file mode 100644 index 000000000..61a6aad42 --- /dev/null +++ b/web/app/styles/hashicorp/product-badge.scss @@ -0,0 +1,37 @@ +.product-badge { + // `.hcp` uses the default styles + + @apply absolute bottom-0.5 -left-4 grid rounded-r place-items-center bg-gradient-to-br w-9 h-7 from-color-palette-neutral-500 to-color-palette-neutral-600 text-color-foreground-high-contrast; + + &.nomad { + @apply from-color-nomad-gradient-primary-start to-color-nomad-gradient-primary-stop text-color-nomad-foreground; + } + + &.packer { + @apply from-color-packer-gradient-primary-start to-color-packer-gradient-primary-stop text-color-packer-foreground; + } + + &.vault { + @apply from-color-vault-gradient-primary-start to-color-vault-gradient-primary-stop text-color-vault-foreground; + } + + &.vagrant { + @apply from-color-vagrant-gradient-primary-start to-color-vagrant-gradient-primary-stop text-color-vagrant-foreground; + } + + &.consul { + @apply from-color-consul-gradient-primary-start to-color-consul-gradient-primary-stop text-color-foreground-high-contrast; + } + + &.terraform { + @apply from-color-terraform-gradient-primary-start to-color-terraform-gradient-primary-stop text-color-foreground-high-contrast; + } + + &.boundary { + @apply from-color-boundary-gradient-primary-start to-color-boundary-gradient-primary-stop text-color-foreground-high-contrast; + } + + &.waypoint { + @apply from-color-waypoint-gradient-primary-start to-color-waypoint-gradient-primary-stop text-color-waypoint-foreground; + } +} diff --git a/web/app/styles/hds-overrides.scss b/web/app/styles/hds-overrides.scss new file mode 100644 index 000000000..48e150e7e --- /dev/null +++ b/web/app/styles/hds-overrides.scss @@ -0,0 +1,20 @@ +// Design System Overrides (this should be limited to workarounds) +.hds-typography-body-200.hds-form-select { + padding: var(--token-form-control-padding); + padding-right: calc( + var(--token-form-control-padding) + 24px + ); // extra space for the icon +} + +.hds-dropdown-toggle-button .hds-button__icon, +.hds-dropdown-toggle-icon__chevron { + transition: none; +} + +.hds-link-standalone.small-external-link { + @apply font-regular; + + .hds-link-standalone__text { + @apply text-body-100; + } +} diff --git a/web/app/templates/application-loading.hbs b/web/app/templates/application-loading.hbs new file mode 100644 index 000000000..3a04ee401 --- /dev/null +++ b/web/app/templates/application-loading.hbs @@ -0,0 +1,3 @@ +
+ +
diff --git a/web/app/templates/application.hbs b/web/app/templates/application.hbs new file mode 100644 index 000000000..3359dddfe --- /dev/null +++ b/web/app/templates/application.hbs @@ -0,0 +1,10 @@ +{{page-title "Hermes"}} +
+ + + +
+ {{outlet}} +
+ + diff --git a/web/app/templates/authenticate.hbs b/web/app/templates/authenticate.hbs new file mode 100644 index 000000000..7c51c8c2c --- /dev/null +++ b/web/app/templates/authenticate.hbs @@ -0,0 +1,31 @@ +{{page-title "Authenticate"}} +{{set-body-class "bg-color-page-faint"}} + +
+ + +

+ Welcome to Hermes. +

+

+ Log in to browse, search, and manage documents +

+ + +
+
+
+ +{{outlet}} diff --git a/web/app/templates/authenticated.hbs b/web/app/templates/authenticated.hbs new file mode 100644 index 000000000..13685ad37 --- /dev/null +++ b/web/app/templates/authenticated.hbs @@ -0,0 +1,3 @@ +{{outlet}} + +