diff --git a/docs/dev-setup.md b/docs/dev-setup.md index fd65ebe3ab..eaa7c0f6b2 100644 --- a/docs/dev-setup.md +++ b/docs/dev-setup.md @@ -7,7 +7,7 @@ ODH requires the following to run: - [NodeJS and NPM](https://nodejs.org/) - Node recommended version -> `18.18.2` - NPM recommended version -> `9.8.1` -- [OpenShift CLI](https://docs.openshift.com/container-platform/latest/cli_reference/openshift_cli/getting-started-cli.html) +- [OpenShift CLI](https://docs.redhat.com/en/documentation/openshift_container_platform/4.16/html/cli_tools/openshift-cli-oc) - [kustomize](https://github.com/kubernetes-sigs/kustomize) (if you need to do deployment) ### Additional tooling diff --git a/frontend/src/__mocks__/mockComponents.ts b/frontend/src/__mocks__/mockComponents.ts index 6190bd9593..3371467581 100644 --- a/frontend/src/__mocks__/mockComponents.ts +++ b/frontend/src/__mocks__/mockComponents.ts @@ -19,7 +19,7 @@ export const mockComponents = (): OdhApplication[] => [ docsLink: 'https://jupyter.org', getStartedLink: 'https://jupyterlab.readthedocs.io/en/stable/getting_started/overview.html', getStartedMarkDown: - '# Jupyter\nLaunch Jupyter and start a notebook server to start working with your notebooks.\n## Prerequisites\n- You have logged in to Red Hat OpenShift AI.\n- You know the names and values you want to use for any environment variables in your notebook server environment, for example, `AWS_SECRET_ACCESS_KEY`.\n- If you want to work with a very large data set, work with your administrator to proactively increase the storage capacity of your notebook server.\n## Procedure\n1. Locate the **Jupyter** card on the **Enabled applications** page.\n2. Click **Launch application**.\n\n\n i. If prompted, select your identity provider.\n\n ii. Enter your credentials and click **Log in** (or equivalent for your identity provider).\n\n If you see **Error 403: Forbidden**, you are not in the default user group or the default administrator group for OpenShift AI. Contact your administrator so that they can add you to the correct group using [Adding users for OpenShift AI](https://access.redhat.com/documentation/en-us/red_hat_openshift_data_science/1/html/managing_users_and_user_resources/adding-users-for-openshift-data-science_useradd).\n\n3. Start a notebook server.\n\n\n This is not required if you have previously launched Jupyter.\n\n i. Select the **Notebook image** to use for your server.\n\n ii. If the notebook image contains multiple versions, select the version of the notebook image from the **Versions** section.\n\n iii. Select the **Container size** for your server.\n\n iv. Optional: Select the **Number of GPUs** (graphics processing units) for your server.\n\n v. Optional: Select and specify values for any new **Environment variables**.\n\n For example, if you plan to integrate with Red Hat OpenShift Streams for Apache Kafka, create environment variables to store your Kafka bootstrap server and the service account username and password here.\n\n he interface stores these variables so that you only need to enter them once. Example variable names for common environment variables are automatically provided for frequently integrated environments and frameworks, such as Amazon Web Services (AWS).\n\n vi. Click **Start server**.\n\n The **Starting server** progress indicator appears. If you encounter a problem during this process, an error message appears with more information. Click **Expand event log** to view additional information on the server creation process. Depending on the deployment size and resources you requested, starting the server can take up to several minutes. After the server starts, the JupyterLab interface opens.\n\n## Verification\nThe JupyterLab interface opens in the same tab.', + '# Jupyter\nLaunch Jupyter and start a notebook server to start working with your notebooks.\n## Prerequisites\n- You have logged in to Red Hat OpenShift AI.\n- You know the names and values you want to use for any environment variables in your notebook server environment, for example, `AWS_SECRET_ACCESS_KEY`.\n- If you want to work with a very large data set, work with your administrator to proactively increase the storage capacity of your notebook server.\n## Procedure\n1. Locate the **Jupyter** card on the **Enabled applications** page.\n2. Click **Launch application**.\n\n\n i. If prompted, select your identity provider.\n\n ii. Enter your credentials and click **Log in** (or equivalent for your identity provider).\n\n If you see **Error 403: Forbidden**, you are not in the default user group or the default administrator group for OpenShift AI. Contact your administrator so that they can add you to the correct group using [Adding users for OpenShift AI](https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/managing_openshift_ai/managing-users-and-groups#adding-users-to-user-groups_managing-rhoai).\n\n3. Start a notebook server.\n\n\n This is not required if you have previously launched Jupyter.\n\n i. Select the **Notebook image** to use for your server.\n\n ii. If the notebook image contains multiple versions, select the version of the notebook image from the **Versions** section.\n\n iii. Select the **Container size** for your server.\n\n iv. Optional: Select the **Number of GPUs** (graphics processing units) for your server.\n\n v. Optional: Select and specify values for any new **Environment variables**.\n\n For example, if you plan to integrate with Red Hat OpenShift Streams for Apache Kafka, create environment variables to store your Kafka bootstrap server and the service account username and password here.\n\n he interface stores these variables so that you only need to enter them once. Example variable names for common environment variables are automatically provided for frequently integrated environments and frameworks, such as Amazon Web Services (AWS).\n\n vi. Click **Start server**.\n\n The **Starting server** progress indicator appears. If you encounter a problem during this process, an error message appears with more information. Click **Expand event log** to view additional information on the server creation process. Depending on the deployment size and resources you requested, starting the server can take up to several minutes. After the server starts, the JupyterLab interface opens.\n\n## Verification\nThe JupyterLab interface opens in the same tab.', img: '\\nGroup.svg\\nCreated using Figma 0.90\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n', internalRoute: 'notebookController', kfdefApplications: ['odh-notebook-controller', 'notebook-images'], @@ -44,7 +44,7 @@ export const mockComponents = (): OdhApplication[] => [ displayName: 'RHOAI', docsLink: '', getStartedLink: - 'https://access.redhat.com/documentation/en-us/red_hat_openshift_ai_cloud_service/1/html/openshift_ai_tutorial_-_fraud_detection_example/index', + 'https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/openshift_ai_tutorial_-_fraud_detection_example/index', getStartedMarkDown: '', img: '', hidden: true, diff --git a/frontend/src/__mocks__/mockDocs.ts b/frontend/src/__mocks__/mockDocs.ts index b9d404a8c4..62f81d0b3b 100644 --- a/frontend/src/__mocks__/mockDocs.ts +++ b/frontend/src/__mocks__/mockDocs.ts @@ -17,7 +17,7 @@ export const mockDocs = (): OdhDocument[] => [ displayName: 'How to install Python packages on your notebook server', durationMinutes: 15, type: 'how-to', - url: 'https://access.redhat.com/documentation/en-us/red_hat_openshift_data_science/1/html-single/working_on_data_science_projects/index#installing-python-packages-on-your-notebook-server_nb-server/index', + url: 'https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/working_with_connected_applications/using_the_jupyter_application#installing-python-packages-on-your-notebook-server_connected-apps', }, }, { @@ -32,11 +32,11 @@ export const mockDocs = (): OdhDocument[] => [ }, spec: { appName: 'jupyter', - description: 'Update the settings or the notebook image on your notebook server.', - displayName: 'How to update notebook server settings', + description: 'Manage the settings or the notebook image on your notebook server.', + displayName: 'How to manage notebook server settings', durationMinutes: 15, type: 'how-to', - url: 'https://access.redhat.com/documentation/en-us/red_hat_openshift_data_science/1/html-single/how_to_update_notebook_server_settings/index', + url: 'https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/managing_resources/managing-notebook-servers_notebook-mgmt', }, }, { @@ -51,11 +51,11 @@ export const mockDocs = (): OdhDocument[] => [ }, spec: { appName: 'jupyter', - description: 'Connect to data in S3 Storage using environment variables.', - displayName: 'How to use data from Amazon S3 buckets', + description: 'Connect to data in S3 Storage.', + displayName: 'How to use data from an S3-compatible object store', durationMinutes: 15, type: 'how-to', - url: 'https://access.redhat.com/documentation/en-us/red_hat_openshift_data_science/1/html-single/how_to_use_data_from_amazon_s3_buckets/index', + url: 'https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/working_with_data_in_an_s3-compatible_object_store/index', }, }, { @@ -74,7 +74,7 @@ export const mockDocs = (): OdhDocument[] => [ displayName: 'How to view installed packages on your notebook server', durationMinutes: 15, type: 'how-to', - url: 'https://access.redhat.com/documentation/en-us/red_hat_openshift_data_science/1/html-single/how_to_view_installed_packages_on_your_notebook_server/index', + url: 'https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/working_with_connected_applications/using_the_jupyter_application#viewing-python-packages-installed-on-your-notebook-server_connected-apps', }, }, ]; diff --git a/frontend/src/concepts/pipelines/content/InvalidArgoDeploymentAlert.tsx b/frontend/src/concepts/pipelines/content/InvalidArgoDeploymentAlert.tsx index 359b6da50a..675d184d43 100644 --- a/frontend/src/concepts/pipelines/content/InvalidArgoDeploymentAlert.tsx +++ b/frontend/src/concepts/pipelines/content/InvalidArgoDeploymentAlert.tsx @@ -5,10 +5,10 @@ import { useIsAreaAvailable, SupportedArea } from '~/concepts/areas'; import { ODH_PRODUCT_NAME } from '~/utilities/const'; const INVALID_ARGO_DEPLOYMENT_SELF_DOCUMENTATION_URL = - 'https://access.redhat.com/documentation/en-us/red_hat_openshift_ai_self-managed/2.9/html/release_notes/new-features-and-enhancements_relnotes'; + 'https://docs.redhat.com/en/documentation/red_hat_openshift_ai_self-managed/2-latest/html/working_with_data_science_pipelines/enabling-data-science-pipelines-2_ds-pipelines'; const INVALID_ARGO_DEPLOYMENT_CLOUD_DOCUMENTATION_URL = - 'https://access.redhat.com/documentation/en-us/red_hat_openshift_ai_cloud_service/1/html/release_notes/new-features-and-enhancements_relnotes'; + 'https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/working_with_data_science_pipelines/enabling-data-science-pipelines-2_ds-pipelines'; export const InvalidArgoDeploymentAlert: React.FC = () => { const [invalidArgoDeploymentAlertDismissed, setInvalidArgoDeploymentAlertDismissed] = diff --git a/frontend/src/pages/clusterSettings/TelemetrySettings.tsx b/frontend/src/pages/clusterSettings/TelemetrySettings.tsx index 9f864c3bdf..3651d47670 100644 --- a/frontend/src/pages/clusterSettings/TelemetrySettings.tsx +++ b/frontend/src/pages/clusterSettings/TelemetrySettings.tsx @@ -27,7 +27,7 @@ const TelemetrySettings: React.FC = ({ For more information see the{' '} documentation diff --git a/frontend/src/pages/notebookController/screens/admin/NotebookAdminControl.tsx b/frontend/src/pages/notebookController/screens/admin/NotebookAdminControl.tsx index 4411bac456..cffb040bae 100644 --- a/frontend/src/pages/notebookController/screens/admin/NotebookAdminControl.tsx +++ b/frontend/src/pages/notebookController/screens/admin/NotebookAdminControl.tsx @@ -73,7 +73,7 @@ const NotebookAdminControl: React.FC = () => { Create, delete, and manage permissions for {ODH_PRODUCT_NAME} users in OpenShift.{' '} diff --git a/manifests/common/apps/jupyter/jupyter-app.yaml b/manifests/common/apps/jupyter/jupyter-app.yaml index e08999eff0..1dc0e844a8 100644 --- a/manifests/common/apps/jupyter/jupyter-app.yaml +++ b/manifests/common/apps/jupyter/jupyter-app.yaml @@ -201,7 +201,7 @@ spec: ii. Enter your credentials and click **Log in** (or equivalent for your identity provider). - If you see **Error 403: Forbidden**, you are not in the default user group or the default administrator group for OpenShift AI. Contact your administrator so that they can add you to the correct group using [Adding users for Red Hat OpenShift AI](https://access.redhat.com/documentation/en-us/red_hat_openshift_data_science/1/html/managing_users_and_user_resources/adding-users-for-openshift-data-science_useradd). + If you see **Error 403: Forbidden**, you are not in the default user group or the default administrator group for OpenShift AI. Contact your administrator so that they can add you to the correct group using [Adding users for Red Hat OpenShift AI](https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/managing_openshift_ai/managing-users-and-groups#adding-users-to-user-groups_managing-rhoai). 3. Start a notebook server. diff --git a/manifests/common/apps/jupyter/jupyter-docs.yaml b/manifests/common/apps/jupyter/jupyter-docs.yaml index 0882278b75..f3a0bb8d10 100644 --- a/manifests/common/apps/jupyter/jupyter-docs.yaml +++ b/manifests/common/apps/jupyter/jupyter-docs.yaml @@ -10,7 +10,7 @@ spec: type: how-to description: |- Install additional python packages into your notebook server. - url: https://access.redhat.com/documentation/en-us/red_hat_openshift_data_science/1/html-single/working_on_data_science_projects/index#installing-python-packages-on-your-notebook-server_nb-server/index + url: https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/working_with_connected_applications/using_the_jupyter_application#installing-python-packages-on-your-notebook-server_connected-apps durationMinutes: 15 --- apiVersion: dashboard.opendatahub.io/v1 @@ -25,7 +25,7 @@ spec: appName: jupyter description: |- Update the settings or the notebook image on your notebook server. - url: https://access.redhat.com/documentation/en-us/red_hat_openshift_data_science/1/html-single/working_on_data_science_projects/index#updating-notebook-server-settings-by-restarting-your-server_nb-server/index + url: https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/managing_resources/managing-notebook-servers_notebook-mgmt durationMinutes: 15 --- apiVersion: dashboard.opendatahub.io/v1 @@ -35,12 +35,12 @@ metadata: annotations: opendatahub.io/categories: 'Data management,Notebook environments' spec: - displayName: How to use data from Amazon S3 buckets + displayName: How to use data from an S3-compatible object store appName: jupyter type: how-to description: |- - Connect to data in S3 Storage using environment variables. - url: https://access.redhat.com/documentation/en-us/red_hat_openshift_data_science/1/html-single/integrating_data_from_amazon_s3/index + Connect to data in S3 Storage. + url: https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/working_with_data_in_an_s3-compatible_object_store/index durationMinutes: 15 --- apiVersion: dashboard.opendatahub.io/v1 @@ -55,5 +55,5 @@ spec: type: how-to description: |- See which packages are installed into your running notebook server. - url: https://access.redhat.com/documentation/en-us/red_hat_openshift_data_science/1/html-single/working_on_data_science_projects/index#viewing-python-packages-installed-on-your-notebook-server_nb-server/index + url: https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/working_with_connected_applications/using_the_jupyter_application#viewing-python-packages-installed-on-your-notebook-server_connected-apps durationMinutes: 15 diff --git a/manifests/rhoai/addon/apps/nvidia/nvidia-app.yaml b/manifests/rhoai/addon/apps/nvidia/nvidia-app.yaml index 4801b5aa82..61bb7a670a 100644 --- a/manifests/rhoai/addon/apps/nvidia/nvidia-app.yaml +++ b/manifests/rhoai/addon/apps/nvidia/nvidia-app.yaml @@ -19,14 +19,14 @@ spec: csvName: gpu-operator-certified docsLink: https://docs.nvidia.com/datacenter/cloud-native/gpu-operator/openshift/contents.html quickStart: gpu-quickstart - getStartedLink: https://access.redhat.com/documentation/en-us/red_hat_openshift_data_science/1/html/managing_users_and_user_resources/enabling-gpu-support-in-openshift-data-science_user-mgmt + getStartedLink: https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/managing_openshift_ai/enabling_accelerators#enabling-nvidia-gpus_managing-rhoai enable: - title: Enable NVIDIA GPU Add-on + title: Enable NVIDIA GPUs actionLabel: Enable description: Clicking enable adds the NVIDIA GPU Add-on card to the Enabled page linkPreface: |- Before enabling, be sure you have installed the NVIDIA GPU Add-on: - link: https://access.redhat.com/documentation/en-us/red_hat_openshift_data_science/1/html/managing_users_and_user_resources/enabling-gpu-support-in-openshift-data-science_user-mgmt + link: https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/managing_openshift_ai/enabling_accelerators#enabling-nvidia-gpus_managing-rhoai validationConfigMap: nvidia-validation-result getStartedMarkDown: >- # NVIDIA GPU Add-on @@ -36,4 +36,4 @@ spec: The NVIDIA GPU Operator uses the operator framework within Kubernetes to automate the management of all NVIDIA software components needed to provision GPU. These components include the NVIDIA drivers (to enable CUDA), Kubernetes device plugin for GPUs, the NVIDIA Container Toolkit, automatic node labelling using GFD, DCGM based monitoring and others. Documentation : - [Enabling GPU Support in OpenShift AI](https://access.redhat.com/documentation/en-us/red_hat_openshift_data_science/1/html/managing_users_and_user_resources/enabling-gpu-support-in-openshift-data-science_user-mgmt) + [Enabling GPU Support in OpenShift AI](https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/managing_openshift_ai/enabling_accelerators#enabling-nvidia-gpus_managing-rhoai) diff --git a/manifests/rhoai/addon/apps/rhoam/deploy-model-rhoam-quickstart.yaml b/manifests/rhoai/addon/apps/rhoam/deploy-model-rhoam-quickstart.yaml index d99796d5a9..3ccfd0633c 100644 --- a/manifests/rhoai/addon/apps/rhoam/deploy-model-rhoam-quickstart.yaml +++ b/manifests/rhoai/addon/apps/rhoam/deploy-model-rhoam-quickstart.yaml @@ -19,7 +19,7 @@ spec: tasks: - title: Annotate your Python Model API for Service Discovery description: >- - Service Discovery is a 3scale feature that helps you import services from an OpenShift cluster. The Red Hat 3scale API Management instance provided by Red Hat OpenShift API Management has the [Service Discovery](https://access.redhat.com/documentation/en-us/red_hat_3scale_api_management/2.10/html/admin_portal_guide/service_discovery_from_openshift_to_3scale) feature enabled and pre-configured. + Service Discovery is a 3scale feature that helps you import services from an OpenShift cluster. The Red Hat 3scale API Management instance provided by Red Hat OpenShift API Management has the [Service Discovery](https://docs.redhat.com/en/documentation/red_hat_3scale_api_management/2.14/html/admin_portal_guide/service_discovery_from_openshift_to_3scale_api_management) feature enabled and pre-configured. When Service Discovery is configured, 3scale scans for discoverable API services that are running in the same OpenShift cluster and automatically imports the associated API definitions into 3scale. Additionally, 3scale can update the API integration and its specification, based on OpenAPI Specification (OAS), to resynchronize them with the cluster. diff --git a/manifests/rhoai/addon/apps/rhoam/rhoam-app.yaml b/manifests/rhoai/addon/apps/rhoam/rhoam-app.yaml index 0b5b97ccb1..f9f1100ded 100644 --- a/manifests/rhoai/addon/apps/rhoam/rhoam-app.yaml +++ b/manifests/rhoai/addon/apps/rhoam/rhoam-app.yaml @@ -23,7 +23,7 @@ spec: category: Red Hat managed support: redhat - docsLink: https://access.redhat.com/documentation/en-us/red_hat_openshift_api_management + docsLink: https://docs.redhat.com/en/documentation/red_hat_openshift_api_management quickStart: deploy-model-rhoam getStartedLink: https://console.redhat.com/openshift/details/#addOns getStartedMarkDown: >- @@ -37,4 +37,4 @@ spec: For more information visit: [https://www.redhat.com/en/technologies/cloud-computing/openshift/openshift-api-management](https://www.redhat.com/en/technologies/cloud-computing/openshift/openshift-api-management) - [https://access.redhat.com/documentation/en-us/red_hat_openshift_api_management](https://access.redhat.com/documentation/en-us/red_hat_openshift_api_management) + [https://docs.redhat.com/en/documentation/red_hat_openshift_api_management](https://docs.redhat.com/en/documentation/red_hat_openshift_api_management) diff --git a/manifests/rhoai/onprem/apps/starburst-enterprise/starburstenterprise-app.yaml b/manifests/rhoai/onprem/apps/starburst-enterprise/starburstenterprise-app.yaml index 7aae61c2bb..f839069c8a 100644 --- a/manifests/rhoai/onprem/apps/starburst-enterprise/starburstenterprise-app.yaml +++ b/manifests/rhoai/onprem/apps/starburst-enterprise/starburstenterprise-app.yaml @@ -30,7 +30,7 @@ spec: - Previously installed and configured Kubernetes, including access to **kubectl**. - An editor suitable for editing YAML files. - Your SEP (Starburst Enterprise) license file. - - The latest OpenShift Container Platform (OCP) client for your platform as described in the [OpenShift documentation](https://docs.openshift.com/container-platform/latest/cli_reference/openshift_cli/getting-started-cli.html) and the **oc** executable copied into your path, usually **/usr/local/bin.** + - The latest OpenShift Container Platform (OCP) client for your platform as described in the [OpenShift documentation](https://docs.redhat.com/en/documentation/openshift_container_platform/4.16/html/cli_tools/openshift-cli-oc) and the **oc** executable copied into your path, usually **/usr/local/bin.** ## Installation diff --git a/manifests/rhoai/shared/apps/rhoai/rhoai-app.yaml b/manifests/rhoai/shared/apps/rhoai/rhoai-app.yaml index c5610c74b4..8b3f1fd1e8 100644 --- a/manifests/rhoai/shared/apps/rhoai/rhoai-app.yaml +++ b/manifests/rhoai/shared/apps/rhoai/rhoai-app.yaml @@ -7,7 +7,7 @@ metadata: spec: img: >- - getStartedLink: 'https://access.redhat.com/documentation/en-us/red_hat_openshift_ai_cloud_service/1/html/openshift_ai_tutorial_-_fraud_detection_example/index' + getStartedLink: 'https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/openshift_ai_tutorial_-_fraud_detection_example/index' displayName: Red Hat OpenShift AI support: red hat provider: Red Hat diff --git a/manifests/rhoai/shared/apps/rhoai/rhoai-docs.yaml b/manifests/rhoai/shared/apps/rhoai/rhoai-docs.yaml index 9cdcc40ad6..4ada36f109 100644 --- a/manifests/rhoai/shared/apps/rhoai/rhoai-docs.yaml +++ b/manifests/rhoai/shared/apps/rhoai/rhoai-docs.yaml @@ -10,7 +10,7 @@ spec: type: tutorial description: |- Use OpenShift AI to train an example model in a Jupyter notebook, deploy the model, integrate the model into a fraud detection application, and refine the model by using automated pipelines. - url: https://access.redhat.com/documentation/en-us/red_hat_openshift_ai_cloud_service/1/html-single/openshift_ai_tutorial_-_fraud_detection_example/ + url: https://docs.redhat.com/en/documentation/red_hat_openshift_ai_cloud_service/1/html/openshift_ai_tutorial_-_fraud_detection_example/index durationMinutes: 60 --- apiVersion: dashboard.opendatahub.io/v1 @@ -26,7 +26,7 @@ spec: description: |- OpenShift AI provides an environment to develop, train, serve, test, and monitor AI/ML models on-premises or in the cloud. - url: 'https://access.redhat.com/documentation/en-us/red_hat_openshift_ai/2024' + url: 'https://docs.redhat.com/en/documentation/red_hat_openshift_ai/2024' img: >-