diff --git a/notebooks/en/automatic_embedding_tei_inference_endpoints.ipynb b/notebooks/en/automatic_embedding_tei_inference_endpoints.ipynb index 23a287b5..f70f1903 100644 --- a/notebooks/en/automatic_embedding_tei_inference_endpoints.ipynb +++ b/notebooks/en/automatic_embedding_tei_inference_endpoints.ipynb @@ -113,9 +113,10 @@ "id": "1e680f3d-4900-46cc-8b49-bb6ba3e27e2b", "metadata": {}, "source": [ - "Hugging Face offers a number of GPUs that you can choose from a number of GPUs that you can choose in Inference Endpoints. Check the [original documentation](https://huggingface.co/docs/inference-endpoints/en/pricing#gpu-instances) for GPU and alternative accelerators for information.\n", + "Inference Endpoints offers a number of GPUs that you can choose from. Check the [documentation](https://huggingface.co/docs/inference-endpoints/en/pricing#gpu-instances) for GPU and alternative accelerators for information.\n", "\n", - "\\*Note that for some architectures you might get a note to email us to get access.\n", + "> [!TIP]\n", + "> You may need to email us for access to some architectures.\n", "\n", "| Provider | Instance Type | Instance Size | Hourly rate | GPUs | Memory | Architecture |\n", "|:--------:|:-------------:|:-------------:|:-----------:|:----:|:------:|:---------------:|\n",