diff --git a/Directory.Packages.props b/Directory.Packages.props
index 94fcf5f456..1b34a01a14 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -69,8 +69,9 @@
-
-
+
+
+
@@ -87,7 +88,7 @@
-
+
diff --git a/aspire/AppHost.cs b/aspire/AppHost.cs
index b5dd60527a..6287569def 100644
--- a/aspire/AppHost.cs
+++ b/aspire/AppHost.cs
@@ -2,125 +2,173 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
-
-using ConsoleAppFramework;
using Elastic.Documentation;
+using Nullean.Argh;
using static Elastic.Documentation.Aspire.ResourceNames;
-GlobalCli.Process(ref args, out _, out var globalArguments);
+// Extract global doc-builder flags before argh routing so they can be forwarded
+// to docs-builder sub-process invocations (--log-level, --config-source, etc.).
+AspireHost.GlobalArguments = AspireHost.ExtractGlobalArgs(ref args);
-await ConsoleApp.RunAsync(args, BuildAspireHost);
-return;
+var app = new ArghApp();
+app.MapRoot(AspireHost.Run);
+return await app.RunAsync(args);
-// ReSharper disable once RedundantLambdaParameterType
-// ReSharper disable once VariableHidesOuterVariable
-async Task BuildAspireHost(bool startElasticsearch, bool assumeCloned, bool assumeBuild, bool skipPrivateRepositories, Cancel ctx)
-{
- var builder = DistributedApplication.CreateBuilder(args);
-
- var llmUrl = builder.AddParameter("LlmGatewayUrl", secret: true);
- var llmServiceAccountPath = builder.AddParameter("LlmGatewayServiceAccountPath", secret: true);
-
- var elasticsearchUrl = builder.AddParameter("DocumentationElasticUrl", secret: true);
- var elasticsearchApiKey = builder.AddParameter("DocumentationElasticApiKey", secret: true);
-
- var cloneAll = builder.AddProject(AssemblerClone);
- string[] cloneArgs = assumeCloned ? ["--assume-cloned"] : [];
- cloneAll = cloneAll.WithArgs(["assembler", "clone", .. globalArguments, .. cloneArgs]);
-
- var buildAll = builder.AddProject(AssemblerBuild);
- string[] buildArgs = assumeBuild ? ["--assume-build"] : [];
- buildAll = buildAll
- .WithArgs(["assembler", "build", .. globalArguments, .. buildArgs])
- .WaitForCompletion(cloneAll)
- .WithParentRelationship(cloneAll);
-
- var elasticsearchLocal = builder.AddElasticsearch(ElasticsearchLocal)
- .WithEnvironment("LICENSE", "trial");
- if (!startElasticsearch)
- elasticsearchLocal = elasticsearchLocal.WithExplicitStart();
-
- var elasticsearchRemote = builder.AddExternalService(ElasticsearchRemote, elasticsearchUrl);
-
- var api = builder.AddProject(Api)
- .WithArgs(globalArguments)
- .WithEnvironment("ENVIRONMENT", "dev")
- .WithEnvironment("LLM_GATEWAY_FUNCTION_URL", llmUrl)
- .WithEnvironment("LLM_GATEWAY_SERVICE_ACCOUNT_KEY_PATH", llmServiceAccountPath);
-
- // ReSharper disable once RedundantAssignment
- api = startElasticsearch
- ? api
- .WithReference(elasticsearchLocal)
- .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchLocal.GetEndpoint("http"))
- .WithEnvironment(context => context.EnvironmentVariables["DOCUMENTATION_ELASTIC_PASSWORD"] = elasticsearchLocal.Resource.PasswordParameter)
- .WithParentRelationship(elasticsearchLocal)
- .WaitFor(elasticsearchLocal)
- .WithExplicitStart()
- : api.WithReference(elasticsearchRemote)
- .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchUrl)
- .WithEnvironment("DOCUMENTATION_ELASTIC_APIKEY", elasticsearchApiKey)
- .WithExplicitStart();
+// ── Aspire host command ───────────────────────────────────────────────────────────────────────────
- var mcp = builder.AddProject(RemoteMcp)
- .WithArgs(globalArguments)
- .WithEnvironment("ENVIRONMENT", "dev");
-
- // ReSharper disable once RedundantAssignment
- mcp = startElasticsearch
- ? mcp
- .WithReference(elasticsearchLocal)
- .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchLocal.GetEndpoint("http"))
- .WithEnvironment(context => context.EnvironmentVariables["DOCUMENTATION_ELASTIC_PASSWORD"] = elasticsearchLocal.Resource.PasswordParameter)
- .WithParentRelationship(elasticsearchLocal)
- .WaitFor(elasticsearchLocal)
- .WithExplicitStart()
- : mcp.WithReference(elasticsearchRemote)
- .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchUrl)
- .WithEnvironment("DOCUMENTATION_ELASTIC_APIKEY", elasticsearchApiKey)
+internal static class AspireHost
+{
+ internal static string[] GlobalArguments = [];
+
+ ///
+ /// Starts the Elastic documentation Aspire AppHost.
+ ///
+ /// Start a local Elasticsearch container
+ /// Skip cloning; assume repositories are already present on disk
+ /// Skip building; assume build output already exists
+ /// Skip cloning private repositories
+ [NoOptionsInjection]
+ internal static async Task Run(
+ bool startElasticsearch = false,
+ bool assumeCloned = false,
+ bool assumeBuild = false,
+ bool skipPrivateRepositories = false,
+ CancellationToken ct = default)
+ {
+ var builder = DistributedApplication.CreateBuilder();
+
+ var llmUrl = builder.AddParameter("LlmGatewayUrl", secret: true);
+ var llmServiceAccountPath = builder.AddParameter("LlmGatewayServiceAccountPath", secret: true);
+
+ var elasticsearchUrl = builder.AddParameter("DocumentationElasticUrl", secret: true);
+ var elasticsearchApiKey = builder.AddParameter("DocumentationElasticApiKey", secret: true);
+
+ var cloneAll = builder.AddProject(AssemblerClone);
+ string[] cloneArgs = assumeCloned ? ["--assume-cloned"] : [];
+ cloneAll = cloneAll.WithArgs(["assembler", "clone", .. GlobalArguments, .. cloneArgs]);
+
+ var buildAll = builder.AddProject(AssemblerBuild);
+ string[] buildArgs = assumeBuild ? ["--assume-build"] : [];
+ buildAll = buildAll
+ .WithArgs(["assembler", "build", .. GlobalArguments, .. buildArgs])
+ .WaitForCompletion(cloneAll)
+ .WithParentRelationship(cloneAll);
+
+ var elasticsearchLocal = builder.AddElasticsearch(ElasticsearchLocal)
+ .WithEnvironment("LICENSE", "trial");
+ if (!startElasticsearch)
+ elasticsearchLocal = elasticsearchLocal.WithExplicitStart();
+
+ var elasticsearchRemote = builder.AddExternalService(ElasticsearchRemote, elasticsearchUrl);
+
+ var api = builder.AddProject(Api)
+ .WithArgs(GlobalArguments)
+ .WithEnvironment("ENVIRONMENT", "dev")
+ .WithEnvironment("LLM_GATEWAY_FUNCTION_URL", llmUrl)
+ .WithEnvironment("LLM_GATEWAY_SERVICE_ACCOUNT_KEY_PATH", llmServiceAccountPath);
+
+ // ReSharper disable once RedundantAssignment
+ api = startElasticsearch
+ ? api
+ .WithReference(elasticsearchLocal)
+ .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchLocal.GetEndpoint("http"))
+ .WithEnvironment(context => context.EnvironmentVariables["DOCUMENTATION_ELASTIC_PASSWORD"] = elasticsearchLocal.Resource.PasswordParameter)
+ .WithParentRelationship(elasticsearchLocal)
+ .WaitFor(elasticsearchLocal)
+ .WithExplicitStart()
+ : api.WithReference(elasticsearchRemote)
+ .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchUrl)
+ .WithEnvironment("DOCUMENTATION_ELASTIC_APIKEY", elasticsearchApiKey)
+ .WithExplicitStart();
+
+ var mcp = builder.AddProject(RemoteMcp)
+ .WithArgs(GlobalArguments)
+ .WithEnvironment("ENVIRONMENT", "dev");
+
+ // ReSharper disable once RedundantAssignment
+ mcp = startElasticsearch
+ ? mcp
+ .WithReference(elasticsearchLocal)
+ .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchLocal.GetEndpoint("http"))
+ .WithEnvironment(context => context.EnvironmentVariables["DOCUMENTATION_ELASTIC_PASSWORD"] = elasticsearchLocal.Resource.PasswordParameter)
+ .WithParentRelationship(elasticsearchLocal)
+ .WaitFor(elasticsearchLocal)
+ .WithExplicitStart()
+ : mcp.WithReference(elasticsearchRemote)
+ .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchUrl)
+ .WithEnvironment("DOCUMENTATION_ELASTIC_APIKEY", elasticsearchApiKey)
+ .WithExplicitStart();
+
+ var indexElasticsearch = builder.AddProject(ElasticsearchIngest)
+ .WithArgs(["assembler", "index", .. GlobalArguments])
+ .WaitForCompletion(cloneAll)
.WithExplicitStart();
- var indexElasticsearch = builder.AddProject(ElasticsearchIngest)
- .WithArgs(["assembler", "index", .. globalArguments])
- .WaitForCompletion(cloneAll)
- .WithExplicitStart();
-
- // ReSharper disable once RedundantAssignment
- indexElasticsearch = startElasticsearch
- ? indexElasticsearch
- .WaitFor(elasticsearchLocal)
- .WithReference(elasticsearchLocal)
- .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchLocal.GetEndpoint("http"))
- .WithEnvironment(context => context.EnvironmentVariables["DOCUMENTATION_ELASTIC_PASSWORD"] = elasticsearchLocal.Resource.PasswordParameter)
- .WithParentRelationship(elasticsearchLocal)
- : indexElasticsearch
- .WithReference(elasticsearchRemote)
- .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchUrl)
- .WithEnvironment("DOCUMENTATION_ELASTIC_APIKEY", elasticsearchApiKey)
- .WithParentRelationship(elasticsearchRemote);
-
- var serveStatic = builder.AddProject(AssemblerServe)
- .WithEnvironment("LLM_GATEWAY_FUNCTION_URL", llmUrl)
- .WithEnvironment("LLM_GATEWAY_SERVICE_ACCOUNT_KEY_PATH", llmServiceAccountPath)
- .WithHttpEndpoint(port: 4000, isProxied: false)
- .WithArgs(["assembler", "serve", .. globalArguments])
- .WithHttpHealthCheck("/", 200)
- .WaitForCompletion(buildAll)
- .WithParentRelationship(cloneAll);
-
- serveStatic = startElasticsearch
- ? serveStatic
- .WithReference(elasticsearchLocal)
- .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchLocal.GetEndpoint("http"))
- .WithEnvironment(context => context.EnvironmentVariables["DOCUMENTATION_ELASTIC_PASSWORD"] = elasticsearchLocal.Resource.PasswordParameter)
- : serveStatic
- .WithReference(elasticsearchRemote)
- .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchUrl)
- .WithEnvironment("DOCUMENTATION_ELASTIC_APIKEY", elasticsearchApiKey);
-
-
- // ReSharper disable once RedundantAssignment
- serveStatic = startElasticsearch ? serveStatic.WaitFor(elasticsearchLocal) : serveStatic.WaitFor(buildAll);
-
- await builder.Build().RunAsync(ctx);
+ // ReSharper disable once RedundantAssignment
+ indexElasticsearch = startElasticsearch
+ ? indexElasticsearch
+ .WaitFor(elasticsearchLocal)
+ .WithReference(elasticsearchLocal)
+ .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchLocal.GetEndpoint("http"))
+ .WithEnvironment(context => context.EnvironmentVariables["DOCUMENTATION_ELASTIC_PASSWORD"] = elasticsearchLocal.Resource.PasswordParameter)
+ .WithParentRelationship(elasticsearchLocal)
+ : indexElasticsearch
+ .WithReference(elasticsearchRemote)
+ .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchUrl)
+ .WithEnvironment("DOCUMENTATION_ELASTIC_APIKEY", elasticsearchApiKey)
+ .WithParentRelationship(elasticsearchRemote);
+
+ var serveStatic = builder.AddProject(AssemblerServe)
+ .WithEnvironment("LLM_GATEWAY_FUNCTION_URL", llmUrl)
+ .WithEnvironment("LLM_GATEWAY_SERVICE_ACCOUNT_KEY_PATH", llmServiceAccountPath)
+ .WithHttpEndpoint(port: 4000, isProxied: false)
+ .WithArgs(["assembler", "serve", .. GlobalArguments])
+ .WithHttpHealthCheck("/", 200)
+ .WaitForCompletion(buildAll)
+ .WithParentRelationship(cloneAll);
+
+ serveStatic = startElasticsearch
+ ? serveStatic
+ .WithReference(elasticsearchLocal)
+ .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchLocal.GetEndpoint("http"))
+ .WithEnvironment(context => context.EnvironmentVariables["DOCUMENTATION_ELASTIC_PASSWORD"] = elasticsearchLocal.Resource.PasswordParameter)
+ : serveStatic
+ .WithReference(elasticsearchRemote)
+ .WithEnvironment("DOCUMENTATION_ELASTIC_URL", elasticsearchUrl)
+ .WithEnvironment("DOCUMENTATION_ELASTIC_APIKEY", elasticsearchApiKey);
+
+ // ReSharper disable once RedundantAssignment
+ serveStatic = startElasticsearch ? serveStatic.WaitFor(elasticsearchLocal) : serveStatic.WaitFor(buildAll);
+
+ await builder.Build().RunAsync(ct);
+ }
+
+ ///
+ /// Extracts global doc-builder flags (--log-level, --config-source, --skip-private-repositories)
+ /// from in-place, returning them for forwarding to docs-builder sub-processes.
+ ///
+ internal static string[] ExtractGlobalArgs(ref string[] args)
+ {
+ var global = new List();
+ var remaining = new List();
+ for (var i = 0; i < args.Length; i++)
+ {
+ if (args[i] == "--log-level" && i + 1 < args.Length)
+ {
+ global.Add("--log-level");
+ global.Add(args[++i]);
+ }
+ else if (args[i] is "--config-source" or "--configuration-source" or "-c" && i + 1 < args.Length)
+ {
+ global.Add("--config-source");
+ global.Add(args[++i]);
+ }
+ else if (args[i] == "--skip-private-repositories")
+ global.Add("--skip-private-repositories");
+ else
+ remaining.Add(args[i]);
+ }
+ args = [.. remaining];
+ return [.. global];
+ }
}
diff --git a/aspire/aspire.csproj b/aspire/aspire.csproj
index 93a127ff2b..5c1073d0e9 100644
--- a/aspire/aspire.csproj
+++ b/aspire/aspire.csproj
@@ -10,14 +10,11 @@
72f50f33-6fb9-4d08-bff3-39568fe370b3
false
Elastic.Documentation.Aspire
- IDE0350
+ IDE0350;IDE0060
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
+
diff --git a/src/Elastic.Documentation.Configuration/DocumentationEndpoints.cs b/src/Elastic.Documentation.Configuration/DocumentationEndpoints.cs
index dec6e05b98..b93770d15c 100644
--- a/src/Elastic.Documentation.Configuration/DocumentationEndpoints.cs
+++ b/src/Elastic.Documentation.Configuration/DocumentationEndpoints.cs
@@ -58,7 +58,7 @@ public class ElasticsearchEndpoint
public bool DisableSslVerification { get; set; }
public X509Certificate? Certificate { get; set; }
public bool CertificateIsNotRoot { get; set; }
- public int? BootstrapTimeout { get; set; }
+ public TimeSpan? BootstrapTimeout { get; set; }
public bool ForceReindex { get; set; }
///
diff --git a/src/Elastic.Documentation.Configuration/Elastic.Documentation.Configuration.csproj b/src/Elastic.Documentation.Configuration/Elastic.Documentation.Configuration.csproj
index 0f96c00f34..8796dc5fd7 100644
--- a/src/Elastic.Documentation.Configuration/Elastic.Documentation.Configuration.csproj
+++ b/src/Elastic.Documentation.Configuration/Elastic.Documentation.Configuration.csproj
@@ -7,6 +7,8 @@
$(NoWarn);CS0618
true
+ true
+ $(NoWarn);CS1591;CS1573;CS1572;CS1571;CS1570;CS1574
@@ -16,6 +18,7 @@
+
diff --git a/src/Elastic.Documentation.Configuration/ElasticsearchEndpointConfigurator.cs b/src/Elastic.Documentation.Configuration/ElasticsearchEndpointConfigurator.cs
index d48a6e6a11..f964a4c2b1 100644
--- a/src/Elastic.Documentation.Configuration/ElasticsearchEndpointConfigurator.cs
+++ b/src/Elastic.Documentation.Configuration/ElasticsearchEndpointConfigurator.cs
@@ -2,59 +2,104 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
+using System.ComponentModel.DataAnnotations;
using System.IO.Abstractions;
using System.Security.Cryptography.X509Certificates;
using Elastic.Documentation.Diagnostics;
+using Nullean.Argh;
namespace Elastic.Documentation.Configuration;
///
-/// Options record for configuring an Elasticsearch endpoint from CLI arguments.
-/// Shared by all index commands (isolated, assembler, codex).
+/// Elasticsearch connection and indexing options shared by all index commands.
+/// Bind from CLI flags via argh [AsParameters].
///
public record ElasticsearchIndexOptions
{
- // endpoint options
- public string? Endpoint { get; init; }
+ // --- endpoint ---
+
+ /// -es,--endpoint, Elasticsearch endpoint URL. Falls back to env DOCUMENTATION_ELASTIC_URL.
+ [Url]
+ public Uri? Endpoint { get; init; }
+
+ /// API key for authentication. Falls back to env DOCUMENTATION_ELASTIC_APIKEY.
public string? ApiKey { get; init; }
+
+ /// Username for basic authentication. Falls back to env DOCUMENTATION_ELASTIC_USERNAME.
public string? Username { get; init; }
+
+ /// Password for basic authentication. Falls back to env DOCUMENTATION_ELASTIC_PASSWORD.
public string? Password { get; init; }
- // inference options
- public bool? NoAiEnrichment { get; init; }
+ // --- inference ---
+
+ /// Enable AI enrichment of documents using LLM-generated metadata (enabled by default).
+ public bool? AiEnrichment { get; init; }
+
+ /// Number of search threads for the inference endpoint.
+ [Range(1, 128)]
public int? SearchNumThreads { get; init; }
+
+ /// Number of index threads for the inference endpoint.
+ [Range(1, 128)]
public int? IndexNumThreads { get; init; }
- public bool? NoEis { get; init; }
- public int? BootstrapTimeout { get; init; }
- // index options
+ /// Use the Elastic Inference Service to bootstrap the inference endpoint (enabled by default).
+ public bool? Eis { get; init; }
+
+ /// How long to wait for the inference endpoint to become ready (e.g. 4m, 90s).
+ [TimeSpanRange("1s", "60m")]
+ public TimeSpan? BootstrapTimeout { get; init; }
+
+ // --- index behavior ---
+
+ /// Force a full reindex, discarding any incremental state.
public bool? ForceReindex { get; init; }
- // channel buffer options
+ /// Number of documents per bulk request.
+ [Range(1, 10_000)]
public int? BufferSize { get; init; }
+
+ /// Number of retry attempts for failed bulk items.
+ [Range(0, 20)]
public int? MaxRetries { get; init; }
- // connection options
+ /// Log every Elasticsearch request and response body; append ?pretty to all requests.
public bool? DebugMode { get; init; }
- public string? ProxyAddress { get; init; }
- public string? ProxyPassword { get; init; }
+
+ // --- proxy ---
+
+ /// Route requests through this proxy URL.
+ [Url]
+ public Uri? ProxyAddress { get; init; }
+
+ /// Proxy server username.
public string? ProxyUsername { get; init; }
- // certificate options
+ /// Proxy server password.
+ public string? ProxyPassword { get; init; }
+
+ // --- certificate ---
+
+ /// Disable SSL certificate validation. Use only in controlled environments.
public bool? DisableSslVerification { get; init; }
+
+ /// SHA-256 fingerprint of a self-signed server certificate.
public string? CertificateFingerprint { get; init; }
- public string? CertificatePath { get; init; }
+
+ /// Path to a PEM or DER certificate file for SSL validation.
+ [Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "pem,der,crt,cer")]
+ public FileInfo? CertificatePath { get; init; }
+
+ /// Set when the certificate is an intermediate CA rather than the root.
public bool? CertificateNotRoot { get; init; }
}
///
-/// Applies CLI options to an . Shared by all index commands.
+/// Applies to an . Shared by all index commands.
///
public static class ElasticsearchEndpointConfigurator
{
- ///
- /// Applies the given options to the Elasticsearch endpoint configuration.
- ///
public static async Task ApplyAsync(
ElasticsearchEndpoint cfg,
ElasticsearchIndexOptions options,
@@ -62,13 +107,8 @@ public static async Task ApplyAsync(
IFileSystem fileSystem,
Cancel ctx)
{
- if (!string.IsNullOrEmpty(options.Endpoint))
- {
- if (!Uri.TryCreate(options.Endpoint, UriKind.Absolute, out var uri))
- collector.EmitGlobalError($"'{options.Endpoint}' is not a valid URI");
- else
- cfg.Uri = uri;
- }
+ if (options.Endpoint is not null)
+ cfg.Uri = options.Endpoint;
if (!string.IsNullOrEmpty(options.ApiKey))
cfg.ApiKey = options.ApiKey;
@@ -81,8 +121,8 @@ public static async Task ApplyAsync(
cfg.SearchNumThreads = options.SearchNumThreads.Value;
if (options.IndexNumThreads.HasValue)
cfg.IndexNumThreads = options.IndexNumThreads.Value;
- if (options.NoEis.HasValue)
- cfg.NoElasticInferenceService = options.NoEis.Value;
+ if (options.Eis.HasValue)
+ cfg.NoElasticInferenceService = !options.Eis.Value;
if (options.BufferSize.HasValue)
cfg.BufferSize = options.BufferSize.Value;
if (options.MaxRetries.HasValue)
@@ -91,30 +131,32 @@ public static async Task ApplyAsync(
cfg.DebugMode = options.DebugMode.Value;
if (!string.IsNullOrEmpty(options.CertificateFingerprint))
cfg.CertificateFingerprint = options.CertificateFingerprint;
- if (!string.IsNullOrEmpty(options.ProxyAddress))
- cfg.ProxyAddress = options.ProxyAddress;
+ if (options.ProxyAddress is not null)
+ cfg.ProxyAddress = options.ProxyAddress.ToString();
if (!string.IsNullOrEmpty(options.ProxyPassword))
cfg.ProxyPassword = options.ProxyPassword;
if (!string.IsNullOrEmpty(options.ProxyUsername))
cfg.ProxyUsername = options.ProxyUsername;
if (options.DisableSslVerification.HasValue)
cfg.DisableSslVerification = options.DisableSslVerification.Value;
- if (!string.IsNullOrEmpty(options.CertificatePath))
+ if (options.CertificatePath is not null)
{
- if (!fileSystem.File.Exists(options.CertificatePath))
- collector.EmitGlobalError($"'{options.CertificatePath}' does not exist");
- var bytes = await fileSystem.File.ReadAllBytesAsync(options.CertificatePath, ctx);
- var loader = X509CertificateLoader.LoadCertificate(bytes);
- cfg.Certificate = loader;
+ if (!fileSystem.File.Exists(options.CertificatePath.FullName))
+ {
+ collector.EmitGlobalError($"'{options.CertificatePath.FullName}' does not exist");
+ }
+ else
+ {
+ var bytes = await fileSystem.File.ReadAllBytesAsync(options.CertificatePath.FullName, ctx);
+ cfg.Certificate = X509CertificateLoader.LoadCertificate(bytes);
+ }
}
-
if (options.CertificateNotRoot.HasValue)
cfg.CertificateIsNotRoot = options.CertificateNotRoot.Value;
if (options.BootstrapTimeout.HasValue)
cfg.BootstrapTimeout = options.BootstrapTimeout.Value;
-
- if (options.NoAiEnrichment == true)
- cfg.EnableAiEnrichment = false;
+ if (options.AiEnrichment.HasValue)
+ cfg.EnableAiEnrichment = options.AiEnrichment.Value;
if (options.ForceReindex.HasValue)
cfg.ForceReindex = options.ForceReindex.Value;
}
diff --git a/src/Elastic.Documentation.ServiceDefaults/AppDefaultsExtensions.cs b/src/Elastic.Documentation.ServiceDefaults/AppDefaultsExtensions.cs
index 4b3a497eb0..2bac3679a9 100644
--- a/src/Elastic.Documentation.ServiceDefaults/AppDefaultsExtensions.cs
+++ b/src/Elastic.Documentation.ServiceDefaults/AppDefaultsExtensions.cs
@@ -2,6 +2,7 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
+using Elastic.Documentation;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Configuration.LegacyUrlMappings;
using Elastic.Documentation.Configuration.Products;
@@ -18,20 +19,19 @@ namespace Elastic.Documentation.ServiceDefaults;
public static class AppDefaultsExtensions
{
- public static TBuilder AddDocumentationServiceDefaults(this TBuilder builder) where TBuilder : IHostApplicationBuilder
- {
- var args = Array.Empty();
- return builder.AddDocumentationServiceDefaults(ref args);
- }
- public static TBuilder AddDocumentationServiceDefaults(this TBuilder builder, ref string[] args, Action? configure = null)
+ public static TBuilder AddDocumentationServiceDefaults(this TBuilder builder)
+ where TBuilder : IHostApplicationBuilder => builder.AddDocumentationServiceDefaults(new GlobalCliOptions(), null);
+
+ public static TBuilder AddDocumentationServiceDefaults(this TBuilder builder, Action? configure)
+ where TBuilder : IHostApplicationBuilder => builder.AddDocumentationServiceDefaults(new GlobalCliOptions(), configure);
+
+ public static TBuilder AddDocumentationServiceDefaults(this TBuilder builder, GlobalCliOptions cliOptions, Action? configure = null)
where TBuilder : IHostApplicationBuilder
{
- GlobalCli.Process(ref args, out var globalArgs);
-
var services = builder.Services;
- _ = builder.Services.AddElasticDocumentationLogging(globalArgs.LogLevel, noConsole: globalArgs.IsMcp);
+ _ = services.AddElasticDocumentationLogging(cliOptions.LogLevel);
_ = services
- .AddConfigurationFileProvider(globalArgs.SkipPrivateRepositories, globalArgs.ConfigurationSource, (s, p) =>
+ .AddConfigurationFileProvider(cliOptions.SkipPrivateRepositories, cliOptions.ConfigSource, (s, p) =>
{
var versionConfiguration = p.CreateVersionConfiguration();
var products = p.CreateProducts(versionConfiguration);
@@ -42,8 +42,7 @@ public static TBuilder AddDocumentationServiceDefaults(this TBuilder b
_ = s.AddSingleton(search);
configure?.Invoke(s, p);
});
- _ = builder.Services.AddElasticDocumentationLogging(globalArgs.LogLevel, noConsole: globalArgs.IsMcp);
- _ = services.AddSingleton(globalArgs);
+ _ = services.AddSingleton(cliOptions);
var endpoints = ElasticsearchEndpointFactory.Create(builder.Configuration);
_ = services.AddSingleton(endpoints);
@@ -51,19 +50,15 @@ public static TBuilder AddDocumentationServiceDefaults(this TBuilder b
return builder.AddServiceDefaults();
}
- public static TServiceCollection AddElasticDocumentationLogging(this TServiceCollection services, LogLevel logLevel, bool noConsole = false)
+ public static TServiceCollection AddElasticDocumentationLogging(this TServiceCollection services, LogLevel logLevel)
where TServiceCollection : IServiceCollection
{
_ = services.AddLogging(x =>
{
_ = x.ClearProviders().SetMinimumLevel(logLevel);
- if (!noConsole)
- {
- services.TryAddEnumerable(ServiceDescriptor.Singleton());
- _ = x.AddConsole(c => c.FormatterName = "condensed");
- }
+ services.TryAddEnumerable(ServiceDescriptor.Singleton());
+ _ = x.AddConsole(c => c.FormatterName = "condensed");
});
return services;
}
-
}
diff --git a/src/Elastic.Documentation/Elastic.Documentation.csproj b/src/Elastic.Documentation/Elastic.Documentation.csproj
index 89463de94f..9720963606 100644
--- a/src/Elastic.Documentation/Elastic.Documentation.csproj
+++ b/src/Elastic.Documentation/Elastic.Documentation.csproj
@@ -6,6 +6,8 @@
enable
Elastic.Documentation
true
+ true
+ $(NoWarn);CS1591;CS1573;CS1572;CS1571;CS1570;CS1574;CS0419
diff --git a/src/Elastic.Documentation/GlobalCliOptions.cs b/src/Elastic.Documentation/GlobalCliOptions.cs
new file mode 100644
index 0000000000..cc0af96f8b
--- /dev/null
+++ b/src/Elastic.Documentation/GlobalCliOptions.cs
@@ -0,0 +1,22 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+using Microsoft.Extensions.Logging;
+
+namespace Elastic.Documentation;
+
+///
+/// Global CLI options available to every command via argh's first-parameter injection.
+///
+public class GlobalCliOptions
+{
+ /// -l,--log-level, Minimum log level. Default: information
+ public LogLevel LogLevel { get; set; } = LogLevel.Information;
+
+ /// -c,--config-source, Override the configuration source: local, remote
+ public ConfigurationSource? ConfigSource { get; set; }
+
+ /// Skip cloning private repositories
+ public bool SkipPrivateRepositories { get; set; }
+}
diff --git a/src/Elastic.Documentation/GlobalCommandLine.cs b/src/Elastic.Documentation/GlobalCommandLine.cs
deleted file mode 100644
index b0ed15ff97..0000000000
--- a/src/Elastic.Documentation/GlobalCommandLine.cs
+++ /dev/null
@@ -1,82 +0,0 @@
-// Licensed to Elasticsearch B.V under one or more agreements.
-// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
-// See the LICENSE file in the project root for more information
-
-using Microsoft.Extensions.Logging;
-
-namespace Elastic.Documentation;
-
-public record GlobalCliArgs
-{
- public LogLevel LogLevel { get; init; } = LogLevel.Information;
- public ConfigurationSource? ConfigurationSource { get; init; }
- public bool SkipPrivateRepositories { get; init; }
- public bool IsHelpOrVersion { get; init; }
- public bool IsMcp { get; init; }
-}
-public static class GlobalCli
-{
- public static void Process(ref string[] args, out GlobalCliArgs cli) => Process(ref args, out cli, out _);
- public static void Process(ref string[] args, out GlobalCliArgs cli, out string[] globalArguments)
- {
- cli = new GlobalCliArgs();
- globalArguments = [];
- var globalArgs = new List();
- var filteredArguments = new List();
- for (var i = 0; i < args.Length; i++)
- {
- if (args[i] == "--log-level")
- {
- if (args.Length > i + 1)
- {
- cli = cli with { LogLevel = GetLogLevel(args[i + 1]) };
- globalArgs.Add("--log-level");
- globalArgs.Add(args[i + 1]);
- }
- i++;
- }
- else if (args[i] is "--config-source" or "--configuration-source" or "-c")
- {
- if (args.Length > i + 1 && ConfigurationSourceExtensions.TryParse(args[i + 1], out var cs, true, true))
- {
- cli = cli with { ConfigurationSource = cs };
- globalArgs.Add("--config-source");
- globalArgs.Add(args[i + 1]);
- }
- i++;
- }
- else if (args[i] == "--skip-private-repositories")
- {
- cli = cli with { SkipPrivateRepositories = true };
- globalArgs.Add("--skip-private-repositories");
- }
- else if (args[i] is "--help" or "--version")
- {
- cli = cli with { IsHelpOrVersion = true };
- globalArgs.Add(args[i]);
- filteredArguments.Add(args[i]);
- }
- else
- filteredArguments.Add(args[i]);
- }
-
- args = [.. filteredArguments];
- globalArguments = [.. globalArgs];
-
- if (filteredArguments.Count > 0 && filteredArguments[0] == "mcp")
- cli = cli with { IsMcp = true };
- }
-
- private static LogLevel GetLogLevel(string? logLevel) => logLevel switch
- {
- "trace" => LogLevel.Trace,
- "debug" => LogLevel.Debug,
- "information" => LogLevel.Information,
- "info" => LogLevel.Information,
- "warning" => LogLevel.Warning,
- "error" => LogLevel.Error,
- "critical" => LogLevel.Critical,
- _ => LogLevel.Information
- };
-
-}
diff --git a/src/api/Elastic.Documentation.Api.App/Program.cs b/src/api/Elastic.Documentation.Api.App/Program.cs
index 0fc2eff02e..980fe483e0 100644
--- a/src/api/Elastic.Documentation.Api.App/Program.cs
+++ b/src/api/Elastic.Documentation.Api.App/Program.cs
@@ -14,7 +14,7 @@
try
{
var builder = WebApplication.CreateSlimBuilder(args);
- _ = builder.AddDocumentationServiceDefaults(ref args, (s, p) =>
+ _ = builder.AddDocumentationServiceDefaults((s, p) =>
{
_ = s.AddSingleton(AssemblyConfiguration.Create(p));
});
diff --git a/src/api/Elastic.Documentation.Mcp.Remote/Program.cs b/src/api/Elastic.Documentation.Mcp.Remote/Program.cs
index fdbd018b76..6e8100c5cb 100644
--- a/src/api/Elastic.Documentation.Mcp.Remote/Program.cs
+++ b/src/api/Elastic.Documentation.Mcp.Remote/Program.cs
@@ -22,7 +22,7 @@
try
{
var builder = WebApplication.CreateSlimBuilder(args);
- _ = builder.AddDocumentationServiceDefaults(ref args);
+ _ = builder.AddDocumentationServiceDefaults();
_ = builder.AddDefaultHealthChecks();
_ = builder.AddDocsApiOpenTelemetry();
diff --git a/src/services/Elastic.Documentation.Assembler/Building/AssemblerBuildOptions.cs b/src/services/Elastic.Documentation.Assembler/Building/AssemblerBuildOptions.cs
new file mode 100644
index 0000000000..1740954f5c
--- /dev/null
+++ b/src/services/Elastic.Documentation.Assembler/Building/AssemblerBuildOptions.cs
@@ -0,0 +1,35 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+using Elastic.Documentation;
+using Nullean.Argh;
+
+namespace Elastic.Documentation.Assembler.Building;
+
+/// Options for an assembler build, bound from CLI flags via argh [AsParameters].
+public record AssemblerBuildOptions
+{
+ /// Treat warnings as errors.
+ public bool? Strict { get; init; }
+
+ /// Named deployment target, e.g. dev, staging, production. Determines which configuration branch and index names are used.
+ public string? Environment { get; init; }
+
+ /// Write only metadata files; skip HTML generation. Ignored when --exporters is also set.
+ public bool? MetadataOnly { get; init; }
+
+ /// Print documentation hints emitted during the build.
+ public bool? ShowHints { get; init; }
+
+ ///
+ /// Comma-separated list of exporters to run.
+ /// Values: Html, Elasticsearch, Configuration, LinkMetadata, DocumentationState, LLMText, Redirects.
+ /// Default: Html, Configuration, LinkMetadata, DocumentationState, Redirects.
+ ///
+ [CollectionSyntax(Separator = ",")]
+ public IReadOnlySet? Exporters { get; init; }
+
+ /// Skip the build step when .artifacts/docs/index.html already exists. Intended for test scenarios only.
+ public bool? AssumeBuild { get; init; }
+}
diff --git a/src/services/Elastic.Documentation.Assembler/Building/AssemblerBuildService.cs b/src/services/Elastic.Documentation.Assembler/Building/AssemblerBuildService.cs
index 1c4639db9c..bc9615e18d 100644
--- a/src/services/Elastic.Documentation.Assembler/Building/AssemblerBuildService.cs
+++ b/src/services/Elastic.Documentation.Assembler/Building/AssemblerBuildService.cs
@@ -32,16 +32,19 @@ IEnvironmentVariables environmentVariables
public async Task BuildAll(
IDiagnosticsCollector collector,
- bool? strict, string? environment,
- bool? metadataOnly,
- bool? showHints,
- IReadOnlySet? exporters,
- bool? assumeBuild,
+ AssemblerBuildOptions options,
ScopedFileSystem readFs,
ScopedFileSystem writeFs,
Cancel ctx
)
{
+ var strict = options.Strict;
+ var environment = options.Environment;
+ var metadataOnly = options.MetadataOnly;
+ var showHints = options.ShowHints;
+ var exporters = options.Exporters;
+ var assumeBuild = options.AssumeBuild;
+
collector.NoHints = !showHints.GetValueOrDefault(false);
strict ??= false;
exporters ??= metadataOnly.GetValueOrDefault(false) ? ExportOptions.MetadataOnly : ExportOptions.Default;
diff --git a/src/services/Elastic.Documentation.Assembler/Building/AssemblerSitemapService.cs b/src/services/Elastic.Documentation.Assembler/Building/AssemblerSitemapService.cs
index 705abdd623..db4faf9d43 100644
--- a/src/services/Elastic.Documentation.Assembler/Building/AssemblerSitemapService.cs
+++ b/src/services/Elastic.Documentation.Assembler/Building/AssemblerSitemapService.cs
@@ -27,19 +27,8 @@ ICoreService githubActionsService
public async Task GenerateSitemapAsync(
IDiagnosticsCollector collector,
ScopedFileSystem fileSystem,
- string? endpoint = null,
+ ElasticsearchIndexOptions es,
string? environment = null,
- string? apiKey = null,
- string? username = null,
- string? password = null,
- bool? debugMode = null,
- string? proxyAddress = null,
- string? proxyPassword = null,
- string? proxyUsername = null,
- bool? disableSslVerification = null,
- string? certificateFingerprint = null,
- string? certificatePath = null,
- bool? certificateNotRoot = null,
Cancel ctx = default
)
{
@@ -54,22 +43,7 @@ public async Task GenerateSitemapAsync(
);
var cfg = configurationContext.Endpoints.Elasticsearch;
- var options = new ElasticsearchIndexOptions
- {
- Endpoint = endpoint,
- ApiKey = apiKey,
- Username = username,
- Password = password,
- DebugMode = debugMode,
- ProxyAddress = proxyAddress,
- ProxyPassword = proxyPassword,
- ProxyUsername = proxyUsername,
- DisableSslVerification = disableSslVerification,
- CertificateFingerprint = certificateFingerprint,
- CertificatePath = certificatePath,
- CertificateNotRoot = certificateNotRoot
- };
- await ElasticsearchEndpointConfigurator.ApplyAsync(cfg, options, collector, fileSystem, ctx);
+ await ElasticsearchEndpointConfigurator.ApplyAsync(cfg, es, collector, fileSystem, ctx);
if (collector.Errors > 0)
return false;
diff --git a/src/services/Elastic.Documentation.Assembler/Elastic.Documentation.Assembler.csproj b/src/services/Elastic.Documentation.Assembler/Elastic.Documentation.Assembler.csproj
index 7755841ad1..cdfa8983db 100644
--- a/src/services/Elastic.Documentation.Assembler/Elastic.Documentation.Assembler.csproj
+++ b/src/services/Elastic.Documentation.Assembler/Elastic.Documentation.Assembler.csproj
@@ -5,6 +5,8 @@
enable
enable
true
+ true
+ $(NoWarn);CS1591;CS1573;CS1572;CS1571;CS1570;CS1574
@@ -16,6 +18,7 @@
+
diff --git a/src/services/Elastic.Documentation.Assembler/Indexing/AssemblerIndexService.cs b/src/services/Elastic.Documentation.Assembler/Indexing/AssemblerIndexService.cs
index 7a3953704d..02af5d5e7d 100644
--- a/src/services/Elastic.Documentation.Assembler/Indexing/AssemblerIndexService.cs
+++ b/src/services/Elastic.Documentation.Assembler/Indexing/AssemblerIndexService.cs
@@ -24,93 +24,27 @@ IEnvironmentVariables environmentVariables
{
private readonly IConfigurationContext _configurationContext = configurationContext;
- ///
- /// Index documentation to Elasticsearch, calls `docs-builder assembler build --exporters elasticsearch`. Exposes more options
- ///
- ///
- ///
- /// Elasticsearch endpoint, alternatively set env DOCUMENTATION_ELASTIC_URL
- /// The --environment used to clone ends up being part of the index name
- /// Elasticsearch API key, alternatively set env DOCUMENTATION_ELASTIC_APIKEY
- /// Elasticsearch username (basic auth), alternatively set env DOCUMENTATION_ELASTIC_USERNAME
- /// Elasticsearch password (basic auth), alternatively set env DOCUMENTATION_ELASTIC_PASSWORD
- /// Disable AI enrichment of documents using LLM-generated metadata (enabled by default)
- /// The number of search threads the inference endpoint should use. Defaults: 8
- /// The number of index threads the inference endpoint should use. Defaults: 8
- /// Do not use the Elastic Inference Service, bootstrap inference endpoint
- /// Timeout in minutes for the inference endpoint creation. Defaults: 4
- /// Force reindex strategy to semantic index
- /// The number of documents to send to ES as part of the bulk. Defaults: 100
- /// The number of times failed bulk items should be retried. Defaults: 3
- /// Buffer ES request/responses for better error messages and pass ?pretty to all requests
- /// Route requests through a proxy server
- /// Proxy server password
- /// Proxy server username
- /// Disable SSL certificate validation (EXPERT OPTION)
- /// Pass a self-signed certificate fingerprint to validate the SSL connection
- /// Pass a self-signed certificate to validate the SSL connection
- /// If the certificate is not root but only part of the validation chain pass this
- ///
- ///
- public async Task Index(IDiagnosticsCollector collector,
+ /// Index assembled documentation to Elasticsearch.
+ public async Task Index(
+ IDiagnosticsCollector collector,
ScopedFileSystem readFs,
ScopedFileSystem writeFs,
- string? endpoint = null,
+ ElasticsearchIndexOptions es,
string? environment = null,
- string? apiKey = null,
- string? username = null,
- string? password = null,
- // inference options
- bool? noAiEnrichment = null,
- int? searchNumThreads = null,
- int? indexNumThreads = null,
- bool? noEis = null,
- int? bootstrapTimeout = null,
- // index options
- bool? forceReindex = null,
- // channel buffer options
- int? bufferSize = null,
- int? maxRetries = null,
- // connection options
- bool? debugMode = null,
- string? proxyAddress = null,
- string? proxyPassword = null,
- string? proxyUsername = null,
- bool? disableSslVerification = null,
- string? certificateFingerprint = null,
- string? certificatePath = null,
- bool? certificateNotRoot = null,
Cancel ctx = default
)
{
var cfg = _configurationContext.Endpoints.Elasticsearch;
- var options = new ElasticsearchIndexOptions
- {
- Endpoint = endpoint,
- ApiKey = apiKey,
- Username = username,
- Password = password,
- NoAiEnrichment = noAiEnrichment,
- SearchNumThreads = searchNumThreads,
- IndexNumThreads = indexNumThreads,
- NoEis = noEis,
- BootstrapTimeout = bootstrapTimeout,
- ForceReindex = forceReindex,
- BufferSize = bufferSize,
- MaxRetries = maxRetries,
- DebugMode = debugMode,
- ProxyAddress = proxyAddress,
- ProxyPassword = proxyPassword,
- ProxyUsername = proxyUsername,
- DisableSslVerification = disableSslVerification,
- CertificateFingerprint = certificateFingerprint,
- CertificatePath = certificatePath,
- CertificateNotRoot = certificateNotRoot
- };
- await ElasticsearchEndpointConfigurator.ApplyAsync(cfg, options, collector, readFs, ctx);
-
- var exporters = new HashSet { Elasticsearch };
+ await ElasticsearchEndpointConfigurator.ApplyAsync(cfg, es, collector, readFs, ctx);
- return await BuildAll(collector, strict: false, environment, metadataOnly: true, showHints: false, exporters, assumeBuild: false, readFs, writeFs, ctx);
+ return await BuildAll(collector, new AssemblerBuildOptions
+ {
+ Strict = false,
+ Environment = environment,
+ MetadataOnly = true,
+ ShowHints = false,
+ Exporters = new HashSet { Elasticsearch },
+ AssumeBuild = false
+ }, readFs, writeFs, ctx);
}
}
diff --git a/src/services/Elastic.Documentation.Assembler/Sourcing/AssemblerCloneOptions.cs b/src/services/Elastic.Documentation.Assembler/Sourcing/AssemblerCloneOptions.cs
new file mode 100644
index 0000000000..e5447765da
--- /dev/null
+++ b/src/services/Elastic.Documentation.Assembler/Sourcing/AssemblerCloneOptions.cs
@@ -0,0 +1,14 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+namespace Elastic.Documentation.Assembler.Sourcing;
+
+/// Options for cloning assembler repositories, bound from CLI flags via argh [AsParameters].
+public record AssemblerCloneOptions
+{
+ public bool? Strict { get; init; }
+ public string? Environment { get; init; }
+ public bool? FetchLatest { get; init; }
+ public bool? AssumeCloned { get; init; }
+}
diff --git a/src/services/Elastic.Documentation.Assembler/Sourcing/AssemblerCloneService.cs b/src/services/Elastic.Documentation.Assembler/Sourcing/AssemblerCloneService.cs
index f819ebe18a..f3522d25c1 100644
--- a/src/services/Elastic.Documentation.Assembler/Sourcing/AssemblerCloneService.cs
+++ b/src/services/Elastic.Documentation.Assembler/Sourcing/AssemblerCloneService.cs
@@ -19,18 +19,18 @@ public class AssemblerCloneService(
ICoreService githubActionsService
) : IService
{
- public async Task CloneAll(IDiagnosticsCollector collector, bool? strict, string? environment, bool? fetchLatest, bool? assumeCloned, Cancel ctx)
+ public async Task CloneAll(IDiagnosticsCollector collector, AssemblerCloneOptions options, Cancel ctx)
{
- strict ??= false;
+ var strict = options.Strict ?? false;
var githubEnvironmentInput = githubActionsService.GetInput("environment");
- environment ??= !string.IsNullOrEmpty(githubEnvironmentInput) ? githubEnvironmentInput : "dev";
+ var environment = options.Environment ?? (!string.IsNullOrEmpty(githubEnvironmentInput) ? githubEnvironmentInput : "dev");
var fs = FileSystemFactory.RealRead;
var assembleContext = new AssembleContext(assemblyConfiguration, configurationContext, environment, collector, fs, fs, null, null);
var cloner = new AssemblerRepositorySourcer(logFactory, assembleContext);
- _ = await cloner.CloneAll(fetchLatest ?? false, assumeCloned ?? false, ctx);
+ _ = await cloner.CloneAll(options.FetchLatest ?? false, options.AssumeCloned ?? false, ctx);
- return strict.Value ? collector.Errors + collector.Warnings == 0 : collector.Errors == 0;
+ return strict ? collector.Errors + collector.Warnings == 0 : collector.Errors == 0;
}
}
diff --git a/src/services/Elastic.Documentation.Isolated/Elastic.Documentation.Isolated.csproj b/src/services/Elastic.Documentation.Isolated/Elastic.Documentation.Isolated.csproj
index 3b50392249..24607021b7 100644
--- a/src/services/Elastic.Documentation.Isolated/Elastic.Documentation.Isolated.csproj
+++ b/src/services/Elastic.Documentation.Isolated/Elastic.Documentation.Isolated.csproj
@@ -5,9 +5,12 @@
enable
enable
true
+ true
+ $(NoWarn);CS1591;CS1573;CS1572;CS1571;CS1570;CS1574
+
diff --git a/src/services/Elastic.Documentation.Isolated/IsolatedBuildOptions.cs b/src/services/Elastic.Documentation.Isolated/IsolatedBuildOptions.cs
new file mode 100644
index 0000000000..8d39f5d60a
--- /dev/null
+++ b/src/services/Elastic.Documentation.Isolated/IsolatedBuildOptions.cs
@@ -0,0 +1,52 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+using System.ComponentModel.DataAnnotations;
+using Nullean.Argh;
+
+namespace Elastic.Documentation.Isolated;
+
+/// Options for an isolated documentation set build, bound from CLI flags via argh [AsParameters].
+public record IsolatedBuildOptions
+{
+ /// -p, Root directory of the documentation source. Defaults to cwd/docs.
+ [Existing, ExpandUserProfile, RejectSymbolicLinks]
+ public DirectoryInfo? Path { get; init; }
+
+ /// -o, Destination for generated HTML. Defaults to .artifacts/html.
+ [ExpandUserProfile, RejectSymbolicLinks]
+ public DirectoryInfo? Output { get; init; }
+
+ /// URL path prefix prepended to every generated link.
+ public string? PathPrefix { get; init; }
+
+ /// Delete and rebuild the output folder even if nothing changed.
+ public bool? Force { get; init; }
+
+ /// Treat warnings as errors.
+ public bool? Strict { get; init; }
+
+ /// Emit meta robots tags that allow search engine indexing.
+ public bool? AllowIndexing { get; init; }
+
+ /// Write only metadata files; skip HTML generation. Ignored when --exporters is also set.
+ public bool? MetadataOnly { get; init; }
+
+ ///
+ /// Comma-separated list of exporters to run.
+ /// Default: html, configuration, linkmetadata, documentationState, dedirects.
+ ///
+ [CollectionSyntax(Separator = ",")]
+ public IReadOnlySet? Exporters { get; init; }
+
+ /// Base URL written into <link rel=canonical> tags.
+ [Url]
+ public Uri? CanonicalBaseUrl { get; init; }
+
+ /// Skip OpenAPI spec generation for faster builds.
+ public bool SkipApi { get; init; }
+
+ /// Skip fetching cross-doc-set link indexes.
+ public bool SkipCrossLinks { get; init; }
+}
diff --git a/src/services/Elastic.Documentation.Isolated/IsolatedBuildService.cs b/src/services/Elastic.Documentation.Isolated/IsolatedBuildService.cs
index 180e315b37..bd75d5b656 100644
--- a/src/services/Elastic.Documentation.Isolated/IsolatedBuildService.cs
+++ b/src/services/Elastic.Documentation.Isolated/IsolatedBuildService.cs
@@ -45,21 +45,23 @@ public bool IsStrict(bool? strict)
public async Task Build(
IDiagnosticsCollector collector,
ScopedFileSystem fileSystem,
- string? path = null,
- string? output = null,
- string? pathPrefix = null,
- bool? force = null,
- bool? strict = null,
- bool? allowIndexing = null,
- bool? metadataOnly = null,
- IReadOnlySet? exporters = null,
- string? canonicalBaseUrl = null,
+ IsolatedBuildOptions options,
ScopedFileSystem? writeFileSystem = null,
- bool skipOpenApi = false,
- bool skipCrossLinks = false,
Cancel ctx = default
)
{
+ var path = options.Path?.FullName;
+ var output = options.Output?.FullName;
+ var pathPrefix = options.PathPrefix;
+ var force = options.Force;
+ var strict = options.Strict;
+ var allowIndexing = options.AllowIndexing;
+ var metadataOnly = options.MetadataOnly;
+ var exporters = options.Exporters;
+ var canonicalBaseUri = options.CanonicalBaseUrl;
+ var skipOpenApi = options.SkipApi;
+ var skipCrossLinks = options.SkipCrossLinks;
+
strict = IsStrict(strict);
if (bool.TryParse(githubActionsService.GetInput("metadata-only"), out var metaValue) && metaValue)
@@ -72,7 +74,7 @@ public async Task Build(
var runningOnCi = _env.IsRunningOnCI;
BuildContext context;
- Uri? canonicalBaseUri;
+ canonicalBaseUri ??= new Uri("https://docs-v3-preview.elastic.dev");
if (runningOnCi)
{
@@ -80,11 +82,6 @@ public async Task Build(
force = true;
}
- if (canonicalBaseUrl is null)
- canonicalBaseUri = new Uri("https://docs-v3-preview.elastic.dev");
- else if (!Uri.TryCreate(canonicalBaseUrl, UriKind.Absolute, out canonicalBaseUri))
- throw new ArgumentException($"The canonical base url '{canonicalBaseUrl}' is not a valid absolute uri");
-
try
{
context = new BuildContext(collector, fileSystem, writeFileSystem ?? fileSystem, configurationContext, exporters, path, output)
diff --git a/src/services/Elastic.Documentation.Isolated/IsolatedIndexService.cs b/src/services/Elastic.Documentation.Isolated/IsolatedIndexService.cs
index b20980e233..8f615612d2 100644
--- a/src/services/Elastic.Documentation.Isolated/IsolatedIndexService.cs
+++ b/src/services/Elastic.Documentation.Isolated/IsolatedIndexService.cs
@@ -21,96 +21,26 @@ IEnvironmentVariables environmentVariables
{
private readonly IConfigurationContext _configurationContext = configurationContext;
- ///
- /// Index documentation to Elasticsearch, calls `docs-builder assembler build --exporters elasticsearch`. Exposes more options
- ///
- ///
- ///
- /// path to the documentation folder, defaults to pwd.
- /// Elasticsearch endpoint, alternatively set env DOCUMENTATION_ELASTIC_URL
- /// Elasticsearch API key, alternatively set env DOCUMENTATION_ELASTIC_APIKEY
- /// Elasticsearch username (basic auth), alternatively set env DOCUMENTATION_ELASTIC_USERNAME
- /// Elasticsearch password (basic auth), alternatively set env DOCUMENTATION_ELASTIC_PASSWORD
- /// Disable AI enrichment of documents using LLM-generated metadata (enabled by default)
- /// The number of search threads the inference endpoint should use. Defaults: 8
- /// The number of index threads the inference endpoint should use. Defaults: 8
- /// Do not use the Elastic Inference Service, bootstrap inference endpoint
- /// Timeout in minutes for the inference endpoint creation. Defaults: 4
- /// Force reindex strategy to semantic index
- /// The number of documents to send to ES as part of the bulk. Defaults: 100
- /// The number of times failed bulk items should be retried. Defaults: 3
- /// Buffer ES request/responses for better error messages and pass ?pretty to all requests
- /// Route requests through a proxy server
- /// Proxy server password
- /// Proxy server username
- /// Disable SSL certificate validation (EXPERT OPTION)
- /// Pass a self-signed certificate fingerprint to validate the SSL connection
- /// Pass a self-signed certificate to validate the SSL connection
- /// If the certificate is not root but only part of the validation chain pass this
- ///
- ///
- public async Task Index(IDiagnosticsCollector collector,
+ /// Index a single documentation set to Elasticsearch.
+ public async Task Index(
+ IDiagnosticsCollector collector,
ScopedFileSystem fileSystem,
+ ElasticsearchIndexOptions es,
string? path = null,
- string? endpoint = null,
- string? apiKey = null,
- string? username = null,
- string? password = null,
- // inference options
- bool? noAiEnrichment = null,
- int? searchNumThreads = null,
- int? indexNumThreads = null,
- bool? noEis = null,
- int? bootstrapTimeout = null,
- // index options
- bool? forceReindex = null,
- // channel buffer options
- int? bufferSize = null,
- int? maxRetries = null,
- // connection options
- bool? debugMode = null,
- string? proxyAddress = null,
- string? proxyPassword = null,
- string? proxyUsername = null,
- bool? disableSslVerification = null,
- string? certificateFingerprint = null,
- string? certificatePath = null,
- bool? certificateNotRoot = null,
Cancel ctx = default
)
{
var cfg = _configurationContext.Endpoints.Elasticsearch;
- var options = new ElasticsearchIndexOptions
- {
- Endpoint = endpoint,
- ApiKey = apiKey,
- Username = username,
- Password = password,
- NoAiEnrichment = noAiEnrichment,
- SearchNumThreads = searchNumThreads,
- IndexNumThreads = indexNumThreads,
- NoEis = noEis,
- BootstrapTimeout = bootstrapTimeout,
- ForceReindex = forceReindex,
- BufferSize = bufferSize,
- MaxRetries = maxRetries,
- DebugMode = debugMode,
- ProxyAddress = proxyAddress,
- ProxyPassword = proxyPassword,
- ProxyUsername = proxyUsername,
- DisableSslVerification = disableSslVerification,
- CertificateFingerprint = certificateFingerprint,
- CertificatePath = certificatePath,
- CertificateNotRoot = certificateNotRoot
- };
- await ElasticsearchEndpointConfigurator.ApplyAsync(cfg, options, collector, fileSystem, ctx);
-
- var exporters = new HashSet { Elasticsearch };
+ await ElasticsearchEndpointConfigurator.ApplyAsync(cfg, es, collector, fileSystem, ctx);
- return await Build(collector, fileSystem,
- metadataOnly: true, strict: false, path: path, output: null, pathPrefix: null,
- force: true, allowIndexing: null, exporters: exporters, canonicalBaseUrl: null,
- skipOpenApi: true,
- ctx: ctx);
+ return await Build(collector, fileSystem, new IsolatedBuildOptions
+ {
+ Path = path != null ? new DirectoryInfo(path) : null,
+ MetadataOnly = true,
+ Strict = false,
+ Force = true,
+ SkipApi = true,
+ Exporters = new HashSet { Elasticsearch }
+ }, ctx: ctx);
}
}
diff --git a/src/tooling/docs-builder/Arguments/ExportOption.cs b/src/tooling/docs-builder/Arguments/ExportOption.cs
deleted file mode 100644
index 980378613c..0000000000
--- a/src/tooling/docs-builder/Arguments/ExportOption.cs
+++ /dev/null
@@ -1,51 +0,0 @@
-// Licensed to Elasticsearch B.V under one or more agreements.
-// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
-// See the LICENSE file in the project root for more information
-
-using ConsoleAppFramework;
-using Elastic.Documentation;
-using static Elastic.Documentation.Exporter;
-
-namespace Documentation.Builder.Arguments;
-
-[AttributeUsage(AttributeTargets.Parameter)]
-public class ExporterParserAttribute : Attribute, IArgumentParser>
-{
- public static bool TryParse(ReadOnlySpan s, out IReadOnlySet result)
- {
- result = ExportOptions.Default;
- var set = new HashSet();
- var options = s.Split(',');
- foreach (var option in options)
- {
- var export = s[option].Trim().ToString().ToLowerInvariant() switch
- {
- "llm" => LLMText,
- "llmtext" => LLMText,
- "es" => Elasticsearch,
- "elasticsearch" => Elasticsearch,
- "html" => Html,
- "config" => Configuration,
- "links" => LinkMetadata,
- "state" => DocumentationState,
- "redirects" => Redirects,
- "redirect" => Redirects,
- "none" => null,
- "default" => AddDefaultReturnNull(set, ExportOptions.Default),
- "metadata" => AddDefaultReturnNull(set, ExportOptions.MetadataOnly),
- _ => throw new Exception($"Unknown exporter {s[option].Trim().ToString().ToLowerInvariant()}")
- };
- if (export.HasValue)
- _ = set.Add(export.Value);
- }
- result = set;
- return true;
- }
-
- private static Exporter? AddDefaultReturnNull(HashSet set, HashSet defaultSet)
- {
- foreach (var option in defaultSet)
- _ = set.Add(option);
- return null;
- }
-}
diff --git a/src/tooling/docs-builder/Arguments/ProductInfoParser.cs b/src/tooling/docs-builder/Arguments/ProductInfoParser.cs
index 3c41f7d6d3..b0e486202f 100644
--- a/src/tooling/docs-builder/Arguments/ProductInfoParser.cs
+++ b/src/tooling/docs-builder/Arguments/ProductInfoParser.cs
@@ -2,40 +2,51 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
-using ConsoleAppFramework;
+using System.Collections;
using Elastic.Changelog;
+using Nullean.Argh.Parsing;
namespace Documentation.Builder.Arguments;
-[AttributeUsage(AttributeTargets.Parameter)]
-public class ProductInfoParserAttribute : Attribute, IArgumentParser>
+///
+/// Wrapper for a parsed list of entries.
+/// Use with [ArgumentParser(typeof(ProductInfoParser))] on command parameters.
+///
+///
+/// Input: comma-separated entries, each space-separated as product target lifecycle.
+/// Example: elasticsearch 9.2.0 ga, cloud-serverless 2025-08-05 ga
+///
+public sealed class ProductArgumentList(List items) : IReadOnlyList
{
- public static bool TryParse(ReadOnlySpan s, out List result)
- {
- result = [];
+ private readonly List _items = items;
+
+ public int Count => _items.Count;
+ public ProductArgument this[int index] => _items[index];
+ public IEnumerator GetEnumerator() => _items.GetEnumerator();
+ IEnumerator IEnumerable.GetEnumerator() => _items.GetEnumerator();
- // Split by comma to get individual product entries
- var productEntries = s.ToString().Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
+ public static implicit operator List(ProductArgumentList v) => v._items;
+}
- foreach (var entry in productEntries)
+/// Parses a comma-separated product list into a .
+public class ProductInfoParser : IArgumentParser
+{
+ public bool TryParse(string raw, out ProductArgumentList result)
+ {
+ var parsed = new List();
+ foreach (var entry in raw.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries))
{
- // Split by whitespace to get product, target, lifecycle
var parts = entry.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
-
if (parts.Length == 0)
continue;
-
- var productInfo = new ProductArgument
+ parsed.Add(new ProductArgument
{
Product = parts[0],
Target = parts.Length > 1 ? parts[1] : null,
Lifecycle = parts.Length > 2 ? parts[2] : null
- };
-
- result.Add(productInfo);
+ });
}
-
- return result.Count > 0;
+ result = new ProductArgumentList(parsed);
+ return parsed.Count > 0;
}
}
-
diff --git a/src/tooling/docs-builder/Commands/Assembler/AssemblerCommands.cs b/src/tooling/docs-builder/Commands/Assembler/AssemblerCommands.cs
index 5c3cf9f25b..6023aeccf0 100644
--- a/src/tooling/docs-builder/Commands/Assembler/AssemblerCommands.cs
+++ b/src/tooling/docs-builder/Commands/Assembler/AssemblerCommands.cs
@@ -4,8 +4,6 @@
using System.IO.Abstractions;
using Actions.Core.Services;
-using ConsoleAppFramework;
-using Documentation.Builder.Arguments;
using Documentation.Builder.Http;
using Elastic.Documentation;
using Elastic.Documentation.Assembler.Building;
@@ -15,10 +13,11 @@
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands.Assembler;
-internal sealed class AssembleCommands(
+internal sealed class AssembleOneShotCommand(
ILoggerFactory logFactory,
IDiagnosticsCollector collector,
AssemblyConfiguration assemblyConfiguration,
@@ -27,62 +26,71 @@ internal sealed class AssembleCommands(
IEnvironmentVariables environmentVariables
)
{
- /// Do a full assembler clone and assembler build in one swoop
- /// Treat warnings as errors and fail the build on warnings
- /// The environment to build
- /// If true, fetch the latest commit of the branch instead of the link registry entry ref
- /// If true, assume the repository folder already exists on disk assume it's cloned already, primarily used for testing
- /// If true, assume the build output already exists and skip building if index.html exists, primarily used for testing
- /// Only emit documentation metadata to output, ignored if 'exporters' is also set
- /// Show hints from all documentation sets during assembler build
- /// Set available exporters:
- /// html, es, config, links, state, llm, redirect, metadata, none.
- /// Defaults to (html, config, links, state, redirect) or 'default'.
- ///
- /// Serve the documentation on port 4000 after succesful build
- ///
- [Command("")]
- public async Task CloneAndBuild(
- bool? strict = null,
- string? environment = null,
+ /// Clone all repositories and build the unified documentation site in one step.
+ ///
+ ///
+ /// The assembler clones multiple documentation repositories and builds them into a single unified site
+ /// composed by a shared navigation.yml. This command combines assembler config init,
+ /// assembler clone, and assembler build into a single invocation.
+ ///
+ ///
+ /// Fetch the HEAD of each branch instead of the pinned link-registry ref.
+ /// Skip cloning; assume repositories are already on disk. Useful for iterating on the build.
+ /// Serve the site on port 4000 after a successful build.
+ [CommandName("assemble")]
+ public async Task Assemble(
+ GlobalCliOptions _,
+ [AsParameters] AssemblerBuildOptions buildOptions,
bool? fetchLatest = null,
bool? assumeCloned = null,
- bool? assumeBuild = null,
- bool? metadataOnly = null,
- bool? showHints = null,
- [ExporterParser] IReadOnlySet? exporters = null,
bool serve = false,
- Cancel ctx = default
+ CancellationToken ct = default
)
{
await using var serviceInvoker = new ServiceInvoker(collector);
+ var cloneOptions = new AssemblerCloneOptions
+ {
+ Strict = buildOptions.Strict,
+ Environment = buildOptions.Environment,
+ FetchLatest = fetchLatest,
+ AssumeCloned = assumeCloned
+ };
var cloneService = new AssemblerCloneService(logFactory, assemblyConfiguration, configurationContext, githubActionsService);
- serviceInvoker.AddCommand(cloneService, (strict, environment, fetchLatest, assumeCloned, ctx), strict ?? false,
- static async (s, collector, state, ctx) => await s.CloneAll(collector, state.strict, state.environment, state.fetchLatest, state.assumeCloned, ctx)
+ serviceInvoker.AddCommand(cloneService, cloneOptions, buildOptions.Strict ?? false,
+ static async (s, col, opts, ctx) => await s.CloneAll(col, opts, ctx)
);
- var buildService = new AssemblerBuildService(logFactory, assemblyConfiguration, configurationContext, githubActionsService, environmentVariables);
var readFs = FileSystemFactory.RealRead;
var writeFs = FileSystemFactory.RealWrite;
- serviceInvoker.AddCommand(buildService, (strict, environment, metadataOnly, showHints, exporters, assumeBuild, readFs, writeFs), strict ?? false,
- static async (s, collector, state, ctx) =>
- await s.BuildAll(collector, state.strict, state.environment, state.metadataOnly, state.showHints, state.exporters, state.assumeBuild, state.readFs, state.writeFs, ctx)
+ var buildService = new AssemblerBuildService(logFactory, assemblyConfiguration, configurationContext, githubActionsService, environmentVariables);
+ serviceInvoker.AddCommand(buildService, (buildOptions, readFs, writeFs), buildOptions.Strict ?? false,
+ static async (s, col, state, ctx) => await s.BuildAll(col, state.buildOptions, state.readFs, state.writeFs, ctx)
);
- var result = await serviceInvoker.InvokeAsync(ctx);
+ var result = await serviceInvoker.InvokeAsync(ct);
if (serve && result == 0)
{
var host = new StaticWebHost(4000, null);
- await host.RunAsync(ctx);
- await host.StopAsync(ctx);
+ await host.RunAsync(ct);
+ await host.StopAsync(ct);
}
return result;
-
}
}
+/// Build a unified documentation site by composing multiple documentation sets under a shared navigation.
+///
+///
+/// The assembler clones multiple documentation repositories and builds them into a single unified site.
+/// A central navigation.yml defines the global structure, merging content from every repository
+/// into one consistent navigation tree.
+///
+///
+/// Typical workflow:
+///
+///
internal sealed class AssemblerCommands(
ILoggerFactory logFactory,
IDiagnosticsCollector collector,
@@ -92,79 +100,68 @@ internal sealed class AssemblerCommands(
IEnvironmentVariables environmentVariables
)
{
- /// Clones all repositories
- /// Treat warnings as errors and fail the build on warnings
- /// The environment to build
- /// If true, fetch the latest commit of the branch instead of the link registry entry ref
- /// If true, assume the repository folder already exists on disk assume it's cloned already, primarily used for testing
- ///
- [Command("clone")]
- public async Task CloneAll(
+ /// Clone all repositories listed in the assembler configuration.
+ ///
+ /// Run assembler config init first to fetch the repository list. Clones into a local
+ /// working directory; subsequent assembler build reads from there.
+ ///
+ /// Treat warnings as errors.
+ /// Named deployment target. Determines which repositories and branches are cloned.
+ /// Fetch the HEAD of each branch instead of the pinned link-registry ref.
+ /// Skip cloning; assume repositories are already on disk.
+ [NoOptionsInjection]
+ public async Task Clone(
bool? strict = null,
string? environment = null,
bool? fetchLatest = null,
bool? assumeCloned = null,
- Cancel ctx = default
+ CancellationToken ct = default
)
{
await using var serviceInvoker = new ServiceInvoker(collector);
-
+ var options = new AssemblerCloneOptions
+ {
+ Strict = strict, Environment = environment,
+ FetchLatest = fetchLatest, AssumeCloned = assumeCloned
+ };
var service = new AssemblerCloneService(logFactory, assemblyConfiguration, configurationContext, githubActionsService);
- serviceInvoker.AddCommand(service, (strict, environment, fetchLatest, assumeCloned, ctx), strict ?? false,
- static async (s, collector, state, ctx) => await s.CloneAll(collector, state.strict, state.environment, state.fetchLatest, state.assumeCloned, ctx)
+ serviceInvoker.AddCommand(service, options, strict ?? false,
+ static async (s, col, opts, ctx) => await s.CloneAll(col, opts, ctx)
);
-
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
-
- /// Builds all repositories
- /// Treat warnings as errors and fail the build on warnings
- /// The environment to build
- /// If true, assume the build output already exists and skip building if index.html exists, primarily used for testing
- /// Only emit documentation metadata to output, ignored if 'exporters' is also set
- /// Show hints from all documentation sets during assembler build
- /// Set available exporters:
- /// html, es, config, links, state, llm, redirect, metadata, none.
- /// Defaults to (html, config, links, state, redirect) or 'default'.
- ///
- ///
- [Command("build")]
- public async Task BuildAll(
- bool? strict = null,
- string? environment = null,
- bool? assumeBuild = null,
- bool? metadataOnly = null,
- bool? showHints = null,
- [ExporterParser] IReadOnlySet? exporters = null,
- Cancel ctx = default
+ /// Build the unified site from all previously cloned repositories.
+ ///
+ /// Run after assembler clone. Reads every cloned repository, applies the shared navigation.yml,
+ /// and writes the unified site to .artifacts/docs/.
+ ///
+ [NoOptionsInjection]
+ public async Task Build(
+ [AsParameters] AssemblerBuildOptions options,
+ CancellationToken ct = default
)
{
await using var serviceInvoker = new ServiceInvoker(collector);
-
var readFs = FileSystemFactory.RealRead;
var writeFs = FileSystemFactory.RealWrite;
var service = new AssemblerBuildService(logFactory, assemblyConfiguration, configurationContext, githubActionsService, environmentVariables);
- serviceInvoker.AddCommand(service, (strict, environment, assumeBuild, metadataOnly, showHints, exporters, readFs, writeFs), strict ?? false,
- static async (s, collector, state, ctx) =>
- await s.BuildAll(collector, state.strict, state.environment, state.metadataOnly, state.showHints, state.exporters, state.assumeBuild, state.readFs, state.writeFs, ctx)
+ serviceInvoker.AddCommand(service, (options, readFs, writeFs), options.Strict ?? false,
+ static async (s, col, state, ctx) => await s.BuildAll(col, state.options, state.readFs, state.writeFs, ctx)
);
-
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
- /// Serve the output of an assembler build
- /// Port to serve the documentation.
- ///
- [Command("serve")]
- public async Task ServeAssemblerBuild(int port = 4000, string? path = null, Cancel ctx = default)
+ /// Serve the output of a completed assembler build at http://localhost:4000.
+ /// Run after assembler build. Does not watch for file changes.
+ /// Port to listen on. Default: 4000.
+ /// Path to the built site. Defaults to .artifacts/docs/.
+ [NoOptionsInjection]
+ public async Task Serve(int port = 4000, [Existing, ExpandUserProfile, RejectSymbolicLinks] DirectoryInfo? path = null, CancellationToken ct = default)
{
- var host = new StaticWebHost(port, path);
- await host.RunAsync(ctx);
- await host.StopAsync(ctx);
- // since this command does not use ServiceInvoker, we stop the collector manually.
- // this should be an exception to the regular command pattern.
- await collector.StopAsync(ctx);
+ var host = new StaticWebHost(port, path?.FullName);
+ await host.RunAsync(ct);
+ await host.StopAsync(ct);
+ await collector.StopAsync(ct);
}
-
}
diff --git a/src/tooling/docs-builder/Commands/Assembler/AssemblerIndexCommand.cs b/src/tooling/docs-builder/Commands/Assembler/AssemblerIndexCommand.cs
index 7a8a5a9316..26412a93cc 100644
--- a/src/tooling/docs-builder/Commands/Assembler/AssemblerIndexCommand.cs
+++ b/src/tooling/docs-builder/Commands/Assembler/AssemblerIndexCommand.cs
@@ -4,13 +4,14 @@
using System.IO.Abstractions;
using Actions.Core.Services;
-using ConsoleAppFramework;
+using Elastic.Documentation;
using Elastic.Documentation.Assembler.Indexing;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Configuration.Assembler;
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands.Assembler;
@@ -23,101 +24,30 @@ internal sealed class AssemblerIndexCommand(
IEnvironmentVariables environmentVariables
)
{
- ///
- /// Index documentation to Elasticsearch, calls `docs-builder assembler build --exporters elasticsearch`. Exposes more options
- ///
- /// -es, Elasticsearch endpoint, alternatively set env DOCUMENTATION_ELASTIC_URL
- /// The --environment used to clone ends up being part of the index name
- /// Elasticsearch API key, alternatively set env DOCUMENTATION_ELASTIC_APIKEY
- /// Elasticsearch username (basic auth), alternatively set env DOCUMENTATION_ELASTIC_USERNAME
- /// Elasticsearch password (basic auth), alternatively set env DOCUMENTATION_ELASTIC_PASSWORD
- /// Disable AI enrichment of documents using LLM-generated metadata (enabled by default)
- /// The number of search threads the inference endpoint should use. Defaults: 8
- /// The number of index threads the inference endpoint should use. Defaults: 8
- /// Do not use the Elastic Inference Service, bootstrap inference endpoint
- /// Force reindex strategy to semantic index
- /// Timeout in minutes for the inference endpoint creation. Defaults: 4
- /// The number of documents to send to ES as part of the bulk. Defaults: 100
- /// The number of times failed bulk items should be retried. Defaults: 3
- /// Buffer ES request/responses for better error messages and pass ?pretty to all requests
- /// Route requests through a proxy server
- /// Proxy server password
- /// Proxy server username
- /// Disable SSL certificate validation (EXPERT OPTION)
- /// Pass a self-signed certificate fingerprint to validate the SSL connection
- /// Pass a self-signed certificate to validate the SSL connection
- /// If the certificate is not root but only part of the validation chain pass this
- ///
- ///
- [Command("")]
+ /// Index the assembled documentation into Elasticsearch.
+ ///
+ ///
+ /// Runs an assembler build with only the Elasticsearch exporter enabled, then streams documents
+ /// to the cluster. The index name is derived from the environment name.
+ ///
+ /// Run after assembler build or use instead of it when indexing is the only goal.
+ ///
+ /// Named deployment target; becomes part of the Elasticsearch index name.
+ [CommandName("index")]
public async Task Index(
- string? endpoint = null,
+ GlobalCliOptions _,
+ [AsParameters] ElasticsearchIndexOptions es,
string? environment = null,
- string? apiKey = null,
- string? username = null,
- string? password = null,
-
- // inference options
- bool? noAiEnrichment = null,
- int? searchNumThreads = null,
- int? indexNumThreads = null,
- bool? noEis = null,
- int? bootstrapTimeout = null,
-
- // index options
- bool? forceReindex = null,
-
- // channel buffer options
- int? bufferSize = null,
- int? maxRetries = null,
-
- // connection options
- bool? debugMode = null,
-
- // proxy options
- string? proxyAddress = null,
- string? proxyPassword = null,
- string? proxyUsername = null,
-
- // certificate options
- bool? disableSslVerification = null,
- string? certificateFingerprint = null,
- string? certificatePath = null,
- bool? certificateNotRoot = null,
- Cancel ctx = default
+ CancellationToken ct = default
)
{
await using var serviceInvoker = new ServiceInvoker(collector);
var readFs = FileSystemFactory.RealRead;
var writeFs = FileSystemFactory.RealWrite;
var service = new AssemblerIndexService(logFactory, configuration, configurationContext, githubActionsService, environmentVariables);
- var state = (readFs, writeFs,
- // endpoint options
- endpoint, environment, apiKey, username, password,
- // inference options
- noAiEnrichment, indexNumThreads, searchNumThreads, noEis, bootstrapTimeout,
- // channel and connection options
- forceReindex, bufferSize, maxRetries, debugMode,
- // proxy options
- proxyAddress, proxyPassword, proxyUsername,
- // certificate options
- disableSslVerification, certificateFingerprint, certificatePath, certificateNotRoot
- );
- serviceInvoker.AddCommand(service, state,
- static async (s, collector, state, ctx) => await s.Index(collector, state.readFs, state.writeFs,
- // endpoint options
- state.endpoint, state.environment, state.apiKey, state.username, state.password,
- // inference options
- state.noAiEnrichment, state.searchNumThreads, state.indexNumThreads, state.noEis, state.bootstrapTimeout,
- // channel and connection options
- state.forceReindex, state.bufferSize, state.maxRetries, state.debugMode,
- // proxy options
- state.proxyAddress, state.proxyPassword, state.proxyUsername,
- // certificate options
- state.disableSslVerification, state.certificateFingerprint, state.certificatePath, state.certificateNotRoot
- , ctx)
+ serviceInvoker.AddCommand(service,
+ async (s, col, ctx) => await s.Index(col, readFs, writeFs, es, environment, ctx)
);
-
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
}
diff --git a/src/tooling/docs-builder/Commands/Assembler/AssemblerSitemapCommand.cs b/src/tooling/docs-builder/Commands/Assembler/AssemblerSitemapCommand.cs
index 9d066babe9..d0e2bbc825 100644
--- a/src/tooling/docs-builder/Commands/Assembler/AssemblerSitemapCommand.cs
+++ b/src/tooling/docs-builder/Commands/Assembler/AssemblerSitemapCommand.cs
@@ -4,13 +4,14 @@
using System.IO.Abstractions;
using Actions.Core.Services;
-using ConsoleAppFramework;
+using Elastic.Documentation;
using Elastic.Documentation.Assembler.Building;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Configuration.Assembler;
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands.Assembler;
@@ -22,58 +23,29 @@ internal sealed class AssemblerSitemapCommand(
ICoreService githubActionsService
)
{
- ///
- /// Generate sitemap.xml from the Elasticsearch index with correct content_last_updated dates
- ///
- /// -es, Elasticsearch endpoint, alternatively set env DOCUMENTATION_ELASTIC_URL
- /// The --environment used to resolve the ES index name
- /// Elasticsearch API key, alternatively set env DOCUMENTATION_ELASTIC_APIKEY
- /// Elasticsearch username (basic auth), alternatively set env DOCUMENTATION_ELASTIC_USERNAME
- /// Elasticsearch password (basic auth), alternatively set env DOCUMENTATION_ELASTIC_PASSWORD
- /// Buffer ES request/responses for better error messages and pass ?pretty to all requests
- /// Route requests through a proxy server
- /// Proxy server password
- /// Proxy server username
- /// Disable SSL certificate validation (EXPERT OPTION)
- /// Pass a self-signed certificate fingerprint to validate the SSL connection
- /// Pass a self-signed certificate to validate the SSL connection
- /// If the certificate is not root but only part of the validation chain pass this
- ///
- ///
- [Command("")]
+ /// Generate sitemap.xml using accurate content_last_updated dates from Elasticsearch.
+ ///
+ ///
+ /// The sitemap generated by assembler build uses the current date as a placeholder.
+ /// Run this command after assembler index to overwrite it with precise last-modified dates
+ /// sourced from the search index.
+ ///
+ ///
+ /// Named deployment target; used to resolve the correct Elasticsearch index.
+ [CommandName("sitemap")]
public async Task Sitemap(
- string? endpoint = null,
+ GlobalCliOptions _,
+ [AsParameters] ElasticsearchIndexOptions es,
string? environment = null,
- string? apiKey = null,
- string? username = null,
- string? password = null,
- bool? debugMode = null,
- string? proxyAddress = null,
- string? proxyPassword = null,
- string? proxyUsername = null,
- bool? disableSslVerification = null,
- string? certificateFingerprint = null,
- string? certificatePath = null,
- bool? certificateNotRoot = null,
- Cancel ctx = default
+ CancellationToken ct = default
)
{
await using var serviceInvoker = new ServiceInvoker(collector);
var fs = FileSystemFactory.RealWrite;
var service = new AssemblerSitemapService(logFactory, configuration, configurationContext, githubActionsService);
- var state = (fs,
- endpoint, environment, apiKey, username, password,
- debugMode, proxyAddress, proxyPassword, proxyUsername,
- disableSslVerification, certificateFingerprint, certificatePath, certificateNotRoot
+ serviceInvoker.AddCommand(service,
+ async (s, col, ctx) => await s.GenerateSitemapAsync(col, fs, es, environment, ctx)
);
- serviceInvoker.AddCommand(service, state,
- static async (s, col, state, ct) => await s.GenerateSitemapAsync(col, state.fs,
- state.endpoint, state.environment, state.apiKey, state.username, state.password,
- state.debugMode, state.proxyAddress, state.proxyPassword, state.proxyUsername,
- state.disableSslVerification, state.certificateFingerprint, state.certificatePath, state.certificateNotRoot,
- ct)
- );
-
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
}
diff --git a/src/tooling/docs-builder/Commands/Assembler/BloomFilterCommands.cs b/src/tooling/docs-builder/Commands/Assembler/BloomFilterCommands.cs
index 289468a847..5170889fc0 100644
--- a/src/tooling/docs-builder/Commands/Assembler/BloomFilterCommands.cs
+++ b/src/tooling/docs-builder/Commands/Assembler/BloomFilterCommands.cs
@@ -2,25 +2,31 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
-using ConsoleAppFramework;
+using Elastic.Documentation;
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.LegacyDocs;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands.Assembler;
+/// Build and query the bloom filter used for legacy-URL redirect coverage.
internal sealed class BloomFilterCommands(ILoggerFactory logFactory, IDiagnosticsCollector collector)
{
- /// Generate the bloom filter binary file
- /// The local dir of local elastic/built-docs repository
- ///
- [Command("create")]
- public async Task CreateBloomBin(string builtDocsDir, Cancel ctx = default)
+ /// Build a bloom filter binary from a local legacy-docs repository.
+ ///
+ /// The bloom filter is a compact data structure that records which legacy URLs existed before migration.
+ /// It is used to verify redirect coverage: if a legacy URL is absent from the filter, any redirect
+ /// pointing to it cannot be validated. Run once after cloning the legacy-docs repository.
+ ///
+ /// Path to the local legacy-docs repository checkout.
+ [NoOptionsInjection]
+ public async Task Create([Existing, ExpandUserProfile, RejectSymbolicLinks] DirectoryInfo builtDocsDir, CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
- var pagesProvider = new LocalPagesProvider(builtDocsDir);
+ var pagesProvider = new LocalPagesProvider(builtDocsDir.FullName);
var legacyPageService = new LegacyPageService(logFactory);
serviceInvoker.AddCommand(legacyPageService, pagesProvider, static (s, _, pagesProvider, _) =>
@@ -28,14 +34,13 @@ public async Task CreateBloomBin(string builtDocsDir, Cancel ctx = default)
var result = s.GenerateBloomFilterBinary(pagesProvider);
return Task.FromResult(result);
});
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
- /// Lookup whether exists in the bloomfilter
- /// The local dir of local elastic/built-docs repository
- ///
- [Command("lookup")]
- public async Task PageExists(string path, Cancel ctx = default)
+ /// Test whether a URL path is recorded in the bloom filter.
+ /// URL path to look up (e.g. /guide/en/elasticsearch/reference/current/index.html).
+ [NoOptionsInjection]
+ public async Task Lookup(string path, CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
@@ -45,6 +50,6 @@ public async Task PageExists(string path, Cancel ctx = default)
var result = s.PathExists(path, logResult: true);
return Task.FromResult(result);
});
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
}
diff --git a/src/tooling/docs-builder/Commands/Assembler/ConfigurationCommands.cs b/src/tooling/docs-builder/Commands/Assembler/ConfigurationCommands.cs
index 0cea37f2cb..e617afd6d5 100644
--- a/src/tooling/docs-builder/Commands/Assembler/ConfigurationCommands.cs
+++ b/src/tooling/docs-builder/Commands/Assembler/ConfigurationCommands.cs
@@ -3,28 +3,34 @@
// See the LICENSE file in the project root for more information
using System.IO.Abstractions;
-using ConsoleAppFramework;
+using Elastic.Documentation;
using Elastic.Documentation.Assembler.Configuration;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Configuration.Assembler;
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands.Assembler;
-internal sealed class ConfigurationCommands(
+/// Fetch and manage the central assembler configuration repository.
+internal sealed class ConfigurationCommand(
ILoggerFactory logFactory,
IDiagnosticsCollector collector,
AssemblyConfiguration assemblyConfiguration
)
{
- /// Clone the configuration folder
- /// The git reference of the config, defaults to 'main'
- /// Save the remote configuration locally in the pwd so later commands can pick it up as local
- ///
- [Command("init")]
- public async Task CloneConfigurationFolder(string? gitRef = null, bool local = false, Cancel ctx = default)
+ /// Fetch the assembler configuration into local application data.
+ ///
+ /// All assembler and codex commands read their repository list from a central configuration repository.
+ /// Run this once before the first assembler clone or assemble invocation, and whenever
+ /// the configuration has changed upstream.
+ ///
+ /// Git ref to fetch. Defaults to main.
+ /// Write the configuration into cwd so subsequent commands treat it as a local override.
+ [NoOptionsInjection]
+ public async Task Init(string? gitRef = null, bool local = false, CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
@@ -32,6 +38,6 @@ public async Task CloneConfigurationFolder(string? gitRef = null, bool loca
var service = new ConfigurationCloneService(logFactory, assemblyConfiguration, fs);
serviceInvoker.AddCommand(service, (gitRef, local), static async (s, collector, state, ctx) =>
await s.InitConfigurationToApplicationData(collector, state.gitRef, state.local, ctx));
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
}
diff --git a/src/tooling/docs-builder/Commands/Assembler/ContentSourceCommands.cs b/src/tooling/docs-builder/Commands/Assembler/ContentSourceCommands.cs
index 21080a6fac..9cfe6be0e3 100644
--- a/src/tooling/docs-builder/Commands/Assembler/ContentSourceCommands.cs
+++ b/src/tooling/docs-builder/Commands/Assembler/ContentSourceCommands.cs
@@ -4,16 +4,18 @@
using System.IO.Abstractions;
using Actions.Core.Services;
-using ConsoleAppFramework;
+using Elastic.Documentation;
using Elastic.Documentation.Assembler.ContentSources;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Configuration.Assembler;
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands.Assembler;
+/// Inspect and validate repository entries in the link registry.
internal sealed class ContentSourceCommands(
ILoggerFactory logFactory,
IDiagnosticsCollector collector,
@@ -22,8 +24,9 @@ internal sealed class ContentSourceCommands(
ICoreService githubActionsService
)
{
- [Command("validate")]
- public async Task Validate(Cancel ctx = default)
+ /// Verify that every repository in the assembler configuration has an active published entry in the link registry.
+ [NoOptionsInjection]
+ public async Task Validate(CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
@@ -31,15 +34,15 @@ public async Task Validate(Cancel ctx = default)
var service = new RepositoryPublishValidationService(logFactory, configuration, configurationContext, fs);
serviceInvoker.AddCommand(service, static async (s, collector, ctx) => await s.ValidatePublishStatus(collector, ctx));
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
- ///
- ///
- ///
- ///
- [Command("match")]
- public async Task Match([Argument] string? repository = null, [Argument] string? branchOrTag = null, Cancel ctx = default)
+ /// Check whether a repository at a specific branch or tag should be included in the next build.
+ /// Exits 0 if the repository matches; 1 otherwise. Useful for conditional CI steps.
+ /// Repository slug to match (e.g. elastic/elasticsearch).
+ /// Branch name or version tag to test against.
+ [NoOptionsInjection]
+ public async Task Match([Argument] string? repository = null, [Argument] string? branchOrTag = null, CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
@@ -48,13 +51,12 @@ public async Task Match([Argument] string? repository = null, [Argument] st
serviceInvoker.AddCommand(service, (repository, branchOrTag),
static async (s, collector, state, ctx) =>
{
+ // ShouldBuild emits GitHub Actions outputs to drive conditional CI steps;
+ // exit code is always 0 — the bool result is communicated via those outputs, not the process exit.
_ = await s.ShouldBuild(collector, state.repository, state.branchOrTag, ctx);
- // ShouldBuild throws an exception on bad args and will return false if it has no matches
- // We return true to the service invoker to continue
return true;
});
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
-
}
diff --git a/src/tooling/docs-builder/Commands/Assembler/DeployCommands.cs b/src/tooling/docs-builder/Commands/Assembler/DeployCommands.cs
index ac307d213f..7e59432288 100644
--- a/src/tooling/docs-builder/Commands/Assembler/DeployCommands.cs
+++ b/src/tooling/docs-builder/Commands/Assembler/DeployCommands.cs
@@ -2,17 +2,20 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
+using System.ComponentModel.DataAnnotations;
using Actions.Core.Services;
-using ConsoleAppFramework;
+using Elastic.Documentation;
using Elastic.Documentation.Assembler.Deploying;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Configuration.Assembler;
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands.Assembler;
+/// Deploy built documentation to S3 and update CloudFront redirect rules.
internal sealed class DeployCommands(
AssemblyConfiguration assemblyConfiguration,
IDiagnosticsCollector collector,
@@ -21,56 +24,58 @@ internal sealed class DeployCommands(
ICoreService githubActionsService
)
{
- /// Creates a sync plan
- /// The environment to build
- /// The S3 bucket name to deploy to
- /// The file to write the plan to
- /// The percentage of deletions allowed in the plan as float
- ///
- [Command("plan")]
- public async Task Plan(string environment, string s3BucketName, string @out = "", float? deleteThreshold = null, Cancel ctx = default)
+ /// Compute a diff of what would change when deploying to S3 and write it to a plan file.
+ ///
+ /// Two-step deployment: plan computes the diff and writes a plan file; apply executes it.
+ /// Review the plan before applying to avoid accidental mass deletions.
+ ///
+ /// Named deployment target.
+ /// S3 bucket to deploy to.
+ /// Path to write the plan file. Defaults to stdout.
+ /// Abort if the plan would delete more than this percentage of objects (0–100).
+ [NoOptionsInjection]
+ public async Task Plan(string environment, string s3BucketName, [ExpandUserProfile, RejectSymbolicLinks] FileInfo? @out = null, float? deleteThreshold = null, CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
var service = new IncrementalDeployService(logFactory, assemblyConfiguration, configurationContext, githubActionsService, FileSystemFactory.RealRead, FileSystemFactory.RealWrite);
serviceInvoker.AddCommand(service, (environment, s3BucketName, @out, deleteThreshold),
- static async (s, collector, state, ctx) => await s.Plan(collector, state.environment, state.s3BucketName, state.@out, state.deleteThreshold, ctx)
+ static async (s, collector, state, ctx) => await s.Plan(collector, state.environment, state.s3BucketName, state.@out?.FullName ?? "", state.deleteThreshold, ctx)
);
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
- /// Applies a sync plan
- /// The environment to build
- /// The S3 bucket name to deploy to
- /// The file path to the plan file to apply
- ///
- [Command("apply")]
- public async Task Apply(string environment, string s3BucketName, string planFile, Cancel ctx = default)
+ /// Upload the changes described in a plan file to S3.
+ /// Run after assembler deploy plan. Applies the pre-computed diff to the S3 bucket.
+ /// Named deployment target.
+ /// S3 bucket to deploy to.
+ /// Path to the plan file produced by assembler deploy plan.
+ [NoOptionsInjection]
+ public async Task Apply(string environment, string s3BucketName, [Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "json")] FileInfo planFile, CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
var service = new IncrementalDeployService(logFactory, assemblyConfiguration, configurationContext, githubActionsService, FileSystemFactory.RealRead, FileSystemFactory.RealWrite);
serviceInvoker.AddCommand(service, (environment, s3BucketName, planFile),
- static async (s, collector, state, ctx) => await s.Apply(collector, state.environment, state.s3BucketName, state.planFile, ctx)
+ static async (s, collector, state, ctx) => await s.Apply(collector, state.environment, state.s3BucketName, state.planFile.FullName, ctx)
);
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
- /// Refreshes the redirects mapping in Cloudfront's KeyValueStore
- /// The environment to build
- /// Path to the redirects mapping pre-generated by docs-builder assemble
- ///
- [Command("update-redirects")]
- public async Task UpdateRedirects(string environment, string? redirectsFile = null, Cancel ctx = default)
+ /// Push the redirects mapping to CloudFront's KeyValueStore.
+ /// Run after assembler build produces a redirects.json.
+ /// Named deployment target.
+ /// Path to redirects.json. Defaults to .artifacts/docs/redirects.json.
+ [NoOptionsInjection]
+ public async Task UpdateRedirects(string environment, [Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "json")] FileInfo? redirectsFile = null, CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
var fs = FileSystemFactory.RealRead;
var service = new DeployUpdateRedirectsService(logFactory, fs);
serviceInvoker.AddCommand(service, (environment, redirectsFile),
- static async (s, collector, state, ctx) => await s.UpdateRedirects(collector, state.environment, state.redirectsFile, ctx: ctx)
+ static async (s, collector, state, ctx) => await s.UpdateRedirects(collector, state.environment, state.redirectsFile?.FullName, ctx: ctx)
);
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
-
}
diff --git a/src/tooling/docs-builder/Commands/Assembler/NavigationCommands.cs b/src/tooling/docs-builder/Commands/Assembler/NavigationCommands.cs
index d6ca3075f9..7f1f03820e 100644
--- a/src/tooling/docs-builder/Commands/Assembler/NavigationCommands.cs
+++ b/src/tooling/docs-builder/Commands/Assembler/NavigationCommands.cs
@@ -2,17 +2,20 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
+using System.ComponentModel.DataAnnotations;
using System.IO.Abstractions;
-using ConsoleAppFramework;
+using Elastic.Documentation;
using Elastic.Documentation.Assembler.Navigation;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Configuration.Assembler;
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands.Assembler;
+/// Validate the global navigation structure and cross-doc-set link references.
internal sealed class NavigationCommands(
ILoggerFactory logFactory,
IDiagnosticsCollector collector,
@@ -20,27 +23,24 @@ internal sealed class NavigationCommands(
IConfigurationContext configurationContext
)
{
- /// Validates navigation.yml does not contain colliding path prefixes and all urls are unique
- ///
- [Command("validate")]
- public async Task Validate(Cancel ctx = default)
+ /// Check navigation.yml for duplicate path prefixes and non-unique URLs.
+ [NoOptionsInjection]
+ public async Task Validate(CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
var service = new GlobalNavigationService(logFactory, configuration, configurationContext, FileSystemFactory.RealRead);
serviceInvoker.AddCommand(service, static async (s, collector, ctx) => await s.Validate(collector, ctx));
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
- /// Validate all published links in links.json do not collide with navigation path_prefixes and all urls are unique.
- /// Path to `links.json` defaults to '.artifacts/docs/html/links.json'
- ///
- [Command("validate-link-reference")]
- public async Task ValidateLocalLinkReference([Argument] string? file = null, Cancel ctx = default)
+ /// Check that no link in a local links.json conflicts with a path prefix defined in navigation.yml.
+ /// Path to links.json. Defaults to .artifacts/docs/html/links.json.
+ [NoOptionsInjection]
+ public async Task ValidateLinkReference([Argument, Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "json")] FileInfo? file = null, CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
var service = new GlobalNavigationService(logFactory, configuration, configurationContext, FileSystemFactory.RealRead);
- serviceInvoker.AddCommand(service, file, static async (s, collector, file, ctx) => await s.ValidateLocalLinkReference(collector, file, ctx));
- return await serviceInvoker.InvokeAsync(ctx);
+ serviceInvoker.AddCommand(service, file, static async (s, collector, file, ctx) => await s.ValidateLocalLinkReference(collector, file?.FullName, ctx));
+ return await serviceInvoker.InvokeAsync(ct);
}
-
}
diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs
index e81b88a2f3..74b367d006 100644
--- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs
+++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs
@@ -2,12 +2,12 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
+using System.ComponentModel.DataAnnotations;
using System.IO.Abstractions;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using Actions.Core.Services;
-using ConsoleAppFramework;
using Documentation.Builder.Arguments;
using Elastic.Changelog;
using Elastic.Changelog.Bundling;
@@ -23,10 +23,12 @@
using Elastic.Documentation.ReleaseNotes;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands;
-internal sealed partial class ChangelogCommand(
+/// Create, bundle, and publish changelog entries.
+internal sealed partial class ChangelogCommands(
ILoggerFactory logFactory,
IDiagnosticsCollector collector,
IConfigurationContext configurationContext,
@@ -41,37 +43,28 @@ IEnvironmentVariables environmentVariables
private static partial Regex BundleOutputDirectoryRegex();
private readonly IFileSystem _fileSystem = FileSystemFactory.RealRead;
- private readonly ILogger _logger = logFactory.CreateLogger();
- ///
- /// Changelog commands. Use 'changelog add' to create a new changelog or 'changelog bundle' to create a consolidated list of changelogs.
- ///
- [Command("")]
- public Task Default()
- {
- collector.EmitError(string.Empty, "Please specify a subcommand. Available subcommands:\n - 'changelog add': Create a new changelog from command-line input\n - 'changelog bundle': Create a consolidated list of changelog files\n - 'changelog init': Initialize changelog configuration and folder structure\n - 'changelog render': Render a bundled changelog to markdown or asciidoc files\n - 'changelog upload': Upload changelog or bundle artifacts to S3 or Elasticsearch\n - 'changelog gh-release': Create changelogs from a GitHub release\n - 'changelog evaluate-pr': (CI) Evaluate a PR for changelog generation eligibility\n - 'changelog prepare-artifact': (CI) Package changelog artifact for cross-workflow transfer\n - 'changelog evaluate-artifact': (CI) Evaluate downloaded artifact in commit workflow\n\nRun 'changelog --help' for usage information.");
- return Task.FromResult(1);
- }
-
- ///
- /// Initialize changelog configuration and folder structure. Creates changelog.yml from the example template in the docs folder (discovered via docset.yml when present, or at {path}/docs which is created if needed), and creates changelog and releases subdirectories if they do not exist.
- /// When changelog.yml already exists and --changelog-dir or --bundles-dir is specified, updates the bundle.directory and/or bundle.output_directory fields accordingly.
- /// When creating a new changelog.yml, seeds bundle.owner, bundle.repo, and bundle.link_allow_repos from git remote origin (github.com only) and/or --owner / --repo.
- ///
- /// Optional: Repository root path. Defaults to the output of pwd (current directory). Docs folder is {path}/docs, created if it does not exist.
- /// Optional: Path to changelog directory. Defaults to {docsFolder}/changelog.
- /// Optional: Path to bundles output directory. Defaults to {docsFolder}/releases.
- /// Optional: GitHub owner for bundle defaults and link_allow_repos seeding. Overrides the owner inferred from git remote origin.
- /// Optional: GitHub repository name for bundle defaults and link_allow_repos seeding. Overrides the repo inferred from git remote origin.
- [Command("init")]
+ private readonly ILogger _logger = logFactory.CreateLogger();
+ /// Create changelog.yml and the changelog/releases directory structure.
+ ///
+ /// Discovers the docs folder via docset.yml; falls back to creating PATH/docs.
+ /// When changelog.yml already exists, updates only the paths specified via or .
+ /// Seeds bundle.owner, bundle.repo, and bundle.link_allow_repos from the git remote origin when available.
+ ///
+ /// Repository root. Defaults to cwd.
+ /// Changelog entry directory. Defaults to docs/changelog.
+ /// Bundle output directory. Defaults to docs/releases.
+ /// GitHub owner for seeding bundle defaults. Overrides the value inferred from git remote origin.
+ /// GitHub repository name for seeding bundle defaults. Overrides the value inferred from git remote origin.
+ [NoOptionsInjection]
public Task Init(
- string? path = null,
- string? changelogDir = null,
- string? bundlesDir = null,
+ [ExpandUserProfile, RejectSymbolicLinks] DirectoryInfo? path = null,
+ [ExpandUserProfile, RejectSymbolicLinks] DirectoryInfo? changelogDir = null,
+ [ExpandUserProfile, RejectSymbolicLinks] DirectoryInfo? bundlesDir = null,
string? owner = null,
string? repo = null
)
{
- var rootPath = NormalizePath(path ?? ".");
+ var rootPath = path?.FullName ?? Path.GetFullPath(".");
var rootDir = _fileSystem.DirectoryInfo.New(rootPath);
IDirectoryInfo docsFolder;
@@ -92,8 +85,8 @@ public Task Init(
}
var configPath = _fileSystem.Path.Join(docsFolder.FullName, "changelog.yml");
- var changelogPath = NormalizePath(changelogDir ?? "changelog");
- var bundlesPath = NormalizePath(bundlesDir ?? "releases");
+ var changelogPath = changelogDir?.FullName ?? _fileSystem.Path.Join(docsFolder.FullName, "changelog");
+ var bundlesPath = bundlesDir?.FullName ?? _fileSystem.Path.Join(docsFolder.FullName, "releases");
var useNonDefaultChangelogDir = changelogDir != null;
var useNonDefaultBundlesDir = bundlesDir != null;
@@ -103,7 +96,7 @@ public Task Init(
if (!_fileSystem.File.Exists(configPath))
{
byte[]? templateBytes = null;
- using (var stream = typeof(ChangelogCommand).Assembly.GetManifestResourceStream("Documentation.Builder.changelog.example.yml"))
+ using (var stream = typeof(ChangelogCommands).Assembly.GetManifestResourceStream("Documentation.Builder.changelog.example.yml"))
{
if (stream == null)
{
@@ -214,9 +207,7 @@ public Task Init(
return Task.FromResult(0);
}
- ///
- /// Add a new changelog from command-line input
- ///
+ /// Create a new changelog entry YAML file.
/// Optional: Products affected in format "product target lifecycle, ..." (e.g., "elasticsearch 9.2.0 ga, cloud-serverless 2025-08-05"). If not specified, will be inferred from repository or config defaults.
/// Optional: What users must do to mitigate
/// Optional: Area(s) affected (comma-separated or specify multiple times)
@@ -242,13 +233,13 @@ public Task Init(
/// Optional: Use issue numbers for filenames instead of timestamp-slug. With both --prs (which creates one changelog per specified PR) and --issues (which creates one changelog per specified issue), each changelog filename will be derived from its issues. Requires --prs or --issues. Mutually exclusive with --use-pr-number.
/// Optional: GitHub release tag to fetch PRs from (e.g., "v9.2.0" or "latest"). When specified, creates one changelog per PR in the release notes. Requires --repo (or bundle.repo in changelog.yml). Mutually exclusive with --prs, --issues, and --report. Does not create a bundle; use 'changelog gh-release' for that.
/// Cancellation token
- [Command("add")]
- public async Task Create(
- [ProductInfoParser] List? products = null,
+ [NoOptionsInjection]
+ public async Task Add(
+ [ArgumentParser(typeof(ProductInfoParser))] ProductArgumentList? products = null,
string? action = null,
string[]? areas = null,
bool concise = false,
- string? config = null,
+ [Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "yml,yaml")] FileInfo? config = null,
string? description = null,
bool noExtractReleaseNotes = false,
bool noExtractIssues = false,
@@ -268,9 +259,10 @@ public async Task Create(
string? type = null,
bool usePrNumber = false,
bool useIssueNumber = false,
- Cancel ctx = default
+ CancellationToken ct = default
)
{
+ var ctx = ct;
await using var serviceInvoker = new ServiceInvoker(collector);
var hasReport = !string.IsNullOrWhiteSpace(report);
@@ -330,7 +322,7 @@ public async Task Create(
// Precedence: CLI option > bundle section in changelog.yml > built-in default.
// This applies to --prs, --issues, --release-version, and --report alike.
var bundleConfig = await new ChangelogConfigurationLoader(logFactory, configurationContext, _fileSystem)
- .LoadChangelogConfiguration(collector, config, ctx);
+ .LoadChangelogConfiguration(collector, config?.FullName, ctx);
var resolvedRepo = !string.IsNullOrWhiteSpace(repo) ? repo : bundleConfig?.Bundle?.Repo;
var resolvedOwner = owner ?? bundleConfig?.Bundle?.Owner ?? "elastic";
var resolvedOutput = !string.IsNullOrWhiteSpace(output) ? output : bundleConfig?.Bundle?.Directory;
@@ -359,7 +351,7 @@ public async Task Create(
{
Repository = repoArg,
Version = releaseVersion,
- Config = config,
+ Config = config?.FullName,
Output = resolvedOutput,
StripTitlePrefix = stripTitlePrefixResolved,
CreateBundle = false
@@ -474,7 +466,7 @@ async static (s, collector, state, ctx) => await s.CreateChangelogsFromRelease(c
}
// Use provided products or empty list (service will infer from repo/config if empty)
- var resolvedProducts = products ?? [];
+ var resolvedProducts = (IReadOnlyList?)products ?? [];
if (usePrNumber && useIssueNumber)
{
@@ -522,7 +514,7 @@ async static (s, collector, state, ctx) => await s.CreateChangelogsFromRelease(c
FeatureId = featureId,
Highlight = highlight,
Output = resolvedOutput,
- Config = config,
+ Config = config?.FullName,
UsePrNumber = usePrNumber,
UseIssueNumber = useIssueNumber,
StripTitlePrefix = stripTitlePrefixResolved,
@@ -538,17 +530,20 @@ async static (s, collector, state, ctx) => await s.CreateChangelog(collector, st
return await serviceInvoker.InvokeAsync(ctx);
}
- ///
- /// Bundle changelog files. Can use either profile-based bundling (for example, "bundle elasticsearch-release 9.2.0") or command-line options (for example, "bundle --all") Only one command-line filter option can be specified: `--all`, `--input-products`, `--prs`, `--issues`, `--release-version`, or `--report`.
- ///
+ /// Aggregate changelog entries matching a filter into a single bundle YAML.
+ ///
+ /// Accepts either a named profile from changelog.yml (e.g. bundle my-release 9.2.0) or
+ /// an explicit filter flag. Exactly one filter must be specified: --all, --input-products,
+ /// --prs, --issues, --release-version, or --report.
+ ///
/// Optional: Profile name from bundle.profiles in config (for example, "elasticsearch-release"). When specified, the second argument is the version or promotion report URL.
/// Optional: Version number or promotion report URL/path when using a profile (for example, "9.2.0" or "https://buildkite.../promotion-report.html")
/// Optional: Promotion report or URL list file when also providing a version. When provided, the second argument must be a version string and this is the PR/issue filter source (for example, "bundle serverless-release 2026-02 ./report.html").
/// Include all changelogs in the directory.
/// Optional: Path to the changelog.yml configuration file. Defaults to 'docs/changelog.yml'
/// Optional: Directory containing changelog YAML files. Uses config bundle.directory or defaults to current directory
- /// Optional: Bundle description text with placeholder support. Supports {version}, {lifecycle}, {owner}, and {repo} placeholders. Overrides bundle.description from config. In option-based mode, placeholders require --output-products to be explicitly specified.
- /// Optional: Filter by feature IDs (comma-separated) or a path to a newline-delimited file containing feature IDs. Can be specified multiple times. Entries with matching feature-id values will be commented out when the bundle is rendered (by CLI render or {changelog} directive).
+ /// Optional: Bundle description text with placeholder support. Supports VERSION, LIFECYCLE, OWNER, and REPO placeholders. Overrides bundle.description from config. In option-based mode, placeholders require --output-products to be explicitly specified.
+ /// Optional: Filter by feature IDs (comma-separated) or a path to a newline-delimited file containing feature IDs. Can be specified multiple times. Entries with matching feature-id values will be commented out when the bundle is rendered (by CLI render or changelog directive).
/// Optional: Skip auto-population of release date in the bundle. Mutually exclusive with --release-date. Not available in profile mode.
/// Optional: Explicit release date for the bundle in YYYY-MM-DD format. Overrides auto-population behavior. Mutually exclusive with --no-release-date. Not available in profile mode.
/// Filter by products in format "product target lifecycle, ..." (for example, "cloud-serverless 2025-12-02 ga, cloud-serverless 2025-12-06 beta"). When specified, all three parts (product, target, lifecycle) are required but can be wildcards (*). Examples: "elasticsearch * *" matches all elasticsearch changelogs, "cloud-serverless 2025-12-02 *" matches cloud-serverless 2025-12-02 with any lifecycle, "* 9.3.* *" matches any product with target starting with "9.3.", "* * *" matches all changelogs (equivalent to --all).
@@ -561,24 +556,23 @@ async static (s, collector, state, ctx) => await s.CreateChangelog(collector, st
/// A URL or file path to a promotion report. Extracts PR URLs and uses them as the filter.
/// GitHub release tag to use as a filter source (for example, "v9.2.0" or "latest"). When specified, fetches the release, parses PR references from the release notes, and uses those PRs as the filter — equivalent to passing the PR list via --prs. When --output-products is not specified, it is inferred from the release tag and repository name.
/// Optional: Copy the contents of each changelog file into the entries array. Uses config bundle.resolve or defaults to false.
- /// Optional: Explicitly turn off resolve (overrides config).
/// Emit GitHub Actions step outputs (needs_network, needs_github_token, output_path) describing network requirements and the resolved output path, then exit without generating the bundle. Intended for CI actions.
///
- [Command("bundle")]
+ [NoOptionsInjection]
public async Task Bundle(
[Argument] string? profile = null,
[Argument] string? profileArg = null,
[Argument] string? profileReport = null,
bool all = false,
- string? config = null,
- string? directory = null,
+ [Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "yml,yaml")] FileInfo? config = null,
+ [ExpandUserProfile, RejectSymbolicLinks] DirectoryInfo? directory = null,
string? description = null,
string[]? hideFeatures = null,
bool noReleaseDate = false,
string? releaseDate = null,
- [ProductInfoParser] List? inputProducts = null,
+ [ArgumentParser(typeof(ProductInfoParser))] ProductArgumentList? inputProducts = null,
string? output = null,
- [ProductInfoParser] List? outputProducts = null,
+ [ArgumentParser(typeof(ProductInfoParser))] ProductArgumentList? outputProducts = null,
string[]? issues = null,
string? owner = null,
bool plan = false,
@@ -587,10 +581,10 @@ public async Task Bundle(
string? repo = null,
string? report = null,
bool? resolve = null,
- bool noResolve = false,
- Cancel ctx = default
+ CancellationToken ct = default
)
{
+ var ctx = ct;
await using var serviceInvoker = new ServiceInvoker(collector);
var service = new ChangelogBundlingService(logFactory, configurationContext);
@@ -611,7 +605,7 @@ public async Task Bundle(
{
// Precedence: --repo CLI > bundle.repo config; --owner CLI > bundle.owner config > "elastic"
var bundleConfig = await new ChangelogConfigurationLoader(logFactory, configurationContext, _fileSystem)
- .LoadChangelogConfiguration(collector, config, ctx);
+ .LoadChangelogConfiguration(collector, config?.FullName, ctx);
var resolvedRepo = !string.IsNullOrWhiteSpace(repo) ? repo : bundleConfig?.Bundle?.Repo;
var resolvedOwner = owner ?? bundleConfig?.Bundle?.Owner ?? "elastic";
@@ -671,14 +665,12 @@ public async Task Bundle(
if (!string.IsNullOrWhiteSpace(owner))
forbidden.Add("--owner");
if (resolve.HasValue)
- forbidden.Add("--resolve");
- if (noResolve)
- forbidden.Add("--no-resolve");
+ forbidden.Add("--resolve / --no-resolve");
if (hideFeatures is { Length: > 0 })
forbidden.Add("--hide-features");
- if (!string.IsNullOrWhiteSpace(config))
+ if (config != null)
forbidden.Add("--config");
- if (!string.IsNullOrWhiteSpace(directory))
+ if (directory != null)
forbidden.Add("--directory");
if (!string.IsNullOrWhiteSpace(description))
forbidden.Add("--description");
@@ -832,7 +824,7 @@ public async Task Bundle(
Output = processedOutput,
Profile = profile,
ProfileArgument = profileArg,
- Config = config,
+ Config = config?.FullName,
Description = description
};
var planResult = await service.PlanBundleAsync(collector, planInput, releaseVersion != null, ctx);
@@ -876,13 +868,13 @@ public async Task Bundle(
}
// Determine resolve: CLI --no-resolve and --resolve override config. null = use config default.
- var shouldResolve = noResolve ? false : resolve;
+ var shouldResolve = resolve;
var allFeatureIdsForBundle = ExpandCommaSeparated(hideFeatures);
var input = new BundleChangelogsArguments
{
- Directory = directory,
+ Directory = directory?.FullName,
Output = processedOutput,
All = all,
InputProducts = inputProducts,
@@ -896,7 +888,7 @@ public async Task Bundle(
ProfileArgument = profileArg,
ProfileReport = isProfileMode ? profileReport : null,
Report = !isProfileMode ? report : null,
- Config = config,
+ Config = config?.FullName,
HideFeatures = allFeatureIdsForBundle.Count > 0 ? allFeatureIdsForBundle.ToArray() : null,
Description = description,
ReleaseDate = releaseDate,
@@ -910,11 +902,11 @@ async static (s, collector, state, ctx) => await s.BundleChangelogs(collector, s
return await serviceInvoker.InvokeAsync(ctx);
}
- ///
- /// Remove changelog files. Can use either profile-based removal (e.g., "remove elasticsearch-release 9.2.0") or raw flags (e.g., "remove --all").
- /// When a file is referenced by an unresolved bundle, the command blocks by default to prevent breaking
- /// the {changelog} directive. Use --force to override.
- ///
+ /// Delete changelog entry files matching a filter.
+ ///
+ /// Blocks when a file is referenced by an unresolved bundle to avoid breaking the {changelog}
+ /// directive in published documentation. Pass --force to override.
+ ///
/// Optional: Profile name from bundle.profiles in config (for example, "elasticsearch-release"). When specified, the second argument is the version or promotion report URL.
/// Optional: Version number or promotion report URL/path when using a profile (for example, "9.2.0" or "https://buildkite.../promotion-report.html")
/// Optional: Promotion report or URL list file when also providing a version. When provided, the second argument must be a version string and this is the PR/issue filter source.
@@ -932,27 +924,28 @@ async static (s, collector, state, ctx) => await s.BundleChangelogs(collector, s
/// GitHub repository name, which is used when PRs or issues are specified as numbers or when --release-version is used. Falls back to bundle.repo in changelog.yml when not specified. If that value is also absent, the product ID is used.
/// Optional (option-based mode only): URL or file path to a promotion report. Extracts PR URLs and uses them as the filter. Mutually exclusive with --all, --products, --prs, --release-version, and --issues.
///
- [Command("remove")]
+ [NoOptionsInjection]
public async Task Remove(
[Argument] string? profile = null,
[Argument] string? profileArg = null,
[Argument] string? profileReport = null,
bool all = false,
- string? bundlesDir = null,
- string? config = null,
- string? directory = null,
+ [ExpandUserProfile, RejectSymbolicLinks] DirectoryInfo? bundlesDir = null,
+ [Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "yml,yaml")] FileInfo? config = null,
+ [ExpandUserProfile, RejectSymbolicLinks] DirectoryInfo? directory = null,
bool dryRun = false,
bool force = false,
string[]? issues = null,
string? owner = null,
- [ProductInfoParser] List? products = null,
+ [ArgumentParser(typeof(ProductInfoParser))] ProductArgumentList? products = null,
string[]? prs = null,
string? releaseVersion = null,
string? repo = null,
string? report = null,
- Cancel ctx = default
+ CancellationToken ct = default
)
{
+ var ctx = ct;
await using var serviceInvoker = new ServiceInvoker(collector);
var service = new ChangelogRemoveService(logFactory, configurationContext);
@@ -971,7 +964,7 @@ public async Task Remove(
// Precedence: --repo CLI > bundle.repo config; --owner CLI > bundle.owner config > "elastic"
var bundleConfig = await new ChangelogConfigurationLoader(logFactory, configurationContext, _fileSystem)
- .LoadChangelogConfiguration(collector, config, ctx);
+ .LoadChangelogConfiguration(collector, config?.FullName, ctx);
var resolvedRepo = !string.IsNullOrWhiteSpace(repo) ? repo : bundleConfig?.Bundle?.Repo;
var resolvedOwner = owner ?? bundleConfig?.Bundle?.Owner ?? "elastic";
@@ -1113,9 +1106,7 @@ public async Task Remove(
// In profile mode, directory is derived from the changelog config (not from CLI).
// In raw mode, pass null when --directory is not specified so ApplyConfigDefaults can consult
// bundle.directory before falling back to CWD.
- var resolvedDirectory = isProfileMode || string.IsNullOrWhiteSpace(directory)
- ? null
- : NormalizePath(directory);
+ var resolvedDirectory = isProfileMode ? null : directory?.FullName;
var input = new ChangelogRemoveArguments
{
@@ -1127,9 +1118,9 @@ public async Task Remove(
Owner = owner,
Repo = repo,
DryRun = dryRun,
- BundlesDir = string.IsNullOrWhiteSpace(bundlesDir) ? null : NormalizePath(bundlesDir),
+ BundlesDir = bundlesDir?.FullName,
Force = force,
- Config = string.IsNullOrWhiteSpace(config) ? null : NormalizePath(config),
+ Config = config?.FullName,
Profile = isProfileMode ? profile : null,
ProfileArgument = isProfileMode ? profileArg : null,
ProfileReport = isProfileMode ? profileReport : null,
@@ -1143,9 +1134,7 @@ async static (s, collector, state, ctx) => await s.RemoveChangelogs(collector, s
return await serviceInvoker.InvokeAsync(ctx);
}
- ///
- /// Render bundled changelog(s) to markdown or asciidoc files
- ///
+ /// Render one or more changelog bundles to Markdown or AsciiDoc.
/// Required: Bundle input(s) in format "bundle-file-path|changelog-file-path|repo|link-visibility" (use pipe as delimiter). To merge multiple bundles, separate them with commas. Only bundle-file-path is required. link-visibility can be "hide-links" or "keep-links" (default). Use "hide-links" for private repositories; when set, all PR and issue links for each affected entry are hidden (entries may have multiple links via the prs and issues arrays). Paths support tilde (~) expansion and relative paths.
/// Optional: Path to the changelog.yml configuration file. Defaults to 'docs/changelog.yml'
/// Optional: Output file type. Valid values: "markdown" or "asciidoc". Defaults to "markdown"
@@ -1154,18 +1143,19 @@ async static (s, collector, state, ctx) => await s.RemoveChangelogs(collector, s
/// Optional: Group entries by area/component in subsections. For breaking changes with a subtype, groups by subtype instead of area. Defaults to false
/// Optional: Title to use for section headers in output files. Defaults to version from first bundle
///
- [Command("render")]
+ [NoOptionsInjection]
public async Task Render(
string[]? input = null,
- string? config = null,
+ [Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "yml,yaml")] FileInfo? config = null,
string? fileType = "markdown",
string[]? hideFeatures = null,
string? output = null,
bool subsections = false,
string? title = null,
- Cancel ctx = default
+ CancellationToken ct = default
)
{
+ var ctx = ct;
await using var serviceInvoker = new ServiceInvoker(collector);
var service = new ChangelogRenderingService(logFactory, configurationContext);
@@ -1195,7 +1185,7 @@ public async Task Render(
Subsections = subsections,
HideFeatures = allFeatureIds.Count > 0 ? allFeatureIds.ToArray() : null,
FileType = ft.Value,
- Config = config
+ Config = config?.FullName
};
serviceInvoker.AddCommand(service, renderInput,
@@ -1205,36 +1195,35 @@ async static (s, collector, state, ctx) => await s.RenderChangelogs(collector, s
return await serviceInvoker.InvokeAsync(ctx);
}
- ///
- /// Create changelogs from a GitHub release
- ///
+ /// Create changelog entries from the PRs referenced in a GitHub release.
/// Required: GitHub repository in owner/repo format (e.g., "elastic/elasticsearch" or just "elasticsearch" which defaults to elastic/elasticsearch)
/// Optional: Version tag to fetch (e.g., "v9.0.0", "9.0.0"). Defaults to "latest"
/// Optional: Path to the changelog.yml configuration file. Defaults to 'docs/changelog.yml'
- /// Optional: Bundle description text with placeholder support. Supports {version}, {lifecycle}, {owner}, and {repo} placeholders. Overrides bundle.description from config.
+ /// Optional: Bundle description text with placeholder support. Supports VERSION, LIFECYCLE, OWNER, and REPO placeholders. Overrides bundle.description from config.
/// Optional: Output directory for changelog files. Falls back to bundle.directory in changelog.yml when not specified. Defaults to './changelogs'
/// Optional: Explicit release date for the bundle in YYYY-MM-DD format. Overrides GitHub release published date.
/// Optional: Remove square brackets and text within them from the beginning of PR titles (e.g., "[Inference API] Title" becomes "Title")
/// Optional: Warn when the type inferred from release notes section headers doesn't match the type derived from PR labels. Defaults to true
///
- [Command("gh-release")]
- public async Task GitHubRelease(
+ [NoOptionsInjection]
+ public async Task GhRelease(
[Argument] string repo,
[Argument] string version = "latest",
- string? config = null,
+ [Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "yml,yaml")] FileInfo? config = null,
string? description = null,
string? output = null,
string? releaseDate = null,
bool stripTitlePrefix = false,
bool warnOnTypeMismatch = true,
- Cancel ctx = default
+ CancellationToken ct = default
)
{
+ var ctx = ct;
await using var serviceInvoker = new ServiceInvoker(collector);
// --output CLI > bundle.directory config > ./changelogs (service default)
var bundleConfig = await new ChangelogConfigurationLoader(logFactory, configurationContext, _fileSystem)
- .LoadChangelogConfiguration(collector, config, ctx);
+ .LoadChangelogConfiguration(collector, config?.FullName, ctx);
var resolvedOutput = !string.IsNullOrWhiteSpace(output) ? output : bundleConfig?.Bundle?.Directory;
IGitHubReleaseService releaseService = new GitHubReleaseService(logFactory);
@@ -1255,7 +1244,7 @@ public async Task GitHubRelease(
{
Repository = repo,
Version = version,
- Config = config,
+ Config = config?.FullName,
Output = resolvedOutput,
StripTitlePrefix = stripTitlePrefixResolved,
WarnOnTypeMismatch = warnOnTypeMismatch,
@@ -1270,23 +1259,20 @@ async static (s, collector, state, ctx) => await s.CreateChangelogsFromRelease(c
return await serviceInvoker.InvokeAsync(ctx);
}
- ///
- /// Amend a bundle with additional changelog entries, creating an immutable .amend-N.yaml file.
- ///
+ /// Append additional changelog entries to a published bundle without modifying it.
+ /// Creates an immutable .amend-N.yaml sidecar file alongside the original bundle.
/// Required: Path to the original bundle file to amend
/// Required: Path(s) to changelog YAML file(s) to add as comma-separated values (e.g., --add "file1.yaml,file2.yaml"). Supports tilde (~) expansion and relative paths.
- /// Optional: Copy the contents of each changelog file into the entries array. When not specified, inferred from the original bundle.
- /// Optional: Explicitly turn off resolve (overrides inference from original bundle).
- ///
- [Command("bundle-amend")]
+ /// Optional: Copy the contents of each changelog file into the entries array. Use --no-resolve to explicitly turn off resolve (overrides inference from original bundle).
+ [NoOptionsInjection]
public async Task BundleAmend(
- [Argument] string bundlePath,
+ [Argument, Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "yml,yaml")] FileInfo bundlePath,
string[]? add = null,
bool? resolve = null,
- bool noResolve = false,
- Cancel ctx = default
+ CancellationToken ct = default
)
{
+ var ctx = ct;
await using var serviceInvoker = new ServiceInvoker(collector);
var service = new ChangelogBundleAmendService(logFactory, configurationContext: configurationContext);
@@ -1301,14 +1287,14 @@ public async Task BundleAmend(
}
// Normalize the bundle path
- var normalizedBundlePath = NormalizePath(bundlePath);
+ var normalizedBundlePath = bundlePath.FullName;
var normalizedAddFiles = ExpandCommaSeparated(add)
.Select(NormalizePath)
.ToList();
// Determine resolve: CLI --no-resolve takes precedence, then CLI --resolve, then infer from bundle
- var shouldResolve = noResolve ? false : resolve;
+ var shouldResolve = resolve;
var input = new AmendBundleArguments
{
@@ -1324,9 +1310,12 @@ async static (s, collector, state, ctx) => await s.AmendBundle(collector, state,
return await serviceInvoker.InvokeAsync(ctx);
}
- ///
- /// (CI) Evaluate a PR for changelog generation eligibility. Performs pre-flight checks (body-only edit, bot loop, manual edit), loads config, checks label rules, resolves title/type, and sets GitHub Actions outputs.
- ///
+ /// (CI) Evaluate a pull request for changelog generation eligibility and set GitHub Actions outputs.
+ ///
+ /// Runs pre-flight checks (body-only edit, bot loop, manual edit), applies label rules from
+ /// changelog.yml, and resolves the entry type and title. Designed to be called from a
+ /// GitHub Actions workflow step.
+ ///
/// Path to the changelog.yml configuration file
/// GitHub repository owner
/// GitHub repository name
@@ -1341,9 +1330,9 @@ async static (s, collector, state, ctx) => await s.AmendBundle(collector, state,
/// Remove square-bracket prefixes from the PR title
/// Bot login name for loop detection
///
- [Command("evaluate-pr")]
+ [NoOptionsInjection]
public async Task EvaluatePr(
- string config,
+ [FileExtensions(Extensions = "yml,yaml")] FileInfo config,
string owner,
string repo,
int prNumber,
@@ -1356,9 +1345,10 @@ public async Task EvaluatePr(
bool bodyChanged = false,
bool stripTitlePrefix = false,
string botName = "github-actions[bot]",
- Cancel ctx = default
+ CancellationToken ct = default
)
{
+ var ctx = ct;
await using var serviceInvoker = new ServiceInvoker(collector);
IGitHubPrService prService = new GitHubPrService(logFactory);
@@ -1368,7 +1358,7 @@ public async Task EvaluatePr(
var args = new EvaluatePrArguments
{
- Config = config,
+ Config = config.FullName,
Owner = owner,
Repo = repo,
PrNumber = prNumber,
@@ -1391,11 +1381,11 @@ async static (s, collector, state, ctx) => await s.EvaluatePr(collector, state,
return await serviceInvoker.InvokeAsync(ctx);
}
- ///
- /// (CI) Package changelog artifact for cross-workflow transfer. Resolves final status from
- /// evaluate-pr + changelog add outcomes, copies generated YAML, writes metadata.json, and
- /// sets GitHub Actions outputs. Always succeeds (exit 0) so the upload step runs.
- ///
+ /// (CI) Package changelog artifact for cross-workflow transfer.
+ ///
+ /// Resolves final status from evaluate-pr + changelog add outcomes, copies generated YAML,
+ /// writes metadata.json, and sets GitHub Actions outputs. Always succeeds (exit 0) so the upload step runs.
+ ///
/// Directory where changelog add wrote the generated YAML
/// Directory to write the artifact (metadata.json + YAML)
/// Status output from the evaluate-pr step
@@ -1412,8 +1402,7 @@ async static (s, collector, state, ctx) => await s.EvaluatePr(collector, state,
/// Optional: comma-separated skip labels from evaluate-pr
/// Optional: path to changelog.yml
/// Optional: filename of a previously committed changelog for this PR
- ///
- [Command("prepare-artifact")]
+ [NoOptionsInjection]
public async Task PrepareArtifact(
string stagingDir,
string outputDir,
@@ -1431,9 +1420,10 @@ public async Task PrepareArtifact(
string? skipLabels = null,
string? config = null,
string? existingChangelogFilename = null,
- Cancel ctx = default
+ CancellationToken ct = default
)
{
+ var ctx = ct;
await using var serviceInvoker = new ServiceInvoker(collector);
var fs = FileSystemFactory.RealGitRootForPathWrite(null, outputDir);
@@ -1466,22 +1456,23 @@ async static (s, collector, state, ctx) => await s.PrepareArtifact(collector, st
return await serviceInvoker.InvokeAsync(ctx);
}
- ///
- /// (CI) Evaluate downloaded artifact in the resolving workflow. Reads metadata, validates
- /// PR state (SHA, labels), and sets GitHub Actions outputs for downstream steps (commit, comment).
- ///
+ /// (CI) Evaluate downloaded artifact in the resolving workflow.
+ ///
+ /// Reads metadata, validates PR state (SHA, labels), and sets GitHub Actions outputs
+ /// for downstream steps (commit, comment).
+ ///
/// Path to the downloaded metadata.json file
/// GitHub repository owner
/// GitHub repository name
- ///
- [Command("evaluate-artifact")]
+ [NoOptionsInjection]
public async Task EvaluateArtifact(
string metadata,
string owner,
string repo,
- Cancel ctx = default
+ CancellationToken ct = default
)
{
+ var ctx = ct;
await using var serviceInvoker = new ServiceInvoker(collector);
var fs = FileSystemFactory.RealGitRootForPathWrite(null, metadata);
@@ -1502,10 +1493,7 @@ async static (s, collector, state, ctx) => await s.EvaluateArtifact(collector, s
return await serviceInvoker.InvokeAsync(ctx);
}
- ///
- /// Expands a CLI array parameter where each element may be comma-separated into a flat list of values.
- /// Filters out blank entries.
- ///
+
private static List ExpandCommaSeparated(string[]? values)
{
if (values is not { Length: > 0 })
@@ -1522,10 +1510,6 @@ private static List ExpandCommaSeparated(string[]? values)
return result;
}
- ///
- /// Returns a path suitable for changelog.yml config (relative to repo when possible, forward slashes).
- /// Quotes the value if it contains YAML-special characters.
- ///
private static string GetPathForConfig(string repoPath, string targetPath)
{
var relativePath = Path.GetRelativePath(repoPath, targetPath);
@@ -1555,25 +1539,24 @@ private string ApplyChangelogInitBundleRepoSeed(string content, string? ownerCli
return ChangelogTemplateSeeder.ApplyBundleRepoSeed(content, ownerCli, repoCli, gitOwner, gitRepo);
}
- ///
- /// Upload changelog or bundle artifacts to S3 or Elasticsearch.
- /// Uses content-hash–based incremental upload: only files whose content has changed are transferred.
- ///
+ /// Upload changelog entries or bundle artifacts to S3 or Elasticsearch.
+ /// Uses content-hash–based incremental transfer — only changed files are uploaded.
/// Artifact type to upload: 'changelog' (individual entries) or 'bundle' (consolidated bundles).
/// Upload destination: 's3' or 'elasticsearch'.
/// S3 bucket name (required when target is 's3').
/// Path to changelog.yml configuration file. Defaults to docs/changelog.yml.
/// Override changelog directory instead of reading it from config.
- [Command("upload")]
+ [NoOptionsInjection]
public async Task Upload(
string artifactType,
string target,
string s3BucketName = "",
- string? config = null,
- string? directory = null,
- Cancel ctx = default
+ [Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "yml,yaml")] FileInfo? config = null,
+ [ExpandUserProfile, RejectSymbolicLinks] DirectoryInfo? directory = null,
+ CancellationToken ct = default
)
{
+ var ctx = ct;
if (!Enum.TryParse(artifactType, ignoreCase: true, out var parsedArtifactType))
{
collector.EmitError(string.Empty, $"Invalid artifact type '{artifactType}'. Valid values: changelog, bundle");
@@ -1592,8 +1575,8 @@ public async Task Upload(
return 1;
}
- var resolvedDirectory = directory != null ? NormalizePath(directory) : null;
- var resolvedConfig = config != null ? NormalizePath(config) : null;
+ var resolvedDirectory = directory != null ? directory?.FullName : null;
+ var resolvedConfig = config != null ? config?.FullName : null;
await using var serviceInvoker = new ServiceInvoker(collector);
var service = new ChangelogUploadService(logFactory, configurationContext);
diff --git a/src/tooling/docs-builder/Commands/Codex/CodexCommands.cs b/src/tooling/docs-builder/Commands/Codex/CodexCommands.cs
index a5d62a18cb..fd5ab16bca 100644
--- a/src/tooling/docs-builder/Commands/Codex/CodexCommands.cs
+++ b/src/tooling/docs-builder/Commands/Codex/CodexCommands.cs
@@ -2,13 +2,14 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
+using System.ComponentModel.DataAnnotations;
using System.IO.Abstractions;
using Actions.Core.Services;
-using ConsoleAppFramework;
using Documentation.Builder.Http;
using Elastic.Codex;
using Elastic.Codex.Building;
using Elastic.Codex.Sourcing;
+using Elastic.Documentation;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Configuration.Codex;
using Elastic.Documentation.Diagnostics;
@@ -16,12 +17,18 @@
using Elastic.Documentation.LinkIndex;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands.Codex;
-///
-/// Commands for building documentation codexes from multiple isolated documentation sets.
-///
+/// Build a documentation portal over multiple independent documentation sets, each with its own navigation.
+///
+///
+/// A codex is a portal composed of several documentation sets. Unlike the assembler, each set retains
+/// its own navigation structure — there is no merged global navigation tree. The codex configuration
+/// (codex.yml) lists which repositories to include and how to compose the portal.
+///
+///
internal sealed class CodexCommands(
ILoggerFactory logFactory,
IDiagnosticsCollector collector,
@@ -30,36 +37,36 @@ internal sealed class CodexCommands(
IEnvironmentVariables environmentVariables
)
{
- ///
- /// Clone and build a documentation codex in one step.
- ///
- /// Path to the codex.yml configuration file.
- /// Treat warnings as errors and fail on warnings.
- /// Fetch the latest commit even if already cloned.
- /// Assume repositories are already cloned.
- /// Output directory for the built codex.
- /// Serve the documentation on port 4000 after build.
- /// Cancellation token.
- [Command("")]
+ /// Clone all repositories and build the portal in one step.
+ ///
+ ///
+ /// Path to the codex.yml configuration file.
+ /// Treat warnings as errors.
+ /// Fetch the HEAD of each branch instead of the pinned ref.
+ /// Skip cloning; assume repositories are already on disk.
+ /// Output directory for the built portal. Defaults to .artifacts/codex/.
+ /// Serve the portal on port 4000 after a successful build.
+ [DefaultCommand]
public async Task CloneAndBuild(
- [Argument] string config,
+ GlobalCliOptions _,
+ [Argument, Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "yml,yaml")] FileInfo config,
bool strict = false,
bool fetchLatest = false,
bool assumeCloned = false,
- string? output = null,
+ [ExpandUserProfile, RejectSymbolicLinks] DirectoryInfo? output = null,
bool serve = false,
- Cancel ctx = default)
+ CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
var fs = FileSystemFactory.RealRead;
- // Load codex configuration
- var configPath = fs.Path.GetFullPath(config);
- var configFile = fs.FileInfo.New(configPath);
+
+
+ var configFile = fs.FileInfo.New(config.FullName);
if (!configFile.Exists)
{
- collector.EmitGlobalError($"Codex configuration file not found: {configPath}");
+ collector.EmitGlobalError($"Codex configuration file not found: {config.FullName}");
return 1;
}
@@ -71,7 +78,7 @@ public async Task CloneAndBuild(
return 1;
}
- var codexContext = new CodexContext(codexConfig, configFile, collector, fs, fs, null, output);
+ var codexContext = new CodexContext(codexConfig, configFile, collector, fs, fs, null, output?.FullName);
using var linkIndexReader = new GitLinkIndexReader(codexConfig.Environment);
var cloneService = new CodexCloneService(logFactory, linkIndexReader);
@@ -84,55 +91,52 @@ public async Task CloneAndBuild(
return cloneResult.Checkouts.Count > 0;
});
- // Build service
var isolatedBuildService = new IsolatedBuildService(logFactory, configurationContext, githubActionsService, environmentVariables);
var buildService = new CodexBuildService(logFactory, configurationContext, isolatedBuildService);
serviceInvoker.AddCommand(buildService, (codexContext, cloneResult, fs), strict,
async (s, col, state, c) =>
{
- if (cloneResult == null)
+ if (state.cloneResult == null)
return false;
- var result = await s.BuildAll(state.codexContext, cloneResult, state.fs, c);
+ var result = await s.BuildAll(state.codexContext, state.cloneResult, state.fs, c);
return result.DocumentationSets.Count > 0;
});
- var result = await serviceInvoker.InvokeAsync(ctx);
+ var result = await serviceInvoker.InvokeAsync(ct);
if (serve && result == 0)
{
var host = new StaticWebHost(4000, codexContext.OutputDirectory.FullName);
- await host.RunAsync(ctx);
- await host.StopAsync(ctx);
+ await host.RunAsync(ct);
+ await host.StopAsync(ct);
}
return result;
}
- ///
- /// Clone all repositories defined in the codex configuration.
- ///
- /// Path to the codex.yml configuration file.
- /// Treat warnings as errors and fail on warnings.
- /// Fetch the latest commit even if already cloned.
- /// Assume repositories are already cloned.
- /// Cancellation token.
- [Command("clone")]
+ /// Clone all repositories listed in the codex configuration.
+ /// Path to the codex.yml configuration file.
+ /// Treat warnings as errors.
+ /// Fetch the HEAD of each branch instead of the pinned ref.
+ /// Skip cloning; assume repositories are already on disk.
+ [NoOptionsInjection]
public async Task Clone(
- [Argument] string config,
+ [Argument, Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "yml,yaml")] FileInfo config,
bool strict = false,
bool fetchLatest = false,
bool assumeCloned = false,
- Cancel ctx = default)
+ CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
var fs = FileSystemFactory.RealRead;
- var configPath = fs.Path.GetFullPath(config);
- var configFile = fs.FileInfo.New(configPath);
+
+
+ var configFile = fs.FileInfo.New(config.FullName);
if (!configFile.Exists)
{
- collector.EmitGlobalError($"Codex configuration file not found: {configPath}");
+ collector.EmitGlobalError($"Codex configuration file not found: {config.FullName}");
return 1;
}
@@ -155,32 +159,31 @@ public async Task Clone(
return result.Checkouts.Count > 0;
});
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
- ///
- /// Build all documentation sets from already cloned repositories.
- ///
- /// Path to the codex.yml configuration file.
- /// Treat warnings as errors and fail on warnings.
- /// Output directory for the built codex.
- /// Cancellation token.
- [Command("build")]
+ /// Build the portal from previously cloned repositories.
+ /// Run after codex clone.
+ /// Path to the codex.yml configuration file.
+ /// Treat warnings as errors.
+ /// Output directory. Defaults to .artifacts/codex/.
+ [NoOptionsInjection]
public async Task Build(
- [Argument] string config,
+ [Argument, Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "yml,yaml")] FileInfo config,
bool strict = false,
- string? output = null,
- Cancel ctx = default)
+ [ExpandUserProfile, RejectSymbolicLinks] DirectoryInfo? output = null,
+ CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
var fs = FileSystemFactory.RealRead;
- var configPath = fs.Path.GetFullPath(config);
- var configFile = fs.FileInfo.New(configPath);
+
+
+ var configFile = fs.FileInfo.New(config.FullName);
if (!configFile.Exists)
{
- collector.EmitGlobalError($"Codex configuration file not found: {configPath}");
+ collector.EmitGlobalError($"Codex configuration file not found: {config.FullName}");
return 1;
}
@@ -192,9 +195,8 @@ public async Task Build(
return 1;
}
- var codexContext = new CodexContext(codexConfig, configFile, collector, fs, fs, null, output);
-
- var cloneResult = await CodexCloneService.DiscoverCheckouts(codexContext, logFactory, ctx);
+ var codexContext = new CodexContext(codexConfig, configFile, collector, fs, fs, null, output?.FullName);
+ var cloneResult = await CodexCloneService.DiscoverCheckouts(codexContext, logFactory, ct);
if (cloneResult == null || cloneResult.Checkouts.Count == 0)
{
@@ -211,30 +213,22 @@ public async Task Build(
return result.DocumentationSets.Count > 0;
});
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
- ///
- /// Serve the built codex documentation.
- ///
- /// Port to serve on.
- /// Path to the codex output directory.
- /// Cancellation token.
- [Command("serve")]
- public async Task Serve(
- int port = 4000,
- string? path = null,
- Cancel ctx = default)
+ /// Serve the built portal at http://localhost:4000.
+ /// Run after codex build. Does not rebuild on file changes.
+ /// Port to listen on. Default: 4000.
+ /// Path to the portal output. Defaults to .artifacts/codex/docs/.
+ [NoOptionsInjection]
+ public async Task Serve(int port = 4000, [Existing, ExpandUserProfile, RejectSymbolicLinks] DirectoryInfo? path = null, CancellationToken ct = default)
{
var fs = FileSystemFactory.RealRead;
- var servePath = path ?? fs.Path.Join(
- Environment.CurrentDirectory, ".artifacts", "codex", "docs");
+ var servePath = path?.FullName ?? fs.Path.Join(Environment.CurrentDirectory, ".artifacts", "codex", "docs");
var host = new StaticWebHost(port, servePath);
- await host.RunAsync(ctx);
- await host.StopAsync(ctx);
-
- // Since this command doesn't use ServiceInvoker, stop collector manually
- await collector.StopAsync(ctx);
+ await host.RunAsync(ct);
+ await host.StopAsync(ct);
+ await collector.StopAsync(ct);
}
}
diff --git a/src/tooling/docs-builder/Commands/Codex/CodexIndexCommand.cs b/src/tooling/docs-builder/Commands/Codex/CodexIndexCommand.cs
index 65d2b08ea9..3e14db4f90 100644
--- a/src/tooling/docs-builder/Commands/Codex/CodexIndexCommand.cs
+++ b/src/tooling/docs-builder/Commands/Codex/CodexIndexCommand.cs
@@ -2,12 +2,14 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
+using System.ComponentModel.DataAnnotations;
using System.IO.Abstractions;
using Actions.Core.Services;
-using ConsoleAppFramework;
+
using Elastic.Codex;
using Elastic.Codex.Indexing;
using Elastic.Codex.Sourcing;
+using Elastic.Documentation;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Configuration.Codex;
using Elastic.Documentation.Diagnostics;
@@ -15,12 +17,11 @@
using Elastic.Documentation.LinkIndex;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands.Codex;
-///
-/// Command for indexing codex documentation into Elasticsearch.
-///
+/// Index codex documentation into Elasticsearch.
internal sealed class CodexIndexCommand(
ILoggerFactory logFactory,
IDiagnosticsCollector collector,
@@ -29,79 +30,25 @@ internal sealed class CodexIndexCommand(
IEnvironmentVariables environmentVariables
)
{
- ///
- /// Index codex documentation to Elasticsearch.
- ///
- /// Path to the codex configuration file.
- /// -es, Elasticsearch endpoint, alternatively set env DOCUMENTATION_ELASTIC_URL
- /// Elasticsearch API key, alternatively set env DOCUMENTATION_ELASTIC_APIKEY
- /// Elasticsearch username (basic auth), alternatively set env DOCUMENTATION_ELASTIC_USERNAME
- /// Elasticsearch password (basic auth), alternatively set env DOCUMENTATION_ELASTIC_PASSWORD
- /// Disable AI enrichment of documents using LLM-generated metadata (enabled by default)
- /// The number of search threads the inference endpoint should use. Defaults: 8
- /// The number of index threads the inference endpoint should use. Defaults: 8
- /// Do not use the Elastic Inference Service, bootstrap inference endpoint
- /// Force reindex strategy to semantic index
- /// Timeout in minutes for the inference endpoint creation. Defaults: 4
- /// The number of documents to send to ES as part of the bulk. Defaults: 100
- /// The number of times failed bulk items should be retried. Defaults: 3
- /// Buffer ES request/responses for better error messages and pass ?pretty to all requests
- /// Route requests through a proxy server
- /// Proxy server password
- /// Proxy server username
- /// Disable SSL certificate validation (EXPERT OPTION)
- /// Pass a self-signed certificate fingerprint to validate the SSL connection
- /// Pass a self-signed certificate to validate the SSL connection
- /// If the certificate is not root but only part of the validation chain pass this
- ///
- ///
- [Command("")]
+ /// Index the built portal documentation into Elasticsearch.
+ ///
+ /// Run after codex build. Streams documents from all included documentation sets to the cluster.
+ ///
+ /// Path to the codex.yml configuration file.
public async Task Index(
- [Argument] string config,
- string? endpoint = null,
- string? apiKey = null,
- string? username = null,
- string? password = null,
-
- // inference options
- bool? noAiEnrichment = null,
- int? searchNumThreads = null,
- int? indexNumThreads = null,
- bool? noEis = null,
- int? bootstrapTimeout = null,
-
- // index options
- bool? forceReindex = null,
-
- // channel buffer options
- int? bufferSize = null,
- int? maxRetries = null,
-
- // connection options
- bool? debugMode = null,
-
- // proxy options
- string? proxyAddress = null,
- string? proxyPassword = null,
- string? proxyUsername = null,
-
- // certificate options
- bool? disableSslVerification = null,
- string? certificateFingerprint = null,
- string? certificatePath = null,
- bool? certificateNotRoot = null,
- Cancel ctx = default
+ GlobalCliOptions _,
+ [Argument, Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "yml,yaml")] FileInfo config,
+ [AsParameters] ElasticsearchIndexOptions es,
+ CancellationToken ct = default
)
{
await using var serviceInvoker = new ServiceInvoker(collector);
var fs = FileSystemFactory.RealRead;
-
- var configPath = fs.Path.GetFullPath(config);
- var configFile = fs.FileInfo.New(configPath);
+ var configFile = fs.FileInfo.New(config.FullName);
if (!configFile.Exists)
{
- collector.EmitGlobalError($"Codex configuration file not found: {configPath}");
+ collector.EmitGlobalError($"Codex configuration file not found: {config.FullName}");
return 1;
}
@@ -117,7 +64,7 @@ public async Task Index(
using var linkIndexReader = new GitLinkIndexReader(codexConfig.Environment);
var cloneService = new CodexCloneService(logFactory, linkIndexReader);
- var cloneResult = await cloneService.CloneAll(codexContext, fetchLatest: false, assumeCloned: true, ctx);
+ var cloneResult = await cloneService.CloneAll(codexContext, fetchLatest: false, assumeCloned: true, ct);
if (cloneResult.Checkouts.Count == 0)
{
@@ -125,37 +72,13 @@ public async Task Index(
return 1;
}
- var esOptions = new ElasticsearchIndexOptions
- {
- Endpoint = endpoint,
- ApiKey = apiKey,
- Username = username,
- Password = password,
- NoAiEnrichment = noAiEnrichment,
- SearchNumThreads = searchNumThreads,
- IndexNumThreads = indexNumThreads,
- NoEis = noEis,
- BootstrapTimeout = bootstrapTimeout,
- ForceReindex = forceReindex,
- BufferSize = bufferSize,
- MaxRetries = maxRetries,
- DebugMode = debugMode,
- ProxyAddress = proxyAddress,
- ProxyPassword = proxyPassword,
- ProxyUsername = proxyUsername,
- DisableSslVerification = disableSslVerification,
- CertificateFingerprint = certificateFingerprint,
- CertificatePath = certificatePath,
- CertificateNotRoot = certificateNotRoot
- };
-
var isolatedBuildService = new IsolatedBuildService(logFactory, configurationContext, githubActionsService, environmentVariables);
var service = new CodexIndexService(logFactory, configurationContext, isolatedBuildService);
- serviceInvoker.AddCommand(service, (codexContext, cloneResult, fs, esOptions),
+ serviceInvoker.AddCommand(service, (codexContext, cloneResult, fs, es),
static async (s, col, state, c) =>
- await s.Index(state.codexContext, state.cloneResult, state.fs, state.esOptions, c)
+ await s.Index(state.codexContext, state.cloneResult, state.fs, state.es, c)
);
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
}
diff --git a/src/tooling/docs-builder/Commands/Codex/CodexUpdateRedirectsCommand.cs b/src/tooling/docs-builder/Commands/Codex/CodexUpdateRedirectsCommand.cs
index d888afcfc7..5795bd54a8 100644
--- a/src/tooling/docs-builder/Commands/Codex/CodexUpdateRedirectsCommand.cs
+++ b/src/tooling/docs-builder/Commands/Codex/CodexUpdateRedirectsCommand.cs
@@ -2,46 +2,45 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
+using System.ComponentModel.DataAnnotations;
using System.IO.Abstractions;
-using ConsoleAppFramework;
+using Elastic.Documentation;
using Elastic.Documentation.Assembler.Deploying;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Configuration.Codex;
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands.Codex;
-///
-/// Command for updating CloudFront KeyValueStore redirects for codex.
-///
+/// Update CloudFront KeyValueStore redirects for a codex deployment.
internal sealed class CodexUpdateRedirectsCommand(
IDiagnosticsCollector collector,
ILoggerFactory logFactory
)
{
- /// Refreshes the redirects mapping in CloudFront's KeyValueStore for codex.
- /// Path to the codex configuration file (used to resolve environment).
- /// The environment to deploy to. Defaults to config or ENVIRONMENT env var.
- /// Path to the redirects mapping. Defaults to .artifacts/codex/docs/redirects.json.
- ///
- [Command("")]
- public async Task Run(
- [Argument] string config,
+ /// Push the codex redirects mapping to CloudFront's KeyValueStore.
+ /// Run after codex build produces a redirects.json.
+ /// Path to the codex.yml configuration file (used to resolve the environment).
+ /// Named deployment target. Defaults to the value in codex.yml or the ENVIRONMENT env var.
+ /// Path to redirects.json. Defaults to .artifacts/codex/docs/redirects.json.
+ public async Task UpdateRedirects(
+ GlobalCliOptions _,
+ [Argument, Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "yml,yaml")] FileInfo config,
string? environment = null,
- string? redirectsFile = null,
- Cancel ctx = default)
+ [Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "json")] FileInfo? redirectsFile = null,
+ CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
var fs = FileSystemFactory.RealRead;
- var configPath = fs.Path.GetFullPath(config);
- var configFile = fs.FileInfo.New(configPath);
+ var configFile = fs.FileInfo.New(config.FullName);
if (!configFile.Exists)
{
- collector.EmitGlobalError($"Codex configuration file not found: {configPath}");
+ collector.EmitGlobalError($"Codex configuration file not found: {config.FullName}");
return 1;
}
@@ -53,8 +52,8 @@ public async Task Run(
var service = new DeployUpdateRedirectsService(logFactory, fs);
serviceInvoker.AddCommand(service, (environment: resolvedEnvironment, redirectsFile, kvsNamePrefix: "codex", defaultRedirectsFile: ".artifacts/codex/docs/redirects.json"),
- static async (s, col, state, c) => await s.UpdateRedirects(col, state.environment, state.redirectsFile, state.kvsNamePrefix, state.defaultRedirectsFile, c)
+ static async (s, col, state, c) => await s.UpdateRedirects(col, state.environment, state.redirectsFile?.FullName, state.kvsNamePrefix, state.defaultRedirectsFile, c)
);
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
}
diff --git a/src/tooling/docs-builder/Commands/DiffCommands.cs b/src/tooling/docs-builder/Commands/DiffCommands.cs
index c3d1d01097..e6196a3ca4 100644
--- a/src/tooling/docs-builder/Commands/DiffCommands.cs
+++ b/src/tooling/docs-builder/Commands/DiffCommands.cs
@@ -3,28 +3,31 @@
// See the LICENSE file in the project root for more information
using System.IO.Abstractions;
-using ConsoleAppFramework;
+using Elastic.Documentation;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.Refactor.Tracking;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands;
-internal sealed class DiffCommands(
+internal sealed class DiffCommand(
ILoggerFactory logFactory,
IDiagnosticsCollector collector,
IConfigurationContext configurationContext
)
{
- ///
- /// Validates redirect updates in the current branch using the redirect file against changes reported by git.
- ///
- /// -p, Defaults to the`{pwd}/docs` folder
- ///
- [Command("validate")]
- public async Task ValidateRedirects(string? path = null, Cancel ctx = default)
+ /// Verify every renamed or removed page in the current branch has a redirect entry.
+ ///
+ /// Compares the git diff of the working branch against the redirect file. Exits 1 if any moved
+ /// or deleted page is missing a redirect entry. Run before merging to catch broken links early.
+ ///
+ /// -p, Root of the documentation source. Defaults to cwd/docs.
+ [NoOptionsInjection]
+ [CommandName("diff")]
+ public async Task Validate(string? path = null, CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
@@ -32,9 +35,8 @@ public async Task ValidateRedirects(string? path = null, Cancel ctx = defau
var fs = FileSystemFactory.RealGitRootForPath(path);
serviceInvoker.AddCommand(service, (path, fs),
- async static (s, collector, state, _) => await s.ValidateRedirects(collector, state.path, state.fs)
+ async static (s, collector, state, _) => await s.ValidateRedirects(collector, state.path, state.fs)
);
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
-
}
diff --git a/src/tooling/docs-builder/Commands/FormatCommand.cs b/src/tooling/docs-builder/Commands/FormatCommand.cs
deleted file mode 100644
index 2849bd3901..0000000000
--- a/src/tooling/docs-builder/Commands/FormatCommand.cs
+++ /dev/null
@@ -1,53 +0,0 @@
-// Licensed to Elasticsearch B.V under one or more agreements.
-// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
-// See the LICENSE file in the project root for more information
-
-using System.IO.Abstractions;
-using ConsoleAppFramework;
-using Elastic.Documentation.Configuration;
-using Elastic.Documentation.Diagnostics;
-using Elastic.Documentation.Refactor;
-using Elastic.Documentation.Services;
-using Microsoft.Extensions.Logging;
-
-namespace Documentation.Builder.Commands;
-
-internal sealed class FormatCommand(
- ILoggerFactory logFactory,
- IDiagnosticsCollector collector,
- IConfigurationContext configurationContext
-)
-{
- ///
- /// Format documentation files by fixing common issues like irregular space
- ///
- /// -p, Path to the documentation folder, defaults to pwd
- /// Check if files need formatting without modifying them (exits with code 1 if formatting needed)
- /// Write formatting changes to files
- ///
- [Command("")]
- public async Task Format(
- string? path = null,
- bool check = false,
- bool write = false,
- Cancel ctx = default
- )
- {
- // Validate that exactly one of --check or --write is specified
- if (check == write)
- {
- collector.EmitError(string.Empty, "Must specify exactly one of --check or --write");
- return 1;
- }
-
- await using var serviceInvoker = new ServiceInvoker(collector);
-
- var service = new FormatService(logFactory, configurationContext);
- var fs = FileSystemFactory.RealGitRootForPath(path);
-
- serviceInvoker.AddCommand(service, (path, check, fs),
- async static (s, collector, state, ctx) => await s.Format(collector, state.path, state.check, state.fs, ctx)
- );
- return await serviceInvoker.InvokeAsync(ctx);
- }
-}
diff --git a/src/tooling/docs-builder/Commands/InboundLinkCommands.cs b/src/tooling/docs-builder/Commands/InboundLinkCommands.cs
index 1bdc3220a7..e208eea64f 100644
--- a/src/tooling/docs-builder/Commands/InboundLinkCommands.cs
+++ b/src/tooling/docs-builder/Commands/InboundLinkCommands.cs
@@ -2,57 +2,66 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
+using System.ComponentModel.DataAnnotations;
using System.IO.Abstractions;
-using ConsoleAppFramework;
+using Elastic.Documentation;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.Links.InboundLinks;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands;
+/// Validate cross-doc-set links against the published link registry.
+///
+///
+/// Every documentation set publishes a links.json file containing the URLs of all its pages.
+/// These files are aggregated into a shared link registry. Inbound-links commands validate that
+/// cross-links between documentation sets resolve to real pages in the registry.
+///
+///
internal sealed class InboundLinkCommands(ILoggerFactory logFactory, IDiagnosticsCollector collector)
{
private readonly LinkIndexService _linkIndexService = new(logFactory, FileSystemFactory.RealRead);
- /// Validate all published cross_links in all published links.json files.
- ///
- [Command("validate-all")]
- public async Task ValidateAllInboundLinks(Cancel ctx = default)
+ /// Validate all cross-links across every published links.json in the registry.
+ [NoOptionsInjection]
+ public async Task ValidateAll(CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
serviceInvoker.AddCommand(_linkIndexService, static async (s, collector, ctx) => await s.CheckAll(collector, ctx));
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
- /// Validate all published cross_links in all published links.json files.
- ///
- ///
- ///
- [Command("validate")]
- public async Task ValidateRepoInboundLinks(string? from = null, string? to = null, Cancel ctx = default)
+ /// Validate all cross-links originating from or targeting a specific repository.
+ /// Only check links published by this repository slug.
+ /// Only check links that point to this repository slug.
+ [NoOptionsInjection]
+ public async Task Validate(string? from = null, string? to = null, CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
serviceInvoker.AddCommand(_linkIndexService, (to, from),
static async (s, collector, state, ctx) => await s.CheckRepository(collector, state.to, state.from, ctx)
);
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
- ///
- /// Validate a locally published links.json file against all published links.json files in the registry
- ///
- /// Path to `links.json` defaults to '.artifacts/docs/html/links.json'
- /// -p, Defaults to the `{pwd}` folder
- ///
- [Command("validate-link-reference")]
- public async Task ValidateLocalLinkReference(string? file = null, string? path = null, Cancel ctx = default)
+ /// Validate a locally built links.json against the published link registry.
+ ///
+ /// Use this to verify cross-links before publishing. The local links.json is checked against
+ /// all currently published registries to ensure every outbound cross-link resolves.
+ ///
+ /// Path to links.json. Defaults to .artifacts/docs/html/links.json.
+ /// -p, Root of the documentation source. Defaults to cwd.
+ [NoOptionsInjection]
+ public async Task ValidateLinkReference([Existing, ExpandUserProfile, RejectSymbolicLinks, FileExtensions(Extensions = "json")] FileInfo? file = null, string? path = null, CancellationToken ct = default)
{
await using var serviceInvoker = new ServiceInvoker(collector);
serviceInvoker.AddCommand(_linkIndexService, (file, path),
- static async (s, collector, state, ctx) => await s.CheckWithLocalLinksJson(collector, state.file, state.path, ctx)
+ static async (s, collector, state, ctx) => await s.CheckWithLocalLinksJson(collector, state.file?.FullName, state.path, ctx)
);
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
}
diff --git a/src/tooling/docs-builder/Commands/IndexCommand.cs b/src/tooling/docs-builder/Commands/IndexCommand.cs
index 556489eb0f..ad0004a648 100644
--- a/src/tooling/docs-builder/Commands/IndexCommand.cs
+++ b/src/tooling/docs-builder/Commands/IndexCommand.cs
@@ -2,14 +2,14 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
-using System.IO.Abstractions;
using Actions.Core.Services;
-using ConsoleAppFramework;
+using Elastic.Documentation;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.Isolated;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands;
@@ -21,100 +21,27 @@ internal sealed class IndexCommand(
IEnvironmentVariables environmentVariables
)
{
- ///
- /// Index a single documentation set to Elasticsearch, calls `docs-builder --exporters elasticsearch`. Exposes more options
- ///
- /// -es, Elasticsearch endpoint, alternatively set env DOCUMENTATION_ELASTIC_URL
- /// path to the documentation folder, defaults to pwd.
- /// Elasticsearch API key, alternatively set env DOCUMENTATION_ELASTIC_APIKEY
- /// Elasticsearch username (basic auth), alternatively set env DOCUMENTATION_ELASTIC_USERNAME
- /// Elasticsearch password (basic auth), alternatively set env DOCUMENTATION_ELASTIC_PASSWORD
- /// Disable AI enrichment of documents using LLM-generated metadata (enabled by default)
- /// The number of search threads the inference endpoint should use. Defaults: 8
- /// The number of index threads the inference endpoint should use. Defaults: 8
- /// Do not use the Elastic Inference Service, bootstrap inference endpoint
- /// Force reindex strategy to semantic index
- /// Timeout in minutes for the inference endpoint creation. Defaults: 4
- /// The number of documents to send to ES as part of the bulk. Defaults: 100
- /// The number of times failed bulk items should be retried. Defaults: 3
- /// Buffer ES request/responses for better error messages and pass ?pretty to all requests
- /// Route requests through a proxy server
- /// Proxy server password
- /// Proxy server username
- /// Disable SSL certificate validation (EXPERT OPTION)
- /// Pass a self-signed certificate fingerprint to validate the SSL connection
- /// Pass a self-signed certificate to validate the SSL connection
- /// If the certificate is not root but only part of the validation chain pass this
- ///
- ///
- [Command("")]
+ /// Index a single documentation set into Elasticsearch.
+ ///
+ ///
+ /// Builds the documentation set in metadata-only mode and streams the output to Elasticsearch.
+ /// Does not write HTML to disk. Requires a running cluster and valid credentials.
+ ///
+ ///
+ [CommandName("index")]
public async Task Index(
- string? endpoint = null,
+ GlobalCliOptions _,
+ [AsParameters] ElasticsearchIndexOptions es,
string? path = null,
- string? apiKey = null,
- string? username = null,
- string? password = null,
-
- // inference options
- bool? noAiEnrichment = null,
- int? searchNumThreads = null,
- int? indexNumThreads = null,
- bool? noEis = null,
- int? bootstrapTimeout = null,
-
- // index options
- bool? forceReindex = null,
-
- // channel buffer options
- int? bufferSize = null,
- int? maxRetries = null,
-
- // connection options
- bool? debugMode = null,
-
- // proxy options
- string? proxyAddress = null,
- string? proxyPassword = null,
- string? proxyUsername = null,
-
- // certificate options
- bool? disableSslVerification = null,
- string? certificateFingerprint = null,
- string? certificatePath = null,
- bool? certificateNotRoot = null,
- Cancel ctx = default
+ CancellationToken ct = default
)
{
await using var serviceInvoker = new ServiceInvoker(collector);
var fs = FileSystemFactory.RealGitRootForPath(path);
var service = new IsolatedIndexService(logFactory, configurationContext, githubActionsService, environmentVariables);
- var state = (fs, path,
- // endpoint options
- endpoint, apiKey, username, password,
- // inference options
- noAiEnrichment, indexNumThreads, noEis, searchNumThreads, bootstrapTimeout,
- // channel and connection options
- forceReindex, bufferSize, maxRetries, debugMode,
- // proxy options
- proxyAddress, proxyPassword, proxyUsername,
- // certificate options
- disableSslVerification, certificateFingerprint, certificatePath, certificateNotRoot
- );
- serviceInvoker.AddCommand(service, state,
- static async (s, collector, state, ctx) => await s.Index(collector, state.fs, state.path,
- // endpoint options
- state.endpoint, state.apiKey, state.username, state.password,
- // inference options
- state.noAiEnrichment, state.searchNumThreads, state.indexNumThreads, state.noEis, state.bootstrapTimeout,
- // channel and connection options
- state.forceReindex, state.bufferSize, state.maxRetries, state.debugMode,
- // proxy options
- state.proxyAddress, state.proxyPassword, state.proxyUsername,
- // certificate options
- state.disableSslVerification, state.certificateFingerprint, state.certificatePath, state.certificateNotRoot
- , ctx)
+ serviceInvoker.AddCommand(service,
+ async (s, col, ctx) => await s.Index(col, fs, es, path, ctx)
);
-
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
}
diff --git a/src/tooling/docs-builder/Commands/IsolatedBuildCommand.cs b/src/tooling/docs-builder/Commands/IsolatedBuildCommand.cs
index a1474e3505..b39cdcb43e 100644
--- a/src/tooling/docs-builder/Commands/IsolatedBuildCommand.cs
+++ b/src/tooling/docs-builder/Commands/IsolatedBuildCommand.cs
@@ -4,14 +4,13 @@
using System.IO.Abstractions;
using Actions.Core.Services;
-using ConsoleAppFramework;
-using Documentation.Builder.Arguments;
using Elastic.Documentation;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.Isolated;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands;
@@ -23,59 +22,30 @@ internal sealed class IsolatedBuildCommand(
IEnvironmentVariables environmentVariables
)
{
- ///
- /// Builds a source documentation set folder.
- /// global options:
- /// --log-level level
- ///
- /// -p, Defaults to the`{pwd}/docs` folder
- /// -o, Defaults to `.artifacts/html`
- /// Specifies the path prefix for urls
- /// Force a full rebuild of the destination folder
- /// Treat warnings as errors and fail the build on warnings
- /// Allow indexing and following of HTML files
- /// Only emit documentation metadata to output, ignored if 'exporters' is also set
- /// Set available exporters:
- /// html, es, config, links, state, llm, redirect, metadata, none.
- /// Defaults to (html, config, links, state, redirect) or 'default'.
- ///
- /// The base URL for the canonical url tag
- /// Run the build in memory without writing to disk
- /// Skip OpenAPI documentation generation for faster builds
- ///
- [Command("")]
+ /// Build a single documentation set from source.
+ ///
+ /// Locates the documentation root by searching for a docset.yml file starting at .Path.
+ /// The output directory is wiped and rebuilt on each run unless incremental build detects no changes.
+ ///
+ [DefaultCommand]
+ [CommandName("build")]
public async Task Build(
- string? path = null,
- string? output = null,
- string? pathPrefix = null,
- bool? force = null,
- bool? strict = null,
- bool? allowIndexing = null,
- bool? metadataOnly = null,
- [ExporterParser] IReadOnlySet? exporters = null,
- string? canonicalBaseUrl = null,
+ GlobalCliOptions _,
+ [AsParameters] IsolatedBuildOptions options,
bool inMemory = false,
- bool skipApi = false,
- Cancel ctx = default
+ CancellationToken ct = default
)
{
await using var serviceInvoker = new ServiceInvoker(collector);
var service = new IsolatedBuildService(logFactory, configurationContext, githubActionsService, environmentVariables);
- var readFs = inMemory ? FileSystemFactory.InMemory() : FileSystemFactory.RealGitRootForPath(path);
- // For real builds supply an explicit write FS without .git access; for in-memory null falls back to readFs
- var writeFs = inMemory ? null : FileSystemFactory.RealGitRootForPathWrite(path, output);
- var strictCommand = service.IsStrict(strict);
+ var readFs = inMemory ? FileSystemFactory.InMemory() : FileSystemFactory.RealGitRootForPath(options.Path?.FullName);
+ var writeFs = inMemory ? null : FileSystemFactory.RealGitRootForPathWrite(options.Path?.FullName, options.Output?.FullName);
+ var strictCommand = service.IsStrict(options.Strict);
- serviceInvoker.AddCommand(service,
- (path, output, pathPrefix, force, strict, allowIndexing, metadataOnly, exporters, canonicalBaseUrl, readFs, writeFs, skipApi), strictCommand,
- async static (s, collector, state, ctx) => await s.Build(
- collector, state.readFs, state.path, state.output, state.pathPrefix,
- state.force, state.strict, state.allowIndexing, state.metadataOnly,
- state.exporters, state.canonicalBaseUrl, state.writeFs, state.skipApi, false, ctx
- )
+ serviceInvoker.AddCommand(service, (options, readFs, writeFs), strictCommand,
+ static async (s, col, state, ctx) => await s.Build(col, state.readFs, state.options, state.writeFs, ctx)
);
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
}
-
}
diff --git a/src/tooling/docs-builder/Commands/MoveCommand.cs b/src/tooling/docs-builder/Commands/MoveCommand.cs
index 540e7da3a7..45ee274887 100644
--- a/src/tooling/docs-builder/Commands/MoveCommand.cs
+++ b/src/tooling/docs-builder/Commands/MoveCommand.cs
@@ -3,36 +3,35 @@
// See the LICENSE file in the project root for more information
using System.IO.Abstractions;
-using ConsoleAppFramework;
+using Elastic.Documentation;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.Refactor;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands;
-internal sealed class MoveCommand(
+internal sealed class RefactorCommands(
ILoggerFactory logFactory,
IDiagnosticsCollector collector,
IConfigurationContext configurationContext
)
{
- ///
- /// Move a file from one location to another and update all links in the documentation
- ///
- /// The source file or folder path to move from
- /// The target file or folder path to move to
- /// -p, Defaults to the`{pwd}` folder
- /// Dry run the move operation
- ///
- [Command("")]
+ /// Move a file or folder and rewrite all inbound links across the documentation set.
+ /// Source file or folder path.
+ /// Destination file or folder path.
+ /// -p, Documentation root. Defaults to cwd.
+ /// Print the changes that would be made without applying them.
+ [CommandName("mv")]
public async Task Move(
+ GlobalCliOptions _,
[Argument] string source,
[Argument] string target,
bool? dryRun = null,
string? path = null,
- Cancel ctx = default
+ Cancel ct = default
)
{
await using var serviceInvoker = new ServiceInvoker(collector);
@@ -43,6 +42,37 @@ public async Task Move(
serviceInvoker.AddCommand(service, (source, target, dryRun, path, fs),
async static (s, collector, state, ctx) => await s.Move(collector, state.source, state.target, state.dryRun, state.path, state.fs, ctx)
);
- return await serviceInvoker.InvokeAsync(ctx);
+ return await serviceInvoker.InvokeAsync(ct);
+ }
+
+ /// Fix common formatting issues (irregular spacing, trailing whitespace) across documentation files.
+ /// Exactly one of --check or --write must be specified.
+ /// -p, Documentation root. Defaults to cwd.
+ /// Report files that need formatting without modifying them. Exits 1 when any file is out of format.
+ /// Apply formatting changes in place.
+ [CommandName("format")]
+ public async Task Format(
+ GlobalCliOptions _,
+ string? path = null,
+ bool check = false,
+ bool write = false,
+ Cancel ct = default
+ )
+ {
+ if (check == write)
+ {
+ collector.EmitError(string.Empty, "Must specify exactly one of --check or --write");
+ return 1;
+ }
+
+ await using var serviceInvoker = new ServiceInvoker(collector);
+
+ var service = new FormatService(logFactory, configurationContext);
+ var fs = FileSystemFactory.RealGitRootForPath(path);
+
+ serviceInvoker.AddCommand(service, (path, check, fs),
+ async static (s, collector, state, ctx) => await s.Format(collector, state.path, state.check, state.fs, ctx)
+ );
+ return await serviceInvoker.InvokeAsync(ct);
}
}
diff --git a/src/tooling/docs-builder/Commands/ServeCommand.cs b/src/tooling/docs-builder/Commands/ServeCommand.cs
index 6b3b3ba883..10f55ed6ec 100644
--- a/src/tooling/docs-builder/Commands/ServeCommand.cs
+++ b/src/tooling/docs-builder/Commands/ServeCommand.cs
@@ -3,10 +3,11 @@
// See the LICENSE file in the project root for more information
using System.IO.Abstractions;
-using ConsoleAppFramework;
using Documentation.Builder.Http;
+using Elastic.Documentation;
using Elastic.Documentation.Configuration;
using Microsoft.Extensions.Logging;
+using Nullean.Argh;
namespace Documentation.Builder.Commands;
@@ -14,25 +15,19 @@ internal sealed class ServeCommand(ILoggerFactory logFactory, IConfigurationCont
{
private readonly ILogger _logger = logFactory.CreateLogger();
- ///
- /// Continuously serve a documentation folder at http://localhost:3000.
- /// File systems changes will be reflected without having to restart the server.
- ///
- /// -p, Path to serve the documentation.
- /// Defaults to the`{pwd}/docs` folder
- ///
- /// Port to serve the documentation.
- /// special flag for dotnet watch optimizations during development
- ///
- [Command("")]
- public async Task Serve(string? path = null, int port = 3000, bool watch = false, Cancel ctx = default)
+ /// Serve a documentation folder at http://localhost:3000 with live reload.
+ /// File-system changes are reflected without restarting the server.
+ /// -p, Documentation source directory. Defaults to the cwd/docs folder.
+ /// Port to serve the documentation. Default: 3000
+ /// Special flag for dotnet watch optimizations during development
+ [CommandName("serve")]
+ public async Task Serve(GlobalCliOptions _, [Existing, ExpandUserProfile, RejectSymbolicLinks] DirectoryInfo? path = null, int port = 3000, bool watch = false, CancellationToken ct = default)
{
- var host = new DocumentationWebHost(logFactory, path, port, FileSystemFactory.RealGitRootForPath(path), FileSystemFactory.InMemory(), configurationContext, watch);
- await host.RunAsync(ctx);
+ var host = new DocumentationWebHost(logFactory, path?.FullName, port, FileSystemFactory.RealGitRootForPath(path?.FullName), FileSystemFactory.InMemory(), configurationContext, watch);
+ await host.RunAsync(ct);
_logger.LogInformation("Find your documentation at http://localhost:{Port}/{Path}", port,
host.GeneratorState.Generator.DocumentationSet.FirstInterestingUrl.TrimStart('/')
);
- await host.StopAsync(ctx);
+ await host.StopAsync(ct);
}
-
}
diff --git a/src/tooling/docs-builder/DocumentationTooling.cs b/src/tooling/docs-builder/DocumentationTooling.cs
index 0aee7f55d3..cfc9448786 100644
--- a/src/tooling/docs-builder/DocumentationTooling.cs
+++ b/src/tooling/docs-builder/DocumentationTooling.cs
@@ -39,9 +39,6 @@ public static TBuilder AddDocumentationToolingDefaults(this TBuilder b
{
var logFactory = sp.GetRequiredService();
var githubActionsService = sp.GetRequiredService();
- var globalArgs = sp.GetRequiredService();
- if (globalArgs.IsHelpOrVersion || globalArgs.IsMcp)
- return new DiagnosticsCollector([]);
return new ConsoleDiagnosticsCollector(logFactory, githubActionsService);
})
.AddSingleton(_ =>
diff --git a/src/tooling/docs-builder/Filters/CatchExceptionFilter.cs b/src/tooling/docs-builder/Filters/CatchExceptionFilter.cs
deleted file mode 100644
index ce0ceccfc7..0000000000
--- a/src/tooling/docs-builder/Filters/CatchExceptionFilter.cs
+++ /dev/null
@@ -1,40 +0,0 @@
-// Licensed to Elasticsearch B.V under one or more agreements.
-// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
-// See the LICENSE file in the project root for more information
-
-using ConsoleAppFramework;
-using Elastic.Documentation.Diagnostics;
-using Microsoft.Extensions.Logging;
-
-namespace Documentation.Builder.Filters;
-
-
-internal sealed class CatchExceptionFilter(ConsoleAppFilter next, ILogger logger, IDiagnosticsCollector collector)
- : ConsoleAppFilter(next)
-{
- private bool _cancelKeyPressed;
- public override async Task InvokeAsync(ConsoleAppContext context, Cancel cancellationToken)
- {
- Console.CancelKeyPress += (_, _) =>
- {
- logger.LogInformation("Received CTRL+C cancelling");
- _cancelKeyPressed = true;
- };
- try
- {
- await Next.InvokeAsync(context, cancellationToken);
- }
- catch (Exception ex)
- {
- if (ex is OperationCanceledException && cancellationToken.IsCancellationRequested && _cancelKeyPressed)
- {
- logger.LogInformation("Cancellation requested, exiting.");
- return;
- }
- _ = collector.StartAsync(cancellationToken);
- collector.EmitGlobalError($"Global unhandled exception: {ex.Message}", ex);
- await collector.StopAsync(cancellationToken);
- Environment.ExitCode = 1;
- }
- }
-}
diff --git a/src/tooling/docs-builder/Filters/CheckForUpdatesFilter.cs b/src/tooling/docs-builder/Filters/CheckForUpdatesFilter.cs
deleted file mode 100644
index 23eb693ad2..0000000000
--- a/src/tooling/docs-builder/Filters/CheckForUpdatesFilter.cs
+++ /dev/null
@@ -1,92 +0,0 @@
-// Licensed to Elasticsearch B.V under one or more agreements.
-// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
-// See the LICENSE file in the project root for more information
-
-using System.IO.Abstractions;
-using System.Reflection;
-using ConsoleAppFramework;
-using Elastic.Documentation;
-using Elastic.Documentation.Configuration;
-
-namespace Documentation.Builder.Filters;
-
-internal sealed class CheckForUpdatesFilter(ConsoleAppFilter next, GlobalCliArgs cli) : ConsoleAppFilter(next)
-{
- // Only accesses ApplicationData — no workspace access needed
- private static readonly IFileSystem Fs = FileSystemFactory.AppData;
- private readonly IFileInfo _stateFile = Fs.FileInfo.New(Path.Join(Paths.ApplicationData.FullName, "docs-build-check.state"));
-
- public override async Task InvokeAsync(ConsoleAppContext context, Cancel ctx)
- {
- await Next.InvokeAsync(context, ctx);
- if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("CI")))
- return;
- if (cli.IsHelpOrVersion || cli.IsMcp)
- return;
-
- var latestVersionUrl = await GetLatestVersion(ctx);
- if (latestVersionUrl is null)
- ConsoleApp.LogError("Unable to determine latest version");
- else
- CompareWithAssemblyVersion(latestVersionUrl);
- }
-
- private static void CompareWithAssemblyVersion(Uri latestVersionUrl)
- {
- var versionPath = latestVersionUrl.AbsolutePath.Split('/').Last();
- if (!SemVersion.TryParse(versionPath, out var latestVersion))
- {
- ConsoleApp.LogError($"Unable to parse latest version from {latestVersionUrl}");
- return;
- }
-
- var assemblyVersion = Assembly.GetExecutingAssembly().GetCustomAttributes()
- .FirstOrDefault()?.InformationalVersion;
- if (SemVersion.TryParse(assemblyVersion ?? "", out var currentSemVersion))
- {
- var currentVersion = new SemVersion(currentSemVersion.Major, currentSemVersion.Minor, currentSemVersion.Patch);
- if (latestVersion <= currentVersion)
- return;
- ConsoleApp.Log("");
- ConsoleApp.Log($"A new version of docs-builder is available: {latestVersion} currently on version {currentSemVersion}");
- ConsoleApp.Log("");
- ConsoleApp.Log($" {latestVersionUrl}");
- ConsoleApp.Log("");
- ConsoleApp.Log("Read more about updating here:");
- ConsoleApp.Log(" https://elastic.github.io/docs-builder/contribute/locally#step-one ");
- ConsoleApp.Log("");
- return;
- }
-
- ConsoleApp.LogError($"Unable to parse current version from docs-builder binary");
- }
-
- private async ValueTask GetLatestVersion(Cancel ctx)
- {
- // only check for new versions once per hour
- if (_stateFile.Exists && _stateFile.LastWriteTimeUtc >= DateTime.UtcNow.Subtract(TimeSpan.FromHours(1)))
- {
- var url = await Fs.File.ReadAllTextAsync(_stateFile.FullName, ctx);
- if (Uri.TryCreate(url, UriKind.Absolute, out var uri))
- return uri;
- }
-
- try
- {
- var httpClient = new HttpClient(new HttpClientHandler { AllowAutoRedirect = false });
- var response = await httpClient.GetAsync("https://github.com/elastic/docs-builder/releases/latest", ctx);
- var redirectUrl = response.Headers.Location;
- if (redirectUrl is not null && _stateFile.Directory is not null)
- {
- // ensure the 'elastic' folder exists.
- if (!Fs.Directory.Exists(_stateFile.Directory.FullName))
- _ = Fs.Directory.CreateDirectory(_stateFile.Directory.FullName);
- await Fs.File.WriteAllTextAsync(_stateFile.FullName, redirectUrl.ToString(), ctx);
- }
- return redirectUrl;
- }
- // ReSharper disable once RedundantEmptyFinallyBlock
- // ignore on purpose
- finally { }
- }
-}
diff --git a/src/tooling/docs-builder/Filters/InfoLoggerFilter.cs b/src/tooling/docs-builder/Filters/InfoLoggerFilter.cs
deleted file mode 100644
index 2a803d94d6..0000000000
--- a/src/tooling/docs-builder/Filters/InfoLoggerFilter.cs
+++ /dev/null
@@ -1,36 +0,0 @@
-// Licensed to Elasticsearch B.V under one or more agreements.
-// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
-// See the LICENSE file in the project root for more information
-
-using System.Reflection;
-using ConsoleAppFramework;
-using Elastic.Documentation;
-using Elastic.Documentation.Configuration;
-using Microsoft.Extensions.Logging;
-
-namespace Documentation.Builder.Filters;
-
-internal sealed class InfoLoggerFilter(
- ConsoleAppFilter next,
- ILogger logger,
- ConfigurationFileProvider fileProvider,
- GlobalCliArgs cli
-)
- : ConsoleAppFilter(next)
-{
- public override async Task InvokeAsync(ConsoleAppContext context, Cancel cancellationToken)
- {
- var assemblyVersion = Assembly.GetExecutingAssembly().GetCustomAttributes()
- .FirstOrDefault()?.InformationalVersion;
- if (cli.IsHelpOrVersion)
- {
- await Next.InvokeAsync(context, cancellationToken);
- return;
- }
- logger.LogInformation("Configuration source: {ConfigurationSource}", fileProvider.ConfigurationSource.ToStringFast(true));
- if (fileProvider.ConfigurationSource == ConfigurationSource.Remote)
- logger.LogInformation("Configuration source git reference: {ConfigurationSourceGitReference}", fileProvider.GitReference);
- logger.LogInformation("Version: {Version}", assemblyVersion);
- await Next.InvokeAsync(context, cancellationToken);
- }
-}
diff --git a/src/tooling/docs-builder/Filters/ReplaceLogFilter.cs b/src/tooling/docs-builder/Filters/ReplaceLogFilter.cs
deleted file mode 100644
index d1522eb683..0000000000
--- a/src/tooling/docs-builder/Filters/ReplaceLogFilter.cs
+++ /dev/null
@@ -1,31 +0,0 @@
-// Licensed to Elasticsearch B.V under one or more agreements.
-// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
-// See the LICENSE file in the project root for more information
-
-using System.Diagnostics.CodeAnalysis;
-using ConsoleAppFramework;
-using Elastic.Documentation;
-using Microsoft.Extensions.Logging;
-
-namespace Documentation.Builder.Filters;
-
-internal sealed class ReplaceLogFilter(ConsoleAppFilter next, ILogger logger, GlobalCliArgs cli)
- : ConsoleAppFilter(next)
-{
- [SuppressMessage("Usage", "CA2254:Template should be a static expression")]
- public override Task InvokeAsync(ConsoleAppContext context, Cancel cancellationToken)
- {
- if (cli.IsMcp)
- {
- ConsoleApp.Log = _ => { };
- ConsoleApp.LogError = _ => { };
- }
- else if (!cli.IsHelpOrVersion)
- {
- ConsoleApp.Log = msg => logger.LogInformation(msg);
- ConsoleApp.LogError = msg => logger.LogError(msg);
- }
-
- return Next.InvokeAsync(context, cancellationToken);
- }
-}
diff --git a/src/tooling/docs-builder/Filters/StopwatchFilter.cs b/src/tooling/docs-builder/Filters/StopwatchFilter.cs
deleted file mode 100644
index ff0bc2fb75..0000000000
--- a/src/tooling/docs-builder/Filters/StopwatchFilter.cs
+++ /dev/null
@@ -1,31 +0,0 @@
-// Licensed to Elasticsearch B.V under one or more agreements.
-// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
-// See the LICENSE file in the project root for more information
-
-using System.Diagnostics;
-using ConsoleAppFramework;
-using Microsoft.Extensions.Logging;
-
-namespace Documentation.Builder.Filters;
-
-internal sealed class StopwatchFilter(ConsoleAppFilter next, ILogger logger) : ConsoleAppFilter(next)
-{
- public override async Task InvokeAsync(ConsoleAppContext context, Cancel cancellationToken)
- {
- var isHelpOrVersion = context.Arguments.Any(a => a is "--help" or "-h" or "--version");
- var name = string.IsNullOrWhiteSpace(context.CommandName) ? "generate" : context.CommandName;
- var startTime = Stopwatch.GetTimestamp();
- if (!isHelpOrVersion)
- logger.LogInformation("{Name} :: Starting...", name);
- try
- {
- await Next.InvokeAsync(context, cancellationToken);
- }
- finally
- {
- var endTime = Stopwatch.GetElapsedTime(startTime);
- if (!isHelpOrVersion)
- logger.LogInformation("{Name} :: Finished in '{EndTime}'", name, endTime);
- }
- }
-}
diff --git a/src/tooling/docs-builder/Http/InMemoryBuildState.cs b/src/tooling/docs-builder/Http/InMemoryBuildState.cs
index 43154396c9..628b752b67 100644
--- a/src/tooling/docs-builder/Http/InMemoryBuildState.cs
+++ b/src/tooling/docs-builder/Http/InMemoryBuildState.cs
@@ -177,18 +177,18 @@ private async Task ExecuteBuildAsync(string sourcePath, Cancel ct)
_ = await service.Build(
streamingCollector,
readFs,
- sourcePath,
- null, // output
- null, // pathPrefix
- true, // force - always rebuild for validation
- false, // strict
- false, // allowIndexing
- false, // metadataOnly
- ExportOptions.Default,
- null, // canonicalBaseUrl
+ new IsolatedBuildOptions
+ {
+ Path = new DirectoryInfo(sourcePath),
+ Force = true,
+ Strict = false,
+ AllowIndexing = false,
+ MetadataOnly = false,
+ Exporters = ExportOptions.Default,
+ SkipApi = true,
+ SkipCrossLinks = false
+ },
_writeFs, // reuse MockFileSystem across builds for caching
- true, // skipOpenApi - skip for faster validation builds
- false, // skipCrossLinks - enable cross-links (cached in MockFileSystem)
ct
);
diff --git a/src/tooling/docs-builder/Middleware/CatchExceptionMiddleware.cs b/src/tooling/docs-builder/Middleware/CatchExceptionMiddleware.cs
new file mode 100644
index 0000000000..c5a93e9991
--- /dev/null
+++ b/src/tooling/docs-builder/Middleware/CatchExceptionMiddleware.cs
@@ -0,0 +1,42 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+using Elastic.Documentation.Diagnostics;
+using Microsoft.Extensions.Logging;
+using Nullean.Argh.Middleware;
+
+namespace Documentation.Builder.Middleware;
+
+internal sealed class CatchExceptionMiddleware(ILogger logger, IDiagnosticsCollector collector)
+ : ICommandMiddleware
+{
+ private bool _cancelKeyPressed;
+
+ public async ValueTask InvokeAsync(CommandContext context, CommandMiddlewareDelegate next)
+ {
+ Console.CancelKeyPress += (_, args) =>
+ {
+ // Suppress OS termination so the OperationCanceledException path below can run gracefully.
+ args.Cancel = true;
+ logger.LogInformation("Received CTRL+C cancelling");
+ _cancelKeyPressed = true;
+ };
+ try
+ {
+ await next(context);
+ }
+ catch (Exception ex)
+ {
+ if (ex is OperationCanceledException && context.CancellationToken.IsCancellationRequested && _cancelKeyPressed)
+ {
+ logger.LogInformation("Cancellation requested, exiting.");
+ return;
+ }
+ _ = collector.StartAsync(context.CancellationToken);
+ collector.EmitGlobalError($"Global unhandled exception: {ex.Message}", ex);
+ await collector.StopAsync(context.CancellationToken);
+ context.ExitCode = 1;
+ }
+ }
+}
diff --git a/src/tooling/docs-builder/Middleware/CheckForUpdatesMiddleware.cs b/src/tooling/docs-builder/Middleware/CheckForUpdatesMiddleware.cs
new file mode 100644
index 0000000000..34f41953ff
--- /dev/null
+++ b/src/tooling/docs-builder/Middleware/CheckForUpdatesMiddleware.cs
@@ -0,0 +1,94 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+using System.IO.Abstractions;
+using System.Reflection;
+using Elastic.Documentation;
+using Elastic.Documentation.Configuration;
+using Microsoft.Extensions.Logging;
+using Nullean.Argh.Middleware;
+
+namespace Documentation.Builder.Middleware;
+
+internal sealed class CheckForUpdatesMiddleware(ILogger logger) : ICommandMiddleware
+{
+ // Only accesses ApplicationData — no workspace access needed
+ private static readonly IFileSystem Fs = FileSystemFactory.AppData;
+ private readonly IFileInfo _stateFile = Fs.FileInfo.New(Path.Join(Paths.ApplicationData.FullName, "docs-build-check.state"));
+ private readonly ILogger _logger = logger;
+
+ public async ValueTask InvokeAsync(CommandContext context, CommandMiddlewareDelegate next)
+ {
+ await next(context);
+ if (context.CancellationToken.IsCancellationRequested || context.ExitCode != 0)
+ return;
+ if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("CI")))
+ return;
+
+ try
+ {
+ var latestVersionUrl = await GetLatestVersion(context.CancellationToken);
+ if (latestVersionUrl is null)
+ _logger.LogWarning("Unable to determine latest version");
+ else
+ CompareWithAssemblyVersion(latestVersionUrl);
+ }
+ catch (Exception ex)
+ {
+ // Best-effort: a network failure here must never break the command that just succeeded.
+ _logger.LogDebug(ex, "Update check failed");
+ }
+ }
+
+ private void CompareWithAssemblyVersion(Uri latestVersionUrl)
+ {
+ var versionPath = latestVersionUrl.AbsolutePath.Split('/').Last();
+ if (!SemVersion.TryParse(versionPath, out var latestVersion))
+ {
+ _logger.LogWarning("Unable to parse latest version from {LatestVersionUrl}", latestVersionUrl);
+ return;
+ }
+
+ var assemblyVersion = Assembly.GetExecutingAssembly()
+ .GetCustomAttributes()
+ .FirstOrDefault()?.InformationalVersion;
+
+ if (!SemVersion.TryParse(assemblyVersion ?? "", out var currentSemVersion))
+ {
+ _logger.LogWarning("Unable to parse current version from docs-builder binary");
+ return;
+ }
+
+ var currentVersion = new SemVersion(currentSemVersion.Major, currentSemVersion.Minor, currentSemVersion.Patch);
+ if (latestVersion <= currentVersion)
+ return;
+
+ _logger.LogInformation("");
+ _logger.LogInformation("A new version of docs-builder is available: {Latest} (currently on {Current})", latestVersion, currentSemVersion);
+ _logger.LogInformation(" {LatestVersionUrl}", latestVersionUrl);
+ _logger.LogInformation("Read more about updating: https://elastic.github.io/docs-builder/contribute/locally#step-one");
+ }
+
+ private async ValueTask GetLatestVersion(CancellationToken ct)
+ {
+ // only check for new versions once per hour
+ if (_stateFile.Exists && _stateFile.LastWriteTimeUtc >= DateTime.UtcNow.Subtract(TimeSpan.FromHours(1)))
+ {
+ var url = await Fs.File.ReadAllTextAsync(_stateFile.FullName, ct);
+ if (Uri.TryCreate(url, UriKind.Absolute, out var uri))
+ return uri;
+ }
+
+ using var httpClient = new HttpClient(new HttpClientHandler { AllowAutoRedirect = false });
+ using var response = await httpClient.GetAsync("https://github.com/elastic/docs-builder/releases/latest", ct);
+ var redirectUrl = response.Headers.Location;
+ if (redirectUrl is not null && _stateFile.Directory is not null)
+ {
+ if (!Fs.Directory.Exists(_stateFile.Directory.FullName))
+ _ = Fs.Directory.CreateDirectory(_stateFile.Directory.FullName);
+ await Fs.File.WriteAllTextAsync(_stateFile.FullName, redirectUrl.ToString(), ct);
+ }
+ return redirectUrl;
+ }
+}
diff --git a/src/tooling/docs-builder/Middleware/InfoLoggerMiddleware.cs b/src/tooling/docs-builder/Middleware/InfoLoggerMiddleware.cs
new file mode 100644
index 0000000000..8121f4ef08
--- /dev/null
+++ b/src/tooling/docs-builder/Middleware/InfoLoggerMiddleware.cs
@@ -0,0 +1,28 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+using System.Reflection;
+using Elastic.Documentation.Configuration;
+using Microsoft.Extensions.Logging;
+using Nullean.Argh.Middleware;
+
+namespace Documentation.Builder.Middleware;
+
+internal sealed class InfoLoggerMiddleware(ILogger logger, ConfigurationFileProvider fileProvider)
+ : ICommandMiddleware
+{
+ public async ValueTask InvokeAsync(CommandContext context, CommandMiddlewareDelegate next)
+ {
+ var assemblyVersion = Assembly.GetExecutingAssembly()
+ .GetCustomAttributes()
+ .FirstOrDefault()?.InformationalVersion;
+
+ logger.LogInformation("Configuration source: {ConfigurationSource}", fileProvider.ConfigurationSource);
+ if (fileProvider.ConfigurationSource == Elastic.Documentation.ConfigurationSource.Remote)
+ logger.LogInformation("Configuration source git reference: {ConfigurationSourceGitReference}", fileProvider.GitReference);
+ logger.LogInformation("Version: {Version}", assemblyVersion);
+
+ await next(context);
+ }
+}
diff --git a/src/tooling/docs-builder/Middleware/StopwatchMiddleware.cs b/src/tooling/docs-builder/Middleware/StopwatchMiddleware.cs
new file mode 100644
index 0000000000..03955bf9a8
--- /dev/null
+++ b/src/tooling/docs-builder/Middleware/StopwatchMiddleware.cs
@@ -0,0 +1,28 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+using System.Diagnostics;
+using Microsoft.Extensions.Logging;
+using Nullean.Argh.Middleware;
+
+namespace Documentation.Builder.Middleware;
+
+internal sealed class StopwatchMiddleware(ILogger logger) : ICommandMiddleware
+{
+ public async ValueTask InvokeAsync(CommandContext context, CommandMiddlewareDelegate next)
+ {
+ var name = context.CommandName.Length == 0 ? "generate" : context.CommandName;
+ var startTime = Stopwatch.GetTimestamp();
+ logger.LogInformation("{Name} :: Starting...", name);
+ try
+ {
+ await next(context);
+ }
+ finally
+ {
+ var elapsed = Stopwatch.GetElapsedTime(startTime);
+ logger.LogInformation("{Name} :: Finished in '{Elapsed}'", name, elapsed);
+ }
+ }
+}
diff --git a/src/tooling/docs-builder/Program.cs b/src/tooling/docs-builder/Program.cs
index 0829f83e42..cfd5cec3db 100644
--- a/src/tooling/docs-builder/Program.cs
+++ b/src/tooling/docs-builder/Program.cs
@@ -2,57 +2,72 @@
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information
-using ConsoleAppFramework;
using Documentation.Builder;
using Documentation.Builder.Commands;
using Documentation.Builder.Commands.Assembler;
using Documentation.Builder.Commands.Codex;
-using Documentation.Builder.Filters;
+using Documentation.Builder.Middleware;
+using Elastic.Documentation;
using Elastic.Documentation.Configuration.Assembler;
using Elastic.Documentation.ServiceDefaults;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
+using Nullean.Argh;
+using Nullean.Argh.Hosting;
+// Pre-host fast path: run --help, --version, __schema, __completion directly and exit
+// before the host (and its startup logs) are ever constructed.
+await ArghApp.TryArghIntrinsicCommand(args);
+
+_ = GlobalCliOptions.TryParseArgh(args, out var cliOptions);
var builder = Host.CreateApplicationBuilder()
- .AddDocumentationServiceDefaults(ref args, (s, p) =>
+ .AddDocumentationServiceDefaults(cliOptions ?? new GlobalCliOptions(), (s, p) =>
{
_ = s.AddSingleton(AssemblyConfiguration.Create(p));
})
.AddDocumentationToolingDefaults()
.AddOpenTelemetryDefaults();
-var app = builder.ToConsoleAppBuilder();
-
-app.UseFilter();
-app.UseFilter();
-app.UseFilter();
-app.UseFilter();
-app.UseFilter();
-
-app.Add();
-app.Add("inbound-links");
-app.Add("diff");
-app.Add("mv");
-app.Add("serve");
-app.Add("index");
-app.Add("format");
-app.Add("changelog");
-
-//assembler commands
-
-app.Add("assembler content-source");
-app.Add("assembler deploy");
-app.Add("assembler bloom-filter");
-app.Add("assembler navigation");
-app.Add("assembler config");
-app.Add("assembler index");
-app.Add("assembler sitemap");
-app.Add("assembler");
-app.Add("assemble");
-
-//codex commands
-app.Add("codex update-redirects");
-app.Add("codex index");
-app.Add("codex");
-
-await app.RunAsync(args).ConfigureAwait(false);
+_ = builder.Services.AddArgh(args, app =>
+{
+ _ = app.UseGlobalOptions();
+
+ _ = app.UseMiddleware();
+ _ = app.UseMiddleware();
+ _ = app.UseMiddleware();
+ _ = app.UseMiddleware();
+
+ // `docs-builder build` as a named command AND root default (`docs-builder` with no sub-command).
+ _ = app.MapAndRootAlias();
+
+ _ = app.Map();
+ _ = app.Map();
+ _ = app.Map();
+ _ = app.Map();
+ _ = app.MapNamespace("changelog");
+ _ = app.MapNamespace("inbound-links");
+
+ _ = app.Map();
+
+ // assembler commands (assemble merged into assembler default)
+ _ = app.MapNamespace("assembler", g =>
+ {
+ _ = g.MapNamespace("content-source");
+ _ = g.MapNamespace("deploy");
+ _ = g.MapNamespace("bloom-filter");
+ _ = g.MapNamespace("navigation");
+ _ = g.MapNamespace("config");
+ _ = g.Map();
+ _ = g.Map();
+ });
+
+ // codex commands
+ _ = app.MapNamespace("codex", g =>
+ {
+ _ = g.Map