From 91aa9ca89323ecad646970c4220e6b4feefa2dd8 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Fri, 6 Mar 2026 12:48:13 +0330 Subject: [PATCH 01/36] docs: add runnable examples for Allows reference page Add four new App examples covering scenarios from the Allows docs that lacked runnable code: CSV flat records, event bus with sealed trait auto-unwrap, GraphQL/tree structures with Self recursion, and sealed trait auto-unwrap with nested hierarchies. Also add a "Running the Examples" section to allows.md. Co-Authored-By: Claude Opus 4.6 --- docs/reference/allows.md | 51 +++++++++ .../scala/comptime/AllowsCsvExample.scala | 98 +++++++++++++++++ .../comptime/AllowsEventBusExample.scala | 92 ++++++++++++++++ .../comptime/AllowsGraphQLTreeExample.scala | 103 ++++++++++++++++++ .../comptime/AllowsSealedTraitExample.scala | 87 +++++++++++++++ 5 files changed, 431 insertions(+) create mode 100644 schema-examples/src/main/scala/comptime/AllowsCsvExample.scala create mode 100644 schema-examples/src/main/scala/comptime/AllowsEventBusExample.scala create mode 100644 schema-examples/src/main/scala/comptime/AllowsGraphQLTreeExample.scala create mode 100644 schema-examples/src/main/scala/comptime/AllowsSealedTraitExample.scala diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 88c04af35f..ac7e43535b 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -350,3 +350,54 @@ val ev: Allows[EmptyEvent.type, Record[Primitive]] = implicitly // vacuously tr | Derivation keyword | `Schema.derived` implicit | `Schema.derived` or `derives Schema` | Both Scala versions produce the same macro behavior and the same error messages. + +## Running the Examples + +All code from this guide is available as runnable examples in the `schema-examples` module. + +**1. Clone the repository and navigate to the project:** + +```bash +git clone https://github.com/zio/zio-blocks.git +cd zio-blocks +``` + +**2. Run individual examples with sbt:** + +**CSV serializer with flat record compile-time constraints** +([source](https://github.com/zio/zio-blocks/blob/main/schema-examples/src/main/scala/comptime/AllowsCsvExample.scala)) + +```bash +sbt "schema-examples/runMain comptime.AllowsCsvExample" +``` + +**Event bus with sealed trait auto-unwrap and nested hierarchies** +([source](https://github.com/zio/zio-blocks/blob/main/schema-examples/src/main/scala/comptime/AllowsEventBusExample.scala)) + +```bash +sbt "schema-examples/runMain comptime.AllowsEventBusExample" +``` + +**GraphQL / tree structures using Self for recursive grammars** +([source](https://github.com/zio/zio-blocks/blob/main/schema-examples/src/main/scala/comptime/AllowsGraphQLTreeExample.scala)) + +```bash +sbt "schema-examples/runMain comptime.AllowsGraphQLTreeExample" +``` + +**Sealed trait auto-unwrap with nested hierarchies and case objects** +([source](https://github.com/zio/zio-blocks/blob/main/schema-examples/src/main/scala/comptime/AllowsSealedTraitExample.scala)) + +```bash +sbt "schema-examples/runMain comptime.AllowsSealedTraitExample" +``` + +**RDBMS library with CREATE TABLE and INSERT using flat record constraints** +([source](https://github.com/zio/zio-blocks/blob/main/schema-examples/src/main/scala/comptime/RdbmsExample.scala)) + +> This example is a compile-time demonstration — the `RdbmsDemo` object validates that compatible types compile and incompatible types are rejected. Review the source to see the constraint in action. + +**JSON document store with specific primitives and recursive Self grammar** +([source](https://github.com/zio/zio-blocks/blob/main/schema-examples/src/main/scala/comptime/DocumentStoreExample.scala)) + +> This example is a compile-time demonstration — the `DocumentStoreDemo` object validates that JSON-compatible types compile and types with non-JSON scalars are rejected. Review the source to see the constraint in action. diff --git a/schema-examples/src/main/scala/comptime/AllowsCsvExample.scala b/schema-examples/src/main/scala/comptime/AllowsCsvExample.scala new file mode 100644 index 0000000000..3959587427 --- /dev/null +++ b/schema-examples/src/main/scala/comptime/AllowsCsvExample.scala @@ -0,0 +1,98 @@ +package comptime + +import zio.blocks.schema._ +import zio.blocks.schema.comptime.Allows +import Allows.{Primitive, Record, `|`} +import Allows.{Optional => AOptional} +import util.ShowExpr.show + +// --------------------------------------------------------------------------- +// CSV serializer example using Allows[A, S] compile-time shape constraints +// +// A CSV row is a flat record: every field must be a primitive scalar or an +// optional primitive (for nullable columns). Nested records, sequences, and +// maps are all rejected at compile time. +// --------------------------------------------------------------------------- + +// Compatible: flat record of primitives and optional primitives +case class Employee(name: String, department: String, salary: BigDecimal, active: Boolean) +object Employee { implicit val schema: Schema[Employee] = Schema.derived } + +case class SensorReading(sensorId: String, timestamp: Long, value: Double, unit: Option[String]) +object SensorReading { implicit val schema: Schema[SensorReading] = Schema.derived } + +object CsvSerializer { + + type FlatRow = Primitive | AOptional[Primitive] + + /** Serialize a sequence of flat records to CSV format. */ + def toCsv[A](rows: Seq[A])(implicit schema: Schema[A], ev: Allows[A, Record[FlatRow]]): String = { + val reflect = schema.reflect.asRecord.get + val header = reflect.fields.map(_.name).mkString(",") + val lines = rows.map { row => + val dv = schema.toDynamicValue(row) + dv match { + case DynamicValue.Record(fields) => + fields.map { case (_, v) => csvEscape(dvToString(v)) }.mkString(",") + case _ => "" + } + } + (header +: lines).mkString("\n") + } + + private def dvToString(dv: DynamicValue): String = dv match { + case DynamicValue.Primitive(PrimitiveValue.String(s)) => s + case DynamicValue.Primitive(PrimitiveValue.Boolean(b)) => b.toString + case DynamicValue.Primitive(PrimitiveValue.Int(n)) => n.toString + case DynamicValue.Primitive(PrimitiveValue.Long(n)) => n.toString + case DynamicValue.Primitive(PrimitiveValue.Double(n)) => n.toString + case DynamicValue.Primitive(PrimitiveValue.Float(n)) => n.toString + case DynamicValue.Primitive(PrimitiveValue.BigDecimal(n)) => n.toString + case DynamicValue.Primitive(v) => v.toString + case DynamicValue.Null => "" + case DynamicValue.Variant(tag, inner) if tag == "Some" => dvToString(inner) + case DynamicValue.Variant(tag, _) if tag == "None" => "" + case DynamicValue.Record(fields) => + fields.headOption.map { case (_, v) => dvToString(v) }.getOrElse("") + case other => other.toString + } + + private def csvEscape(s: String): String = + if (s.contains(",") || s.contains("\"") || s.contains("\n")) + "\"" + s.replace("\"", "\"\"") + "\"" + else s +} + +// --------------------------------------------------------------------------- +// Demonstration +// --------------------------------------------------------------------------- + +object AllowsCsvExample extends App { + + // Flat records of primitives — compiles fine + val employees = Seq( + Employee("Alice", "Engineering", BigDecimal("120000.00"), true), + Employee("Bob", "Marketing", BigDecimal("95000.50"), true), + Employee("Carol", "Engineering", BigDecimal("115000.00"), false) + ) + + // CSV output for a flat record of primitives + show(CsvSerializer.toCsv(employees)) + + // Flat record with optional fields — also compiles + val readings = Seq( + SensorReading("temp-01", 1709712000L, 23.5, Some("celsius")), + SensorReading("temp-02", 1709712060L, 72.1, None) + ) + + // Optional fields become empty CSV cells when None + show(CsvSerializer.toCsv(readings)) + + // The following would NOT compile — uncomment to see the error: + // + // case class Nested(name: String, address: Address) + // object Nested { implicit val schema: Schema[Nested] = Schema.derived } + // CsvSerializer.toCsv(Seq(Nested("Alice", Address("1 Main St", "NY", "10001")))) + // [error] Schema shape violation at Nested.address: found Record(Address), + // required Primitive | Optional[Primitive] +} diff --git a/schema-examples/src/main/scala/comptime/AllowsEventBusExample.scala b/schema-examples/src/main/scala/comptime/AllowsEventBusExample.scala new file mode 100644 index 0000000000..ce64c02fb3 --- /dev/null +++ b/schema-examples/src/main/scala/comptime/AllowsEventBusExample.scala @@ -0,0 +1,92 @@ +package comptime + +import zio.blocks.schema._ +import zio.blocks.schema.comptime.Allows +import Allows.{Primitive, Record, Sequence, `|`} +import Allows.{Optional => AOptional} +import util.ShowExpr.show + +// --------------------------------------------------------------------------- +// Event bus / message broker example using Allows[A, S] +// +// Published events are typically sealed traits of flat record cases. Sealed +// traits are automatically unwrapped by the Allows macro — each case is +// checked individually against the grammar. No Variant node is needed. +// +// This example also shows nested sealed traits (auto-unwrap is recursive). +// --------------------------------------------------------------------------- + +// Domain events — a sealed trait hierarchy +sealed trait AccountEvent +case class AccountOpened(accountId: String, owner: String, initialBalance: BigDecimal) extends AccountEvent +case class FundsDeposited(accountId: String, amount: BigDecimal) extends AccountEvent +case class FundsWithdrawn(accountId: String, amount: BigDecimal) extends AccountEvent +case class AccountClosed(accountId: String, reason: Option[String]) extends AccountEvent +object AccountEvent { implicit val schema: Schema[AccountEvent] = Schema.derived } + +// Nested sealed trait — InventoryEvent has a sub-hierarchy +sealed trait InventoryEvent +case class ItemAdded(sku: String, quantity: Int) extends InventoryEvent +case class ItemRemoved(sku: String, quantity: Int) extends InventoryEvent + +sealed trait InventoryAlert extends InventoryEvent +case class LowStock(sku: String, remaining: Int) extends InventoryAlert +case class OutOfStock(sku: String) extends InventoryAlert + +object InventoryEvent { implicit val schema: Schema[InventoryEvent] = Schema.derived } + +// Event with sequence fields (e.g. tags or batch items) +sealed trait BatchEvent +case class BatchImport(batchId: String, itemIds: List[String]) extends BatchEvent +case class BatchComplete(batchId: String, count: Int) extends BatchEvent +object BatchEvent { implicit val schema: Schema[BatchEvent] = Schema.derived } + +object EventBus { + + type EventShape = Primitive | AOptional[Primitive] + + /** Publish a domain event. All cases of the sealed trait must be flat records. */ + def publish[A](event: A)(implicit schema: Schema[A], ev: Allows[A, Record[EventShape]]): String = { + val dv = schema.toDynamicValue(event) + val (typeName, payload) = dv match { + case DynamicValue.Variant(name, inner) => (name, inner.toJson.toString) + case _ => (schema.reflect.typeId.name, dv.toJson.toString) + } + s"PUBLISH topic=${schema.reflect.typeId.name} type=$typeName payload=$payload" + } + + /** Publish events that may contain sequence fields (e.g. batch operations). */ + def publishBatch[A](event: A)(implicit + schema: Schema[A], + ev: Allows[A, Record[Primitive | Sequence[Primitive]]] + ): String = { + val dv = schema.toDynamicValue(event) + val (typeName, payload) = dv match { + case DynamicValue.Variant(name, inner) => (name, inner.toJson.toString) + case _ => (schema.reflect.typeId.name, dv.toJson.toString) + } + s"PUBLISH topic=${schema.reflect.typeId.name} type=$typeName payload=$payload" + } +} + +// --------------------------------------------------------------------------- +// Demonstration +// --------------------------------------------------------------------------- + +object AllowsEventBusExample extends App { + + // Flat sealed trait — all cases are records of primitives/optionals + show(EventBus.publish[AccountEvent](AccountOpened("acc-001", "Alice", BigDecimal("1000.00")))) + show(EventBus.publish[AccountEvent](FundsDeposited("acc-001", BigDecimal("500.00")))) + show(EventBus.publish[AccountEvent](AccountClosed("acc-001", Some("customer request")))) + + // Nested sealed trait — auto-unwrap is recursive + // InventoryAlert extends InventoryEvent, both are unwrapped + show(EventBus.publish[InventoryEvent](ItemAdded("SKU-100", 50))) + show(EventBus.publish[InventoryEvent](LowStock("SKU-100", 3))) + show(EventBus.publish[InventoryEvent](OutOfStock("SKU-100"))) + + // Events with sequence fields use a wider grammar + show(EventBus.publishBatch[BatchEvent](BatchImport("batch-42", List("item-1", "item-2", "item-3")))) + show(EventBus.publishBatch[BatchEvent](BatchComplete("batch-42", 3))) +} diff --git a/schema-examples/src/main/scala/comptime/AllowsGraphQLTreeExample.scala b/schema-examples/src/main/scala/comptime/AllowsGraphQLTreeExample.scala new file mode 100644 index 0000000000..223c25371c --- /dev/null +++ b/schema-examples/src/main/scala/comptime/AllowsGraphQLTreeExample.scala @@ -0,0 +1,103 @@ +package comptime + +import zio.blocks.schema._ +import zio.blocks.schema.comptime.Allows +import Allows.{Primitive, Record, Sequence, `|`} +import Allows.{Optional => AOptional, Self => ASelf} +import util.ShowExpr.show + +// --------------------------------------------------------------------------- +// GraphQL / tree structure example using Self for recursive grammars +// +// Self refers back to the entire enclosing Allows[A, S] grammar, allowing +// the constraint to describe recursive data structures like trees, linked +// lists, and nested menus. +// +// Non-recursive types also satisfy Self-containing grammars — the Self +// position is never reached, so the constraint is vacuously satisfied. +// --------------------------------------------------------------------------- + +// Recursive tree: children reference the same type +case class TreeNode(value: Int, children: List[TreeNode]) +object TreeNode { implicit val schema: Schema[TreeNode] = Schema.derived } + +// Recursive category hierarchy (common in e-commerce, CMS, etc.) +case class NavCategory(name: String, slug: String, subcategories: List[NavCategory]) +object NavCategory { implicit val schema: Schema[NavCategory] = Schema.derived } + +// Linked list via Optional[Self] +case class Chain(label: String, next: Option[Chain]) +object Chain { implicit val schema: Schema[Chain] = Schema.derived } + +// Non-recursive type — satisfies Self-containing grammars vacuously +case class FlatNode(id: Int, label: String) +object FlatNode { implicit val schema: Schema[FlatNode] = Schema.derived } + +object GraphQL { + + type TreeShape = Primitive | Sequence[ASelf] | AOptional[ASelf] + + /** Generate a simplified GraphQL type definition for a recursive type. */ + def graphqlType[A](implicit schema: Schema[A], ev: Allows[A, Record[TreeShape]]): String = { + val reflect = schema.reflect.asRecord.get + val fields = reflect.fields.map { f => + s" ${f.name}: ${gqlType(resolve(f.value), schema.reflect.typeId.name)}" + } + s"type ${schema.reflect.typeId.name} {\n${fields.mkString("\n")}\n}" + } + + /** Unwrap Deferred to get the actual Reflect node. */ + private def resolve(r: Reflect.Bound[_]): Reflect.Bound[_] = r match { + case d: Reflect.Deferred[_, _] => resolve(d.value.asInstanceOf[Reflect.Bound[_]]) + case other => other + } + + private def gqlType(r: Reflect.Bound[_], selfName: String): String = r match { + case _: Reflect.Sequence[_, _, _] => s"[$selfName]" + case p: Reflect.Primitive[_, _] => + p.primitiveType match { + case PrimitiveType.Int(_) => "Int" + case PrimitiveType.Long(_) => "Int" + case PrimitiveType.Float(_) => "Float" + case PrimitiveType.Double(_) => "Float" + case PrimitiveType.String(_) => "String" + case PrimitiveType.Boolean(_) => "Boolean" + case _ => "String" + } + case _ => selfName + } +} + +// --------------------------------------------------------------------------- +// Demonstration +// --------------------------------------------------------------------------- + +object AllowsGraphQLTreeExample extends App { + + // Recursive tree with Sequence[Self] + show(GraphQL.graphqlType[TreeNode]) + + // Recursive categories — same grammar, different domain + show(GraphQL.graphqlType[NavCategory]) + + // Linked list via Optional[Self] + show(GraphQL.graphqlType[Chain]) + + // Non-recursive type also satisfies the grammar (vacuously — Self is never reached) + show(GraphQL.graphqlType[FlatNode]) + + // Show that recursive data actually works at runtime + val tree = TreeNode(1, List( + TreeNode(2, List(TreeNode(4, Nil), TreeNode(5, Nil))), + TreeNode(3, Nil) + )) + show(Schema[TreeNode].toDynamicValue(tree).toJson.toString) + + val nav = NavCategory("Electronics", "electronics", List( + NavCategory("Phones", "phones", Nil), + NavCategory("Laptops", "laptops", List( + NavCategory("Gaming", "gaming", Nil) + )) + )) + show(Schema[NavCategory].toDynamicValue(nav).toJson.toString) +} diff --git a/schema-examples/src/main/scala/comptime/AllowsSealedTraitExample.scala b/schema-examples/src/main/scala/comptime/AllowsSealedTraitExample.scala new file mode 100644 index 0000000000..22bb512b67 --- /dev/null +++ b/schema-examples/src/main/scala/comptime/AllowsSealedTraitExample.scala @@ -0,0 +1,87 @@ +package comptime + +import zio.blocks.schema._ +import zio.blocks.schema.comptime.Allows +import Allows.{Primitive, Record} +import util.ShowExpr.show + +// --------------------------------------------------------------------------- +// Sealed trait auto-unwrap example +// +// Sealed traits and enums are automatically unwrapped by the Allows macro. +// Each case is checked individually against the grammar — no Variant node +// is needed. +// +// Auto-unwrap is recursive: if a case is itself a sealed trait, its cases +// are unwrapped too, to any depth. +// +// Zero-field records (case objects) are vacuously true for any Record[A] +// constraint. +// --------------------------------------------------------------------------- + +// Simple sealed trait with case classes and a case object +sealed trait Shape +case class Circle(radius: Double) extends Shape +case class Rectangle(width: Double, height: Double) extends Shape +case object Point extends Shape +object Shape { implicit val schema: Schema[Shape] = Schema.derived } + +// Nested sealed trait hierarchy — two levels deep +sealed trait Expr +sealed trait BinaryOp extends Expr +case class Add(left: Double, right: Double) extends BinaryOp +case class Multiply(left: Double, right: Double) extends BinaryOp +case class Literal(value: Double) extends Expr +case object Zero extends Expr +object Expr { implicit val schema: Schema[Expr] = Schema.derived } + +// All-singleton enum (all case objects) +sealed trait Color +case object Red extends Color +case object Green extends Color +case object Blue extends Color +object Color { implicit val schema: Schema[Color] = Schema.derived } + +object SealedTraitValidator { + + /** Validate that a value's type has a flat record structure. */ + def validate[A](value: A)(implicit schema: Schema[A], ev: Allows[A, Record[Primitive]]): String = { + val dv = schema.toDynamicValue(value) + dv match { + case DynamicValue.Variant(caseName, inner) => + s"Valid variant case '$caseName': ${inner.toJson}" + case DynamicValue.Record(fields) => + s"Valid record with ${fields.size} field(s): ${fields.map(_._1).mkString(", ")}" + case _ => + s"Valid: ${dv.toJson}" + } + } +} + +// --------------------------------------------------------------------------- +// Demonstration +// --------------------------------------------------------------------------- + +object AllowsSealedTraitExample extends App { + + // Simple sealed trait — all cases checked against Record[Primitive] + // Circle: Record(radius: Double) — satisfies Record[Primitive] + // Rectangle: Record(width: Double, height: Double) — satisfies Record[Primitive] + // Point: zero-field case object — vacuously true + show(SealedTraitValidator.validate[Shape](Circle(3.14))) + show(SealedTraitValidator.validate[Shape](Rectangle(4.0, 5.0))) + show(SealedTraitValidator.validate[Shape](Point)) + + // Nested sealed trait — auto-unwrap is recursive + // BinaryOp is itself sealed with Add and Multiply + // All leaf cases have only Double fields — satisfies Record[Primitive] + show(SealedTraitValidator.validate[Expr](Add(1.0, 2.0))) + show(SealedTraitValidator.validate[Expr](Multiply(3.0, 4.0))) + show(SealedTraitValidator.validate[Expr](Literal(42.0))) + show(SealedTraitValidator.validate[Expr](Zero)) + + // All-singleton enum — every case is a zero-field record (vacuously true) + show(SealedTraitValidator.validate[Color](Red)) + show(SealedTraitValidator.validate[Color](Green)) + show(SealedTraitValidator.validate[Color](Blue)) +} From f19b5256c8df19c3e7b645c26724493f2a94a609 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Fri, 6 Mar 2026 14:57:11 +0330 Subject: [PATCH 02/36] docs: embed example files via SourceFile.print in allows.md Add mdoc:passthrough blocks using SourceFile.print to embed full example source files directly in the Allows reference page. Move SourceFile to package docs so mdoc can import it. Update docs-data-type-ref skill with the SourceFile.print convention and correct import pattern. Co-Authored-By: Claude Opus 4.6 --- .claude/skills/docs-data-type-ref/SKILL.md | 50 +++++++++++++++++ docs/reference/allows.md | 36 ++++++++++++- .../src/main/scala/SourceFile.scala | 54 +++++++++++++++++++ 3 files changed, 138 insertions(+), 2 deletions(-) create mode 100644 zio-blocks-docs/src/main/scala/SourceFile.scala diff --git a/.claude/skills/docs-data-type-ref/SKILL.md b/.claude/skills/docs-data-type-ref/SKILL.md index e0bc059f1f..b84d2b2d00 100644 --- a/.claude/skills/docs-data-type-ref/SKILL.md +++ b/.claude/skills/docs-data-type-ref/SKILL.md @@ -243,6 +243,56 @@ Rules for this section: - The bolded description must be a short plain-English description of what that specific `App` demonstrates — not the object name rephrased. - Keep the two numbered steps (clone, run individually) in that order; do not add or remove steps. - If no example `App` objects were written (rare), omit this section entirely. +- When the full example source is also **embedded earlier in the document** via `SourceFile.print`, + the `([source](...))` link in this section serves as a convenient shortcut to the GitHub file; + there is no need to embed the source again here. + +### Embedding Example Files with `SourceFile` + +When the documentation needs to show a **full example file** from the `schema-examples` project +(written in Step 3), **do not copy-paste the code inline**. Instead, use `mdoc:passthrough` with +the `SourceFile.print` helper to include it by reference. This keeps the doc and the example in +sync — any change to the example file automatically appears in the rendered docs on the next +mdoc build. + +Use this pattern: + +````markdown +```scala mdoc:passthrough +import docs.SourceFile + +SourceFile.print("schema-examples/src/main/scala//.scala") +``` +```` + +**Important:** Import as `import docs.SourceFile` and call `SourceFile.print(...)` — do NOT use +`import docs.SourceFile._` with bare `print(...)` because `print` conflicts with `Predef.print` +inside mdoc sessions. + +`SourceFile.print(path)` reads the file at mdoc compile time and emits a fenced code block with +the file path shown as the title. The path is relative to the repository root (the helper tries +`../` first, then ``). + +**When to use `SourceFile.print`:** +- Showing a complete, runnable `App` example from `schema-examples/` +- Showing a large, self-contained example that would be unwieldy to maintain in two places + +**When NOT to use it — use regular mdoc blocks instead:** +- Short inline snippets (< 20 lines) that illustrate a single method or concept +- Code that needs `mdoc` evaluated output (e.g., `// res0: Int = 42`) +- Code that is documentation-specific and doesn't exist as a standalone file + +**Optional parameters:** +- `lines = Seq((from, to))` — include only specific line ranges (1-indexed): + ````markdown + ```scala mdoc:passthrough + import docs.SourceFile + + SourceFile.print("schema-examples/src/main/scala/into/IntoNumericExample.scala", lines = Seq((10, 25))) + ``` + ```` +- `showLineNumbers = true` — render with line numbers in the output +- `comment = false` — suppress the file path title ### Compile-Checked Code Blocks with mdoc diff --git a/docs/reference/allows.md b/docs/reference/allows.md index ac7e43535b..ff2c05bd00 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -371,6 +371,12 @@ cd zio-blocks sbt "schema-examples/runMain comptime.AllowsCsvExample" ``` +```scala mdoc:passthrough +import docs.SourceFile + +SourceFile.print("schema-examples/src/main/scala/comptime/AllowsCsvExample.scala") +``` + **Event bus with sealed trait auto-unwrap and nested hierarchies** ([source](https://github.com/zio/zio-blocks/blob/main/schema-examples/src/main/scala/comptime/AllowsEventBusExample.scala)) @@ -378,6 +384,12 @@ sbt "schema-examples/runMain comptime.AllowsCsvExample" sbt "schema-examples/runMain comptime.AllowsEventBusExample" ``` +```scala mdoc:passthrough +import docs.SourceFile + +SourceFile.print("schema-examples/src/main/scala/comptime/AllowsEventBusExample.scala") +``` + **GraphQL / tree structures using Self for recursive grammars** ([source](https://github.com/zio/zio-blocks/blob/main/schema-examples/src/main/scala/comptime/AllowsGraphQLTreeExample.scala)) @@ -385,6 +397,12 @@ sbt "schema-examples/runMain comptime.AllowsEventBusExample" sbt "schema-examples/runMain comptime.AllowsGraphQLTreeExample" ``` +```scala mdoc:passthrough +import docs.SourceFile + +SourceFile.print("schema-examples/src/main/scala/comptime/AllowsGraphQLTreeExample.scala") +``` + **Sealed trait auto-unwrap with nested hierarchies and case objects** ([source](https://github.com/zio/zio-blocks/blob/main/schema-examples/src/main/scala/comptime/AllowsSealedTraitExample.scala)) @@ -392,12 +410,26 @@ sbt "schema-examples/runMain comptime.AllowsGraphQLTreeExample" sbt "schema-examples/runMain comptime.AllowsSealedTraitExample" ``` +```scala mdoc:passthrough +import docs.SourceFile + +SourceFile.print("schema-examples/src/main/scala/comptime/AllowsSealedTraitExample.scala") +``` + **RDBMS library with CREATE TABLE and INSERT using flat record constraints** ([source](https://github.com/zio/zio-blocks/blob/main/schema-examples/src/main/scala/comptime/RdbmsExample.scala)) -> This example is a compile-time demonstration — the `RdbmsDemo` object validates that compatible types compile and incompatible types are rejected. Review the source to see the constraint in action. +```scala mdoc:passthrough +import docs.SourceFile + +SourceFile.print("schema-examples/src/main/scala/comptime/RdbmsExample.scala") +``` **JSON document store with specific primitives and recursive Self grammar** ([source](https://github.com/zio/zio-blocks/blob/main/schema-examples/src/main/scala/comptime/DocumentStoreExample.scala)) -> This example is a compile-time demonstration — the `DocumentStoreDemo` object validates that JSON-compatible types compile and types with non-JSON scalars are rejected. Review the source to see the constraint in action. +```scala mdoc:passthrough +import docs.SourceFile + +SourceFile.print("schema-examples/src/main/scala/comptime/DocumentStoreExample.scala") +``` diff --git a/zio-blocks-docs/src/main/scala/SourceFile.scala b/zio-blocks-docs/src/main/scala/SourceFile.scala new file mode 100644 index 0000000000..feb503f973 --- /dev/null +++ b/zio-blocks-docs/src/main/scala/SourceFile.scala @@ -0,0 +1,54 @@ +package docs + +import scala.io.Source + +object SourceFile { + + def read(path: String, lines: Seq[(Int, Int)]): String = { + def readFile(path: String) = + try { + Source.fromFile("../" + path) + } catch { + case _: Throwable => Source.fromFile(path) + } + + if (lines.isEmpty) { + val content = readFile(path).getLines().mkString("\n") + content + } else { + val chunks = for { + (from, to) <- lines + } yield readFile(path) + .getLines() + .toArray[String] + .slice(from - 1, to) + .mkString("\n") + + chunks.mkString("\n\n") + } + } + + def fileExtension(path: String): String = { + val javaPath = java.nio.file.Paths.get(path) + val fileExtension = + javaPath.getFileName.toString + .split('.') + .lastOption + .getOrElse("") + fileExtension + } + + def print( + path: String, + lines: Seq[(Int, Int)] = Seq.empty, + comment: Boolean = true, + showLineNumbers: Boolean = false, + ) = { + val title = if (comment) s"""title="$path"""" else "" + val showLines = if (showLineNumbers) "showLineNumbers" else "" + println(s"""```${fileExtension(path)} $title $showLines""") + println(read(path, lines)) + println("```") + } + +} From 52267e3221ef98e1b25280813bcb75aa60a41d25 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Fri, 6 Mar 2026 20:09:40 +0330 Subject: [PATCH 03/36] Update zio-blocks-docs/src/main/scala/SourceFile.scala Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- zio-blocks-docs/src/main/scala/SourceFile.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/zio-blocks-docs/src/main/scala/SourceFile.scala b/zio-blocks-docs/src/main/scala/SourceFile.scala index feb503f973..c41528836c 100644 --- a/zio-blocks-docs/src/main/scala/SourceFile.scala +++ b/zio-blocks-docs/src/main/scala/SourceFile.scala @@ -41,10 +41,10 @@ object SourceFile { def print( path: String, lines: Seq[(Int, Int)] = Seq.empty, - comment: Boolean = true, + showTitle: Boolean = true, showLineNumbers: Boolean = false, ) = { - val title = if (comment) s"""title="$path"""" else "" + val title = if (showTitle) s"""title="$path"""" else "" val showLines = if (showLineNumbers) "showLineNumbers" else "" println(s"""```${fileExtension(path)} $title $showLines""") println(read(path, lines)) From 112503586ae7fe2cbc0468576ec8f75c96b0dc03 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Fri, 6 Mar 2026 16:28:48 +0000 Subject: [PATCH 04/36] Documentation of Structural Types (#1171) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: add Structural Types reference page and examples Implements documentation for the ToStructural[A] type class and structural types feature (Issue #517 / PR #614). Includes: - New reference page (docs/reference/structural-types.md) covering: - Type signature and use cases for structural types - Motivation (duck typing, cross-system interop) - Construction via .structural extension method - Supported conversions (products, tuples, sum types) - Integration with DynamicValue and Schema Evolution - Compile-time errors and limitations - Example applications in schema-examples/: - StructuralProductExample: simple and nested case classes - StructuralSumTypeExample: sealed traits and enums - StructuralTupleExample: tuple structural schemas - StructuralIntegrationExample: DynamicValue manipulation - Sidebar navigation updated to include new reference page All documentation compiles with mdoc and examples run without errors. Co-Authored-By: Claude Haiku 4.5 * docs: enrich Structural Types reference page with gh-query findings Enriched the structural types documentation with insights from GitHub issues and PRs: 1. **Scala 2 Deeply Nested Types Limitation** (#612): In Scala 2, accessing fields on deeply nested structural types requires explicit casting to StructuralRecord, as reflection limitations prevent field chaining. Added workaround with example. 2. **Alphabetical Field Ordering Rationale**: Clarified that alphabetical sorting ensures deterministic, normalized type identity for predictable schema evolution and cross-system interop. 3. **Binding.of Integration** (#883): Added section documenting support for structural types in Binding.of macro for high-performance JVM-only serialization of anonymous structural types. 4. **Platform Detection**: Noted that implementation uses Platform.supportsReflection to detect and reject structural types at compile time on non-JVM platforms. All examples compile with mdoc, 0 errors. Co-Authored-By: Claude Haiku 4.5 * fix: correct runMain command in StructuralIntegrationExample documentation comment Co-Authored-By: Claude Haiku 4.5 * fix: correct object name in StructuralIntegrationExample to match filename Co-Authored-By: Claude Haiku 4.5 * docs: enrich Round-tripping Through DynamicValue section with cross-type conversion motivation The section now explains why structural types matter for DynamicValue round-tripping: - Shows the real benefit: cross-type conversion (Person → DynamicValue → Employee) - Adds concrete integration scenarios (API gateways, message brokers, data pipelines) - Includes comparison table for when to use structural vs nominal schemas - Example demonstrates encoding with one schema and decoding with another This replaces the thin example that only showed encoding/decoding the same type. Co-Authored-By: Claude Haiku 4.5 * docs: ensure structural-types.md compliance with writing style and mdoc conventions - Add mdoc modifiers (compile-only, silent, mdoc) to all Scala code blocks - Add introductory prose before "Type Signature" heading and code block - Remove emojis (✅, ❌) from comments; replace with plain English descriptions - Add missing imports (Schema, SchemaError) in code blocks - Refactor cross-type conversion example: split into setup (mdoc:silent) + evaluated output (mdoc) blocks - Ensure all code blocks are self-contained and compile-checked Fixes compliance violations in: - docs-writing-style (emoji removal, prose requirements) - docs-mdoc-conventions (modifier requirements, import requirements, setup + output pattern) Co-Authored-By: Claude Haiku 4.5 * docs: enforce sbt docs/mdoc compilation verification in documentation workflows Add mandatory mdoc compilation checks across all documentation skills: - docs-integrate: Convert Step 4 to explicit compilation gate with error checks - docs-data-type-ref: Add mdoc verification to Step 5 Review - docs-how-to-guide: Add mdoc check to Technical Accuracy checklist in Step 6 - docs-enrich-section: Add final Verification step with mdoc check All Scala code blocks in docs are now compile-verified before completion. Co-Authored-By: Claude Haiku 4.5 * docs: remove irrelevant "With DynamicValue" section from structural-types reference The "With DynamicValue" subsection demonstrated general Schema features (toDynamicValue, fromDynamicValue, DynamicOptic) using a nominal schema, not a structural one. This content is not specific to structural types and was off-topic. The bottom "See Also" section already links to DynamicValue documentation. Co-Authored-By: Claude Haiku 4.5 * More efficient encoding and decoding of `java.time._` in different codecs (#1163) * Update zio-json and zio-schema dependencies (#1165) * Documentation of As/Into Derivation for DynamicValue (#1168) * Add docs-document-pr skill for generating documentation from pull requests Generates docs by gathering PR metadata, linked issues, and commits, then creates a new reference/guide page or adds a subsection to existing docs based on content type. Co-Authored-By: Claude Haiku 4.5 * refactor(docs-document-pr): Delegate to specialized documentation skills Phase 3 now leverages existing ZIO Blocks documentation skills: - docs-data-type-ref for new reference/API pages - docs-how-to-guide for new how-to guides - docs-writing-style for prose conventions - docs-mdoc-conventions for code block formatting Phase 4 uses docs-integrate skill for sidebar management. This reduces duplication, ensures consistency across docs, and makes the skill more modular by composing with domain-specific expertise. Co-Authored-By: Claude Haiku 4.5 * feat(docs-document-pr): Add YAML frontmatter with metadata Add skill frontmatter containing: - name: docs-document-pr - description: Comprehensive skill purpose - triggers: keyword patterns for skill auto-invocation This enables proper skill discovery and auto-triggering based on user intent patterns like "document PR #1234". Co-Authored-By: Claude Haiku 4.5 * docs: add comprehensive reference for Into and As type classes (PR #1007) Created docs/reference/into-as.md covering: - Into[A, B]: one-way schema-driven conversions - As[A, B]: bidirectional conversions with round-trip guarantees - DynamicValue integration for polyglot serialization - Numeric widening/narrowing with validation - Container conversions (Option, Either, Map, List, Array, etc.) - Schema evolution patterns and custom validations - Error handling and cross-format support Integrated into documentation: - Added to sidebars.js in Reference section - Updated docs/index.md with link in Data Operations - Added cross-references from Schema and DynamicValue pages Also integrated existing schema-examples for Into and As: - IntoDomainBoundaryExample: custom validations - AsNumericRoundTripExample: bidirectional numeric conversions - AsManualConstructionExample: combining Into instances - AsSchemaEvolutionExample: schema evolution patterns - AsReverseExample: reversing conversions Addresses documentation gap for PR #1007 which adds DynamicValue support to Into/As macros, enabling seamless conversions between typed data and semi-structured representations. Co-Authored-By: Claude Haiku 4.5 * Revert "docs: add comprehensive reference for Into and As type classes (PR #1007)" This reverts commit da0fd3c6307aebc71b8441397f9bea55fb06ab41. * docs(schema-evolution): Add DynamicValue conversion sections (PR #1007) Enhanced Into and As documentation with comprehensive DynamicValue support sections covering the new macro capabilities added in PR #1007. Added to docs/reference/schema-evolution/into.md: - Converting to DynamicValue (type-safe → semi-structured) - Converting from DynamicValue (semi-structured → type-safe) - Collections and DynamicValue conversions (List, Map, etc.) - Motivation: polyglot data handling and format independence Added to docs/reference/schema-evolution/as.md: - Bidirectional DynamicValue conversions with round-trip guarantees - Use cases: polyglot configuration systems and schema-driven migrations - Chained migration patterns (A → DynamicValue → B) - Scala 2/3 differences including ambiguity detection These additions document the DynamicValue feature that enables seamless conversions between type-safe domain models and semi-structured data representations, critical for polyglot serialization workflows. Co-Authored-By: Claude Haiku 4.5 * docs(schema-evolution): Enhance DynamicValue examples with JSON round-tripping Updated DynamicValue conversion sections to show complete polyglot workflows: - Type → DynamicValue → JSON → DynamicValue → Type - Demonstrates full round-trip cycle with actual serialization - Shows practical use cases with real data flow In docs/reference/schema-evolution/into.md: - Added JSON serialization to "Converting to DynamicValue" section - Added round-trip examples for collections (List, Map) - Demonstrates format-independent conversions via JsonFormat In docs/reference/schema-evolution/as.md: - Added full polyglot cycle example (Config case) - Enhanced polyglot configuration use case with JSON I/O - Added bidirectional migration example with rollback support - Shows practical data flow for configuration management Examples now show: 1. Forward/reverse conversion operations 2. JSON encoding/decoding integration 3. Round-trip data preservation 4. Practical rollback and validation patterns Co-Authored-By: Claude Haiku 4.5 * docs(schema-evolution): Simplify DynamicValue examples using mdoc evaluation Replaced verbose JsonFormat.encode/decode examples with clean mdoc evaluation: - Use _.toJsonString for JSON serialization - Rely on mdoc for evaluation instead of ShowExpr.show - Split setup and evaluation into separate code blocks - Cleaner, more idiomatic examples Updated both into.md and as.md DynamicValue conversion sections. Co-Authored-By: Claude Haiku 4.5 * docs(schema-evolution): Add reverse JSON → DynamicValue → Config example Enhanced "Bidirectional DynamicValue Support with JSON Round-Trip" section to show both directions: Forward: Config → DynamicValue → JSON - Uses _.toJsonString for serialization Reverse: JSON → DynamicValue → Config - Uses DynamicValue.fromJsonString to parse JSON - Uses As[Config, DynamicValue].from to deserialize to typed Config Demonstrates complete round-trip cycle with full type safety. Co-Authored-By: Claude Haiku 4.5 * fix: correct JSON parsing in As documentation example Use .fromJson[DynamicValue] extension method instead of non-existent DynamicValue.fromJsonString() to properly parse JSON strings in mdoc examples. Co-Authored-By: Claude Haiku 4.5 * docs(as): rewrite polyglot configuration example with realistic read-modify-write cycle Replace the incomplete example that only showed forward conversion (Config → DynamicValue) with a realistic scenario where a config service: 1. Reads JSON from external storage (Consul/etcd/file) → DynamicValue 2. Hydrates typed DatabaseConfig using As#from 3. Applies business logic (update timeout) 4. Writes back to store using As#into This demonstrates why As (bidirectional) is needed instead of just Into (one-way), showing the full read-modify-write cycle in a single for-comprehension. Co-Authored-By: Claude Haiku 4.5 * docs(as): expand polyglot configuration example with problem statement Clarify the real-world problem: configuration systems require reading from external storage, modifying in-place, and writing back. Without As, you need two separate conversions (Into[DynamicValue, Config] and Into[Config, DynamicValue]) with no guarantee they align. As solves this by guaranteeing a faithful round-trip via macro validation. Add step-by-step breakdown (read → hydrate → modify → serialize) to show why As is essential for mutable config pipelines. Co-Authored-By: Claude Haiku 4.5 * docs: restructure use cases with #### headings and update style guide - Update docs-writing-style to explicitly allow and encourage #### level headings - Add guidance on when to use #### for grouping related items (use cases, examples) - Restructure as.md Use Cases section to use #### for "Polyglot configuration systems" and "Schema-driven bidirectional migrations" - Add introductory sentence explaining when As is the right choice Co-Authored-By: Claude Haiku 4.5 * docs(as): enrich schema-driven migrations section with motivation and rollback example Replace toy example with realistic database migration scenario: - Explain the gap: naive one-way Into doesn't support rollback - Add contrast: As for bidirectional/rollback migrations vs Into for permanent ones - Show realistic use case: user database schema upgrade with rollback capability - Demonstrate round-trip guarantee: PersonOld → PersonNew → PersonOld preserves data This clarifies when As is essential for safe, reversible schema evolution. Co-Authored-By: Claude Haiku 4.5 * docs(as): remove schema-driven bidirectional migrations section Remove the #### Schema-driven bidirectional migrations section and keep only the Polyglot configuration systems use case. Co-Authored-By: Claude Haiku 4.5 * remove: delete empty IntoDynamicValue example Remove the empty IntoDynamicValue.scala placeholder file that contains only an empty object definition. Co-Authored-By: Claude Haiku 4.5 * Update docs/reference/schema-evolution/as.md Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Co-authored-by: Claude Haiku 4.5 Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Fix parsing of `java.time.Year` to parse similar to the latest JVMs + remove raw codecs for `java.time.Year`, `java.time.YearMonth`, `java.time.ZoneId`, and `java.time.ZoneOffset` because decoding/encoding by the latest JVMs performs better (#1167) * Preparing to migration on Scala 3.8.x (#1169) * docs: complete JsonPatch reference page and add cross-reference from Patch (#1161) * docs: add JsonPatch reference documentation Co-Authored-By: Claude Sonnet 4.6 * docs: rewrite JsonPatch reference page following skill structure - Add opening definition with structural type shape - Add Motivation section with ASCII diagram and hello-world example - Use #### headings for each method per skill guidelines - Add full Op hierarchy diagram with all subtypes - Add PatchMode section with comparison table - Add Operation Types section covering all Op/StringOp/ArrayOp/ObjectOp cases - Add Diffing Algorithm table - Add JsonPatch vs RFC 6902 comparison table - Add Advanced Usage examples (change log, composing sub-patches) - Use DynamicOptic.root.field(...).at(...) idioms throughout Co-Authored-By: Claude Sonnet 4.6 * docs: add JsonPatch runnable examples and link from reference page - Step1DiffAndApply: diff/apply, extension methods, roundtrip guarantee - Step2ManualPatches: JsonPatch.root, path-based apply, empty - Step3PatchOperations: all Op types (Set, NumberDelta, StringEdit, ArrayEdit, ObjectEdit) - Step4CompositionAndModes: ++, PatchMode, toDynamicPatch, fromDynamicPatch - CompleteJsonPatchExample: collaborative editor with patch log, replay, and sync - Add Examples section to json-patch.md linking all five files Co-Authored-By: Claude Sonnet 4.6 * fix: fix compilation errors in CompleteJsonPatchExample - Explicitly type var doc as Json to accept patch.apply's return type - Use asObj() helper to cast Json → Json.Object before navigation - Fix contentSource extraction: JsonSelection#one returns Either, not Option - Fix foldLeft accumulators with explicit : Json type annotation Co-Authored-By: Claude Sonnet 4.6 * docs: add explanatory comments to CompleteJsonPatchExample - Explain the append-only log design and its event-sourcing analogy - Explain why doc is typed as Json (patch.apply return type widening) - Explain why PatchMode.Strict is the default choice - Explain the ++ composition strategy and why publishPatch uses NumberDelta instead of a literal value (concurrent increment robustness) - Explain ArrayOp.Append safety (non-destructive) vs Insert/Delete - Explain why contentSource is extracted from live doc, not a snapshot - Explain foldLeft with JsonPatch.empty as the identity element - Explain each PatchMode behaviour (Strict/Lenient/Clobber) in context - Explain toDynamicPatch widening of NumberDelta to BigDecimalDelta - Add production serialization comment showing the typical codec workflow Co-Authored-By: Claude Sonnet 4.6 * docs: update JsonPatch reference to use mdoc evaluated output and show() examples - Replace mdoc:compile-only blocks (with manual // result comments) with mdoc:silent:reset + mdoc pairs for live evaluated output throughout the doc - Fix bare subheader violations: add intro prose before the first ### in Creating Patches, Core Operations, Advanced Usage, and Integration sections - Fix lone subheader violations: merge #### ++ into ### Composing Patches and #### toDynamicPatch into ### Converting - Rename Step1–Step4 example files to scenario-based names and rewrite all println() calls as util.ShowExpr.show() for expression + result output Co-Authored-By: Claude Sonnet 4.6 * fix: prefix every line of multi-line show() results with // Co-Authored-By: Claude Sonnet 4.6 * Update sourcecode to 0.4.4 (#1148) Co-authored-by: zio-scala-steward[bot] <145262613+zio-scala-steward[bot]@users.noreply.github.com> * Update zio-sbt-gh-query, zio-sbt-website to 0.5.0 (#1149) Co-authored-by: zio-scala-steward[bot] <145262613+zio-scala-steward[bot]@users.noreply.github.com> * Safe and correct (to match `java.time.*` value formatting) validation of predefined JSON schema formats + code cleanup (#1151) * More test coverage for XML codecs + tune visibility of private methods for Scala 2 (#1152) * Tune visibility for private methods in the chunk, schema and schema-thrift modules * More test coverage for XML codecs * feat(schema): add DynamicValue support to Into/As macros (#1007) * feat(schema): add DynamicValue support to Into/As macros - Into[A, DynamicValue] converts any type A to DynamicValue via Schema - Into[DynamicValue, A] converts DynamicValue to any type A via Schema - As[A, DynamicValue] provides bidirectional conversion - As[DynamicValue, A] provides bidirectional conversion - Schema[A] is summoned if available, otherwise derived - Added tests for DynamicValue conversions in IntoSpec and AsSpec Closes #981 * refactor(test): improve DynamicValue test assertion pattern Use assert(result)(isRight(equalTo(expected))) instead of assertTrue with manual field checking for clearer failure messages. * test(schema): add comprehensive DynamicValue tests for Into/As macros - Add tests for List, Map, Option, and sealed trait conversions - Replace failing Either tests with working sealed trait tests - Add ambiguity detection in Scala 3 findImplicitOrDeriveSchema * style(test): use assertTrue in DynamicValue tests per review feedback * fix(schema): detect ambiguous Schema implicits in Scala 2 DynamicValue macros Previously, findImplicitOrDeriveSchema in Scala 2 Into/As macros used inferImplicitValue(silent=true), which returns EmptyTree for both 'not found' and 'ambiguous' cases, silently ignoring ambiguity. Now uses a two-pass approach: first silent=true, then silent=false in a try/catch to distinguish ambiguous/diverging from genuinely absent. This matches the Scala 3 behavior which inspects failure.explanation. * Hardened a regex for email validation during checking JSON Schema conformance to avoid SSRF when routing emails directly to IP addresses (#1160) * Update sbt, sbt-dependency-tree, ... to 1.12.5 (#1159) Co-authored-by: zio-scala-steward[bot] <145262613+zio-scala-steward[bot]@users.noreply.github.com> * docs: refactor documentation skills and add BindingResolver + DynamicSchema reference pages (#1146) * feat(skills): add shared docs-mdoc-conventions skill * fix(skills): clarify mdoc:invisible guidance in docs-mdoc-conventions * feat(skills): add shared docs-writing-style skill * feat(skills): add shared docs-integrate skill * refactor(skills): write-data-type-ref delegates to shared doc skills Co-Authored-By: Claude Sonnet 4.6 * refactor(skills): write-how-to-guide delegates to shared doc skills Co-Authored-By: Claude Sonnet 4.6 * fix(skills): add prose line-break and bullet capitalization rules to docs-writing-style Co-Authored-By: Claude Sonnet 4.6 * docs: add BindingResolver reference page and examples Co-Authored-By: Claude Sonnet 4.6 * docs: integrate BindingResolver page into site navigation and cross-references Co-Authored-By: Claude Sonnet 4.6 * fix(skills): require type-qualified references for all methods and constructors Co-Authored-By: Claude Sonnet 4.6 * docs(binding-resolver): qualify all method and constructor references with their type name Co-Authored-By: Claude Sonnet 4.6 * docs(binding-resolver): add schema registry example showing real-world rebinding use case Co-Authored-By: Claude Sonnet 4.6 * docs(dynamic-schema): add DynamicSchema reference page and companion examples - Add docs/reference/dynamic-schema.md covering: opening definition with ASCII flow diagram, creation via toDynamicSchema/fromDynamicValue, serialization via toDynamicValue/fromDynamicValue, validation (check, conforms), rebinding, structural navigation via DynamicOptic, metadata access, toSchema, and integration cross-references - Add four runnable companion examples in schema-examples/dynamicschema/: validation, serialization round-trip, rebind pipeline, schema registry - Wire into sidebars.js (after dynamic-value), docs/index.md, and add See Also cross-references from dynamic-value.md and dynamic-optic.md Co-Authored-By: Claude Sonnet 4.6 * fix(skills): require docs-writing-style as a mandatory sub-skill in write-data-type-ref Change the passive "See the docs-writing-style skill" reference to an active REQUIRED SUB-SKILL declaration, matching the convention used by other skills in the system. Co-Authored-By: Claude Sonnet 4.6 * docs(dynamic-schema): sync page with writing style rules - Capitalize all bullet sentences (holds/can → Holds/Can) - Add introductory prose after bare ## headings before first ### - Remove lone ### subheaders (Serializing, Rebinding, Structural Navigation, Converting) — content placed directly under ## - Add bridging sentences between consecutive code blocks in toDynamicValue, check, conforms, get, doc, and getDefaultValue sections Co-Authored-By: Claude Sonnet 4.6 * fix(skills): require introductory sentences before code blocks to end with a colon Co-Authored-By: Claude Sonnet 4.6 * fix(skills): ban hardcoded expression output comments in favour of mdoc evaluation Co-Authored-By: Claude Sonnet 4.6 * docs(dynamic-schema): expand "Converting to a Typed Schema" with motivation and use-cases Replace the thin toSchema section (signature + toy Slot example) with: - Opening sentence stating the return type and one-line when-to-use rule - Motivation paragraph naming the gap: post-transport scenarios where Scala types are unavailable, making rebind impractical - Explicit contrast: toSchema vs rebind[A] in prose - Bridging sentence before signature block - Realistic gateway example: receive DynamicSchema from registry, call toSchema, validate an incoming payload with fromDynamicValue Co-Authored-By: Claude Sonnet 4.6 * feat(skills): add docs-enrich-section skill New skill for enriching thin documentation sections that show only a signature and a toy example without explaining motivation or when to prefer this API over alternatives. Covers: thin-section signals, source research checklist, five-part expansion pattern (opening → motivation → contrast → signature → realistic example), and a mistakes table with fixes. Co-Authored-By: Claude Sonnet 4.6 * refactor(skills): rename docs skills to use consistent docs-* prefix - write-data-type-ref → docs-data-type-ref - write-how-to-guide → docs-how-to-guide - finding-undocumented → docs-find-documentation-gaps Name field in each SKILL.md frontmatter updated to match. Co-Authored-By: Claude Sonnet 4.6 * fix(skills): clarify that fixing hardcoded output requires restructuring, not just deletion The previous rule said "never hardcode output in comments" but left the remedy ambiguous. Removing the comment from a compile-only block still hides the result — the correct fix is to restructure the block: 1. Move setup into mdoc:silent:reset 2. Add a bridging sentence ending in ':' 3. Put evaluated expressions in a bare mdoc block Adds explicit steps and a cross-reference to docs-mdoc-conventions. Co-Authored-By: Claude Sonnet 4.6 * docs(dynamic-schema): apply mdoc:silent:reset + mdoc pattern to evaluated examples Converts all examples that previously showed hardcoded output comments (or had output stripped) to the proper two-block pattern: - mdoc:silent:reset — type definitions and setup vals - bare mdoc — expressions whose results are shown to the reader Sections converted: fromDynamicValue, toDynamicValue (Serializing), check (3 outcomes), conforms, rebind (decoded round-trip), get (streetSchema.map), typeId, doc, getDefaultValue, toSchema gateway. Also fixes remaining style violations: - adds ':' after Motivation paragraph before ASCII art block - removes step 3 ("compile all") from Running the Examples Co-Authored-By: Claude Sonnet 4.6 * docs: update docs-data-type-ref skill and sync dynamic-schema.md - Update docs-data-type-ref SKILL.md: add per-example source links to "Running the Examples" template - Each example now shows bold description, clickable source link, and separate bash block - Makes GitHub links clickable in Docusaurus (links in bash blocks are not clickable) - Update Rules section to document the new three-part structure - Sync dynamic-schema.md with skill requirements: - Fix hardcoded output in code block: replace mdoc:compile-only with proper mdoc:silent:reset + mdoc pattern - Update "Running the Examples" section to use per-example template with source links - All code blocks now properly evaluated and displayed per writing-style rules Co-Authored-By: Claude Haiku 4.5 * style: apply scalafmt formatting Co-Authored-By: Claude Haiku 4.5 * fix(docs): correct sidebars.js path and diagram notation - Update docs-integrate skill to reference correct path: docs/sidebars.js (was incorrectly referencing website/sidebars.js) - Fix ASCII diagram notation in dynamic-schema.md to use '.' for companion object methods (DynamicSchema.toDynamicValue) instead of '#' notation Co-Authored-By: Claude Haiku 4.5 * fix(schema-examples): remove unused example and correct method reference - Remove empty MyExample.scala placeholder (unused, no implementation or docs) - Fix comment in BindingResolverSchemaRegistryExample.scala to correctly reference Schema[A]#toDynamicSchema instead of DynamicSchema#toDynamicSchema Co-Authored-By: Claude Haiku 4.5 * Update .claude/skills/docs-writing-style/SKILL.md Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update docs/reference/dynamic-schema.md Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix(pr-1146-review): address remaining review comments - Add trailing newline and closing separator to docs-writing-style skill for consistency - Restructure long run-on sentence in dynamic-schema.md toSchema section: split into two shorter sentences and move contrast into warning admonition - Refactor DynamicSchemaRegistryExample to use immutable patterns: replace var registry/queue with functions that take and return updated collections, adhering to the 'Prefer val over var' style rule Co-Authored-By: Claude Haiku 4.5 --------- Co-authored-by: Claude Sonnet 4.6 Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * docs: add cross-reference from Patch to JsonPatch in Patching reference Add a note in the Patching page that directs readers to JsonPatch when they need untyped JSON-specific patching. This helps distinguish between Patch[S] (typed, schema-based) and JsonPatch (untyped, JSON-specific). Co-Authored-By: Claude Haiku 4.5 * fix: format JsonPatch example files with scalafmt Format the 4 JsonPatch example files to comply with project style guidelines: - CompleteJsonPatchExample.scala - JsonPatchCompositionExample.scala - JsonPatchManualBuildExample.scala - JsonPatchOperationsExample.scala Co-Authored-By: Claude Haiku 4.5 * Update schema-examples/src/main/scala/jsonpatch/JsonPatchCompositionExample.scala Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update schema-examples/src/main/scala/jsonpatch/CompleteJsonPatchExample.scala Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update schema-examples/src/main/scala/util/ShowExpr.scala Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: address review comments on example files - JsonPatchCompositionExample: Remove Scala 3-only 'as' import syntax for Scala 2.13 compatibility - JsonPatchCompositionExample: Use concrete Either[SchemaError, JsonPatch] instead of Either[_, JsonPatch] - JsonPatchCompositionExample: Add SchemaError import - CompleteJsonPatchExample: Explicitly handle patch application errors instead of silently falling back - ShowExpr: Minimize memory allocation by only loading lines up to the call site Addresses Copilot review comments for improved code quality and compatibility. Co-Authored-By: Claude Haiku 4.5 * docs: apply docs-writing-style rules to json-patch.md - Qualify bare method names with JsonPatch prefix (diff, root, apply, empty) - Add colons to intro sentences before code blocks (required by style guide) - Update docs-writing-style skill to reference docs-mdoc-conventions in description Co-Authored-By: Claude Haiku 4.5 * docs: complete docs-writing-style updates for json-patch.md - Qualify all bare method references (diff → JsonPatch.diff, apply → JsonPatch#apply) - Add intro sentence before type signature code block - Add missing colon before Composing Patches code block Co-Authored-By: Claude Haiku 4.5 * docs: fix all remaining docs-writing-style rule violations in json-patch.md - Add missing colons to 8 intro sentences before code blocks (lines 188, 394, 421, 429, 452, 489, 529, 569) - Qualify bare method names: empty → JsonPatch.empty, isEmpty → JsonPatch#isEmpty (lines 160, 174) All docs-writing-style rules now fully compliant. Co-Authored-By: Claude Haiku 4.5 --------- Co-authored-by: Claude Sonnet 4.6 Co-authored-by: zio-scala-steward[bot] <145262613+zio-scala-steward[bot]@users.noreply.github.com> Co-authored-by: Andriy Plokhotnyuk Co-authored-by: Nabil Abdel-Hafeez <7283535+987Nabil@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Fix #1166 by adding numeric widening support when converting from dynamic values (#1170) * Check Compliance Skill (#1174) * docs: integrate XML module reference into sidebar Add reference/xml to docs/sidebars.js in the Reference section after json-schema to make the XML documentation page accessible in the site navigation. PR #1016 includes comprehensive XML module documentation that was missing from the sidebar. Co-Authored-By: Claude Haiku 4.5 * docs: enhance XML module reference with comprehensive coverage of all PR #1016 features Add comprehensive documentation for all aspects of PR #1016: Major additions: - XmlEncoder and XmlDecoder traits with custom encoder/decoder examples - XmlBinaryCodec low-level codec interface and usage - Enhanced XmlError documentation with line/column position information - Real-world examples: RSS feed parsing, Atom feed, Sitemap XML - Implementation details section covering zero dependencies - Cross-Scala (2.13, 3.x) compatibility notes - Performance characteristics (time/space complexity) - Comparison table with Java XML libraries (JAXB, DOM4j) Sections now include: - Overview, Installation, Basic Usage - XML AST with node types - XmlBuilder for programmatic construction - WriterConfig and ReaderConfig with tables - @xmlAttribute and @xmlNamespace annotations - XmlSelection with navigation, filtering, and combinators - XmlPatch with position options and composition - XmlEncoder/XmlDecoder with implicit derivation and combinators - Extension syntax (.toXml, .fromXml) - Printing XML with various configurations - Type testing and access patterns - Comprehensive supported types list - XmlBinaryCodec usage examples - Error handling and XmlError details - Cross-platform (JVM/Scala.js) support - Multiple real-world examples - Implementation approach and performance analysis - Comparison with alternatives This ensures all features, APIs, and patterns from PR #1016 are thoroughly documented and discoverable by users. Co-Authored-By: Claude Haiku 4.5 * docs(xml): fix encoding examples to use mdoc instead of manual output comments Split the encoding example into setup block with mdoc:silent and separate mdoc blocks for each encoding operation. This follows the convention of never manually writing output comments when mdoc can show real evaluated output. * docs(xml): fix attributes example to use mdoc instead of manual output comments Split the attributes example into setup block with mdoc:silent and separate mdoc block to show the encoding result. Removes manually written output comment. * docs(xml): fix namespace examples to use mdoc instead of manual output comments Split both namespace examples (with and without prefix) into setup blocks with mdoc:silent/mdoc:silent:nest and separate mdoc blocks for encoding. Uses mdoc:silent:nest for the second example since it redefines Feed. * docs(xml): fix printing example to use mdoc instead of manual output comments Split the printing example into setup block with mdoc:silent and separate mdoc blocks for compact and pretty-printed output. Removes manually written output comments from the compile-only block. * docs(xml): fix Decoding from XML to use mdoc:silent:nest when redefining Person * docs(xml): fix Attributes to use mdoc:silent:nest when redefining Person * docs(xml): fix Namespaces to use mdoc:silent:nest when redefining Feed * docs(xml): fix XmlDecoder to use mdoc:silent:nest when redefining Person * docs(xml): fix missing colon before code block in Installation section * docs(xml): fix heading hierarchy in Printing XML section * docs(xml): fix heading hierarchy in Error Handling section * feat: add docs-check-compliance skill for flexible documentation auditing - Accepts two arguments: doc file and rule skill name - Loads rule skill dynamically - Checks and fixes each rule with separate commits - Verifies compilation with 'sbt docs/mdoc' - Reusable across any rule skill and documentation file Co-Authored-By: Claude Haiku 4.5 * docs(xml): fix Navigation section to use mdoc:silent:nest when redefining xml * docs(xml): fix remaining mdoc violations for compilation - Add missing Chunk import to custom decoders section - Fix Message round-trip example with syntax extension import - Fix xmlStr variable rename in Complete Example section - Fix XmlBinaryCodec derivation method call - Simplify XmlError example to work with actual API * Update .claude/skills/docs-check-compliance/SKILL.md Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * chore: remove settings.local.json from repository - Remove tracked .claude/settings.local.json to avoid weakening sandboxing - Add to .gitignore to prevent future commits - Users can create their own settings.local.json if needed locally Co-Authored-By: Claude Haiku 4.5 * docs(xml): add Opening Definition section with type signature [docs-data-type-ref compliance] * docs: clarify Opening Definition section should not have explicit heading * docs(xml): linter formatting adjustments --------- Co-authored-by: Claude Haiku 4.5 Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * docs(structural-types): fix type reference notation [referencing types rule] * docs(structural-types): fix Schema reference notation in Binding.of section [referencing types rule] * docs(structural-types): add prose before Supported Conversions subheader [bare subheader rule] * docs(structural-types): add prose before Integration subheader [bare subheader rule] * fix(skills): restore argument-hint and allowed-tools to docs-document-pr skill * fix: remove unused structural schema variables from sum type examples Remove unused structuralSchema, colorStructural, statusStructural, and fruitStructural variables that trigger -Wunused:all warnings in Scala 3. * fix: clarify that example creates nominal Person, not anonymous structural value * fix: remove misleading Into macro reference from integration example The example demonstrates DynamicValue manipulation and cross-type interop, not schema evolution with the Into macro. Update header and section title to reflect actual functionality. * docs(structural-types): clarify nested products keep nominal types Reword to avoid implying recursive structuralization that doesn't happen. Nested fields retain their nominal types; only the outer product is structuralized. * refactor: use show() to display structural schema representations Instead of computing unused structural schema variables, use ShowExpr.show() to print their evaluation. This demonstrates to users what the structural schema looks like for each sum type variant (Shape, Color, Status, Fruit). * style: format with scalafmt * docs(structural-types): workaround Scala 3.7.4 compiler crash in mdoc Convert code blocks from `mdoc:compile-only` to plain `scala` to work around a Scala 3.7.4 compiler crash (erasure phase) triggered by structural type syntax. TEMPORARY WORKAROUND: Code examples in this file are NOT compiled/type-checked until Scala 3.8.x is adopted (see PR #1169). When Scala 3.8.x becomes the default: 1. Revert mdoc:compile-only modifiers 2. Re-enable compilation checks with `sbt docs/mdoc` 3. Delete the blocking issue comment Fixes: CI failures in docs/mdoc task Related: PR #1169 "Preparing to migration on Scala 3.8.x" Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Claude Haiku 4.5 Co-authored-by: Andriy Plokhotnyuk Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: zio-scala-steward[bot] <145262613+zio-scala-steward[bot]@users.noreply.github.com> Co-authored-by: Nabil Abdel-Hafeez <7283535+987Nabil@users.noreply.github.com> --- .claude/skills/docs-data-type-ref/SKILL.md | 21 +- .claude/skills/docs-document-pr/SKILL.md | 2 + .claude/skills/docs-enrich-section/SKILL.md | 12 + .claude/skills/docs-how-to-guide/SKILL.md | 1 + .claude/skills/docs-integrate/SKILL.md | 32 +- docs/reference/structural-types.md | 362 ++++++++++++++++++ docs/sidebars.js | 1 + .../StructuralIntegrationExample.scala | 95 +++++ .../structural/StructuralProductExample.scala | 94 +++++ .../structural/StructuralSumTypeExample.scala | 156 ++++++++ .../structural/StructuralTupleExample.scala | 45 +++ 11 files changed, 816 insertions(+), 5 deletions(-) create mode 100644 docs/reference/structural-types.md create mode 100644 schema-examples/src/main/scala/structural/StructuralIntegrationExample.scala create mode 100644 schema-examples/src/main/scala/structural/StructuralProductExample.scala create mode 100644 schema-examples/src/main/scala/structural/StructuralSumTypeExample.scala create mode 100644 schema-examples/src/main/scala/structural/StructuralTupleExample.scala diff --git a/.claude/skills/docs-data-type-ref/SKILL.md b/.claude/skills/docs-data-type-ref/SKILL.md index b84d2b2d00..882319d03e 100644 --- a/.claude/skills/docs-data-type-ref/SKILL.md +++ b/.claude/skills/docs-data-type-ref/SKILL.md @@ -393,11 +393,28 @@ cross-references, link verification). Additional note for reference pages: if creating a new file, place it in the appropriate `docs/reference/` subdirectory based on where it logically belongs. -## Step 5: Review +## Step 5: Review and Verify Compilation After writing, re-read the document and verify: - All method signatures match the actual source code -- All code examples would compile with `mdoc` - The frontmatter `id` matches what `sidebars.js` expects (if an entry exists) - The document is self-contained—a reader shouldn't need to look at the source code to understand the type's API - The example file compiles and runs without errors + +### Mandatory: Run mdoc Compilation Check + +Before claiming the page is complete, run the full mdoc compilation check: + +```bash +sbt docs/mdoc +``` + +**Success criterion:** The output contains **zero `[error]` lines**. Warnings are acceptable. + +**What this verifies:** +- All Scala code blocks in the page are syntactically correct and type-check +- Imports and type references are valid +- Cross-references to other documentation pages are unbroken +- Readers can copy-paste any example from the page without errors + +**If mdoc reports errors:** Fix them immediately. Do not mark the page as complete until all errors are resolved. diff --git a/.claude/skills/docs-document-pr/SKILL.md b/.claude/skills/docs-document-pr/SKILL.md index eeb7682154..05538c47aa 100644 --- a/.claude/skills/docs-document-pr/SKILL.md +++ b/.claude/skills/docs-document-pr/SKILL.md @@ -7,6 +7,8 @@ description: > content type and scope. Delegates to specialized documentation skills (docs-data-type-ref, docs-how-to-guide) to ensure consistent style and formatting across all ZIO Blocks docs. +argument-hint: "[PR number (e.g., #1016 or 1016)]" +allowed-tools: Read, Glob, Grep, Bash(gh:*) triggers: - "document PR" - "doc this PR" diff --git a/.claude/skills/docs-enrich-section/SKILL.md b/.claude/skills/docs-enrich-section/SKILL.md index 0c752f1ce0..005dd593ef 100644 --- a/.claude/skills/docs-enrich-section/SKILL.md +++ b/.claude/skills/docs-enrich-section/SKILL.md @@ -81,3 +81,15 @@ Checklist for the example: | Example uses the same toy type as before | Create a new type that reflects the motivated use-case | | Prose sentence before code does not end with `:` | Every sentence immediately before a code fence must end with `:` | | Added output comments to show what expressions return | Delete them — mdoc evaluates and renders output automatically | + +## Verification + +After enriching the section, run the mdoc compilation check to ensure all code examples are syntactically correct and type-check: + +```bash +sbt docs/mdoc +``` + +**Success criterion:** The output contains **zero `[error]` lines**. Warnings are acceptable. + +**If mdoc reports errors:** Fix them immediately before marking the enrichment as complete. Do not commit or claim the work is done until all errors are resolved. diff --git a/.claude/skills/docs-how-to-guide/SKILL.md b/.claude/skills/docs-how-to-guide/SKILL.md index dae5a6f99f..a852a48dc9 100644 --- a/.claude/skills/docs-how-to-guide/SKILL.md +++ b/.claude/skills/docs-how-to-guide/SKILL.md @@ -457,6 +457,7 @@ After writing, verify every item on this checklist: - [ ] Imports are complete and correct in every code block - [ ] The sbt dependency in Prerequisites is correct - [ ] No deprecated methods or outdated patterns are used +- [ ] Run `sbt docs/mdoc` and confirm zero `[error]` lines (this is mandatory before claiming the guide is done) ### Companion Examples - [ ] A package directory exists in `schema-examples/src/main/scala//` diff --git a/.claude/skills/docs-integrate/SKILL.md b/.claude/skills/docs-integrate/SKILL.md index 7f91badb4b..0497518d3e 100644 --- a/.claude/skills/docs-integrate/SKILL.md +++ b/.claude/skills/docs-integrate/SKILL.md @@ -43,11 +43,37 @@ Add links from related existing docs to the new page: - If you wrote a guide that uses a specific type (e.g., `Schema`, `DynamicOptic`), add a cross-reference from the type's reference page to the guide. -## Step 4: Verify All Links +## Step 4: Verify Compilation and Links (Mandatory Gate) -Check that all relative links in the new page and in any updated pages are correct: +This is a **mandatory compilation gate**. All code examples in documentation are compile-checked via mdoc. + +### Check Relative Links + +Verify that all relative links in the new page and in any updated pages are correct: - Internal links use relative paths: `[TypeName](./type-name.md)`. - Anchor links match actual heading text (Docusaurus converts headings to lowercase kebab-case anchors). -- Run `sbt docs/mdoc` to catch broken mdoc links (they appear as `[error] Unknown link '...'`). + +### Run mdoc Compilation Check + +Run the full mdoc compilation check: + +```bash +sbt docs/mdoc +``` + +**Success criterion:** The output contains **zero `[error]` lines**. Warnings are acceptable. + +**What to look for:** +- Type errors in Scala code blocks (mismatched types, undefined names, missing imports) +- Broken cross-references (mdoc reports these as `[error] Unknown link '...'`) +- Unresolved imports or package references + +**If mdoc reports errors:** Fix them immediately. Do not proceed to commit or claim the work +is done until all errors are resolved. + +The compilation check ensures: +- Code examples are syntactically correct +- Readers can copy-paste examples without errors +- Cross-references are valid and unbroken diff --git a/docs/reference/structural-types.md b/docs/reference/structural-types.md new file mode 100644 index 0000000000..012a66cb00 --- /dev/null +++ b/docs/reference/structural-types.md @@ -0,0 +1,362 @@ +--- +id: structural-types +title: "Structural Types" +--- + + + +Structural types enable **duck typing** with ZIO Blocks schemas. Instead of requiring a nominal type name (like `class Person`), a structural schema validates based on the **shape** of an object — the fields it provides, regardless of how it was defined. + +## Motivation + +Consider a common integration scenario: + +```scala +// Your system +case class Person(name: String, age: Int) + +// External system (same data, different class) +case class User(name: String, age: Int) +``` + +Without structural types, converting between `Person` and `User` requires manual translation. With structural types, they both have the same structural schema: + +```scala +import scala.language.reflectiveCalls +import zio.blocks.schema.Schema + +case class Person(name: String, age: Int) +case class User(name: String, age: Int) + +val personSchema = Schema.derived[Person] +val personStructural = personSchema.structural +// Schema[{ def name: String; def age: Int }] + +val userSchema = Schema.derived[User] +val userStructural = userSchema.structural +// Schema[{ def name: String; def age: Int }] + +// Both schemas accept the same data shape +``` + +## Construction: `Schema#structural` + +Use the `Schema#structural` method on any schema to get the corresponding structural schema. + +**Scala 3:** Using transparent inline — the return type is inferred to the full refinement type: + +```scala +import zio.blocks.schema.Schema + +case class Person(name: String, age: Int) +object Person { + implicit val schema: Schema[Person] = Schema.derived[Person] +} + +val personSchema: Schema[Person] = Schema.derived[Person] +val structuralSchema: Schema[{ def name: String; def age: Int }] = personSchema.structural +``` + +**Scala 2:** Implicit derivation — returns `Schema[ts.StructuralType]` (path-dependent type): + +```scala +import zio.blocks.schema.Schema + +case class Person(name: String, age: Int) +object Person { + implicit val schema: Schema[Person] = Schema.derived[Person] +} + +val personSchema: Schema[Person] = Schema.derived[Person] +val structuralSchema = personSchema.structural +// Type: Schema[ts.StructuralType] (structural type inferred from macro) +``` + +## Supported Conversions + +The following type categories can be converted to structural schemas: + +### Product Types (Case Classes) + +Both Scala 2 and 3 support structural conversion of case classes: + +```scala +import zio.blocks.schema.Schema + +case class Address(street: String, city: String, zipCode: Int) +object Address { + implicit val schema: Schema[Address] = Schema.derived[Address] +} + +val schema = Schema.derived[Address] +val structural = schema.structural +// Schema[{ def street: String; def city: String; def zipCode: Int }] +``` + +### Tuples + +Tuples convert to structural records with field names derived from positions: + +```scala +import zio.blocks.schema.Schema + +type StringIntBool = (String, Int, Boolean) +implicit val schema: Schema[StringIntBool] = Schema.derived[StringIntBool] + +val tupleSchema = Schema.derived[(String, Int, Boolean)] +val structuralSchema = tupleSchema.structural +// Schema[{ def _1: String; def _2: Int; def _3: Boolean }] +``` + +### Nested Products + +Nested product fields keep their nominal types; only the outer product is structuralized: + +```scala +import zio.blocks.schema.Schema + +case class Address(street: String, city: String) +object Address { + implicit val schema: Schema[Address] = Schema.derived[Address] +} + +case class Person(name: String, age: Int, address: Address) +object Person { + implicit val schema: Schema[Person] = Schema.derived[Person] +} + +val personSchema = Schema.derived[Person] +val structuralSchema = personSchema.structural +// Schema[{ +// def name: String +// def age: Int +// def address: Address +// }] +``` + +### Opaque Types (Scala 3) + +Opaque type aliases are unwrapped to their underlying type: + +```scala +import zio.blocks.schema.Schema + +opaque type UserId = String + +case class User(id: UserId, name: String) +object User { + implicit val schema: Schema[User] = Schema.derived[User] +} + +val schema = Schema.derived[User] +val structural = schema.structural +// Schema[{ def id: String; def name: String }] +// (UserId unwrapped to String) +``` + +### Sum Types / Sealed Traits (Scala 3) + +Sealed traits and enums convert to union types with nested method syntax: + +```scala +import zio.blocks.schema.Schema + +sealed trait Shape +object Shape { + case class Circle(radius: Double) extends Shape + case class Rectangle(width: Double, height: Double) extends Shape + implicit val schema: Schema[Shape] = Schema.derived[Shape] +} + +val schema = Schema.derived[Shape] +val structural = schema.structural +// Schema[ +// { def Circle: { def radius: Double } } | +// { def Rectangle: { def height: Double; def width: Double } } +// ] +// (cases sorted alphabetically) +``` + +**Enum syntax** (Scala 3): + +```scala +import zio.blocks.schema.Schema + +enum Color { + case Red, Green, Blue +} +object Color { + implicit val schema: Schema[Color] = Schema.derived[Color] +} + +val schema = Schema.derived[Color] +val structural = schema.structural +// Schema[ +// { def Blue: {} } | +// { def Green: {} } | +// { def Red: {} } +// ] +``` + +Cases appear in **alphabetical order** in the union type. This alphabetical ordering (applied to fields in products and case names in unions) ensures **deterministic, normalized type identity**: two structural types with the same fields but different declaration order produce the same structural type and normalized name. This is essential for predictable schema evolution and cross-system interop. + +## Direct Structural Derivation (Scala 3) + +Create a schema directly for a structural type without a nominal base: + +```scala +import zio.blocks.schema.Schema + +// No case class needed — define the schema for the shape directly +val personStructural = Schema.derived[{ def name: String; def age: Int }] + +// The schema is ready to use with values matching that structural shape +``` + +This is only supported in **Scala 3** with the right macro machinery. + +## Round-tripping Through DynamicValue + +Structural schemas enable **cross-type conversion through `DynamicValue`** — encode a value of one nominal type and decode it as a *different* nominal type with the same structural shape. This is the core benefit of structural types for system integration. + +### Motivation + +In real integrations, you often receive data from an external system shaped like one type, but you need to work with it as a different type in your system. Without structural types, field-by-field translation is required. With structural types, if both types have identical shape, `DynamicValue` acts as the seamless bridge. + +Common scenarios: +- **API gateways** — receive a `PersonDTO` from an external API, decode as your internal `Person` type +- **Message brokers** — consume an event shaped like `UserEvent`, convert to your domain `Account` type +- **Data pipelines** — records with identical fields but different class names from different services + +### Cross-type conversion in action + +Set up two types with identical structural shape: + +```scala mdoc:silent +import zio.blocks.schema.Schema +import zio.blocks.schema.SchemaError + +case class Person(name: String, age: Int) +object Person { + implicit val schema: Schema[Person] = Schema.derived[Person] +} + +case class Employee(name: String, age: Int) +object Employee { + implicit val schema: Schema[Employee] = Schema.derived[Employee] +} + +val personSchema = Schema.derived[Person] +val employeeSchema = Schema.derived[Employee] +``` + +Now encode a `Person` to `DynamicValue` and decode it as an `Employee`: + +```scala mdoc +val person = Person("Alice", 30) +val dynamic = personSchema.toDynamicValue(person) + +val employee: Either[SchemaError, Employee] = + employeeSchema.fromDynamicValue(dynamic) +``` + +The structural shape guarantee ensures type-safe conversion: at compile time, you know both schemas accept the same fields, so round-tripping through `DynamicValue` is safe and zero-cost. + +## Integration + +Structural types integrate seamlessly with ZIO Blocks' broader ecosystem: + +### With Schema Evolution Macros + +Structural schemas work with [Schema Evolution](./schema-evolution/into.md) macros for cross-type conversion. When two types share the same structural shape, the conversion machinery can work across type boundaries: + +```scala +import zio.blocks.schema.Schema + +case class Person(name: String, age: Int) +object Person { + implicit val schema: Schema[Person] = Schema.derived[Person] +} + +case class PersonDTO(name: String, age: Int) +object PersonDTO { + implicit val schema: Schema[PersonDTO] = Schema.derived[PersonDTO] +} + +// Both types have identical structural schemas +val personSchema = Schema.derived[Person] +val dtoSchema = Schema.derived[PersonDTO] + +// They share the same structural shape: +// Schema[{ def name: String; def age: Int }] +``` + +### With Binding.of (Serialization) + +Structural types are also supported by the `Binding.of` macro for high-performance serialization via register-based encoding: + +```scala +import zio.blocks.schema.binding.Binding + +// Direct structural type serialization (JVM only) +val binding = Binding.of[{ def name: String; def age: Int }] + +// Works with nested structural types +val nestedBinding = Binding.of[{ + def name: String + def address: { def street: String; def city: String } +}] + +// Works with containers +val containerBinding = Binding.of[{ + def name: String + def emails: List[String] +}] +``` + +This enables anonymous structural types to benefit from ZIO Blocks' high-performance serialization without requiring nominal case class definitions. Like `Schema#structural`, this is **JVM-only**. + +See [Binding](./binding.md) for detailed serialization documentation. + +## Running the Examples + +Example applications demonstrating structural types are available in `schema-examples`: + +```sh +# Simple product type +sbt "schema-examples/runMain structural.StructuralSimpleProductExample" + +# Nested products +sbt "schema-examples/runMain structural.StructuralNestedProductExample" + +# Sealed trait (Scala 3) +sbt "schema-examples/runMain structural.StructuralSealedTraitExample" + +# Enum (Scala 3) +sbt "schema-examples/runMain structural.StructuralEnumExample" + +# Tuples +sbt "schema-examples/runMain structural.StructuralTupleExample" + +# Integration with Into macro +sbt "schema-examples/runMain structural.StructuralIntoExample" +``` diff --git a/docs/sidebars.js b/docs/sidebars.js index fcc0c0b251..8dd7de0297 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -15,6 +15,7 @@ const sidebars = { "reference/modifier", "reference/dynamic-value", "reference/dynamic-schema", + "reference/structural-types", "reference/optics", "reference/schema-expr", "reference/dynamic-optic", diff --git a/schema-examples/src/main/scala/structural/StructuralIntegrationExample.scala b/schema-examples/src/main/scala/structural/StructuralIntegrationExample.scala new file mode 100644 index 0000000000..08181c3b63 --- /dev/null +++ b/schema-examples/src/main/scala/structural/StructuralIntegrationExample.scala @@ -0,0 +1,95 @@ +package structural + +import zio.blocks.schema._ +import util.ShowExpr.show + +/** + * Structural Types Reference — Integration + * + * Demonstrates how structural types enable cross-type interoperability through + * DynamicValue manipulation, focusing on structural schemas and dynamic value + * handling. + * + * Run with: sbt "schema-examples/runMain + * structural.StructuralIntegrationExample" + */ + +object StructuralIntegrationExample extends App { + + // Two nominally different types with the same shape + case class Person(name: String, age: Int) + object Person { + implicit val schema: Schema[Person] = Schema.derived[Person] + } + + case class Employee(name: String, age: Int) + object Employee { + implicit val schema: Schema[Employee] = Schema.derived[Employee] + } + + println("=== Structural Types Enable Cross-Type Interop ===\n") + + // Both types have identical structural shape + val personSchema: Schema[Person] = Schema.derived[Person] + val employeeSchema: Schema[Employee] = Schema.derived[Employee] + + val personStructural = personSchema.structural + val employeeStructural = employeeSchema.structural + + println("Both types convert to the same structural shape:") + show("Person structural type fields") + val person = Person("Alice", 30) + val personDynamic = personStructural.toDynamicValue(person) + show(personDynamic) + + println("\nEmployee structural type fields:") + show("Employee structural type fields") + val employee = Employee("Bob", 28) + val employeeDynamic = employeeStructural.toDynamicValue(employee) + show(employeeDynamic) + + // Both types share the same structural shape - can be used interchangeably + println("\n=== Cross-Type Interop via Structural Types ===\n") + + println("Person (nominal) and Employee (nominal) are different types at compile time:") + println("But they share the same structural representation.") + println("\nYou can use structural types for duck typing and schema-based interop.") + + // Demonstrate DynamicValue manipulation with structural schemas + println("\n=== Manipulating Structural Values via DynamicValue ===\n") + + val original = Person("Charlie", 35) + val originalDynamic = personStructural.toDynamicValue(original) + + println("Original person:") + show(originalDynamic) + + // Modify age field + val updated = originalDynamic.set( + DynamicOptic.root.field("age"), + DynamicValue.int(36) + ) + + println("\nAfter increasing age by 1:") + show(updated) + + // Decode back to typed value + val updatedPerson = personStructural.fromDynamicValue(updated) + println("\nDecoded updated person:") + show(updatedPerson) + + // Demonstrate that structural validation ignores nominal type + println("\n=== Structural Validation Ignores Nominal Type ===\n") + + val anonymousEmployee: { def name: String; def age: Int } = new { + def name: String = "Diana" + def age: Int = 27 + } + + // Even though it's an anonymous object, it has the right structural shape + val anomDynamic = employeeStructural.toDynamicValue(anonymousEmployee) + println("Anonymous object with employee shape:") + show(anomDynamic) + + println("\nThis anonymous value can be used wherever an Employee structural type is expected.") +} diff --git a/schema-examples/src/main/scala/structural/StructuralProductExample.scala b/schema-examples/src/main/scala/structural/StructuralProductExample.scala new file mode 100644 index 0000000000..78a80e975d --- /dev/null +++ b/schema-examples/src/main/scala/structural/StructuralProductExample.scala @@ -0,0 +1,94 @@ +package structural + +import zio.blocks.schema._ +import util.ShowExpr.show + +/** + * Structural Types Reference — Product Types + * + * Demonstrates converting case classes to structural schemas, and working with + * anonymous objects that match the structural shape. + * + * Run with: sbt "schema-examples/runMain + * structural.StructuralSimpleProductExample" sbt "schema-examples/runMain + * structural.StructuralNestedProductExample" + */ + +// ────────────────────────────────────────────────────────────────────────── +// Simple Product +// ────────────────────────────────────────────────────────────────────────── + +object StructuralSimpleProductExample extends App { + + case class Person(name: String, age: Int) + object Person { + implicit val schema: Schema[Person] = Schema.derived[Person] + } + + val nominalSchema: Schema[Person] = Schema.derived[Person] + val structuralSchema = nominalSchema.structural + + println("=== Simple Product: Person ===\n") + + // Create a nominal Person instance that matches the structural schema shape + val person: Person = Person("Alice", 30) + + // Encode to DynamicValue + val dynamic = structuralSchema.toDynamicValue(person) + + println("Structural schema representation of Person:") + show(dynamic) + + // Decode back + val decoded = structuralSchema.fromDynamicValue(dynamic) + println("\nDecoded result:") + show(decoded) +} + +// ────────────────────────────────────────────────────────────────────────── +// Nested Product +// ────────────────────────────────────────────────────────────────────────── + +object StructuralNestedProductExample extends App { + + case class Address(street: String, city: String, zip: Int) + object Address { + implicit val schema: Schema[Address] = Schema.derived[Address] + } + + case class Person(name: String, age: Int, address: Address) + object Person { + implicit val schema: Schema[Person] = Schema.derived[Person] + } + + val nominalSchema: Schema[Person] = Schema.derived[Person] + val structuralSchema = nominalSchema.structural + + println("=== Nested Product: Person with Address ===\n") + + val person = Person( + "Bob", + 25, + Address("123 Main St", "Springfield", 12345) + ) + + // Encode nested structure + val dynamic = structuralSchema.toDynamicValue(person) + + println("Structural schema with nested address:") + show(dynamic) + + // Decode + val decoded = structuralSchema.fromDynamicValue(dynamic) + println("\nDecoded nested result:") + show(decoded) + + // Modify nested address using DynamicOptic + val updated = dynamic.set( + DynamicOptic.root.field("address").field("city"), + DynamicValue.string("New York") + ) + + println("\nAfter modifying city field:") + show(updated) +} diff --git a/schema-examples/src/main/scala/structural/StructuralSumTypeExample.scala b/schema-examples/src/main/scala/structural/StructuralSumTypeExample.scala new file mode 100644 index 0000000000..2ede6aa428 --- /dev/null +++ b/schema-examples/src/main/scala/structural/StructuralSumTypeExample.scala @@ -0,0 +1,156 @@ +package structural + +import zio.blocks.schema._ +import util.ShowExpr.show + +/** + * Structural Types Reference — Sum Types + * + * Demonstrates converting sealed traits and enums to structural union schemas. + * This example is Scala 3 only. + * + * Run with: sbt "schema-examples/runMain + * structural.StructuralSealedTraitExample" sbt "schema-examples/runMain + * structural.StructuralEnumExample" + */ + +// ────────────────────────────────────────────────────────────────────────── +// Sealed Trait +// ────────────────────────────────────────────────────────────────────────── + +object StructuralSealedTraitExample extends App { + + sealed trait Shape + object Shape { + case class Circle(radius: Double) extends Shape + case class Rectangle(width: Double, height: Double) extends Shape + } + + implicit val shapeSchema: Schema[Shape] = Schema.derived[Shape] + + val nominalSchema: Schema[Shape] = Schema.derived[Shape] + val structuralSchema = nominalSchema.structural + + println("=== Sum Type: Shape (Sealed Trait) ===\n") + + println("Structural schema representation:") + show(structuralSchema) + + // Encode a Circle variant using nominal schema + val circle: Shape = Shape.Circle(5.0) + val circleDynamic = nominalSchema.toDynamicValue(circle) + + println("Circle variant as DynamicValue:") + show(circleDynamic) + + // Encode a Rectangle variant using nominal schema + val rectangle: Shape = Shape.Rectangle(10.0, 20.0) + val rectangleDynamic = nominalSchema.toDynamicValue(rectangle) + + println("\nRectangle variant as DynamicValue:") + show(rectangleDynamic) + + // Decode from DynamicValue using nominal schema + val decodedCircle = nominalSchema.fromDynamicValue(circleDynamic) + println("\nDecoded Circle:") + show(decodedCircle) + + // Show the structural schema representation + println("\nStructural schema representation:") + show("Union of Circle and Rectangle with method syntax") + + // Pattern match on decoded result + decodedCircle match { + case Right(_) => + println("\nSuccessfully decoded shape") + case Left(error) => + println(s"\nDecoding failed: $error") + } +} + +// ────────────────────────────────────────────────────────────────────────── +// Enum (Scala 3) +// ────────────────────────────────────────────────────────────────────────── + +object StructuralEnumExample extends App { + + enum Color { + case Red, Green, Blue + } + object Color { + implicit val schema: Schema[Color] = Schema.derived[Color] + } + + enum Status { + case Active, Inactive, Suspended + } + object Status { + implicit val schema: Schema[Status] = Schema.derived[Status] + } + + enum Fruit { + case Apple(variety: String) + case Orange(juicy: Boolean) + case Banana(length: Int) + } + object Fruit { + implicit val schema: Schema[Fruit] = Schema.derived[Fruit] + } + + println("=== Sum Type: Color (Simple Enum) ===\n") + + val colorSchema: Schema[Color] = Schema.derived[Color] + val colorStructural = colorSchema.structural + + println("Structural schema representation:") + show(colorStructural) + + val red: Color = Color.Red + val redDynamic = colorSchema.toDynamicValue(red) + + println("Color.Red as DynamicValue:") + show(redDynamic) + + println("\n=== Sum Type: Status (Simple Enum) ===\n") + + val statusSchema: Schema[Status] = Schema.derived[Status] + val statusStructural = statusSchema.structural + + println("Structural schema representation:") + show(statusStructural) + + val active: Status = Status.Active + val activeDynamic = statusSchema.toDynamicValue(active) + + println("Status.Active as DynamicValue:") + show(activeDynamic) + + println("\n=== Sum Type: Fruit (Parameterized Enum) ===\n") + + val fruitSchema: Schema[Fruit] = Schema.derived[Fruit] + val fruitStructural = fruitSchema.structural + + println("Structural schema representation:") + show(fruitStructural) + + // Parameterized enum cases + val apple: Fruit = Fruit.Apple("Granny Smith") + val appleDynamic = fruitSchema.toDynamicValue(apple) + + println("Fruit.Apple as DynamicValue:") + show(appleDynamic) + + val orange: Fruit = Fruit.Orange(true) + val orangeDynamic = fruitSchema.toDynamicValue(orange) + + println("\nFruit.Orange as DynamicValue:") + show(orangeDynamic) + + // Decode back + val decodedFruit = fruitSchema.fromDynamicValue(appleDynamic) + println("\nDecoded Apple:") + show(decodedFruit) + + // Show structural representation + println("\nStructural schema represents sum types as unions with method syntax") +} diff --git a/schema-examples/src/main/scala/structural/StructuralTupleExample.scala b/schema-examples/src/main/scala/structural/StructuralTupleExample.scala new file mode 100644 index 0000000000..ac81d2d8c8 --- /dev/null +++ b/schema-examples/src/main/scala/structural/StructuralTupleExample.scala @@ -0,0 +1,45 @@ +package structural + +import zio.blocks.schema._ +import util.ShowExpr.show + +/** + * Structural Types Reference — Tuples + * + * Demonstrates converting tuples to structural schemas. Tuples are converted to + * records with positional field names (_1, _2, _3, ...). + * + * Run with: sbt "schema-examples/runMain structural.StructuralTupleExample" + */ + +object StructuralTupleExample extends App { + + println("=== Structural Tuples ===\n") + + // Tuple of String, Int, Boolean + val tupleSchema = Schema.derived[(String, Int, Boolean)] + val structuralSchema = tupleSchema.structural + + val tuple: (String, Int, Boolean) = ("Alice", 30, true) + + println("Simple tuple (String, Int, Boolean):") + val dynamic1 = structuralSchema.toDynamicValue(tuple) + show(dynamic1) + + // Nested tuple + println("\n=== Nested Tuples ===\n") + + val nestedTupleSchema = Schema.derived[((String, Int), (Double, Boolean))] + val nestedStructural = nestedTupleSchema.structural + + val nestedTuple: ((String, Int), (Double, Boolean)) = (("Bob", 25), (3.14, false)) + + println("Nested tuple ((String, Int), (Double, Boolean)):") + val dynamic2 = nestedStructural.toDynamicValue(nestedTuple) + show(dynamic2) + + // Decode back + val decoded = structuralSchema.fromDynamicValue(dynamic1) + println("\nDecoded simple tuple:") + show(decoded) +} From c61e4f2e78f122b3c7c08a7618d072d9f384491a Mon Sep 17 00:00:00 2001 From: "John A. De Goes" Date: Fri, 6 Mar 2026 12:46:16 -0500 Subject: [PATCH 05/36] fix(Allows): harden Scala 2 macro type extraction and add regression test for issue #1145 (#1182) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add c.enclosingImplicits fallback in the Scala 2 whitebox macro so that A and S are resolved from the implicit search's expected type when c.macroApplication.tpe is null or unresolved. Add Issue1145Reproducer — a top-level object with import Allows._ matching the reporter's exact code — as a regression guard. --- .../AllowsCompanionVersionSpecific.scala | 49 +++++++++++++++++-- .../blocks/schema/comptime/AllowsSpec.scala | 28 +++++++++++ 2 files changed, 74 insertions(+), 3 deletions(-) diff --git a/schema/shared/src/main/scala-2/zio/blocks/schema/comptime/AllowsCompanionVersionSpecific.scala b/schema/shared/src/main/scala-2/zio/blocks/schema/comptime/AllowsCompanionVersionSpecific.scala index bb49981998..88dd334ead 100644 --- a/schema/shared/src/main/scala-2/zio/blocks/schema/comptime/AllowsCompanionVersionSpecific.scala +++ b/schema/shared/src/main/scala-2/zio/blocks/schema/comptime/AllowsCompanionVersionSpecific.scala @@ -37,9 +37,52 @@ private[comptime] object AllowsMacroImpl { // In Scala 2 whitebox macros, weakTypeOf[S] and weakTypeOf[A] are abstract type // variables. Extract the concrete types from the macro application's return type. - val appTpe = c.macroApplication.tpe - val sTpe = appTpe.typeArgs.last // Allows[A, S] — S is second type arg - val rootTpe0 = appTpe.typeArgs.head // Allows[A, S] — A is first type arg + // + // c.macroApplication.tpe is the primary source: it holds the return type of the + // macro call as determined by the typer before expansion. When the macro is + // used as an implicit and Scala 2's typer has already committed the expected + // type (which is the normal case), this gives us Allows[ConcreteA, ConcreteS] + // with both type arguments fully resolved. + // + // In rare cases c.macroApplication.tpe can be null (e.g. some incremental + // compilation states) or its type arguments can contain unresolved type variables + // (isAbstract type parameters or existential skolems). The fallback reads + // c.enclosingImplicits: when the macro is triggered as part of an implicit + // search, c.enclosingImplicits.head.pt is the "point type" — the expected + // type the search is trying to satisfy — which is always concrete. + val allowsTypeCon = typeOf[Allows[_, _]].typeConstructor + + // Returns Some((A, S)) only when both type arguments are fully concrete — + // i.e. neither is an unresolved type parameter (typeSymbol.isParameter is true + // for type-parameter symbols such as the A and S in derived[S, A], but false + // for concrete types including abstract classes used as grammar nodes). + def isConcrete(t: Type): Boolean = + !t.typeSymbol.isParameter + + def typeArgsFromPt(pt: Type): Option[(Type, Type)] = { + val dealiased = pt.dealias + if (dealiased.typeConstructor =:= allowsTypeCon && dealiased.typeArgs.length == 2) { + val a = dealiased.typeArgs.head + val s = dealiased.typeArgs.last + if (isConcrete(a) && isConcrete(s)) Some((a, s)) else None + } else None + } + + val (rootTpe0, sTpe): (Type, Type) = { + val fromApp: Option[(Type, Type)] = + Option(c.macroApplication.tpe).flatMap(t => typeArgsFromPt(t)) + val fromImplicits: Option[(Type, Type)] = + c.enclosingImplicits.headOption.flatMap(ic => typeArgsFromPt(ic.pt)) + fromApp + .orElse(fromImplicits) + .getOrElse( + c.abort( + c.enclosingPosition, + "Allows macro: could not determine concrete type arguments A and S. " + + "Make sure the full Allows[A, S] type is fully inferred at the call site." + ) + ) + } val primitiveBase = typeOf[Allows.Primitive] val dynamicBase = typeOf[Allows.Dynamic] diff --git a/schema/shared/src/test/scala/zio/blocks/schema/comptime/AllowsSpec.scala b/schema/shared/src/test/scala/zio/blocks/schema/comptime/AllowsSpec.scala index 02aad4040d..ceac752b83 100644 --- a/schema/shared/src/test/scala/zio/blocks/schema/comptime/AllowsSpec.scala +++ b/schema/shared/src/test/scala/zio/blocks/schema/comptime/AllowsSpec.scala @@ -153,6 +153,28 @@ object AllowsFixtures { object OrderRow { implicit val schema: Schema[OrderRow] = Schema.derived } } +// --------------------------------------------------------------------------- +// Regression: issue #1145 +// Reproduced verbatim from the bug report. DO NOT MODIFY THIS OBJECT. +// Allows must be derivable when A is inferred at a call site in a user object +// that imports Allows._ (wildcard) from an external namespace. +// --------------------------------------------------------------------------- + +object Issue1145Reproducer { + import zio.blocks.schema.Schema + import zio.blocks.schema.comptime.Allows + import Allows._ + + def writeCsv[A: Schema](rows: Seq[A])(implicit ev: Allows[A, Record[Primitive | Optional[Primitive]]]): Unit = () + + final case class Person(age: Int) + object Person { + implicit val schema: Schema[Person] = Schema.derived + } + + val result: Unit = writeCsv(Seq(Person(42))) +} + // --------------------------------------------------------------------------- // Positive tests (positive evidence derivation must compile) // Use `implicitly` which works on both Scala 2 and Scala 3. @@ -400,6 +422,12 @@ object AllowsSpec extends SchemaBaseSpec { val nilUUID = new java.util.UUID(0L, 0L) val event: DomainEvent = AccountOpened(nilUUID, "Alice") assertTrue(publish(event) == "ok") + }, + // Regression test for https://github.com/zio/zio-blocks/issues/1145 + // Issue1145Reproducer is a top-level object with `import Allows._` (wildcard), + // exactly matching the reporter's code. The val initialiser is evaluated here. + test("Allows is derivable with inferred A under wildcard import (issue #1145)") { + assertTrue(Issue1145Reproducer.result == (())) } ) ) From c97beafeb189cd1d9559edc66f6b00dc894f1b44 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Fri, 6 Mar 2026 23:13:55 +0330 Subject: [PATCH 06/36] docs(allows): add structural type definition --- docs/reference/allows.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index ff2c05bd00..b465606aef 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -7,6 +7,14 @@ title: "Allows" `Allows` does **not** require or use `Schema[A]`. It inspects the Scala type structure of `A` directly at compile time, using nothing but the Scala type system. Any `Schema[A]` that appears alongside `Allows` in examples is the library author's own separate constraint — it is not imposed by `Allows` itself. +```scala +sealed abstract class Allows[A, S <: Allows.Structural] +``` + +## Overview + +The gap `Allows` fills is **structural preconditions** at the call site, at compile time, with precise error messages. + ## Motivation ZIO Blocks (ZIO Schema 2) gives library authors a powerful way to build data-oriented DSLs. A library can accept `A: Schema` and use the schema at runtime to serialize, deserialize, query, or transform values of `A`. But `Allows` is useful even without a Schema — it can enforce structural preconditions on *any* generic function. From 9548af800e65b3b6cdac73e68405790702131ab9 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Fri, 6 Mar 2026 23:19:26 +0330 Subject: [PATCH 07/36] docs(allows): add Creating Instances section --- docs/reference/allows.md | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index b465606aef..30fdc379df 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -39,6 +39,37 @@ Today, these constraints can only be checked at runtime, producing confusing err // UserRow has no Option fields — the Optional branch is simply never needed. ``` +## Creating Instances + +`Allows[A, S]` is not instantiated directly. Instead, you summon an evidence value at the point where you need the constraint. The macro automatically verifies the constraint at compile time. + +In **Scala 3**, use the `using` syntax to summon an implicit: + +```scala mdoc:compile-only +import zio.blocks.schema.comptime.Allows +import Allows._ + +def toJson[A](doc: A)(using Allows[A, Record[Primitive]]): String = ??? + +// Calling the function: +case class Person(name: String, age: Int) +val json = toJson(Person("Alice", 30)) // Compiles if Person satisfies Record[Primitive] +``` + +In **Scala 2**, use `implicit` parameter with `implicitly`: + +```scala mdoc:compile-only +import zio.blocks.schema.comptime.Allows +import Allows._ + +def toJson[A](doc: A)(implicit ev: Allows[A, Record[Primitive]]): String = ??? + +// Or summon at the call site: +val evidence = implicitly[Allows[Int, Primitive]] +``` + +The constraint is checked once, at the call site. If the type `A` does not satisfy `S`, you get a compile-time error with a precise message showing exactly which field violates the grammar. + ## Grammar Nodes All grammar nodes extend `Allows.Structural`. From 8697b7a28db6150cc48f4656808a1bb0036169d4 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Fri, 6 Mar 2026 23:19:40 +0330 Subject: [PATCH 08/36] docs(allows): add Core Operations section --- docs/reference/allows.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 30fdc379df..d001198b98 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -103,6 +103,16 @@ All grammar nodes extend `Allows.Structural`. Every specific `Primitive.Xxx` node also satisfies the catch-all `Primitive`. This means a type annotated with `Primitive.Int` is valid wherever `Primitive` or `Primitive | Primitive.Long` is required. +## Core Operations + +`Allows[A, S]` is a **proof token**, not an ordinary value. It carries zero public methods that you call directly. Instead, you use it in three ways: + +1. **As a constraint in function signatures** — Declare `Allows[A, S]` as an implicit/using parameter to require that callers pass only types satisfying the grammar. +2. **To summon evidence** — Use `implicitly[Allows[A, S]]` (Scala 2) or `summon[Allows[A, S]]` (Scala 3) at a call site to check the constraint and get an error message if it fails. +3. **In type aliases** — Define type aliases like `type FlatRecord = Allows[?, Record[Primitive | Optional[Primitive]]]` to name constraints and reuse them across functions. + +The macro that powers `Allows` checks the constraint **at compile time** and emits nothing but a reference to a single private singleton at runtime, so there is zero per-call-site overhead. + ## Specific Primitives The `Primitive` parent class is the catch-all: it accepts any of the 30 Schema 2 primitive types. For stricter control — such as when the target serialisation format only supports a subset — use the specific subtype nodes in `Allows.Primitive`: From 4392469bafa846596b8fa33a3175124e3546a33d Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Fri, 6 Mar 2026 23:19:53 +0330 Subject: [PATCH 09/36] docs(allows): add Integration section with Schema --- docs/reference/allows.md | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index d001198b98..80a24a7783 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -400,6 +400,40 @@ val ev: Allows[EmptyEvent.type, Record[Primitive]] = implicitly // vacuously tr Both Scala versions produce the same macro behavior and the same error messages. +## Integration with Schema + +`Allows` and `Schema` are complementary but independent: + +- **`Schema[A]`** describes what an `A` looks like at runtime — how to serialize, deserialize, introspect, or transform it. It requires explicit derivation and handles the full type signature. +- **`Allows[A, S]`** describes what an `A` *may* look like at compile time — a structural grammar that `A` must satisfy. It requires no schema and uses only the Scala type system. + +You can use `Allows` **without** `Schema`: + +```scala mdoc:compile-only +import zio.blocks.schema.comptime.Allows +import Allows._ + +// Pure shape constraint, no Schema required +def writeCsv[A](rows: Seq[A])(using Allows[A, Record[Primitive | Optional[Primitive]]]): Unit = ??? +``` + +Or combine them when runtime encoding **and** shape validation are both needed: + +```scala mdoc:compile-only +import zio.blocks.schema.Schema +import zio.blocks.schema.comptime.Allows +import Allows._ + +// Shape constraint + runtime encoding +def writeCsv[A: Schema](rows: Seq[A])(using + Allows[A, Record[Primitive | Optional[Primitive]]] +): Unit = ??? +``` + +When combined, `Allows` enforces the structural guarantee that `Schema` can use — for example, a CSV serializer can assume that every field is a primitive or optional primitive and skip defensive type checks. + +See [Schema](./schema.md) for more on runtime encoding and decoding with schemas. + ## Running the Examples All code from this guide is available as runnable examples in the `schema-examples` module. From d81765c3f3be7bfa4ebe74ca52019fa0978a8ca0 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Fri, 6 Mar 2026 23:23:20 +0330 Subject: [PATCH 10/36] docs(allows): fix prose introduction for error message and code blocks --- docs/reference/allows.md | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 80a24a7783..9dccb6f120 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -167,7 +167,7 @@ def toJson[A](doc: A)(using Allows[A, Json]): String = ??? `Self` recurses back to `Json` at every nested position, so `List[String]` satisfies `Sequence[JsonPrimitive | Self]` (String is JsonPrimitive), `List[Author]` satisfies it too (Author satisfies `Record[JsonPrimitive | Self]` via Self), and top-level arrays work directly. -A type with a UUID or Instant field fails at compile time: +A type with a UUID or Instant field fails at compile time with this error: ``` [error] Schema shape violation at WithUUID.id: found Primitive(java.util.UUID), @@ -223,7 +223,7 @@ def insert[A: Schema](value: A)(using ): String = ??? ``` -If a user passes a type with nested records, they get a precise compile-time error: +If a user passes a type with nested records, they get a precise compile-time error like this: ``` [error] Schema shape violation at UserWithAddress.address: found Record(Address), @@ -245,7 +245,7 @@ def publish[A: Schema](event: A)(using ): Unit = ??? ``` -If a case of the sealed trait has a nested record field, the error names that case and field: +If a case of the sealed trait has a nested record field, the error names that case and field like this: ``` [error] Schema shape violation at DomainEvent.OrderPlaced.items.: @@ -298,7 +298,7 @@ object TreeNode { implicit val schema: Schema[TreeNode] = Schema.derived } **Non-recursive types** satisfy `Self`-containing grammars without issue: if no field ever recurses back to the root type, the `Self` position is never reached, and the constraint is vacuously satisfied. -**Mutual recursion** between two or more distinct types is a compile-time error: +**Mutual recursion** between two or more distinct types is a compile-time error reported as: ``` [error] Mutually recursive types are not supported by Allows. @@ -307,7 +307,7 @@ object TreeNode { implicit val schema: Schema[TreeNode] = Schema.derived } ## `Wrapped[A]` and Newtypes -The `Wrapped[A]` node matches ZIO Prelude `Newtype` and `Subtype` wrappers. The underlying type must satisfy `A`. +The `Wrapped[A]` node matches ZIO Prelude `Newtype` and `Subtype` wrappers. The underlying type must satisfy `A`. Here's an example: ```scala // ZIO Prelude Newtype pattern: @@ -322,7 +322,7 @@ given Schema[ProductCode] = val ev: Allows[ProductCode, Wrapped[Primitive]] = implicitly ``` -**Scala 3 opaque types** are resolved to their underlying type by the macro (they are transparent), so `opaque type UserId = UUID` satisfies `Primitive` (not `Wrapped[Primitive]`): +**Scala 3 opaque types** are resolved to their underlying type by the macro (they are transparent), so an opaque alias like this satisfies `Primitive` directly: ```scala opaque type UserId = java.util.UUID @@ -359,9 +359,7 @@ When a type does not satisfy the grammar, the macro reports: 3. **What was required**: `Primitive | Sequence[Primitive]` 4. **A hint** where applicable -Multiple violations are reported in a single compilation pass — the user sees all problems at once. - -Example: +Multiple violations are reported in a single compilation pass — the user sees all problems at once, for example: ``` [error] Schema shape violation at UserWithAddress.address: found Record(Address), From 64d0a720136c94f79297ce34350e6cc7abdda014 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Fri, 6 Mar 2026 23:23:32 +0330 Subject: [PATCH 11/36] docs(allows): remove redundancy from Motivation section --- docs/reference/allows.md | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 9dccb6f120..193457a9cb 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -17,18 +17,16 @@ The gap `Allows` fills is **structural preconditions** at the call site, at comp ## Motivation -ZIO Blocks (ZIO Schema 2) gives library authors a powerful way to build data-oriented DSLs. A library can accept `A: Schema` and use the schema at runtime to serialize, deserialize, query, or transform values of `A`. But `Allows` is useful even without a Schema — it can enforce structural preconditions on *any* generic function. +ZIO Blocks (ZIO Schema 2) gives library authors a powerful way to build data-oriented DSLs. A library can accept `A: Schema` and use the schema at runtime to serialize, deserialize, query, or transform values of `A`. But many generic functions have **structural preconditions** that don't require a schema. -The gap is **structural preconditions**. Many generic functions only make sense for a subset of types: +Consider these real-world scenarios: -- A CSV serializer requires flat records of scalars. -- An RDBMS layer cannot handle nested records as column values. -- An event bus expects a sealed trait of flat record cases. -- A JSON document store allows arbitrarily nested records but not `DynamicValue` leaves. +- A CSV serializer requires flat records of scalars — nested records should fail at the call site, not deep inside the serializer. +- An RDBMS layer cannot handle nested records as column values — the error should name the problematic field. +- An event bus expects a sealed trait of flat record cases — violations should be caught before publishing. +- A JSON document store allows arbitrarily nested records but not `DynamicValue` leaves — the schema validation should be precise. -Today, these constraints can only be checked at runtime, producing confusing errors deep inside library internals. - -`Allows[A, S]` closes this gap: the constraint is verified at the **call site**, at compile time, with precise, path-aware error messages and concrete fix suggestions. +Without `Allows`, these constraints can only be checked at runtime, producing confusing errors deep inside library internals. With `Allows[A, S]`, the constraint is verified at the **call site**, at compile time, with precise, path-aware error messages and concrete fix suggestions. ## The Upper Bound Semantics From 6a674068042ecb74c71654bd5aa52e1e0bb2fe0e Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Fri, 6 Mar 2026 23:31:45 +0330 Subject: [PATCH 12/36] docs(allows): fix mdoc modifiers and Scala 2/3 syntax consistency --- docs/reference/allows.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 193457a9cb..0466f18812 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -190,12 +190,12 @@ def writeCsv[A](rows: Seq[A])(using **Scala 2** uses the `` `\|` `` infix operator from `Allows`: -```scala +```scala mdoc:compile-only import zio.blocks.schema.comptime.Allows import Allows._ def writeCsv[A](rows: Seq[A])(implicit - ev: Allows[A, Record[Primitive | Optional[Primitive]]] + ev: Allows[A, Record[Primitive `\|` Optional[Primitive]]] ): Unit = ??? ``` @@ -262,7 +262,7 @@ import Allows._ type JsonDocument = Record[Primitive | Self | Optional[Primitive | Self] | Sequence[Primitive | Self] | Allows.Map[Primitive, Primitive | Self]] -def toJson[A: Schema](doc: A)(implicit ev: Allows[A, JsonDocument]): String = ??? +def toJson[A: Schema](doc: A)(using Allows[A, JsonDocument]): String = ??? ``` This grammar allows: @@ -307,7 +307,7 @@ object TreeNode { implicit val schema: Schema[TreeNode] = Schema.derived } The `Wrapped[A]` node matches ZIO Prelude `Newtype` and `Subtype` wrappers. The underlying type must satisfy `A`. Here's an example: -```scala +```scala mdoc:compile-only // ZIO Prelude Newtype pattern: import zio.prelude.Newtype object ProductCode extends Newtype[String] @@ -322,7 +322,7 @@ val ev: Allows[ProductCode, Wrapped[Primitive]] = implicitly **Scala 3 opaque types** are resolved to their underlying type by the macro (they are transparent), so an opaque alias like this satisfies `Primitive` directly: -```scala +```scala mdoc:compile-only opaque type UserId = java.util.UUID // UserId satisfies Allows[UserId, Primitive] — resolved to UUID (a primitive) ``` From e121865fbbc57044240733c194d958895ba495d2 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Fri, 6 Mar 2026 23:32:50 +0330 Subject: [PATCH 13/36] docs(allows): fix code block compilation errors (escape and imports) --- docs/reference/allows.md | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 0466f18812..07bcb1ffeb 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -188,14 +188,14 @@ def writeCsv[A](rows: Seq[A])(using ): Unit = ??? ``` -**Scala 2** uses the `` `\|` `` infix operator from `Allows`: +**Scala 2** uses the infix operator `` Primitive `|` Optional[Primitive] `` from `Allows`: ```scala mdoc:compile-only import zio.blocks.schema.comptime.Allows import Allows._ def writeCsv[A](rows: Seq[A])(implicit - ev: Allows[A, Record[Primitive `\|` Optional[Primitive]]] + ev: Allows[A, Record[Primitive | Optional[Primitive]]] ): Unit = ??? ``` @@ -308,8 +308,12 @@ object TreeNode { implicit val schema: Schema[TreeNode] = Schema.derived } The `Wrapped[A]` node matches ZIO Prelude `Newtype` and `Subtype` wrappers. The underlying type must satisfy `A`. Here's an example: ```scala mdoc:compile-only -// ZIO Prelude Newtype pattern: import zio.prelude.Newtype +import zio.blocks.schema.Schema +import zio.blocks.schema.comptime.Allows +import Allows._ + +// ZIO Prelude Newtype pattern: object ProductCode extends Newtype[String] type ProductCode = ProductCode.Type From 8cabc8ce4128bc2d94b66db5cee173bd2c713efb Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Fri, 6 Mar 2026 23:43:30 +0330 Subject: [PATCH 14/36] docs: integrate allows.md into sidebar and documentation index --- docs/index.md | 1 + docs/sidebars.js | 1 + 2 files changed, 2 insertions(+) diff --git a/docs/index.md b/docs/index.md index 2ddc490bbe..d21bb11e2d 100644 --- a/docs/index.md +++ b/docs/index.md @@ -536,6 +536,7 @@ ZIO Blocks supports **Scala 2.13** and **Scala 3.x** with full source compatibil ### Core Schema Concepts - [Schema](./reference/schema.md) - Core schema definitions and derivation +- [Allows](./reference/allows.md) - Compile-time structural grammar constraints - [Reflect](./reference/reflect.md) - Structural reflection API - [Binding](./reference/binding.md) - Runtime constructors and deconstructors - [BindingResolver](./reference/binding-resolver.md) - Binding lookup and schema rebinding diff --git a/docs/sidebars.js b/docs/sidebars.js index 8dd7de0297..4ec40a2d0c 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -7,6 +7,7 @@ const sidebars = { link: { type: "doc", id: "index" }, items: [ "reference/schema", + "reference/allows", "reference/reflect", "reference/binding", "reference/binding-resolver", From 3217cd35dd80ed86891575056e81402f1fc99ec7 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 06:34:38 +0330 Subject: [PATCH 15/36] docs: add scalafmt lint step to documentation skills Add mandatory lint checks (sbt fmt + sbt check) to documentation skills that create Scala example files: - docs-data-type-ref: lint step between Step 3 (Write Examples) and Step 5 (Integrate) - docs-how-to-guide: lint step 4f after example compilation - docs-document-pr: Phase 6 for verification after documentation generation This prevents CI failures due to unformatted Scala files in schema-examples/. Co-Authored-By: Claude Haiku 4.5 --- .claude/skills/docs-data-type-ref/SKILL.md | 27 +++++++++++++++++++-- .claude/skills/docs-document-pr/SKILL.md | 28 ++++++++++++++++++++++ .claude/skills/docs-how-to-guide/SKILL.md | 23 ++++++++++++++++++ 3 files changed, 76 insertions(+), 2 deletions(-) diff --git a/.claude/skills/docs-data-type-ref/SKILL.md b/.claude/skills/docs-data-type-ref/SKILL.md index 882319d03e..62af6bfdc1 100644 --- a/.claude/skills/docs-data-type-ref/SKILL.md +++ b/.claude/skills/docs-data-type-ref/SKILL.md @@ -385,7 +385,30 @@ object IntoSchemaEvolutionExample extends App { } ``` -## Step 4: Integrate +## Step 4: Lint Check (Mandatory Before Integration) + +After creating all example files, ensure all Scala files pass the CI formatting gate: + +```bash +sbt fmt +``` + +If any files were reformatted, commit the changes immediately: + +```bash +git add -A +git commit -m "docs(): apply scalafmt to examples" +``` + +Verify the CI lint gate locally: + +```bash +sbt check +``` + +**Success criterion:** zero formatting violations reported. + +## Step 5: Integrate See the **`docs-integrate`** skill for the complete integration checklist (sidebars.js, index.md, cross-references, link verification). @@ -393,7 +416,7 @@ cross-references, link verification). Additional note for reference pages: if creating a new file, place it in the appropriate `docs/reference/` subdirectory based on where it logically belongs. -## Step 5: Review and Verify Compilation +## Step 6: Review and Verify Compilation After writing, re-read the document and verify: - All method signatures match the actual source code diff --git a/.claude/skills/docs-document-pr/SKILL.md b/.claude/skills/docs-document-pr/SKILL.md index 05538c47aa..2ae2bff9e6 100644 --- a/.claude/skills/docs-document-pr/SKILL.md +++ b/.claude/skills/docs-document-pr/SKILL.md @@ -272,6 +272,33 @@ Once documentation is written, tell the user: --- +## Phase 6: Verify Lint (If Examples Created) + +If documentation involved creating or modifying `.scala` example files in `schema-examples/`, verify that all Scala code passes the CI formatting gate before reporting completion: + +```bash +sbt fmt +``` + +If any files were reformatted, commit the changes: + +```bash +git add -A +git commit -m "docs(): apply scalafmt to examples" +``` + +Then verify the CI lint gate locally: + +```bash +sbt check +``` + +**Success criterion:** zero formatting violations reported. + +**If no `.scala` files were created or modified**, skip this phase. + +--- + ## Implementation Checklist When you invoke this skill: @@ -285,6 +312,7 @@ When you invoke this skill: - [ ] **Phase 3c:** If subsection → manually edit existing page, consult `docs-writing-style` and `docs-mdoc-conventions` skills - [ ] **Phase 4:** If new page → invoke `docs-integrate` skill to update sidebar - [ ] **Phase 5:** Report findings and file paths to user +- [ ] **Phase 6:** If `.scala` examples were created, run `sbt fmt` and `sbt check` to verify lint compliance --- diff --git a/.claude/skills/docs-how-to-guide/SKILL.md b/.claude/skills/docs-how-to-guide/SKILL.md index a852a48dc9..a49105fc8c 100644 --- a/.claude/skills/docs-how-to-guide/SKILL.md +++ b/.claude/skills/docs-how-to-guide/SKILL.md @@ -420,6 +420,29 @@ sbt "schema-examples/compile" If any example fails to compile, fix it before proceeding. The examples must compile successfully. +### 4f. Lint Check (Mandatory Before Integration) + +After all examples compile, run Scalafmt to ensure all Scala files pass the CI formatting gate: + +```bash +sbt fmt +``` + +If any files were reformatted, commit the changes immediately: + +```bash +git add -A +git commit -m "docs(): apply scalafmt to examples" +``` + +Verify the CI lint gate locally: + +```bash +sbt check +``` + +**Success criterion:** zero formatting violations reported. + --- ## Step 5: Integrate From 3f938a299830ec1aa2533aa7031ee94105d6f223 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 06:36:18 +0330 Subject: [PATCH 16/36] docs(allows): apply scalafmt to examples --- .../scala/comptime/AllowsCsvExample.scala | 4 +-- .../comptime/AllowsEventBusExample.scala | 22 +++++++----- .../comptime/AllowsGraphQLTreeExample.scala | 35 ++++++++++++------- .../comptime/AllowsSealedTraitExample.scala | 10 +++--- .../src/main/scala/SourceFile.scala | 2 +- 5 files changed, 44 insertions(+), 29 deletions(-) diff --git a/schema-examples/src/main/scala/comptime/AllowsCsvExample.scala b/schema-examples/src/main/scala/comptime/AllowsCsvExample.scala index 3959587427..8dd6c63c4e 100644 --- a/schema-examples/src/main/scala/comptime/AllowsCsvExample.scala +++ b/schema-examples/src/main/scala/comptime/AllowsCsvExample.scala @@ -29,7 +29,7 @@ object CsvSerializer { def toCsv[A](rows: Seq[A])(implicit schema: Schema[A], ev: Allows[A, Record[FlatRow]]): String = { val reflect = schema.reflect.asRecord.get val header = reflect.fields.map(_.name).mkString(",") - val lines = rows.map { row => + val lines = rows.map { row => val dv = schema.toDynamicValue(row) dv match { case DynamicValue.Record(fields) => @@ -52,7 +52,7 @@ object CsvSerializer { case DynamicValue.Null => "" case DynamicValue.Variant(tag, inner) if tag == "Some" => dvToString(inner) case DynamicValue.Variant(tag, _) if tag == "None" => "" - case DynamicValue.Record(fields) => + case DynamicValue.Record(fields) => fields.headOption.map { case (_, v) => dvToString(v) }.getOrElse("") case other => other.toString } diff --git a/schema-examples/src/main/scala/comptime/AllowsEventBusExample.scala b/schema-examples/src/main/scala/comptime/AllowsEventBusExample.scala index ce64c02fb3..fe44f3bfd5 100644 --- a/schema-examples/src/main/scala/comptime/AllowsEventBusExample.scala +++ b/schema-examples/src/main/scala/comptime/AllowsEventBusExample.scala @@ -26,12 +26,12 @@ object AccountEvent { implicit val schema: Schema[AccountEvent] = Schema.derived // Nested sealed trait — InventoryEvent has a sub-hierarchy sealed trait InventoryEvent -case class ItemAdded(sku: String, quantity: Int) extends InventoryEvent -case class ItemRemoved(sku: String, quantity: Int) extends InventoryEvent +case class ItemAdded(sku: String, quantity: Int) extends InventoryEvent +case class ItemRemoved(sku: String, quantity: Int) extends InventoryEvent -sealed trait InventoryAlert extends InventoryEvent -case class LowStock(sku: String, remaining: Int) extends InventoryAlert -case class OutOfStock(sku: String) extends InventoryAlert +sealed trait InventoryAlert extends InventoryEvent +case class LowStock(sku: String, remaining: Int) extends InventoryAlert +case class OutOfStock(sku: String) extends InventoryAlert object InventoryEvent { implicit val schema: Schema[InventoryEvent] = Schema.derived } @@ -45,9 +45,11 @@ object EventBus { type EventShape = Primitive | AOptional[Primitive] - /** Publish a domain event. All cases of the sealed trait must be flat records. */ + /** + * Publish a domain event. All cases of the sealed trait must be flat records. + */ def publish[A](event: A)(implicit schema: Schema[A], ev: Allows[A, Record[EventShape]]): String = { - val dv = schema.toDynamicValue(event) + val dv = schema.toDynamicValue(event) val (typeName, payload) = dv match { case DynamicValue.Variant(name, inner) => (name, inner.toJson.toString) case _ => (schema.reflect.typeId.name, dv.toJson.toString) @@ -55,12 +57,14 @@ object EventBus { s"PUBLISH topic=${schema.reflect.typeId.name} type=$typeName payload=$payload" } - /** Publish events that may contain sequence fields (e.g. batch operations). */ + /** + * Publish events that may contain sequence fields (e.g. batch operations). + */ def publishBatch[A](event: A)(implicit schema: Schema[A], ev: Allows[A, Record[Primitive | Sequence[Primitive]]] ): String = { - val dv = schema.toDynamicValue(event) + val dv = schema.toDynamicValue(event) val (typeName, payload) = dv match { case DynamicValue.Variant(name, inner) => (name, inner.toJson.toString) case _ => (schema.reflect.typeId.name, dv.toJson.toString) diff --git a/schema-examples/src/main/scala/comptime/AllowsGraphQLTreeExample.scala b/schema-examples/src/main/scala/comptime/AllowsGraphQLTreeExample.scala index 223c25371c..60b940b54e 100644 --- a/schema-examples/src/main/scala/comptime/AllowsGraphQLTreeExample.scala +++ b/schema-examples/src/main/scala/comptime/AllowsGraphQLTreeExample.scala @@ -40,7 +40,7 @@ object GraphQL { /** Generate a simplified GraphQL type definition for a recursive type. */ def graphqlType[A](implicit schema: Schema[A], ev: Allows[A, Record[TreeShape]]): String = { val reflect = schema.reflect.asRecord.get - val fields = reflect.fields.map { f => + val fields = reflect.fields.map { f => s" ${f.name}: ${gqlType(resolve(f.value), schema.reflect.typeId.name)}" } s"type ${schema.reflect.typeId.name} {\n${fields.mkString("\n")}\n}" @@ -54,7 +54,7 @@ object GraphQL { private def gqlType(r: Reflect.Bound[_], selfName: String): String = r match { case _: Reflect.Sequence[_, _, _] => s"[$selfName]" - case p: Reflect.Primitive[_, _] => + case p: Reflect.Primitive[_, _] => p.primitiveType match { case PrimitiveType.Int(_) => "Int" case PrimitiveType.Long(_) => "Int" @@ -87,17 +87,28 @@ object AllowsGraphQLTreeExample extends App { show(GraphQL.graphqlType[FlatNode]) // Show that recursive data actually works at runtime - val tree = TreeNode(1, List( - TreeNode(2, List(TreeNode(4, Nil), TreeNode(5, Nil))), - TreeNode(3, Nil) - )) + val tree = TreeNode( + 1, + List( + TreeNode(2, List(TreeNode(4, Nil), TreeNode(5, Nil))), + TreeNode(3, Nil) + ) + ) show(Schema[TreeNode].toDynamicValue(tree).toJson.toString) - val nav = NavCategory("Electronics", "electronics", List( - NavCategory("Phones", "phones", Nil), - NavCategory("Laptops", "laptops", List( - NavCategory("Gaming", "gaming", Nil) - )) - )) + val nav = NavCategory( + "Electronics", + "electronics", + List( + NavCategory("Phones", "phones", Nil), + NavCategory( + "Laptops", + "laptops", + List( + NavCategory("Gaming", "gaming", Nil) + ) + ) + ) + ) show(Schema[NavCategory].toDynamicValue(nav).toJson.toString) } diff --git a/schema-examples/src/main/scala/comptime/AllowsSealedTraitExample.scala b/schema-examples/src/main/scala/comptime/AllowsSealedTraitExample.scala index 22bb512b67..4c613388a3 100644 --- a/schema-examples/src/main/scala/comptime/AllowsSealedTraitExample.scala +++ b/schema-examples/src/main/scala/comptime/AllowsSealedTraitExample.scala @@ -28,11 +28,11 @@ object Shape { implicit val schema: Schema[Shape] = Schema.derived } // Nested sealed trait hierarchy — two levels deep sealed trait Expr -sealed trait BinaryOp extends Expr -case class Add(left: Double, right: Double) extends BinaryOp -case class Multiply(left: Double, right: Double) extends BinaryOp -case class Literal(value: Double) extends Expr -case object Zero extends Expr +sealed trait BinaryOp extends Expr +case class Add(left: Double, right: Double) extends BinaryOp +case class Multiply(left: Double, right: Double) extends BinaryOp +case class Literal(value: Double) extends Expr +case object Zero extends Expr object Expr { implicit val schema: Schema[Expr] = Schema.derived } // All-singleton enum (all case objects) diff --git a/zio-blocks-docs/src/main/scala/SourceFile.scala b/zio-blocks-docs/src/main/scala/SourceFile.scala index c41528836c..d8b057f301 100644 --- a/zio-blocks-docs/src/main/scala/SourceFile.scala +++ b/zio-blocks-docs/src/main/scala/SourceFile.scala @@ -42,7 +42,7 @@ object SourceFile { path: String, lines: Seq[(Int, Int)] = Seq.empty, showTitle: Boolean = true, - showLineNumbers: Boolean = false, + showLineNumbers: Boolean = false ) = { val title = if (showTitle) s"""title="$path"""" else "" val showLines = if (showLineNumbers) "showLineNumbers" else "" From f3569f299d4cd6d3949f956c82497d43055e00a8 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 06:56:19 +0330 Subject: [PATCH 17/36] docs(skills): stage example files before lint check to catch untracked files - Add 'git add' step before 'sbt fmtChanged' in all three documentation skills - Ensures newly created (untracked) example files are properly linted - Resolves issue where untracked files were missed by git diff-based filtering - Applies to: docs-data-type-ref, docs-how-to-guide, docs-document-pr skills Co-Authored-By: Claude Haiku 4.5 --- .claude/skills/docs-data-type-ref/SKILL.md | 5 +++-- .claude/skills/docs-document-pr/SKILL.md | 5 +++-- .claude/skills/docs-how-to-guide/SKILL.md | 5 +++-- build.sbt | 4 ++++ 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/.claude/skills/docs-data-type-ref/SKILL.md b/.claude/skills/docs-data-type-ref/SKILL.md index 62af6bfdc1..4447872286 100644 --- a/.claude/skills/docs-data-type-ref/SKILL.md +++ b/.claude/skills/docs-data-type-ref/SKILL.md @@ -387,10 +387,11 @@ object IntoSchemaEvolutionExample extends App { ## Step 4: Lint Check (Mandatory Before Integration) -After creating all example files, ensure all Scala files pass the CI formatting gate: +After creating all example files, stage them in git first, then ensure all Scala files pass the CI formatting gate: ```bash -sbt fmt +git add schema-examples/src/main/scala/**/*.scala +sbt fmtChanged ``` If any files were reformatted, commit the changes immediately: diff --git a/.claude/skills/docs-document-pr/SKILL.md b/.claude/skills/docs-document-pr/SKILL.md index 2ae2bff9e6..ba6a7c0ac7 100644 --- a/.claude/skills/docs-document-pr/SKILL.md +++ b/.claude/skills/docs-document-pr/SKILL.md @@ -274,10 +274,11 @@ Once documentation is written, tell the user: ## Phase 6: Verify Lint (If Examples Created) -If documentation involved creating or modifying `.scala` example files in `schema-examples/`, verify that all Scala code passes the CI formatting gate before reporting completion: +If documentation involved creating or modifying `.scala` example files in `schema-examples/`, stage them in git first, then verify that all Scala code passes the CI formatting gate before reporting completion: ```bash -sbt fmt +git add schema-examples/src/main/scala/**/*.scala +sbt fmtChanged ``` If any files were reformatted, commit the changes: diff --git a/.claude/skills/docs-how-to-guide/SKILL.md b/.claude/skills/docs-how-to-guide/SKILL.md index a49105fc8c..774051dd2f 100644 --- a/.claude/skills/docs-how-to-guide/SKILL.md +++ b/.claude/skills/docs-how-to-guide/SKILL.md @@ -422,10 +422,11 @@ If any example fails to compile, fix it before proceeding. The examples must com ### 4f. Lint Check (Mandatory Before Integration) -After all examples compile, run Scalafmt to ensure all Scala files pass the CI formatting gate: +After all examples compile, stage them in git first, then run Scalafmt to ensure all Scala files pass the CI formatting gate: ```bash -sbt fmt +git add schema-examples/src/main/scala/**/*.scala +sbt fmtChanged ``` If any files were reformatted, commit the changes immediately: diff --git a/build.sbt b/build.sbt index 0019229f5b..9cc5d39536 100644 --- a/build.sbt +++ b/build.sbt @@ -31,6 +31,10 @@ com.github.sbt.git.SbtGit.useReadableConsoleGit addCommandAlias("build", "; fmt; coverage; root/test; coverageReport") addCommandAlias("fmt", "all root/scalafmtSbt root/scalafmtAll") addCommandAlias("fmtCheck", "all root/scalafmtSbtCheck root/scalafmtCheckAll") +addCommandAlias( + "fmtChanged", + "; set scalafmtFilter in ThisBuild := \"diff-ref=main\"; scalafmtAll; set scalafmtFilter in ThisBuild := \"\"" +) addCommandAlias("check", "; scalafmtSbtCheck; scalafmtCheckAll") addCommandAlias("mimaChecks", "all schemaJVM/mimaReportBinaryIssues") addCommandAlias( From 8d30f53977ef8fe8ff256b01d99b8b8a13c0e576 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 07:06:56 +0330 Subject: [PATCH 18/36] docs(allows): clarify compile-only examples and remove misleading sbt runMain hint Address Copilot review comment: The RDBMS and DocumentStore examples are compile-only demonstrations of Allows constraints, not runnable Apps. Remove runMain commands and add explicit '(compile-only)' labels. Update section text to clarify these examples show compile-time validation patterns. This prevents user confusion about why sbt runMain would fail on files that don't extend App. Co-Authored-By: Claude Haiku 4.5 --- docs/reference/allows.md | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 07bcb1ffeb..7cd8ae259d 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -499,18 +499,22 @@ import docs.SourceFile SourceFile.print("schema-examples/src/main/scala/comptime/AllowsSealedTraitExample.scala") ``` -**RDBMS library with CREATE TABLE and INSERT using flat record constraints** +**RDBMS library with CREATE TABLE and INSERT using flat record constraints** (compile-only) ([source](https://github.com/zio/zio-blocks/blob/main/schema-examples/src/main/scala/comptime/RdbmsExample.scala)) +Demonstrates how Allows constraints are verified at compile time — the code below shows valid examples that compile successfully, and includes comments showing which patterns would be rejected: + ```scala mdoc:passthrough import docs.SourceFile SourceFile.print("schema-examples/src/main/scala/comptime/RdbmsExample.scala") ``` -**JSON document store with specific primitives and recursive Self grammar** +**JSON document store with specific primitives and recursive Self grammar** (compile-only) ([source](https://github.com/zio/zio-blocks/blob/main/schema-examples/src/main/scala/comptime/DocumentStoreExample.scala)) +Demonstrates how Allows enforces recursive schema constraints at compile time: + ```scala mdoc:passthrough import docs.SourceFile From fc8c1d34deed1bbc8bc1492c1cfc8b53084cf2aa Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 07:07:03 +0330 Subject: [PATCH 19/36] fix(docs): improve SourceFile resource management and error handling Address three related Copilot review comments: 1. Fix resource leak: Wrap Source.fromFile() with Using.resource to ensure file handles are reliably closed in all code paths, including when slicing specific line ranges. 2. Narrow exception handling: Replace overly broad 'catch Throwable' with 'catch NonFatal' to avoid silently hiding unexpected failures like InterruptedException or VirtualMachineError. 3. Optimize file reads: Read the entire file once into a Vector at the start, then slice multiple line ranges from the in-memory copy. Previously, the file was unnecessarily re-opened and fully read for each (from, to) range. Combined, these changes eliminate file handle leaks, improve error visibility, and reduce I/O overhead for multi-range operations. Co-Authored-By: Claude Haiku 4.5 --- .../src/main/scala/SourceFile.scala | 32 ++++++++++--------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/zio-blocks-docs/src/main/scala/SourceFile.scala b/zio-blocks-docs/src/main/scala/SourceFile.scala index d8b057f301..9131330bca 100644 --- a/zio-blocks-docs/src/main/scala/SourceFile.scala +++ b/zio-blocks-docs/src/main/scala/SourceFile.scala @@ -1,30 +1,32 @@ package docs import scala.io.Source +import scala.util.Using +import scala.util.control.NonFatal object SourceFile { def read(path: String, lines: Seq[(Int, Int)]): String = { - def readFile(path: String) = + def openSource(path: String): Source = try { Source.fromFile("../" + path) } catch { - case _: Throwable => Source.fromFile(path) + case NonFatal(_) => Source.fromFile(path) } - if (lines.isEmpty) { - val content = readFile(path).getLines().mkString("\n") - content - } else { - val chunks = for { - (from, to) <- lines - } yield readFile(path) - .getLines() - .toArray[String] - .slice(from - 1, to) - .mkString("\n") - - chunks.mkString("\n\n") + Using.resource(openSource(path)) { source => + val allLines = source.getLines().toVector + if (lines.isEmpty) { + allLines.mkString("\n") + } else { + val chunks = for { + (from, to) <- lines + } yield allLines + .slice(from - 1, to) + .mkString("\n") + + chunks.mkString("\n\n") + } } } From 240f91baff20e0fffa33c599307b5eb1acc7e3e8 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 14:35:34 +0330 Subject: [PATCH 20/36] docs(allows): explain capability token concept --- docs/reference/allows.md | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 5b94f0c196..9955ae7526 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -3,7 +3,7 @@ id: allows title: "Allows" --- -`Allows[A, S]` is a compile-time capability token that proves, at the call site, that type `A` satisfies the structural grammar `S`. +`Allows[A, S]` is a compile-time capability token that proves, at the call site, that type `A` satisfies the structural grammar `S`. A capability token is a compile-time phantom proof value — it carries no runtime data and exists solely to pass evidence through the type system that a structural constraint has been satisfied. `Allows` does **not** require or use `Schema[A]`. It inspects the Scala type structure of `A` directly at compile time, using nothing but the Scala type system. Any `Schema[A]` that appears alongside `Allows` in examples is the library author's own separate constraint — it is not imposed by `Allows` itself. @@ -15,10 +15,15 @@ sealed abstract class Allows[A, S <: Allows.Structural] The gap `Allows` fills is **structural preconditions** at the call site, at compile time, with precise error messages. +[//]: # (explain what structural preconditions are, and how they differ from runtime checks) + ## Motivation ZIO Blocks (ZIO Schema 2) gives library authors a powerful way to build data-oriented DSLs. A library can accept `A: Schema` and use the schema at runtime to serialize, deserialize, query, or transform values of `A`. But many generic functions have **structural preconditions** that don't require a schema. +[//]: # (Do not use ZIO Schema 2 as alternative to ZIO Blocks) +[//]: # (Explain what data-oriented DSLs are, and how they differ from ordinary APIs) + Consider these real-world scenarios: - A CSV serializer requires flat records of scalars — nested records should fail at the call site, not deep inside the serializer. @@ -26,17 +31,23 @@ Consider these real-world scenarios: - An event bus expects a sealed trait of flat record cases — violations should be caught before publishing. - A JSON document store allows arbitrarily nested records but not `DynamicValue` leaves — the schema validation should be precise. +[//]: # (Explain why json document shouldn't have DynamicValue leaves) + Without `Allows`, these constraints can only be checked at runtime, producing confusing errors deep inside library internals. With `Allows[A, S]`, the constraint is verified at the **call site**, at compile time, with precise, path-aware error messages and concrete fix suggestions. ## The Upper Bound Semantics `Allows[A, S]` is an upper bound. A type `A` that uses only a strict subset of what `S` permits also satisfies it — just as `A <: Foo` does not require that `A` uses every method of `Foo`. +[//]: # (Explain why upper bound semantics is the right choice for a grammar constraint and why the Allows[A, S] is an upper bound rather than a lower bound or exact match) + ```scala // Allows[UserRow, Record[Primitive | Optional[Primitive]]] is satisfied even if // UserRow has no Option fields — the Optional branch is simply never needed. ``` +[//]: # (The above code blocks doesn't have enough context to be meaningful on its own — consider adding a more complete example showing the upper bound semantics in action) + ## Creating Instances `Allows[A, S]` is not instantiated directly. Instead, you summon an evidence value at the point where you need the constraint. The macro automatically verifies the constraint at compile time. @@ -66,6 +77,8 @@ def toJson[A](doc: A)(implicit ev: Allows[A, Record[Primitive]]): String = ??? val evidence = implicitly[Allows[Int, Primitive]] ``` +[//]: # (Please research abot tabbed code blocks: https://docusaurus.io/docs/markdown-features/tabs and plan how to use them to show Scala 2 and Scala 3 examples side by side) + The constraint is checked once, at the call site. If the type `A` does not satisfy `S`, you get a compile-time error with a precise message showing exactly which field violates the grammar. ## Grammar Nodes @@ -105,8 +118,12 @@ All grammar nodes extend `Allows.Structural`. | `Self` | Recursive self-reference back to the entire enclosing `Allows[A, S]` grammar | | `` `\|` `` | Union of two grammar nodes: `A \| B`. In Scala 2 write `` A `\|` B `` in infix position. | +[//]: # (Please reconsider the presentation of the grammar nodes - is it required to list them? if so, should we list all of them? Why it is good to show them in documentation? then decide how what to include/not include here) + Every specific `Primitive.Xxx` node also satisfies the catch-all `Primitive`. This means a type annotated with `Primitive.Int` is valid wherever `Primitive` or `Primitive | Primitive.Long` is required. +[//]: # (The above sentence is a bit technical and may not be clear to all readers — consider adding more context what do you mean by catch-all) + ## Core Operations `Allows[A, S]` is a **proof token**, not an ordinary value. It carries zero public methods that you call directly. Instead, you use it in three ways: From 02d42f6b52c2bf1b88928c6f2a73f5a124c86ff2 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 14:35:59 +0330 Subject: [PATCH 21/36] docs(allows): explain structural preconditions vs runtime checks --- docs/reference/allows.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 9955ae7526..dcb95ad804 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -13,9 +13,7 @@ sealed abstract class Allows[A, S <: Allows.Structural] ## Overview -The gap `Allows` fills is **structural preconditions** at the call site, at compile time, with precise error messages. - -[//]: # (explain what structural preconditions are, and how they differ from runtime checks) +The gap `Allows` fills is **structural preconditions** at the call site, at compile time, with precise error messages. Structural preconditions are constraints on the shape of a type's fields (e.g., "all fields must be scalars"), unlike runtime checks which happen during execution and produce exceptions or errors. ## Motivation From af4116428b4d96a4c10154f2149475c081e3e279 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 14:36:09 +0330 Subject: [PATCH 22/36] docs(allows): replace 'ZIO Schema 2' with 'ZIO Blocks' and explain data-oriented DSLs --- docs/reference/allows.md | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index dcb95ad804..6b917482f4 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -17,10 +17,7 @@ The gap `Allows` fills is **structural preconditions** at the call site, at comp ## Motivation -ZIO Blocks (ZIO Schema 2) gives library authors a powerful way to build data-oriented DSLs. A library can accept `A: Schema` and use the schema at runtime to serialize, deserialize, query, or transform values of `A`. But many generic functions have **structural preconditions** that don't require a schema. - -[//]: # (Do not use ZIO Schema 2 as alternative to ZIO Blocks) -[//]: # (Explain what data-oriented DSLs are, and how they differ from ordinary APIs) +ZIO Blocks gives library authors a powerful way to build data-oriented DSLs. A library can accept `A: Schema` and use the schema at runtime to serialize, deserialize, query, or transform values of `A`. A data-oriented DSL is a generic API built around a data description (Schema) rather than a fixed interface, allowing one function to serialize, validate, or transform any conforming type. Many generic functions have **structural preconditions** that don't require a schema. Consider these real-world scenarios: From d7bae0cf406b97b139e611ed125184ef3ab11541 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 14:36:23 +0330 Subject: [PATCH 23/36] docs(allows): explain DynamicValue in JSON context --- docs/reference/allows.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 6b917482f4..19239cf4b5 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -24,9 +24,7 @@ Consider these real-world scenarios: - A CSV serializer requires flat records of scalars — nested records should fail at the call site, not deep inside the serializer. - An RDBMS layer cannot handle nested records as column values — the error should name the problematic field. - An event bus expects a sealed trait of flat record cases — violations should be caught before publishing. -- A JSON document store allows arbitrarily nested records but not `DynamicValue` leaves — the schema validation should be precise. - -[//]: # (Explain why json document shouldn't have DynamicValue leaves) +- A JSON document store allows arbitrarily nested records but not `DynamicValue` leaves — the schema validation should be precise. DynamicValue is the schema-less escape hatch that can hold arbitrary data — a DynamicValue leaf bypasses compile-time checking entirely, making it impossible for the compiler to enforce any structural grammar. Without `Allows`, these constraints can only be checked at runtime, producing confusing errors deep inside library internals. With `Allows[A, S]`, the constraint is verified at the **call site**, at compile time, with precise, path-aware error messages and concrete fix suggestions. From 39e2c21280d33e0e4c7f085dc6c1172796e31826 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 14:36:34 +0330 Subject: [PATCH 24/36] docs(allows): explain upper bound semantics rationale --- docs/reference/allows.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 19239cf4b5..eafd675054 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -30,9 +30,7 @@ Without `Allows`, these constraints can only be checked at runtime, producing co ## The Upper Bound Semantics -`Allows[A, S]` is an upper bound. A type `A` that uses only a strict subset of what `S` permits also satisfies it — just as `A <: Foo` does not require that `A` uses every method of `Foo`. - -[//]: # (Explain why upper bound semantics is the right choice for a grammar constraint and why the Allows[A, S] is an upper bound rather than a lower bound or exact match) +`Allows[A, S]` is an upper bound. A type `A` that uses only a strict subset of what `S` permits also satisfies it — just as `A <: Foo` does not require that `A` uses every method of `Foo`. Upper bound semantics is the right choice because a lower bound would require using every shape (impractical), exact matching would require naming every shape used (too rigid), whereas upper bound says "your type may use any of these shapes" — a permission, not a mandate. ```scala // Allows[UserRow, Record[Primitive | Optional[Primitive]]] is satisfied even if From 11ea3f4c70cd73d3b46ba0017be3232b6cb4f542 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 14:36:48 +0330 Subject: [PATCH 25/36] docs(allows): improve upper bound code example with UserRow cases --- docs/reference/allows.md | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index eafd675054..fa285f58eb 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -32,12 +32,21 @@ Without `Allows`, these constraints can only be checked at runtime, producing co `Allows[A, S]` is an upper bound. A type `A` that uses only a strict subset of what `S` permits also satisfies it — just as `A <: Foo` does not require that `A` uses every method of `Foo`. Upper bound semantics is the right choice because a lower bound would require using every shape (impractical), exact matching would require naming every shape used (too rigid), whereas upper bound says "your type may use any of these shapes" — a permission, not a mandate. -```scala -// Allows[UserRow, Record[Primitive | Optional[Primitive]]] is satisfied even if -// UserRow has no Option fields — the Optional branch is simply never needed. -``` +```scala mdoc:compile-only +import zio.blocks.schema.comptime.Allows +import Allows._ -[//]: # (The above code blocks doesn't have enough context to be meaningful on its own — consider adding a more complete example showing the upper bound semantics in action) +// Both satisfy Record[Primitive | Optional[Primitive]] — the upper bound + +case class UserRow(name: String, age: Int) +// UserRow satisfies the grammar: all fields are Primitive + +case class UserRowOpt(name: String, age: Int, email: Option[String]) +// UserRowOpt also satisfies the grammar: all fields are Primitive or Optional[Primitive] + +val ev1: Allows[UserRow, Record[Primitive | Optional[Primitive]]] = implicitly +val ev2: Allows[UserRowOpt, Record[Primitive | Optional[Primitive]]] = implicitly +``` ## Creating Instances From 35a3a6af132932feae4f1ddf971cb9293c5cafa7 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 14:36:55 +0330 Subject: [PATCH 26/36] docs(allows): remove tabbed code blocks comment --- docs/reference/allows.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index fa285f58eb..a4eaa83af0 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -77,8 +77,6 @@ def toJson[A](doc: A)(implicit ev: Allows[A, Record[Primitive]]): String = ??? val evidence = implicitly[Allows[Int, Primitive]] ``` -[//]: # (Please research abot tabbed code blocks: https://docusaurus.io/docs/markdown-features/tabs and plan how to use them to show Scala 2 and Scala 3 examples side by side) - The constraint is checked once, at the call site. If the type `A` does not satisfy `S`, you get a compile-time error with a precise message showing exactly which field violates the grammar. ## Grammar Nodes From eb3dbb8bbdbfe958842c291b2969284eea5ec8ea Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 14:37:11 +0330 Subject: [PATCH 27/36] docs(allows): restructure grammar nodes table into three groups --- docs/reference/allows.md | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index a4eaa83af0..af1ff5f319 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -101,23 +101,18 @@ All grammar nodes extend `Allows.Structural`. | `Primitive.UUID` | `java.util.UUID` only | | `Primitive.Currency` | `java.util.Currency` only | | `Primitive.Instant` / `LocalDate` / `LocalDateTime` / … | Each specific `java.time.*` type | +| | | | `Record[A]` | A case class / product type whose every field satisfies `A`. Vacuously true for zero-field records. Sealed traits and enums are **automatically unwrapped**: each case is checked individually, so no `Variant` node is needed. | | `Sequence[A]` | Any collection (`List`, `Vector`, `Set`, `Array`, `Chunk`, …) whose element type satisfies `A` | -| `Sequence.List[A]` | `scala.collection.immutable.List` only, element type satisfies `A` | -| `Sequence.Vector[A]` | `scala.collection.immutable.Vector` only, element type satisfies `A` | -| `Sequence.Set[A]` | `scala.collection.immutable.Set` only, element type satisfies `A` | -| `Sequence.Array[A]` | `scala.Array` only, element type satisfies `A` | -| `Sequence.Chunk[A]` | `zio.blocks.chunk.Chunk` only, element type satisfies `A` | -| `IsType[A]` | Exact nominal type match: satisfied only when the checked type is exactly `A` (`=:=`) | | `Map[K, V]` | `Map`, `HashMap`, … whose key satisfies `K` and value satisfies `V` | | `Optional[A]` | `Option[X]` where the inner type `X` satisfies `A` | | `Wrapped[A]` | A ZIO Prelude `Newtype`/`Subtype` wrapper whose underlying type satisfies `A` | -| `Dynamic` | `DynamicValue` — the schema-less escape hatch | +| | | | `Self` | Recursive self-reference back to the entire enclosing `Allows[A, S]` grammar | +| `Dynamic` | `DynamicValue` — the schema-less escape hatch | +| `IsType[A]` | Exact nominal type match: satisfied only when the checked type is exactly `A` (`=:=`) | | `` `\|` `` | Union of two grammar nodes: `A \| B`. In Scala 2 write `` A `\|` B `` in infix position. | -[//]: # (Please reconsider the presentation of the grammar nodes - is it required to list them? if so, should we list all of them? Why it is good to show them in documentation? then decide how what to include/not include here) - Every specific `Primitive.Xxx` node also satisfies the catch-all `Primitive`. This means a type annotated with `Primitive.Int` is valid wherever `Primitive` or `Primitive | Primitive.Long` is required. [//]: # (The above sentence is a bit technical and may not be clear to all readers — consider adding more context what do you mean by catch-all) From d8951dc7e6bc90db4dfa8292bca665f0c6aa8350 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 14:37:18 +0330 Subject: [PATCH 28/36] docs(allows): clarify 'catch-all Primitive' terminology --- docs/reference/allows.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index af1ff5f319..2316d0944e 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -113,9 +113,7 @@ All grammar nodes extend `Allows.Structural`. | `IsType[A]` | Exact nominal type match: satisfied only when the checked type is exactly `A` (`=:=`) | | `` `\|` `` | Union of two grammar nodes: `A \| B`. In Scala 2 write `` A `\|` B `` in infix position. | -Every specific `Primitive.Xxx` node also satisfies the catch-all `Primitive`. This means a type annotated with `Primitive.Int` is valid wherever `Primitive` or `Primitive | Primitive.Long` is required. - -[//]: # (The above sentence is a bit technical and may not be clear to all readers — consider adding more context what do you mean by catch-all) +Every specific `Primitive.Xxx` node also satisfies the top-level `Primitive` node (which matches any of the 30 primitive types). This means a type annotated with `Primitive.Int` is valid wherever `Primitive` or `Primitive | Primitive.Long` is required. ## Core Operations From 8e7731905d2afbe90df773fcb4c42b38486fa463 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 15:08:10 +0330 Subject: [PATCH 29/36] docs(allows): add MDX Tabs imports --- docs/reference/allows.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 2316d0944e..8fe85b3f02 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -3,6 +3,9 @@ id: allows title: "Allows" --- +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + `Allows[A, S]` is a compile-time capability token that proves, at the call site, that type `A` satisfies the structural grammar `S`. A capability token is a compile-time phantom proof value — it carries no runtime data and exists solely to pass evidence through the type system that a structural constraint has been satisfied. `Allows` does **not** require or use `Schema[A]`. It inspects the Scala type structure of `A` directly at compile time, using nothing but the Scala type system. Any `Schema[A]` that appears alongside `Allows` in examples is the library author's own separate constraint — it is not imposed by `Allows` itself. From dbcfc6c1e6686d3a05daa39824582eb998589c03 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 15:11:43 +0330 Subject: [PATCH 30/36] docs(allows): convert 'Creating Instances' examples to tabs --- docs/reference/allows.md | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 8fe85b3f02..67830f7e1a 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -55,31 +55,36 @@ val ev2: Allows[UserRowOpt, Record[Primitive | Optional[Primitive]]] = implicitl `Allows[A, S]` is not instantiated directly. Instead, you summon an evidence value at the point where you need the constraint. The macro automatically verifies the constraint at compile time. -In **Scala 3**, use the `using` syntax to summon an implicit: + + ```scala mdoc:compile-only import zio.blocks.schema.comptime.Allows import Allows._ -def toJson[A](doc: A)(using Allows[A, Record[Primitive]]): String = ??? +def toJson[A](doc: A)(implicit ev: Allows[A, Record[Primitive]]): String = ??? -// Calling the function: -case class Person(name: String, age: Int) -val json = toJson(Person("Alice", 30)) // Compiles if Person satisfies Record[Primitive] +// Or summon at the call site: +val evidence = implicitly[Allows[Int, Primitive]] ``` -In **Scala 2**, use `implicit` parameter with `implicitly`: + + ```scala mdoc:compile-only import zio.blocks.schema.comptime.Allows import Allows._ -def toJson[A](doc: A)(implicit ev: Allows[A, Record[Primitive]]): String = ??? +def toJson[A](doc: A)(using Allows[A, Record[Primitive]]): String = ??? -// Or summon at the call site: -val evidence = implicitly[Allows[Int, Primitive]] +// Calling the function: +case class Person(name: String, age: Int) +val json = toJson(Person("Alice", 30)) // Compiles if Person satisfies Record[Primitive] ``` + + + The constraint is checked once, at the call site. If the type `A` does not satisfy `S`, you get a compile-time error with a precise message showing exactly which field violates the grammar. ## Grammar Nodes From 7c3bcc181b9b5c0b80543aad6d77f2cee47b840e Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 15:15:17 +0330 Subject: [PATCH 31/36] docs(allows): convert 'Union Syntax' examples to tabs --- docs/reference/allows.md | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 67830f7e1a..5dbe7cea26 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -199,28 +199,37 @@ A type with a UUID or Instant field fails at compile time with this error: Union types express "or" in the grammar. -**Scala 3** uses native union type syntax: + + + +Uses the infix operator `` Primitive `|` Optional[Primitive] `` from `Allows`: ```scala mdoc:compile-only import zio.blocks.schema.comptime.Allows import Allows._ -def writeCsv[A](rows: Seq[A])(using - Allows[A, Record[Primitive | Optional[Primitive]]] +def writeCsv[A](rows: Seq[A])(implicit + ev: Allows[A, Record[Primitive | Optional[Primitive]]] ): Unit = ??? ``` -**Scala 2** uses the infix operator `` Primitive `|` Optional[Primitive] `` from `Allows`: + + + +Uses native union type syntax: ```scala mdoc:compile-only import zio.blocks.schema.comptime.Allows import Allows._ -def writeCsv[A](rows: Seq[A])(implicit - ev: Allows[A, Record[Primitive | Optional[Primitive]]] +def writeCsv[A](rows: Seq[A])(using + Allows[A, Record[Primitive | Optional[Primitive]]] ): Unit = ??? ``` + + + Both spellings compile and produce the same semantic behavior. The grammar is identical — the only difference is how the union type is expressed. ## Use Cases From 5cc8a18b214681fb5d79233f65d1323a7aab0658 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 15:35:53 +0330 Subject: [PATCH 32/36] docs(skills): add Scala 2/3 tabs pattern to docs-mdoc-conventions --- .claude/skills/docs-mdoc-conventions/SKILL.md | 53 +++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/.claude/skills/docs-mdoc-conventions/SKILL.md b/.claude/skills/docs-mdoc-conventions/SKILL.md index e30983b53b..8d978fca04 100644 --- a/.claude/skills/docs-mdoc-conventions/SKILL.md +++ b/.claude/skills/docs-mdoc-conventions/SKILL.md @@ -106,6 +106,59 @@ modifiers are used more than in reference pages: --- +## Tabbed Scala 2 / Scala 3 Examples + +When a section shows syntax that differs between Scala 2 and Scala 3, use Docusaurus tabs +instead of sequential prose blocks. This lets readers pick their version once and have all +tab groups on the page sync together. + +### Required MDX imports + +Add these two lines at the top of any `.md` file that uses tabs (right after the closing +`---` of the frontmatter, before any prose): + +```mdx +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; +``` + +### Tab structure + +```mdx + + + +```scala mdoc:compile-only +// Scala 2 syntax here +``` + + + + +```scala mdoc:compile-only +// Scala 3 syntax here +``` + + + +``` + +### Rules + +- Always use `groupId="scala-version"` — this syncs all tab groups on the page when the + reader picks a version. +- Always use `defaultValue="scala2"` — Scala 2 is shown first by default. +- Blank lines inside `` are required for mdoc to process fenced code blocks + correctly. +- `mdoc:compile-only` is the correct modifier for code inside tabs (same as everywhere + else). +- mdoc passes JSX components through unchanged — only fenced `scala mdoc:*` blocks are + rewritten. +- Do **not** use tabs for examples that are identical in both versions — only use them + when the syntax genuinely differs. + +--- + ## Docusaurus Admonitions Use Docusaurus admonition syntax for callouts: From 8b6d95c7a19f13361f88a1e3ad8e1c36c76e0343 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 15:51:28 +0330 Subject: [PATCH 33/36] docs(skills): update Scala version rule to allow tabs for version-specific syntax --- .claude/skills/docs-writing-style/SKILL.md | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.claude/skills/docs-writing-style/SKILL.md b/.claude/skills/docs-writing-style/SKILL.md index 7c81fdcc56..6b0eb82979 100644 --- a/.claude/skills/docs-writing-style/SKILL.md +++ b/.claude/skills/docs-writing-style/SKILL.md @@ -82,7 +82,12 @@ Apply these conventions consistently in all prose, section headings, and inline ## Scala Version -All code in documentation and companion example files **must use Scala 2.13.x syntax**. When in -doubt, check the companion example files — they are the source of truth for syntax style. +All code in documentation and companion example files **defaults to Scala 2.13.x syntax**. +When in doubt, check the companion example files — they are the source of truth for syntax style. + +When a section shows syntax that genuinely differs between Scala 2 and Scala 3 (e.g., `using` +vs `implicit`, native union types vs backtick infix), use tabbed code blocks instead of +sequential prose. See `docs-mdoc-conventions` for the exact tab structure. Scala 2 is always +the default tab (`defaultValue="scala2"`). --- From e9ed13780e270cb39c2db7ef251d3f226d297df3 Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 16:32:08 +0330 Subject: [PATCH 34/36] Update .claude/skills/docs-data-type-ref/SKILL.md Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .claude/skills/docs-data-type-ref/SKILL.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.claude/skills/docs-data-type-ref/SKILL.md b/.claude/skills/docs-data-type-ref/SKILL.md index 4447872286..7ddbe11189 100644 --- a/.claude/skills/docs-data-type-ref/SKILL.md +++ b/.claude/skills/docs-data-type-ref/SKILL.md @@ -292,7 +292,7 @@ the file path shown as the title. The path is relative to the repository root (t ``` ```` - `showLineNumbers = true` — render with line numbers in the output -- `comment = false` — suppress the file path title +- `showTitle = false` — suppress the file path title ### Compile-Checked Code Blocks with mdoc From 0b93ec259b4b4cb46a0582a48fb806170d55aa6c Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 16:33:31 +0330 Subject: [PATCH 35/36] Update docs/reference/allows.md Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- docs/reference/allows.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/allows.md b/docs/reference/allows.md index 5dbe7cea26..e2a408e510 100644 --- a/docs/reference/allows.md +++ b/docs/reference/allows.md @@ -129,7 +129,7 @@ Every specific `Primitive.Xxx` node also satisfies the top-level `Primitive` nod 1. **As a constraint in function signatures** — Declare `Allows[A, S]` as an implicit/using parameter to require that callers pass only types satisfying the grammar. 2. **To summon evidence** — Use `implicitly[Allows[A, S]]` (Scala 2) or `summon[Allows[A, S]]` (Scala 3) at a call site to check the constraint and get an error message if it fails. -3. **In type aliases** — Define type aliases like `type FlatRecord = Allows[?, Record[Primitive | Optional[Primitive]]]` to name constraints and reuse them across functions. +3. **In type aliases** — Define type aliases like `type FlatRecord = Allows[_, Record[Primitive | Optional[Primitive]]]` to name constraints and reuse them across functions. The macro that powers `Allows` checks the constraint **at compile time** and emits nothing but a reference to a single private singleton at runtime, so there is zero per-call-site overhead. From e8e3aa96b00cd5423b5be9d45ea2e68205b5b9bc Mon Sep 17 00:00:00 2001 From: Milad Khajavi Date: Sat, 7 Mar 2026 16:44:06 +0330 Subject: [PATCH 36/36] docs(skills): fix nested fence in tabbed code example Use 4 backticks for outer mdx fence to properly contain inner scala fences --- .claude/skills/docs-mdoc-conventions/SKILL.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.claude/skills/docs-mdoc-conventions/SKILL.md b/.claude/skills/docs-mdoc-conventions/SKILL.md index 8d978fca04..bef1cd2a6d 100644 --- a/.claude/skills/docs-mdoc-conventions/SKILL.md +++ b/.claude/skills/docs-mdoc-conventions/SKILL.md @@ -124,7 +124,7 @@ import TabItem from '@theme/TabItem'; ### Tab structure -```mdx +````mdx @@ -141,7 +141,7 @@ import TabItem from '@theme/TabItem'; -``` +```` ### Rules