Skip to content

Commit

Permalink
Merge pull request #22 from writer/release-please--branches--main--ch…
Browse files Browse the repository at this point in the history
…anges--next

release: 0.3.0
  • Loading branch information
ramedina86 authored Jul 12, 2024
2 parents b3166e5 + 74b3e16 commit ab70916
Show file tree
Hide file tree
Showing 17 changed files with 159 additions and 150 deletions.
1 change: 1 addition & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ on:
pull_request:
branches:
- main
- next

jobs:
lint:
Expand Down
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "0.2.0"
".": "0.3.0"
}
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Changelog

## 0.3.0 (2024-07-12)

Full Changelog: [v0.2.0...v0.3.0](https://github.com/writer/writer-python/compare/v0.2.0...v0.3.0)

### Features

* **api:** update via SDK Studio ([#20](https://github.com/writer/writer-python/issues/20)) ([018271a](https://github.com/writer/writer-python/commit/018271a7c8a4a7a98937e708fe02f0b8b944df5d))

## 0.2.0 (2024-07-10)

Full Changelog: [v0.1.2...v0.2.0](https://github.com/writer/writer-python/compare/v0.1.2...v0.2.0)
Expand Down
14 changes: 7 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ client = Writer(
chat = client.chat.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand Down Expand Up @@ -69,7 +69,7 @@ async def main() -> None:
chat = await client.chat.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand Down Expand Up @@ -149,7 +149,7 @@ try:
client.chat.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand Down Expand Up @@ -200,7 +200,7 @@ client = Writer(
client.with_options(max_retries=5).chat.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand Down Expand Up @@ -231,7 +231,7 @@ client = Writer(
client.with_options(timeout=5.0).chat.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand Down Expand Up @@ -277,7 +277,7 @@ from writerai import Writer
client = Writer()
response = client.chat.with_raw_response.chat(
messages=[{
"content": "string",
"content": "content",
"role": "user",
}],
model="palmyra-x-002-32k",
Expand All @@ -302,7 +302,7 @@ To stream the response body, use `.with_streaming_response` instead, which requi
with client.chat.with_streaming_response.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "writer-sdk"
version = "0.2.0"
version = "0.3.0"
description = "The official Python library for the writer API"
dynamic = ["readme"]
license = "Apache-2.0"
Expand Down
6 changes: 5 additions & 1 deletion requirements-dev.lock
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,11 @@ idna==3.4
importlib-metadata==7.0.0
iniconfig==2.0.0
# via pytest
mypy==1.7.1
markdown-it-py==3.0.0
# via rich
mdurl==0.1.2
# via markdown-it-py
mypy==1.10.1
mypy-extensions==1.0.0
# via mypy
nodeenv==1.8.0
Expand Down
8 changes: 8 additions & 0 deletions src/writerai/_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -616,6 +616,14 @@ def validate_type(*, type_: type[_T], value: object) -> _T:
return cast(_T, _validate_non_model_type(type_=type_, value=value))


def set_pydantic_config(typ: Any, config: pydantic.ConfigDict) -> None:
"""Add a pydantic config for the given type.
Note: this is a no-op on Pydantic v1.
"""
setattr(typ, "__pydantic_config__", config) # noqa: B010


# our use of subclasssing here causes weirdness for type checkers,
# so we just pretend that we don't subclass
if TYPE_CHECKING:
Expand Down
2 changes: 1 addition & 1 deletion src/writerai/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

__title__ = "writerai"
__version__ = "0.2.0" # x-release-please-version
__version__ = "0.3.0" # x-release-please-version
4 changes: 1 addition & 3 deletions src/writerai/resources/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,7 @@
)
from .._streaming import Stream, AsyncStream
from ..types.chat import Chat
from .._base_client import (
make_request_options,
)
from .._base_client import make_request_options
from ..types.chat_streaming_data import ChatStreamingData

__all__ = ["ChatResource", "AsyncChatResource"]
Expand Down
4 changes: 1 addition & 3 deletions src/writerai/resources/completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,7 @@
async_to_streamed_response_wrapper,
)
from .._streaming import Stream, AsyncStream
from .._base_client import (
make_request_options,
)
from .._base_client import make_request_options
from ..types.completion import Completion
from ..types.streaming_data import StreamingData

Expand Down
5 changes: 1 addition & 4 deletions src/writerai/resources/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,7 @@
)
from ..pagination import SyncCursorPage, AsyncCursorPage
from ..types.file import File
from .._base_client import (
AsyncPaginator,
make_request_options,
)
from .._base_client import AsyncPaginator, make_request_options
from ..types.file_delete_response import FileDeleteResponse

__all__ = ["FilesResource", "AsyncFilesResource"]
Expand Down
5 changes: 1 addition & 4 deletions src/writerai/resources/graphs.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,7 @@
from ..pagination import SyncCursorPage, AsyncCursorPage
from ..types.file import File
from ..types.graph import Graph
from .._base_client import (
AsyncPaginator,
make_request_options,
)
from .._base_client import AsyncPaginator, make_request_options
from ..types.graph_create_response import GraphCreateResponse
from ..types.graph_delete_response import GraphDeleteResponse
from ..types.graph_update_response import GraphUpdateResponse
Expand Down
4 changes: 1 addition & 3 deletions src/writerai/resources/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
from .._base_client import (
make_request_options,
)
from .._base_client import make_request_options
from ..types.model_list_response import ModelListResponse

__all__ = ["ModelsResource", "AsyncModelsResource"]
Expand Down
40 changes: 20 additions & 20 deletions tests/api_resources/test_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def test_method_chat_overload_1(self, client: Writer) -> None:
chat = client.chat.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand All @@ -35,9 +35,9 @@ def test_method_chat_with_all_params_overload_1(self, client: Writer) -> None:
chat = client.chat.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
"name": "string",
"name": "name",
}
],
model="palmyra-x-002-32k",
Expand All @@ -55,7 +55,7 @@ def test_raw_response_chat_overload_1(self, client: Writer) -> None:
response = client.chat.with_raw_response.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand All @@ -72,7 +72,7 @@ def test_streaming_response_chat_overload_1(self, client: Writer) -> None:
with client.chat.with_streaming_response.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand All @@ -91,7 +91,7 @@ def test_method_chat_overload_2(self, client: Writer) -> None:
chat_stream = client.chat.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand All @@ -105,9 +105,9 @@ def test_method_chat_with_all_params_overload_2(self, client: Writer) -> None:
chat_stream = client.chat.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
"name": "string",
"name": "name",
}
],
model="palmyra-x-002-32k",
Expand All @@ -125,7 +125,7 @@ def test_raw_response_chat_overload_2(self, client: Writer) -> None:
response = client.chat.with_raw_response.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand All @@ -142,7 +142,7 @@ def test_streaming_response_chat_overload_2(self, client: Writer) -> None:
with client.chat.with_streaming_response.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand All @@ -166,7 +166,7 @@ async def test_method_chat_overload_1(self, async_client: AsyncWriter) -> None:
chat = await async_client.chat.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand All @@ -179,9 +179,9 @@ async def test_method_chat_with_all_params_overload_1(self, async_client: AsyncW
chat = await async_client.chat.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
"name": "string",
"name": "name",
}
],
model="palmyra-x-002-32k",
Expand All @@ -199,7 +199,7 @@ async def test_raw_response_chat_overload_1(self, async_client: AsyncWriter) ->
response = await async_client.chat.with_raw_response.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand All @@ -216,7 +216,7 @@ async def test_streaming_response_chat_overload_1(self, async_client: AsyncWrite
async with async_client.chat.with_streaming_response.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand All @@ -235,7 +235,7 @@ async def test_method_chat_overload_2(self, async_client: AsyncWriter) -> None:
chat_stream = await async_client.chat.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand All @@ -249,9 +249,9 @@ async def test_method_chat_with_all_params_overload_2(self, async_client: AsyncW
chat_stream = await async_client.chat.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
"name": "string",
"name": "name",
}
],
model="palmyra-x-002-32k",
Expand All @@ -269,7 +269,7 @@ async def test_raw_response_chat_overload_2(self, async_client: AsyncWriter) ->
response = await async_client.chat.with_raw_response.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand All @@ -286,7 +286,7 @@ async def test_streaming_response_chat_overload_2(self, async_client: AsyncWrite
async with async_client.chat.with_streaming_response.chat(
messages=[
{
"content": "string",
"content": "content",
"role": "user",
}
],
Expand Down
Loading

0 comments on commit ab70916

Please sign in to comment.