Skip to content

Commit

Permalink
chore: go live (#5)
Browse files Browse the repository at this point in the history
  • Loading branch information
stainless-app[bot] committed Feb 16, 2024
1 parent 4de80db commit 75ea081
Show file tree
Hide file tree
Showing 9 changed files with 140 additions and 44 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@ name: CI
on:
push:
branches:
- main
- stainless
pull_request:
branches:
- main
- stainless

jobs:
lint:
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -261,9 +261,9 @@ completion = response.parse() # get the object that `chat.completions.create()`
print(completion.id)
```

These methods return an [`APIResponse`](https://github.com/groq/groq-python/tree/main/src/groq/_response.py) object.
These methods return an [`APIResponse`](https://github.com/groq/groq-python/tree/stainless/src/groq/_response.py) object.

The async client returns an [`AsyncAPIResponse`](https://github.com/groq/groq-python/tree/main/src/groq/_response.py) with the same structure, the only difference being `await`able methods for reading the response content.
The async client returns an [`AsyncAPIResponse`](https://github.com/groq/groq-python/tree/stainless/src/groq/_response.py) with the same structure, the only difference being `await`able methods for reading the response content.

#### `.with_streaming_response`

Expand Down
4 changes: 2 additions & 2 deletions bin/check-release-environment
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ if [ -z "${PYPI_TOKEN}" ]; then
errors+=("The GROQ_PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.")
fi

len=${#errors[@]}
lenErrors=${#errors[@]}

if [[ len -gt 0 ]]; then
if [[ lenErrors -gt 0 ]]; then
echo -e "Found the following errors in the release environment:\n"

for error in "${errors[@]}"; do
Expand Down
Empty file modified bin/check-test-server
100644 → 100755
Empty file.
Empty file modified bin/test
100644 → 100755
Empty file.
16 changes: 8 additions & 8 deletions src/groq/resources/chat/completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,12 +36,12 @@ def with_streaming_response(self) -> CompletionsWithStreamingResponse:
def create(
self,
*,
messages: Iterable[completion_create_params.Message],
model: str,
frequency_penalty: float | NotGiven = NOT_GIVEN,
logit_bias: Dict[str, int] | NotGiven = NOT_GIVEN,
logprobs: bool | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
messages: Iterable[completion_create_params.Message] | NotGiven = NOT_GIVEN,
model: str | NotGiven = NOT_GIVEN,
n: int | NotGiven = NOT_GIVEN,
presence_penalty: float | NotGiven = NOT_GIVEN,
response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -80,12 +80,12 @@ def create(
"/openai/v1/chat/completions",
body=maybe_transform(
{
"messages": messages,
"model": model,
"frequency_penalty": frequency_penalty,
"logit_bias": logit_bias,
"logprobs": logprobs,
"max_tokens": max_tokens,
"messages": messages,
"model": model,
"n": n,
"presence_penalty": presence_penalty,
"response_format": response_format,
Expand Down Expand Up @@ -120,12 +120,12 @@ def with_streaming_response(self) -> AsyncCompletionsWithStreamingResponse:
async def create(
self,
*,
messages: Iterable[completion_create_params.Message],
model: str,
frequency_penalty: float | NotGiven = NOT_GIVEN,
logit_bias: Dict[str, int] | NotGiven = NOT_GIVEN,
logprobs: bool | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
messages: Iterable[completion_create_params.Message] | NotGiven = NOT_GIVEN,
model: str | NotGiven = NOT_GIVEN,
n: int | NotGiven = NOT_GIVEN,
presence_penalty: float | NotGiven = NOT_GIVEN,
response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -164,12 +164,12 @@ async def create(
"/openai/v1/chat/completions",
body=maybe_transform(
{
"messages": messages,
"model": model,
"frequency_penalty": frequency_penalty,
"logit_bias": logit_bias,
"logprobs": logprobs,
"max_tokens": max_tokens,
"messages": messages,
"model": model,
"n": n,
"presence_penalty": presence_penalty,
"response_format": response_format,
Expand Down
16 changes: 8 additions & 8 deletions src/groq/types/chat/chat_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,21 +54,21 @@ class ChoiceMessageToolCall(BaseModel):


class ChoiceMessage(BaseModel):
content: Optional[str] = None
content: str

role: Optional[str] = None
role: str

tool_calls: Optional[List[ChoiceMessageToolCall]] = None


class Choice(BaseModel):
finish_reason: Optional[str] = None
finish_reason: str

index: Optional[int] = None
index: int

logprobs: Optional[ChoiceLogprobs] = None
logprobs: ChoiceLogprobs

message: Optional[ChoiceMessage] = None
message: ChoiceMessage


class Usage(BaseModel):
Expand All @@ -86,9 +86,9 @@ class Usage(BaseModel):


class ChatCompletion(BaseModel):
id: Optional[str] = None
choices: List[Choice]

choices: Optional[List[Choice]] = None
id: Optional[str] = None

created: Optional[int] = None

Expand Down
16 changes: 8 additions & 8 deletions src/groq/types/chat/completion_create_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from __future__ import annotations

from typing import Dict, List, Union, Iterable, Optional
from typing_extensions import Annotated, TypedDict
from typing_extensions import Required, Annotated, TypedDict

from ..._utils import PropertyInfo

Expand All @@ -22,6 +22,10 @@


class CompletionCreateParams(TypedDict, total=False):
messages: Required[Iterable[Message]]

model: Required[str]

frequency_penalty: float

logit_bias: Dict[str, int]
Expand All @@ -30,10 +34,6 @@ class CompletionCreateParams(TypedDict, total=False):

max_tokens: int

messages: Iterable[Message]

model: str

n: int

presence_penalty: float
Expand Down Expand Up @@ -78,11 +78,11 @@ class MessageToolCall(TypedDict, total=False):


class Message(TypedDict, total=False):
content: str
content: Required[str]

name: str
role: Required[str]

role: str
name: str

tool_call_id: str
"""ToolMessage Fields"""
Expand Down
124 changes: 110 additions & 14 deletions tests/api_resources/chat/test_completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,28 @@ class TestCompletions:

@parametrize
def test_method_create(self, client: Groq) -> None:
completion = client.chat.completions.create()
completion = client.chat.completions.create(
messages=[
{
"content": "string",
"role": "string",
},
{
"content": "string",
"role": "string",
},
{
"content": "string",
"role": "string",
},
],
model="string",
)
assert_matches_type(ChatCompletion, completion, path=["response"])

@parametrize
def test_method_create_with_all_params(self, client: Groq) -> None:
completion = client.chat.completions.create(
frequency_penalty=0,
logit_bias={"foo": 0},
logprobs=True,
max_tokens=0,
messages=[
{
"content": "string",
Expand Down Expand Up @@ -128,6 +140,10 @@ def test_method_create_with_all_params(self, client: Groq) -> None:
},
],
model="string",
frequency_penalty=0,
logit_bias={"foo": 0},
logprobs=True,
max_tokens=0,
n=0,
presence_penalty=0,
response_format={"type": "string"},
Expand Down Expand Up @@ -176,7 +192,23 @@ def test_method_create_with_all_params(self, client: Groq) -> None:

@parametrize
def test_raw_response_create(self, client: Groq) -> None:
response = client.chat.completions.with_raw_response.create()
response = client.chat.completions.with_raw_response.create(
messages=[
{
"content": "string",
"role": "string",
},
{
"content": "string",
"role": "string",
},
{
"content": "string",
"role": "string",
},
],
model="string",
)

assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
Expand All @@ -185,7 +217,23 @@ def test_raw_response_create(self, client: Groq) -> None:

@parametrize
def test_streaming_response_create(self, client: Groq) -> None:
with client.chat.completions.with_streaming_response.create() as response:
with client.chat.completions.with_streaming_response.create(
messages=[
{
"content": "string",
"role": "string",
},
{
"content": "string",
"role": "string",
},
{
"content": "string",
"role": "string",
},
],
model="string",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

Expand All @@ -200,16 +248,28 @@ class TestAsyncCompletions:

@parametrize
async def test_method_create(self, async_client: AsyncGroq) -> None:
completion = await async_client.chat.completions.create()
completion = await async_client.chat.completions.create(
messages=[
{
"content": "string",
"role": "string",
},
{
"content": "string",
"role": "string",
},
{
"content": "string",
"role": "string",
},
],
model="string",
)
assert_matches_type(ChatCompletion, completion, path=["response"])

@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGroq) -> None:
completion = await async_client.chat.completions.create(
frequency_penalty=0,
logit_bias={"foo": 0},
logprobs=True,
max_tokens=0,
messages=[
{
"content": "string",
Expand Down Expand Up @@ -309,6 +369,10 @@ async def test_method_create_with_all_params(self, async_client: AsyncGroq) -> N
},
],
model="string",
frequency_penalty=0,
logit_bias={"foo": 0},
logprobs=True,
max_tokens=0,
n=0,
presence_penalty=0,
response_format={"type": "string"},
Expand Down Expand Up @@ -357,7 +421,23 @@ async def test_method_create_with_all_params(self, async_client: AsyncGroq) -> N

@parametrize
async def test_raw_response_create(self, async_client: AsyncGroq) -> None:
response = await async_client.chat.completions.with_raw_response.create()
response = await async_client.chat.completions.with_raw_response.create(
messages=[
{
"content": "string",
"role": "string",
},
{
"content": "string",
"role": "string",
},
{
"content": "string",
"role": "string",
},
],
model="string",
)

assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
Expand All @@ -366,7 +446,23 @@ async def test_raw_response_create(self, async_client: AsyncGroq) -> None:

@parametrize
async def test_streaming_response_create(self, async_client: AsyncGroq) -> None:
async with async_client.chat.completions.with_streaming_response.create() as response:
async with async_client.chat.completions.with_streaming_response.create(
messages=[
{
"content": "string",
"role": "string",
},
{
"content": "string",
"role": "string",
},
{
"content": "string",
"role": "string",
},
],
model="string",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

Expand Down

0 comments on commit 75ea081

Please sign in to comment.