Skip to content

Commit

Permalink
Chat stream usage: update annotations and add test
Browse files Browse the repository at this point in the history
  • Loading branch information
gehrisandro committed May 27, 2024
1 parent 769be96 commit faddd9b
Show file tree
Hide file tree
Showing 3 changed files with 43 additions and 5 deletions.
15 changes: 10 additions & 5 deletions src/Responses/Chat/CreateStreamedResponse.php
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@
use OpenAI\Testing\Responses\Concerns\FakeableForStreamedResponse;

/**
* @implements ResponseContract<array{id: string, object: string, created: int, model: string, choices: array<int, array{index: int, delta: array{role?: string, content?: string}|array{role?: string, content: null, function_call: array{name?: string, arguments?: string}}, finish_reason: string|null}>}>
* @implements ResponseContract<array{id: string, object: string, created: int, model: string, choices: array<int, array{index: int, delta: array{role?: string, content?: string}|array{role?: string, content: null, function_call: array{name?: string, arguments?: string}}, finish_reason: string|null}>, usage?: array{prompt_tokens: int, completion_tokens: int|null, total_tokens: int}}>
*/
final class CreateStreamedResponse implements ResponseContract
{
/**
* @use ArrayAccessible<array{id: string, object: string, created: int, model: string, choices: array<int, array{index: int, delta: array{role?: string, content?: string}|array{role?: string, content: null, function_call: array{name?: string, arguments?: string}}, finish_reason: string|null}>}>
* @use ArrayAccessible<array{id: string, object: string, created: int, model: string, choices: array<int, array{index: int, delta: array{role?: string, content?: string}|array{role?: string, content: null, function_call: array{name?: string, arguments?: string}}, finish_reason: string|null}>, usage?: array{prompt_tokens: int, completion_tokens: int|null, total_tokens: int}}>
*/
use ArrayAccessible;

Expand All @@ -36,7 +36,7 @@ private function __construct(
/**
* Acts as static factory, and returns a new Response instance.
*
* @param array{id: string, object: string, created: int, model: string, choices: array<int, array{index: int, delta: array{role?: string, content?: string}, finish_reason: string|null}>} $attributes
* @param array{id: string, object: string, created: int, model: string, choices: array<int, array{index: int, delta: array{role?: string, content?: string}, finish_reason: string|null}>, usage?: array{prompt_tokens: int, completion_tokens: int|null, total_tokens: int}} $attributes
*/
public static function from(array $attributes): self
{
Expand All @@ -59,7 +59,7 @@ public static function from(array $attributes): self
*/
public function toArray(): array
{
return [
$data = [
'id' => $this->id,
'object' => $this->object,
'created' => $this->created,
Expand All @@ -68,7 +68,12 @@ public function toArray(): array
static fn (CreateStreamedResponseChoice $result): array => $result->toArray(),
$this->choices,
),
'usage' => $this->usage?->toArray() ?? null,
];

if ($this->usage instanceof \OpenAI\Responses\Chat\CreateResponseUsage) {
$data['usage'] = $this->usage->toArray();
}

return $data;
}
}
16 changes: 16 additions & 0 deletions tests/Fixtures/Chat.php
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,22 @@ function chatCompletionStreamContentChunk(): array
];
}

function chatCompletionStreamUsageChunk(): array
{
return [
'id' => 'chatcmpl-6wdIE4DsUtqf1srdMTsfkJp0VWZgz',
'object' => 'chat.completion.chunk',
'created' => 1679432086,
'model' => 'gpt-4-0314',
'choices' => [],
'usage' => [
'prompt_tokens' => 9,
'completion_tokens' => 12,
'total_tokens' => 21,
],
];
}

function chatCompletionStreamFunctionCallChunk(): array
{
return [
Expand Down
17 changes: 17 additions & 0 deletions tests/Responses/Chat/CreateStreamedResponse.php
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
<?php

use OpenAI\Responses\Chat\CreateResponseUsage;
use OpenAI\Responses\Chat\CreateStreamedResponse;
use OpenAI\Responses\Chat\CreateStreamedResponseChoice;

Expand All @@ -16,6 +17,22 @@
->choices->each->toBeInstanceOf(CreateStreamedResponseChoice::class);
});

test('from usage chunk', function () {
$completion = CreateStreamedResponse::from(chatCompletionStreamUsageChunk());

expect($completion)
->toBeInstanceOf(CreateStreamedResponse::class)
->id->toBe('chatcmpl-6wdIE4DsUtqf1srdMTsfkJp0VWZgz')
->object->toBe('chat.completion.chunk')
->created->toBe(1679432086)
->model->toBe('gpt-4-0314')
->choices->toBeArray()->toHaveCount(0)
->usage->toBeInstanceOf(CreateResponseUsage::class)
->usage->promptTokens->toBe(9)
->usage->completionTokens->toBe(12)
->usage->totalTokens->toBe(21);
});

test('as array accessible', function () {
$completion = CreateStreamedResponse::from(chatCompletionStreamFirstChunk());

Expand Down

0 comments on commit faddd9b

Please sign in to comment.