You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: packages/ai/openai/src/Generated.ts
+3-3Lines changed: 3 additions & 3 deletions
Original file line number
Diff line number
Diff line change
@@ -2466,7 +2466,7 @@ export class ChatCompletionFunctions extends S.Class<ChatCompletionFunctions>("C
2466
2466
/**
2467
2467
* The retention policy for the prompt cache. Set to `24h` to enable extended prompt caching, which keeps cached prefixes active for longer, up to a maximum of 24 hours. [Learn more](https://platform.openai.com/docs/guides/prompt-caching#prompt-cache-retention).
2468
2468
*/
2469
-
export class CreateChatCompletionRequestPromptCacheRetentionEnum extends S.Literal("in-memory", "24h") {}
2469
+
export class CreateChatCompletionRequestPromptCacheRetentionEnum extends S.Literal("in_memory", "24h") {}
2470
2470
2471
2471
export class CreateChatCompletionRequest extends S.Class<CreateChatCompletionRequest>("CreateChatCompletionRequest")({
2472
2472
/**
@@ -14969,7 +14969,7 @@ export class CreateResponseTruncationEnum extends S.Literal("auto", "disabled")
14969
14969
/**
14970
14970
* The retention policy for the prompt cache. Set to `24h` to enable extended prompt caching, which keeps cached prefixes active for longer, up to a maximum of 24 hours. [Learn more](https://platform.openai.com/docs/guides/prompt-caching#prompt-cache-retention).
14971
14971
*/
14972
-
export class CreateResponsePromptCacheRetentionEnum extends S.Literal("in-memory", "24h") {}
14972
+
export class CreateResponsePromptCacheRetentionEnum extends S.Literal("in_memory", "24h") {}
14973
14973
14974
14974
export class CreateResponse extends S.Class<CreateResponse>("CreateResponse")({
* The retention policy for the prompt cache. Set to `24h` to enable extended prompt caching, which keeps cached prefixes active for longer, up to a maximum of 24 hours. [Learn more](https://platform.openai.com/docs/guides/prompt-caching#prompt-cache-retention).
15183
15183
*/
15184
-
export class ResponsePromptCacheRetentionEnum extends S.Literal("in-memory", "24h") {}
15184
+
export class ResponsePromptCacheRetentionEnum extends S.Literal("in_memory", "24h") {}
15185
15185
15186
15186
export class Response extends S.Class<Response>("Response")({
0 commit comments