Skip to content

Commit b799b5a

Browse files
feat(api): OpenAPI spec update via Stainless API (#132)
1 parent 406cbe0 commit b799b5a

File tree

5 files changed

+24
-1
lines changed

5 files changed

+24
-1
lines changed

.stats.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 21
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-46c4a5f130f6e345a9aea686def3f4b271fd4f0a5c32393e8187c9f8903383c4.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-5d4722a755a01f8917b975ab7e6528e590f53d09891baac758abba1e28df15d1.yml

src/prompt_foundry_python_sdk/types/prompt_configuration.py

+3
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,9 @@ class Parameters(BaseModel):
7575

7676
tool_choice: Optional[str] = FieldInfo(alias="toolChoice", default=None)
7777

78+
top_k: Optional[float] = FieldInfo(alias="topK", default=None)
79+
"""Example: 50"""
80+
7881
top_p: float = FieldInfo(alias="topP")
7982
"""Example: 1"""
8083

src/prompt_foundry_python_sdk/types/prompt_create_params.py

+3
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,9 @@ class Parameters(TypedDict, total=False):
8585

8686
tool_choice: Required[Annotated[Optional[str], PropertyInfo(alias="toolChoice")]]
8787

88+
top_k: Required[Annotated[Optional[float], PropertyInfo(alias="topK")]]
89+
"""Example: 50"""
90+
8891
top_p: Required[Annotated[float, PropertyInfo(alias="topP")]]
8992
"""Example: 1"""
9093

src/prompt_foundry_python_sdk/types/prompt_update_params.py

+3
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,9 @@ class Parameters(TypedDict, total=False):
8585

8686
tool_choice: Required[Annotated[Optional[str], PropertyInfo(alias="toolChoice")]]
8787

88+
top_k: Required[Annotated[Optional[float], PropertyInfo(alias="topK")]]
89+
"""Example: 50"""
90+
8891
top_p: Required[Annotated[float, PropertyInfo(alias="topP")]]
8992
"""Example: 1"""
9093

tests/api_resources/test_prompts.py

+14
Original file line numberDiff line numberDiff line change
@@ -127,6 +127,7 @@ def test_method_create(self, client: PromptFoundry) -> None:
127127
"response_format": "JSON",
128128
"temperature": 0,
129129
"top_p": 0,
130+
"top_k": 1,
130131
"frequency_penalty": 0,
131132
"presence_penalty": 0,
132133
"max_tokens": 0,
@@ -244,6 +245,7 @@ def test_raw_response_create(self, client: PromptFoundry) -> None:
244245
"response_format": "JSON",
245246
"temperature": 0,
246247
"top_p": 0,
248+
"top_k": 1,
247249
"frequency_penalty": 0,
248250
"presence_penalty": 0,
249251
"max_tokens": 0,
@@ -365,6 +367,7 @@ def test_streaming_response_create(self, client: PromptFoundry) -> None:
365367
"response_format": "JSON",
366368
"temperature": 0,
367369
"top_p": 0,
370+
"top_k": 1,
368371
"frequency_penalty": 0,
369372
"presence_penalty": 0,
370373
"max_tokens": 0,
@@ -489,6 +492,7 @@ def test_method_update(self, client: PromptFoundry) -> None:
489492
"response_format": "JSON",
490493
"temperature": 0,
491494
"top_p": 0,
495+
"top_k": 1,
492496
"frequency_penalty": 0,
493497
"presence_penalty": 0,
494498
"max_tokens": 0,
@@ -607,6 +611,7 @@ def test_raw_response_update(self, client: PromptFoundry) -> None:
607611
"response_format": "JSON",
608612
"temperature": 0,
609613
"top_p": 0,
614+
"top_k": 1,
610615
"frequency_penalty": 0,
611616
"presence_penalty": 0,
612617
"max_tokens": 0,
@@ -729,6 +734,7 @@ def test_streaming_response_update(self, client: PromptFoundry) -> None:
729734
"response_format": "JSON",
730735
"temperature": 0,
731736
"top_p": 0,
737+
"top_k": 1,
732738
"frequency_penalty": 0,
733739
"presence_penalty": 0,
734740
"max_tokens": 0,
@@ -854,6 +860,7 @@ def test_path_params_update(self, client: PromptFoundry) -> None:
854860
"response_format": "JSON",
855861
"temperature": 0,
856862
"top_p": 0,
863+
"top_k": 1,
857864
"frequency_penalty": 0,
858865
"presence_penalty": 0,
859866
"max_tokens": 0,
@@ -1312,6 +1319,7 @@ async def test_method_create(self, async_client: AsyncPromptFoundry) -> None:
13121319
"response_format": "JSON",
13131320
"temperature": 0,
13141321
"top_p": 0,
1322+
"top_k": 1,
13151323
"frequency_penalty": 0,
13161324
"presence_penalty": 0,
13171325
"max_tokens": 0,
@@ -1429,6 +1437,7 @@ async def test_raw_response_create(self, async_client: AsyncPromptFoundry) -> No
14291437
"response_format": "JSON",
14301438
"temperature": 0,
14311439
"top_p": 0,
1440+
"top_k": 1,
14321441
"frequency_penalty": 0,
14331442
"presence_penalty": 0,
14341443
"max_tokens": 0,
@@ -1550,6 +1559,7 @@ async def test_streaming_response_create(self, async_client: AsyncPromptFoundry)
15501559
"response_format": "JSON",
15511560
"temperature": 0,
15521561
"top_p": 0,
1562+
"top_k": 1,
15531563
"frequency_penalty": 0,
15541564
"presence_penalty": 0,
15551565
"max_tokens": 0,
@@ -1674,6 +1684,7 @@ async def test_method_update(self, async_client: AsyncPromptFoundry) -> None:
16741684
"response_format": "JSON",
16751685
"temperature": 0,
16761686
"top_p": 0,
1687+
"top_k": 1,
16771688
"frequency_penalty": 0,
16781689
"presence_penalty": 0,
16791690
"max_tokens": 0,
@@ -1792,6 +1803,7 @@ async def test_raw_response_update(self, async_client: AsyncPromptFoundry) -> No
17921803
"response_format": "JSON",
17931804
"temperature": 0,
17941805
"top_p": 0,
1806+
"top_k": 1,
17951807
"frequency_penalty": 0,
17961808
"presence_penalty": 0,
17971809
"max_tokens": 0,
@@ -1914,6 +1926,7 @@ async def test_streaming_response_update(self, async_client: AsyncPromptFoundry)
19141926
"response_format": "JSON",
19151927
"temperature": 0,
19161928
"top_p": 0,
1929+
"top_k": 1,
19171930
"frequency_penalty": 0,
19181931
"presence_penalty": 0,
19191932
"max_tokens": 0,
@@ -2039,6 +2052,7 @@ async def test_path_params_update(self, async_client: AsyncPromptFoundry) -> Non
20392052
"response_format": "JSON",
20402053
"temperature": 0,
20412054
"top_p": 0,
2055+
"top_k": 1,
20422056
"frequency_penalty": 0,
20432057
"presence_penalty": 0,
20442058
"max_tokens": 0,

0 commit comments

Comments
 (0)