Skip to content

Commit c78eef7

Browse files
feat(api): Add support for claude-mythos-preview
1 parent ac91863 commit c78eef7

File tree

6 files changed

+36
-34
lines changed

6 files changed

+36
-34
lines changed

.stats.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 34
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic%2Fanthropic-efe26b096126c693462514b8cbd3ec3e376569232becbfb730cd26fb31c7c7e3.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic%2Fanthropic-04225437444990f383d0581df2f07022baab6ad510de0f3a8bd6b07c38d83cc9.yml
33
openapi_spec_hash: cae9199aabfd7b87f0ff2dcc10760c92
4-
config_hash: e1ebf6470ea96e5b7bf02520fbeeed0a
4+
config_hash: fcc34074db6eaf64bc99b578c6c82c61

src/anthropic/types/model.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
Model: TypeAlias = Union[
99
Literal[
10+
"claude-mythos-preview",
1011
"claude-opus-4-6",
1112
"claude-sonnet-4-6",
1213
"claude-haiku-4-5",

src/anthropic/types/model_param.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99

1010
ModelParam: TypeAlias = Union[
1111
Literal[
12+
"claude-mythos-preview",
1213
"claude-opus-4-6",
1314
"claude-sonnet-4-6",
1415
"claude-haiku-4-5",

tests/api_resources/beta/test_messages.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -396,7 +396,7 @@ def test_method_count_tokens(self, client: Anthropic) -> None:
396396
"role": "user",
397397
}
398398
],
399-
model="claude-opus-4-6",
399+
model="claude-mythos-preview",
400400
)
401401
assert_matches_type(BetaMessageTokensCount, message, path=["response"])
402402

@@ -409,7 +409,7 @@ def test_method_count_tokens_with_all_params(self, client: Anthropic) -> None:
409409
"role": "user",
410410
}
411411
],
412-
model="claude-opus-4-6",
412+
model="claude-mythos-preview",
413413
cache_control={
414414
"type": "ephemeral",
415415
"ttl": "5m",
@@ -520,7 +520,7 @@ def test_raw_response_count_tokens(self, client: Anthropic) -> None:
520520
"role": "user",
521521
}
522522
],
523-
model="claude-opus-4-6",
523+
model="claude-mythos-preview",
524524
)
525525

526526
assert response.is_closed is True
@@ -537,7 +537,7 @@ def test_streaming_response_count_tokens(self, client: Anthropic) -> None:
537537
"role": "user",
538538
}
539539
],
540-
model="claude-opus-4-6",
540+
model="claude-mythos-preview",
541541
) as response:
542542
assert not response.is_closed
543543
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -945,7 +945,7 @@ async def test_method_count_tokens(self, async_client: AsyncAnthropic) -> None:
945945
"role": "user",
946946
}
947947
],
948-
model="claude-opus-4-6",
948+
model="claude-mythos-preview",
949949
)
950950
assert_matches_type(BetaMessageTokensCount, message, path=["response"])
951951

@@ -958,7 +958,7 @@ async def test_method_count_tokens_with_all_params(self, async_client: AsyncAnth
958958
"role": "user",
959959
}
960960
],
961-
model="claude-opus-4-6",
961+
model="claude-mythos-preview",
962962
cache_control={
963963
"type": "ephemeral",
964964
"ttl": "5m",
@@ -1069,7 +1069,7 @@ async def test_raw_response_count_tokens(self, async_client: AsyncAnthropic) ->
10691069
"role": "user",
10701070
}
10711071
],
1072-
model="claude-opus-4-6",
1072+
model="claude-mythos-preview",
10731073
)
10741074

10751075
assert response.is_closed is True
@@ -1086,7 +1086,7 @@ async def test_streaming_response_count_tokens(self, async_client: AsyncAnthropi
10861086
"role": "user",
10871087
}
10881088
],
1089-
model="claude-opus-4-6",
1089+
model="claude-mythos-preview",
10901090
) as response:
10911091
assert not response.is_closed
10921092
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

tests/api_resources/test_completions.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ class TestCompletions:
2121
def test_method_create_overload_1(self, client: Anthropic) -> None:
2222
completion = client.completions.create(
2323
max_tokens_to_sample=256,
24-
model="claude-opus-4-6",
24+
model="claude-mythos-preview",
2525
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
2626
)
2727
assert_matches_type(Completion, completion, path=["response"])
@@ -30,7 +30,7 @@ def test_method_create_overload_1(self, client: Anthropic) -> None:
3030
def test_method_create_with_all_params_overload_1(self, client: Anthropic) -> None:
3131
completion = client.completions.create(
3232
max_tokens_to_sample=256,
33-
model="claude-opus-4-6",
33+
model="claude-mythos-preview",
3434
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
3535
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
3636
stop_sequences=["string"],
@@ -46,7 +46,7 @@ def test_method_create_with_all_params_overload_1(self, client: Anthropic) -> No
4646
def test_raw_response_create_overload_1(self, client: Anthropic) -> None:
4747
response = client.completions.with_raw_response.create(
4848
max_tokens_to_sample=256,
49-
model="claude-opus-4-6",
49+
model="claude-mythos-preview",
5050
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
5151
)
5252

@@ -59,7 +59,7 @@ def test_raw_response_create_overload_1(self, client: Anthropic) -> None:
5959
def test_streaming_response_create_overload_1(self, client: Anthropic) -> None:
6060
with client.completions.with_streaming_response.create(
6161
max_tokens_to_sample=256,
62-
model="claude-opus-4-6",
62+
model="claude-mythos-preview",
6363
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
6464
) as response:
6565
assert not response.is_closed
@@ -74,7 +74,7 @@ def test_streaming_response_create_overload_1(self, client: Anthropic) -> None:
7474
def test_method_create_overload_2(self, client: Anthropic) -> None:
7575
completion_stream = client.completions.create(
7676
max_tokens_to_sample=256,
77-
model="claude-opus-4-6",
77+
model="claude-mythos-preview",
7878
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
7979
stream=True,
8080
)
@@ -84,7 +84,7 @@ def test_method_create_overload_2(self, client: Anthropic) -> None:
8484
def test_method_create_with_all_params_overload_2(self, client: Anthropic) -> None:
8585
completion_stream = client.completions.create(
8686
max_tokens_to_sample=256,
87-
model="claude-opus-4-6",
87+
model="claude-mythos-preview",
8888
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
8989
stream=True,
9090
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
@@ -100,7 +100,7 @@ def test_method_create_with_all_params_overload_2(self, client: Anthropic) -> No
100100
def test_raw_response_create_overload_2(self, client: Anthropic) -> None:
101101
response = client.completions.with_raw_response.create(
102102
max_tokens_to_sample=256,
103-
model="claude-opus-4-6",
103+
model="claude-mythos-preview",
104104
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
105105
stream=True,
106106
)
@@ -113,7 +113,7 @@ def test_raw_response_create_overload_2(self, client: Anthropic) -> None:
113113
def test_streaming_response_create_overload_2(self, client: Anthropic) -> None:
114114
with client.completions.with_streaming_response.create(
115115
max_tokens_to_sample=256,
116-
model="claude-opus-4-6",
116+
model="claude-mythos-preview",
117117
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
118118
stream=True,
119119
) as response:
@@ -135,7 +135,7 @@ class TestAsyncCompletions:
135135
async def test_method_create_overload_1(self, async_client: AsyncAnthropic) -> None:
136136
completion = await async_client.completions.create(
137137
max_tokens_to_sample=256,
138-
model="claude-opus-4-6",
138+
model="claude-mythos-preview",
139139
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
140140
)
141141
assert_matches_type(Completion, completion, path=["response"])
@@ -144,7 +144,7 @@ async def test_method_create_overload_1(self, async_client: AsyncAnthropic) -> N
144144
async def test_method_create_with_all_params_overload_1(self, async_client: AsyncAnthropic) -> None:
145145
completion = await async_client.completions.create(
146146
max_tokens_to_sample=256,
147-
model="claude-opus-4-6",
147+
model="claude-mythos-preview",
148148
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
149149
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
150150
stop_sequences=["string"],
@@ -160,7 +160,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
160160
async def test_raw_response_create_overload_1(self, async_client: AsyncAnthropic) -> None:
161161
response = await async_client.completions.with_raw_response.create(
162162
max_tokens_to_sample=256,
163-
model="claude-opus-4-6",
163+
model="claude-mythos-preview",
164164
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
165165
)
166166

@@ -173,7 +173,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncAnthropic
173173
async def test_streaming_response_create_overload_1(self, async_client: AsyncAnthropic) -> None:
174174
async with async_client.completions.with_streaming_response.create(
175175
max_tokens_to_sample=256,
176-
model="claude-opus-4-6",
176+
model="claude-mythos-preview",
177177
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
178178
) as response:
179179
assert not response.is_closed
@@ -188,7 +188,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncAnt
188188
async def test_method_create_overload_2(self, async_client: AsyncAnthropic) -> None:
189189
completion_stream = await async_client.completions.create(
190190
max_tokens_to_sample=256,
191-
model="claude-opus-4-6",
191+
model="claude-mythos-preview",
192192
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
193193
stream=True,
194194
)
@@ -198,7 +198,7 @@ async def test_method_create_overload_2(self, async_client: AsyncAnthropic) -> N
198198
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncAnthropic) -> None:
199199
completion_stream = await async_client.completions.create(
200200
max_tokens_to_sample=256,
201-
model="claude-opus-4-6",
201+
model="claude-mythos-preview",
202202
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
203203
stream=True,
204204
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
@@ -214,7 +214,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
214214
async def test_raw_response_create_overload_2(self, async_client: AsyncAnthropic) -> None:
215215
response = await async_client.completions.with_raw_response.create(
216216
max_tokens_to_sample=256,
217-
model="claude-opus-4-6",
217+
model="claude-mythos-preview",
218218
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
219219
stream=True,
220220
)
@@ -227,7 +227,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncAnthropic
227227
async def test_streaming_response_create_overload_2(self, async_client: AsyncAnthropic) -> None:
228228
async with async_client.completions.with_streaming_response.create(
229229
max_tokens_to_sample=256,
230-
model="claude-opus-4-6",
230+
model="claude-mythos-preview",
231231
prompt="\n\nHuman: Hello, world!\n\nAssistant:",
232232
stream=True,
233233
) as response:

tests/api_resources/test_messages.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -318,7 +318,7 @@ def test_method_count_tokens(self, client: Anthropic) -> None:
318318
"role": "user",
319319
}
320320
],
321-
model="claude-opus-4-6",
321+
model="claude-mythos-preview",
322322
)
323323
assert_matches_type(MessageTokensCount, message, path=["response"])
324324

@@ -331,7 +331,7 @@ def test_method_count_tokens_with_all_params(self, client: Anthropic) -> None:
331331
"role": "user",
332332
}
333333
],
334-
model="claude-opus-4-6",
334+
model="claude-mythos-preview",
335335
cache_control={
336336
"type": "ephemeral",
337337
"ttl": "5m",
@@ -407,7 +407,7 @@ def test_raw_response_count_tokens(self, client: Anthropic) -> None:
407407
"role": "user",
408408
}
409409
],
410-
model="claude-opus-4-6",
410+
model="claude-mythos-preview",
411411
)
412412

413413
assert response.is_closed is True
@@ -424,7 +424,7 @@ def test_streaming_response_count_tokens(self, client: Anthropic) -> None:
424424
"role": "user",
425425
}
426426
],
427-
model="claude-opus-4-6",
427+
model="claude-mythos-preview",
428428
) as response:
429429
assert not response.is_closed
430430
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -737,7 +737,7 @@ async def test_method_count_tokens(self, async_client: AsyncAnthropic) -> None:
737737
"role": "user",
738738
}
739739
],
740-
model="claude-opus-4-6",
740+
model="claude-mythos-preview",
741741
)
742742
assert_matches_type(MessageTokensCount, message, path=["response"])
743743

@@ -750,7 +750,7 @@ async def test_method_count_tokens_with_all_params(self, async_client: AsyncAnth
750750
"role": "user",
751751
}
752752
],
753-
model="claude-opus-4-6",
753+
model="claude-mythos-preview",
754754
cache_control={
755755
"type": "ephemeral",
756756
"ttl": "5m",
@@ -826,7 +826,7 @@ async def test_raw_response_count_tokens(self, async_client: AsyncAnthropic) ->
826826
"role": "user",
827827
}
828828
],
829-
model="claude-opus-4-6",
829+
model="claude-mythos-preview",
830830
)
831831

832832
assert response.is_closed is True
@@ -843,7 +843,7 @@ async def test_streaming_response_count_tokens(self, async_client: AsyncAnthropi
843843
"role": "user",
844844
}
845845
],
846-
model="claude-opus-4-6",
846+
model="claude-mythos-preview",
847847
) as response:
848848
assert not response.is_closed
849849
assert response.http_request.headers.get("X-Stainless-Lang") == "python"

0 commit comments

Comments
 (0)