@@ -21,7 +21,7 @@ class TestCompletions:
2121 def test_method_create_overload_1 (self , client : Anthropic ) -> None :
2222 completion = client .completions .create (
2323 max_tokens_to_sample = 256 ,
24- model = "claude-opus-4-6 " ,
24+ model = "claude-mythos-preview " ,
2525 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
2626 )
2727 assert_matches_type (Completion , completion , path = ["response" ])
@@ -30,7 +30,7 @@ def test_method_create_overload_1(self, client: Anthropic) -> None:
3030 def test_method_create_with_all_params_overload_1 (self , client : Anthropic ) -> None :
3131 completion = client .completions .create (
3232 max_tokens_to_sample = 256 ,
33- model = "claude-opus-4-6 " ,
33+ model = "claude-mythos-preview " ,
3434 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
3535 metadata = {"user_id" : "13803d75-b4b5-4c3e-b2a2-6f21399b021b" },
3636 stop_sequences = ["string" ],
@@ -46,7 +46,7 @@ def test_method_create_with_all_params_overload_1(self, client: Anthropic) -> No
4646 def test_raw_response_create_overload_1 (self , client : Anthropic ) -> None :
4747 response = client .completions .with_raw_response .create (
4848 max_tokens_to_sample = 256 ,
49- model = "claude-opus-4-6 " ,
49+ model = "claude-mythos-preview " ,
5050 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
5151 )
5252
@@ -59,7 +59,7 @@ def test_raw_response_create_overload_1(self, client: Anthropic) -> None:
5959 def test_streaming_response_create_overload_1 (self , client : Anthropic ) -> None :
6060 with client .completions .with_streaming_response .create (
6161 max_tokens_to_sample = 256 ,
62- model = "claude-opus-4-6 " ,
62+ model = "claude-mythos-preview " ,
6363 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
6464 ) as response :
6565 assert not response .is_closed
@@ -74,7 +74,7 @@ def test_streaming_response_create_overload_1(self, client: Anthropic) -> None:
7474 def test_method_create_overload_2 (self , client : Anthropic ) -> None :
7575 completion_stream = client .completions .create (
7676 max_tokens_to_sample = 256 ,
77- model = "claude-opus-4-6 " ,
77+ model = "claude-mythos-preview " ,
7878 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
7979 stream = True ,
8080 )
@@ -84,7 +84,7 @@ def test_method_create_overload_2(self, client: Anthropic) -> None:
8484 def test_method_create_with_all_params_overload_2 (self , client : Anthropic ) -> None :
8585 completion_stream = client .completions .create (
8686 max_tokens_to_sample = 256 ,
87- model = "claude-opus-4-6 " ,
87+ model = "claude-mythos-preview " ,
8888 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
8989 stream = True ,
9090 metadata = {"user_id" : "13803d75-b4b5-4c3e-b2a2-6f21399b021b" },
@@ -100,7 +100,7 @@ def test_method_create_with_all_params_overload_2(self, client: Anthropic) -> No
100100 def test_raw_response_create_overload_2 (self , client : Anthropic ) -> None :
101101 response = client .completions .with_raw_response .create (
102102 max_tokens_to_sample = 256 ,
103- model = "claude-opus-4-6 " ,
103+ model = "claude-mythos-preview " ,
104104 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
105105 stream = True ,
106106 )
@@ -113,7 +113,7 @@ def test_raw_response_create_overload_2(self, client: Anthropic) -> None:
113113 def test_streaming_response_create_overload_2 (self , client : Anthropic ) -> None :
114114 with client .completions .with_streaming_response .create (
115115 max_tokens_to_sample = 256 ,
116- model = "claude-opus-4-6 " ,
116+ model = "claude-mythos-preview " ,
117117 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
118118 stream = True ,
119119 ) as response :
@@ -135,7 +135,7 @@ class TestAsyncCompletions:
135135 async def test_method_create_overload_1 (self , async_client : AsyncAnthropic ) -> None :
136136 completion = await async_client .completions .create (
137137 max_tokens_to_sample = 256 ,
138- model = "claude-opus-4-6 " ,
138+ model = "claude-mythos-preview " ,
139139 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
140140 )
141141 assert_matches_type (Completion , completion , path = ["response" ])
@@ -144,7 +144,7 @@ async def test_method_create_overload_1(self, async_client: AsyncAnthropic) -> N
144144 async def test_method_create_with_all_params_overload_1 (self , async_client : AsyncAnthropic ) -> None :
145145 completion = await async_client .completions .create (
146146 max_tokens_to_sample = 256 ,
147- model = "claude-opus-4-6 " ,
147+ model = "claude-mythos-preview " ,
148148 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
149149 metadata = {"user_id" : "13803d75-b4b5-4c3e-b2a2-6f21399b021b" },
150150 stop_sequences = ["string" ],
@@ -160,7 +160,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
160160 async def test_raw_response_create_overload_1 (self , async_client : AsyncAnthropic ) -> None :
161161 response = await async_client .completions .with_raw_response .create (
162162 max_tokens_to_sample = 256 ,
163- model = "claude-opus-4-6 " ,
163+ model = "claude-mythos-preview " ,
164164 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
165165 )
166166
@@ -173,7 +173,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncAnthropic
173173 async def test_streaming_response_create_overload_1 (self , async_client : AsyncAnthropic ) -> None :
174174 async with async_client .completions .with_streaming_response .create (
175175 max_tokens_to_sample = 256 ,
176- model = "claude-opus-4-6 " ,
176+ model = "claude-mythos-preview " ,
177177 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
178178 ) as response :
179179 assert not response .is_closed
@@ -188,7 +188,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncAnt
188188 async def test_method_create_overload_2 (self , async_client : AsyncAnthropic ) -> None :
189189 completion_stream = await async_client .completions .create (
190190 max_tokens_to_sample = 256 ,
191- model = "claude-opus-4-6 " ,
191+ model = "claude-mythos-preview " ,
192192 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
193193 stream = True ,
194194 )
@@ -198,7 +198,7 @@ async def test_method_create_overload_2(self, async_client: AsyncAnthropic) -> N
198198 async def test_method_create_with_all_params_overload_2 (self , async_client : AsyncAnthropic ) -> None :
199199 completion_stream = await async_client .completions .create (
200200 max_tokens_to_sample = 256 ,
201- model = "claude-opus-4-6 " ,
201+ model = "claude-mythos-preview " ,
202202 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
203203 stream = True ,
204204 metadata = {"user_id" : "13803d75-b4b5-4c3e-b2a2-6f21399b021b" },
@@ -214,7 +214,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
214214 async def test_raw_response_create_overload_2 (self , async_client : AsyncAnthropic ) -> None :
215215 response = await async_client .completions .with_raw_response .create (
216216 max_tokens_to_sample = 256 ,
217- model = "claude-opus-4-6 " ,
217+ model = "claude-mythos-preview " ,
218218 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
219219 stream = True ,
220220 )
@@ -227,7 +227,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncAnthropic
227227 async def test_streaming_response_create_overload_2 (self , async_client : AsyncAnthropic ) -> None :
228228 async with async_client .completions .with_streaming_response .create (
229229 max_tokens_to_sample = 256 ,
230- model = "claude-opus-4-6 " ,
230+ model = "claude-mythos-preview " ,
231231 prompt = "\n \n Human: Hello, world!\n \n Assistant:" ,
232232 stream = True ,
233233 ) as response :
0 commit comments