|
22 | 22 |
|
23 | 23 | import vertexai
|
24 | 24 | from google.cloud.aiplatform import initializer
|
25 |
| -from vertexai.preview import generative_models |
| 25 | +from vertexai import generative_models |
| 26 | +from vertexai.preview import generative_models as preview_generative_models |
26 | 27 | from vertexai.generative_models._generative_models import (
|
27 | 28 | prediction_service,
|
28 | 29 | gapic_prediction_service_types,
|
@@ -231,7 +232,11 @@ def teardown_method(self):
|
231 | 232 | attribute="generate_content",
|
232 | 233 | new=mock_generate_content,
|
233 | 234 | )
|
234 |
| -def test_generate_content(self): |
| 235 | +@pytest.mark.parametrize( |
| 236 | +"generative_models", |
| 237 | +[generative_models, preview_generative_models], |
| 238 | +) |
| 239 | +def test_generate_content(self, generative_models: generative_models): |
235 | 240 | model = generative_models.GenerativeModel("gemini-pro")
|
236 | 241 | response = model.generate_content("Why is sky blue?")
|
237 | 242 | assert response.text
|
@@ -254,7 +259,11 @@ def test_generate_content(self):
|
254 | 259 | attribute="stream_generate_content",
|
255 | 260 | new=mock_stream_generate_content,
|
256 | 261 | )
|
257 |
| -def test_generate_content_(self): |
| 262 | +@pytest.mark.parametrize( |
| 263 | +"generative_models", |
| 264 | +[generative_models, preview_generative_models], |
| 265 | +) |
| 266 | +def test_generate_content_(self, generative_models: generative_models): |
258 | 267 | model = generative_models.GenerativeModel("gemini-pro")
|
259 | 268 | stream = model.generate_content("Why is sky blue?", stream=True)
|
260 | 269 | for chunk in stream:
|
@@ -265,7 +274,11 @@ def test_generate_content_(self):
|
265 | 274 | attribute="generate_content",
|
266 | 275 | new=mock_generate_content,
|
267 | 276 | )
|
268 |
| -def test_chat_send_message(self): |
| 277 | +@pytest.mark.parametrize( |
| 278 | +"generative_models", |
| 279 | +[generative_models, preview_generative_models], |
| 280 | +) |
| 281 | +def test_chat_send_message(self, generative_models: generative_models): |
269 | 282 | model = generative_models.GenerativeModel("gemini-pro")
|
270 | 283 | chat = model.start_chat()
|
271 | 284 | response1 = chat.send_message("Why is sky blue?")
|
@@ -278,7 +291,11 @@ def test_chat_send_message(self):
|
278 | 291 | attribute="generate_content",
|
279 | 292 | new=mock_generate_content,
|
280 | 293 | )
|
281 |
| -def test_chat_function_calling(self): |
| 294 | +@pytest.mark.parametrize( |
| 295 | +"generative_models", |
| 296 | +[generative_models, preview_generative_models], |
| 297 | +) |
| 298 | +def test_chat_function_calling(self, generative_models: generative_models): |
282 | 299 | get_current_weather_func = generative_models.FunctionDeclaration(
|
283 | 300 | name="get_current_weather",
|
284 | 301 | description="Get the current weather in a given location",
|
|
0 commit comments