|
83 | 83 | _ML_EMBED_TEXT_STATUS = "ml_embed_text_status"
|
84 | 84 | _ML_GENERATE_EMBEDDING_STATUS = "ml_generate_embedding_status"
|
85 | 85 |
|
| 86 | +_MODEL_NOT_SUPPORTED_WARNING = ( |
| 87 | + "Model name '{model_name}' is not supported. " |
| 88 | + "We are currently aware of the following models: {known_models}. " |
| 89 | + "However, model names can change, and the supported models may be outdated. " |
| 90 | + "You should use this model name only if you are sure that it is supported in BigQuery." |
| 91 | +) |
| 92 | + |
86 | 93 |
|
87 | 94 | @typing_extensions.deprecated(
|
88 | 95 | "PaLM2TextGenerator is going to be deprecated. Use GeminiTextGenerator(https://cloud.google.com/python/docs/reference/bigframes/latest/bigframes.ml.llm.GeminiTextGenerator) instead. ",
|
@@ -154,8 +161,11 @@ def _create_bqml_model(self):
|
154 | 161 | )
|
155 | 162 |
|
156 | 163 | if self.model_name not in _TEXT_GENERATOR_ENDPOINTS:
|
157 |
| - raise ValueError( |
158 |
| - f"Model name {self.model_name} is not supported. We only support {', '.join(_TEXT_GENERATOR_ENDPOINTS)}." |
| 164 | + warnings.warn( |
| 165 | + _MODEL_NOT_SUPPORTED_WARNING.format( |
| 166 | + model_name=self.model_name, |
| 167 | + known_models=", ".join(_TEXT_GENERATOR_ENDPOINTS), |
| 168 | + ) |
159 | 169 | )
|
160 | 170 |
|
161 | 171 | options = {
|
@@ -484,8 +494,11 @@ def _create_bqml_model(self):
|
484 | 494 | )
|
485 | 495 |
|
486 | 496 | if self.model_name not in _PALM2_EMBEDDING_GENERATOR_ENDPOINTS:
|
487 |
| - raise ValueError( |
488 |
| - f"Model name {self.model_name} is not supported. We only support {', '.join(_PALM2_EMBEDDING_GENERATOR_ENDPOINTS)}." |
| 497 | + warnings.warn( |
| 498 | + _MODEL_NOT_SUPPORTED_WARNING.format( |
| 499 | + model_name=self.model_name, |
| 500 | + known_models=", ".join(_PALM2_EMBEDDING_GENERATOR_ENDPOINTS), |
| 501 | + ) |
489 | 502 | )
|
490 | 503 |
|
491 | 504 | endpoint = (
|
@@ -644,8 +657,11 @@ def _create_bqml_model(self):
|
644 | 657 | )
|
645 | 658 |
|
646 | 659 | if self.model_name not in _TEXT_EMBEDDING_ENDPOINTS:
|
647 |
| - raise ValueError( |
648 |
| - f"Model name {self.model_name} is not supported. We only support {', '.join(_TEXT_EMBEDDING_ENDPOINTS)}." |
| 660 | + warnings.warn( |
| 661 | + _MODEL_NOT_SUPPORTED_WARNING.format( |
| 662 | + model_name=self.model_name, |
| 663 | + known_models=", ".join(_TEXT_EMBEDDING_ENDPOINTS), |
| 664 | + ) |
649 | 665 | )
|
650 | 666 |
|
651 | 667 | options = {
|
@@ -801,8 +817,11 @@ def _create_bqml_model(self):
|
801 | 817 | )
|
802 | 818 |
|
803 | 819 | if self.model_name not in _GEMINI_ENDPOINTS:
|
804 |
| - raise ValueError( |
805 |
| - f"Model name {self.model_name} is not supported. We only support {', '.join(_GEMINI_ENDPOINTS)}." |
| 820 | + warnings.warn( |
| 821 | + _MODEL_NOT_SUPPORTED_WARNING.format( |
| 822 | + model_name=self.model_name, |
| 823 | + known_models=", ".join(_GEMINI_ENDPOINTS), |
| 824 | + ) |
806 | 825 | )
|
807 | 826 |
|
808 | 827 | options = {"endpoint": self.model_name}
|
@@ -1118,8 +1137,11 @@ def _create_bqml_model(self):
|
1118 | 1137 | )
|
1119 | 1138 |
|
1120 | 1139 | if self.model_name not in _CLAUDE_3_ENDPOINTS:
|
1121 |
| - raise ValueError( |
1122 |
| - f"Model name {self.model_name} is not supported. We only support {', '.join(_CLAUDE_3_ENDPOINTS)}." |
| 1140 | + warnings.warn( |
| 1141 | + _MODEL_NOT_SUPPORTED_WARNING.format( |
| 1142 | + model_name=self.model_name, |
| 1143 | + known_models=", ".join(_CLAUDE_3_ENDPOINTS), |
| 1144 | + ) |
1123 | 1145 | )
|
1124 | 1146 |
|
1125 | 1147 | options = {
|
|
0 commit comments