We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
GoogleCloudPlatform/generative-ai/blob/main/gemini/use-cases/retrieval-augmented-generation/utils/intro_multimodal_rag_utils.py
When you load from utils.intro_multimodal_rag_utils import get_document_metadata you get this error:
NotFound: 404 Publisher Model publishers/google/models/textembedding-gecko@latest is not found.
publishers/google/models/textembedding-gecko@latest
Changing from latest to 003 resolves the issue: text_embedding_model = TextEmbeddingModel.from_pretrained("textembedding-gecko@003")
text_embedding_model = TextEmbeddingModel.from_pretrained("textembedding-gecko@003")
As per the documentation we shouldn't use latest
--------------------------------------------------------------------------- _InactiveRpcError Traceback (most recent call last) File /opt/conda/lib/python3.10/site-packages/google/api_core/grpc_helpers.py:65, in _wrap_unary_errors.<locals>.error_remapped_callable(*args, **kwargs) 64 try: ---> 65 return callable_(*args, **kwargs) 66 except grpc.RpcError as exc: File /opt/conda/lib/python3.10/site-packages/grpc/_interceptor.py:277, in _UnaryUnaryMultiCallable.__call__(self, request, timeout, metadata, credentials, wait_for_ready, compression) 268 def __call__( 269 self, 270 request: Any, (...) 275 compression: Optional[grpc.Compression] = None, 276 ) -> Any: --> 277 response, ignored_call = self._with_call( 278 request, 279 timeout=timeout, 280 metadata=metadata, 281 credentials=credentials, 282 wait_for_ready=wait_for_ready, 283 compression=compression, 284 ) 285 return response File /opt/conda/lib/python3.10/site-packages/grpc/_interceptor.py:332, in _UnaryUnaryMultiCallable._with_call(self, request, timeout, metadata, credentials, wait_for_ready, compression) 329 call = self._interceptor.intercept_unary_unary( 330 continuation, client_call_details, request 331 ) --> 332 return call.result(), call File /opt/conda/lib/python3.10/site-packages/grpc/_channel.py:440, in _InactiveRpcError.result(self, timeout) 439 """See grpc.Future.result.""" --> 440 raise self File /opt/conda/lib/python3.10/site-packages/grpc/_interceptor.py:315, in _UnaryUnaryMultiCallable._with_call.<locals>.continuation(new_details, request) 314 try: --> 315 response, call = self._thunk(new_method).with_call( 316 request, 317 timeout=new_timeout, 318 metadata=new_metadata, 319 credentials=new_credentials, 320 wait_for_ready=new_wait_for_ready, 321 compression=new_compression, 322 ) 323 return _UnaryOutcome(response, call) File /opt/conda/lib/python3.10/site-packages/grpc/_channel.py:1198, in _UnaryUnaryMultiCallable.with_call(self, request, timeout, metadata, credentials, wait_for_ready, compression) 1192 ( 1193 state, 1194 call, 1195 ) = self._blocking( 1196 request, timeout, metadata, credentials, wait_for_ready, compression 1197 ) -> 1198 return _end_unary_response_blocking(state, call, True, None) File /opt/conda/lib/python3.10/site-packages/grpc/_channel.py:1006, in _end_unary_response_blocking(state, call, with_call, deadline) 1005 else: -> 1006 raise _InactiveRpcError(state) _InactiveRpcError: <_InactiveRpcError of RPC that terminated with: status = StatusCode.NOT_FOUND details = "Publisher Model `publishers/google/models/textembedding-gecko@latest` is not found." debug_error_string = "UNKNOWN:Error received from peer ipv4:74.125.26.95:443 {created_time:"2024-12-21T21:39:53.643907811+00:00", grpc_status:5, grpc_message:"Publisher Model `publishers/google/models/textembedding-gecko@latest` is not found."}" > The above exception was the direct cause of the following exception: NotFound Traceback (most recent call last) Cell In[11], line 4 1 # "RUN THIS CELL AS IS" 2 3 # Import helper functions from utils. ----> 4 from utils.intro_multimodal_rag_utils import get_document_metadata File ~/utils/intro_multimodal_rag_utils.py:23 20 from vertexai.vision_models import Image as vision_model_Image 21 from vertexai.vision_models import MultiModalEmbeddingModel ---> 23 text_embedding_model = TextEmbeddingModel.from_pretrained("textembedding-gecko@latest") 24 multimodal_embedding_model = MultiModalEmbeddingModel.from_pretrained( 25 "multimodalembedding@001" 26 ) 29 # Functions for getting text and image embeddings File ~/.local/lib/python3.10/site-packages/vertexai/_model_garden/_model_garden_models.py:289, in _ModelGardenModel.from_pretrained(cls, model_name) 278 credential_exception_str = ( 279 "\nUnable to authenticate your request." 280 "\nDepending on your runtime environment, you can complete authentication by:" (...) 285 "\n- if in service account or other: please follow guidance in https://cloud.google.com/docs/authentication" 286 ) 288 try: --> 289 return _from_pretrained(interface_class=cls, model_name=model_name) 290 except auth_exceptions.GoogleAuthError as e: 291 raise auth_exceptions.GoogleAuthError(credential_exception_str) from e File ~/.local/lib/python3.10/site-packages/vertexai/_model_garden/_model_garden_models.py:206, in _from_pretrained(interface_class, model_name, publisher_model, tuned_vertex_model) 201 if not interface_class._INSTANCE_SCHEMA_URI: 202 raise ValueError( 203 f"Class {interface_class} is not a correct model interface class since it does not have an instance schema URI." 204 ) --> 206 model_info = _get_model_info( 207 model_id=model_name, 208 schema_to_class_map={interface_class._INSTANCE_SCHEMA_URI: interface_class}, 209 ) 211 else: 212 schema_uri = publisher_model._gca_resource.predict_schemata.instance_schema_uri File ~/.local/lib/python3.10/site-packages/vertexai/_model_garden/_model_garden_models.py:122, in _get_model_info(model_id, schema_to_class_map, interface_class, publisher_model_res, tuned_vertex_model) 118 model_id = "publishers/google/models/" + model_id 120 if not publisher_model_res: 121 publisher_model_res = ( --> 122 _publisher_models._PublisherModel( # pylint: disable=protected-access 123 resource_name=model_id 124 )._gca_resource 125 ) 127 if not publisher_model_res.name.startswith("publishers/google/models/"): 128 raise ValueError( 129 f"Only Google models are currently supported. {publisher_model_res.name}" 130 ) File ~/.local/lib/python3.10/site-packages/google/cloud/aiplatform/_publisher_models.py:77, in _PublisherModel.__init__(self, resource_name, project, location, credentials) 71 else: 72 raise ValueError( 73 f"`{resource_name}` is not a valid PublisherModel resource " 74 "name or model garden id." 75 ) ---> 77 self._gca_resource = getattr(self.api_client, self._getter_method)( 78 name=full_resource_name, retry=base._DEFAULT_RETRY 79 ) File ~/.local/lib/python3.10/site-packages/google/cloud/aiplatform_v1/services/model_garden_service/client.py:844, in ModelGardenServiceClient.get_publisher_model(self, request, name, retry, timeout, metadata) 841 self._validate_universe_domain() 843 # Send the request. --> 844 response = rpc( 845 request, 846 retry=retry, 847 timeout=timeout, 848 metadata=metadata, 849 ) 851 # Done; return the response. 852 return response File /opt/conda/lib/python3.10/site-packages/google/api_core/gapic_v1/method.py:113, in _GapicCallable.__call__(self, timeout, retry, *args, **kwargs) 110 metadata.extend(self._metadata) 111 kwargs["metadata"] = metadata --> 113 return wrapped_func(*args, **kwargs) File /opt/conda/lib/python3.10/site-packages/google/api_core/retry.py:349, in Retry.__call__.<locals>.retry_wrapped_func(*args, **kwargs) 345 target = functools.partial(func, *args, **kwargs) 346 sleep_generator = exponential_sleep_generator( 347 self._initial, self._maximum, multiplier=self._multiplier 348 ) --> 349 return retry_target( 350 target, 351 self._predicate, 352 sleep_generator, 353 self._timeout, 354 on_error=on_error, 355 ) File /opt/conda/lib/python3.10/site-packages/google/api_core/retry.py:191, in retry_target(target, predicate, sleep_generator, timeout, on_error, **kwargs) 189 for sleep in sleep_generator: 190 try: --> 191 return target() 193 # pylint: disable=broad-except 194 # This function explicitly must deal with broad exceptions. 195 except Exception as exc: File /opt/conda/lib/python3.10/site-packages/google/api_core/grpc_helpers.py:67, in _wrap_unary_errors.<locals>.error_remapped_callable(*args, **kwargs) 65 return callable_(*args, **kwargs) 66 except grpc.RpcError as exc: ---> 67 raise exceptions.from_grpc_error(exc) from exc NotFound: 404 Publisher Model `publishers/google/models/textembedding-gecko@latest` is not found.
The text was updated successfully, but these errors were encountered:
PR: #1572
Sorry, something went wrong.
No branches or pull requests
File Name
GoogleCloudPlatform/generative-ai/blob/main/gemini/use-cases/retrieval-augmented-generation/utils/intro_multimodal_rag_utils.py
What happened?
When you load from utils.intro_multimodal_rag_utils import get_document_metadata you get this error:
NotFound: 404 Publisher Model
publishers/google/models/textembedding-gecko@latest
is not found.Changing from latest to 003 resolves the issue:
text_embedding_model = TextEmbeddingModel.from_pretrained("textembedding-gecko@003")
As per the documentation we shouldn't use latest
Relevant log output
Code of Conduct
The text was updated successfully, but these errors were encountered: