diff --git a/google/cloud/aiplatform_v1/__init__.py b/google/cloud/aiplatform_v1/__init__.py index 9c54ee4c48..79d0422665 100644 --- a/google/cloud/aiplatform_v1/__init__.py +++ b/google/cloud/aiplatform_v1/__init__.py @@ -563,6 +563,8 @@ from .types.io import SharePointSources from .types.io import SlackSource from .types.io import TFRecordDestination +from .types.io import VertexMultimodalDatasetDestination +from .types.io import VertexMultimodalDatasetSource from .types.job_service import CancelBatchPredictionJobRequest from .types.job_service import CancelCustomJobRequest from .types.job_service import CancelDataLabelingJobRequest @@ -2198,6 +2200,8 @@ def _get_version(dependency_name): "Value", "VertexAISearch", "VertexAiSearchConfig", + "VertexMultimodalDatasetDestination", + "VertexMultimodalDatasetSource", "VertexRagDataServiceClient", "VertexRagServiceClient", "VertexRagStore", diff --git a/google/cloud/aiplatform_v1/services/dataset_service/async_client.py b/google/cloud/aiplatform_v1/services/dataset_service/async_client.py index f321aca6be..4df4fc103f 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/async_client.py @@ -507,7 +507,6 @@ async def sample_get_dataset(): request (Optional[Union[google.cloud.aiplatform_v1.types.GetDatasetRequest, dict]]): The request object. Request message for [DatasetService.GetDataset][google.cloud.aiplatform.v1.DatasetService.GetDataset]. - Next ID: 4 name (:class:`str`): Required. The name of the Dataset resource. @@ -1704,7 +1703,6 @@ async def sample_get_dataset_version(): request (Optional[Union[google.cloud.aiplatform_v1.types.GetDatasetVersionRequest, dict]]): The request object. Request message for [DatasetService.GetDatasetVersion][google.cloud.aiplatform.v1.DatasetService.GetDatasetVersion]. - Next ID: 4 name (:class:`str`): Required. The resource name of the Dataset version to delete. Format: @@ -2643,10 +2641,7 @@ async def list_annotations( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListAnnotationsAsyncPager: - r"""Lists Annotations belongs to a dataitem - This RPC is only available in InternalDatasetService. It - is only used for exporting conversation data to CCAI - Insights. + r"""Lists Annotations belongs to a dataitem. .. code-block:: python diff --git a/google/cloud/aiplatform_v1/services/dataset_service/client.py b/google/cloud/aiplatform_v1/services/dataset_service/client.py index 9e0e03ff4f..8af5856750 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/client.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/client.py @@ -1091,7 +1091,6 @@ def sample_get_dataset(): request (Union[google.cloud.aiplatform_v1.types.GetDatasetRequest, dict]): The request object. Request message for [DatasetService.GetDataset][google.cloud.aiplatform.v1.DatasetService.GetDataset]. - Next ID: 4 name (str): Required. The name of the Dataset resource. @@ -2261,7 +2260,6 @@ def sample_get_dataset_version(): request (Union[google.cloud.aiplatform_v1.types.GetDatasetVersionRequest, dict]): The request object. Request message for [DatasetService.GetDatasetVersion][google.cloud.aiplatform.v1.DatasetService.GetDatasetVersion]. - Next ID: 4 name (str): Required. The resource name of the Dataset version to delete. Format: @@ -3177,10 +3175,7 @@ def list_annotations( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListAnnotationsPager: - r"""Lists Annotations belongs to a dataitem - This RPC is only available in InternalDatasetService. It - is only used for exporting conversation data to CCAI - Insights. + r"""Lists Annotations belongs to a dataitem. .. code-block:: python diff --git a/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc.py index 8410f7f790..baa6b4d9ba 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc.py @@ -853,10 +853,7 @@ def list_annotations( ]: r"""Return a callable for the list annotations method over gRPC. - Lists Annotations belongs to a dataitem - This RPC is only available in InternalDatasetService. It - is only used for exporting conversation data to CCAI - Insights. + Lists Annotations belongs to a dataitem. Returns: Callable[[~.ListAnnotationsRequest], diff --git a/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc_asyncio.py index 1cdabd6e2f..4ecd09f9ff 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/transports/grpc_asyncio.py @@ -880,10 +880,7 @@ def list_annotations( ]: r"""Return a callable for the list annotations method over gRPC. - Lists Annotations belongs to a dataitem - This RPC is only available in InternalDatasetService. It - is only used for exporting conversation data to CCAI - Insights. + Lists Annotations belongs to a dataitem. Returns: Callable[[~.ListAnnotationsRequest], diff --git a/google/cloud/aiplatform_v1/services/dataset_service/transports/rest.py b/google/cloud/aiplatform_v1/services/dataset_service/transports/rest.py index e084918200..98fb0171c2 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/transports/rest.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/transports/rest.py @@ -4569,7 +4569,6 @@ def __call__( request (~.dataset_service.GetDatasetRequest): The request object. Request message for [DatasetService.GetDataset][google.cloud.aiplatform.v1.DatasetService.GetDataset]. - Next ID: 4 retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4720,7 +4719,6 @@ def __call__( request (~.dataset_service.GetDatasetVersionRequest): The request object. Request message for [DatasetService.GetDatasetVersion][google.cloud.aiplatform.v1.DatasetService.GetDatasetVersion]. - Next ID: 4 retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/google/cloud/aiplatform_v1/services/dataset_service/transports/rest_asyncio.py b/google/cloud/aiplatform_v1/services/dataset_service/transports/rest_asyncio.py index ddc755b15b..378a0a7ef6 100644 --- a/google/cloud/aiplatform_v1/services/dataset_service/transports/rest_asyncio.py +++ b/google/cloud/aiplatform_v1/services/dataset_service/transports/rest_asyncio.py @@ -2839,7 +2839,6 @@ async def __call__( request (~.dataset_service.GetDatasetRequest): The request object. Request message for [DatasetService.GetDataset][google.cloud.aiplatform.v1.DatasetService.GetDataset]. - Next ID: 4 retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2999,7 +2998,6 @@ async def __call__( request (~.dataset_service.GetDatasetVersionRequest): The request object. Request message for [DatasetService.GetDatasetVersion][google.cloud.aiplatform.v1.DatasetService.GetDatasetVersion]. - Next ID: 4 retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/google/cloud/aiplatform_v1/services/migration_service/client.py b/google/cloud/aiplatform_v1/services/migration_service/client.py index e626e873a0..8755475c2d 100644 --- a/google/cloud/aiplatform_v1/services/migration_service/client.py +++ b/google/cloud/aiplatform_v1/services/migration_service/client.py @@ -271,40 +271,40 @@ def parse_annotated_dataset_path(path: str) -> Dict[str, str]: @staticmethod def dataset_path( project: str, + location: str, dataset: str, ) -> str: """Returns a fully-qualified dataset string.""" - return "projects/{project}/datasets/{dataset}".format( + return "projects/{project}/locations/{location}/datasets/{dataset}".format( project=project, + location=location, dataset=dataset, ) @staticmethod def parse_dataset_path(path: str) -> Dict[str, str]: """Parses a dataset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod def dataset_path( project: str, - location: str, dataset: str, ) -> str: """Returns a fully-qualified dataset string.""" - return "projects/{project}/locations/{location}/datasets/{dataset}".format( + return "projects/{project}/datasets/{dataset}".format( project=project, - location=location, dataset=dataset, ) @staticmethod def parse_dataset_path(path: str) -> Dict[str, str]: """Parses a dataset path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", - path, - ) + m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod diff --git a/google/cloud/aiplatform_v1/types/__init__.py b/google/cloud/aiplatform_v1/types/__init__.py index 0bfadf9248..4f2c8b83bd 100644 --- a/google/cloud/aiplatform_v1/types/__init__.py +++ b/google/cloud/aiplatform_v1/types/__init__.py @@ -566,6 +566,8 @@ SharePointSources, SlackSource, TFRecordDestination, + VertexMultimodalDatasetDestination, + VertexMultimodalDatasetSource, ) from .job_service import ( CancelBatchPredictionJobRequest, @@ -1665,6 +1667,8 @@ "SharePointSources", "SlackSource", "TFRecordDestination", + "VertexMultimodalDatasetDestination", + "VertexMultimodalDatasetSource", "CancelBatchPredictionJobRequest", "CancelCustomJobRequest", "CancelDataLabelingJobRequest", diff --git a/google/cloud/aiplatform_v1/types/batch_prediction_job.py b/google/cloud/aiplatform_v1/types/batch_prediction_job.py index e1c263e295..8a2d6ac15a 100644 --- a/google/cloud/aiplatform_v1/types/batch_prediction_job.py +++ b/google/cloud/aiplatform_v1/types/batch_prediction_job.py @@ -273,6 +273,11 @@ class InputConfig(proto.Message): additional columns that are not described by the schema, and they will be ignored. + This field is a member of `oneof`_ ``source``. + vertex_multimodal_dataset_source (google.cloud.aiplatform_v1.types.VertexMultimodalDatasetSource): + A Vertex Managed Dataset. Currently, only + datasets of type Multimodal are supported. + This field is a member of `oneof`_ ``source``. instances_format (str): Required. The format in which instances are given, must be @@ -293,6 +298,14 @@ class InputConfig(proto.Message): oneof="source", message=io.BigQuerySource, ) + vertex_multimodal_dataset_source: io.VertexMultimodalDatasetSource = ( + proto.Field( + proto.MESSAGE, + number=4, + oneof="source", + message=io.VertexMultimodalDatasetSource, + ) + ) instances_format: str = proto.Field( proto.STRING, number=1, @@ -489,6 +502,11 @@ class OutputConfig(proto.Message): [google.rpc.Status][google.rpc.Status] represented as a STRUCT, and containing only ``code`` and ``message``. + This field is a member of `oneof`_ ``destination``. + vertex_multimodal_dataset_destination (google.cloud.aiplatform_v1.types.VertexMultimodalDatasetDestination): + The details for a Vertex Multimodal Dataset + that will be created for the output. + This field is a member of `oneof`_ ``destination``. predictions_format (str): Required. The format in which Vertex AI gives the @@ -509,6 +527,14 @@ class OutputConfig(proto.Message): oneof="destination", message=io.BigQueryDestination, ) + vertex_multimodal_dataset_destination: io.VertexMultimodalDatasetDestination = ( + proto.Field( + proto.MESSAGE, + number=6, + oneof="destination", + message=io.VertexMultimodalDatasetDestination, + ) + ) predictions_format: str = proto.Field( proto.STRING, number=1, @@ -537,6 +563,13 @@ class OutputInfo(proto.Message): ``bq://projectId.bqDatasetId`` format, into which the prediction output is written. + This field is a member of `oneof`_ ``output_location``. + vertex_multimodal_dataset_name (str): + Output only. The resource name of the Vertex Managed Dataset + created, into which the prediction output is written. + Format: + ``projects/{project}/locations/{location}/datasets/{dataset}`` + This field is a member of `oneof`_ ``output_location``. bigquery_output_table (str): Output only. The name of the BigQuery table created, in @@ -555,6 +588,11 @@ class OutputInfo(proto.Message): number=2, oneof="output_location", ) + vertex_multimodal_dataset_name: str = proto.Field( + proto.STRING, + number=5, + oneof="output_location", + ) bigquery_output_table: str = proto.Field( proto.STRING, number=4, diff --git a/google/cloud/aiplatform_v1/types/dataset.py b/google/cloud/aiplatform_v1/types/dataset.py index c810b9997b..55c0a49670 100644 --- a/google/cloud/aiplatform_v1/types/dataset.py +++ b/google/cloud/aiplatform_v1/types/dataset.py @@ -43,8 +43,9 @@ class Dataset(proto.Message): Attributes: name (str): - Output only. Identifier. The resource name of - the Dataset. + Output only. Identifier. The resource name of the Dataset. + Format: + ``projects/{project}/locations/{location}/datasets/{dataset}`` display_name (str): Required. The user-defined name of the Dataset. The name can be up to 128 characters diff --git a/google/cloud/aiplatform_v1/types/dataset_service.py b/google/cloud/aiplatform_v1/types/dataset_service.py index aa594f5304..858d1ec4e6 100644 --- a/google/cloud/aiplatform_v1/types/dataset_service.py +++ b/google/cloud/aiplatform_v1/types/dataset_service.py @@ -112,7 +112,6 @@ class CreateDatasetOperationMetadata(proto.Message): class GetDatasetRequest(proto.Message): r"""Request message for [DatasetService.GetDataset][google.cloud.aiplatform.v1.DatasetService.GetDataset]. - Next ID: 4 Attributes: name (str): @@ -489,7 +488,6 @@ class DeleteDatasetVersionRequest(proto.Message): class GetDatasetVersionRequest(proto.Message): r"""Request message for [DatasetService.GetDatasetVersion][google.cloud.aiplatform.v1.DatasetService.GetDatasetVersion]. - Next ID: 4 Attributes: name (str): diff --git a/google/cloud/aiplatform_v1/types/dataset_version.py b/google/cloud/aiplatform_v1/types/dataset_version.py index 5a03fe5aa8..22affcb419 100644 --- a/google/cloud/aiplatform_v1/types/dataset_version.py +++ b/google/cloud/aiplatform_v1/types/dataset_version.py @@ -36,8 +36,9 @@ class DatasetVersion(proto.Message): Attributes: name (str): - Output only. Identifier. The resource name of - the DatasetVersion. + Output only. Identifier. The resource name of the + DatasetVersion. Format: + ``projects/{project}/locations/{location}/datasets/{dataset}/datasetVersions/{dataset_version}`` create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Timestamp when this DatasetVersion was created. diff --git a/google/cloud/aiplatform_v1/types/io.py b/google/cloud/aiplatform_v1/types/io.py index 07d89f0bea..ab5f96aecd 100644 --- a/google/cloud/aiplatform_v1/types/io.py +++ b/google/cloud/aiplatform_v1/types/io.py @@ -32,6 +32,8 @@ "GcsDestination", "BigQuerySource", "BigQueryDestination", + "VertexMultimodalDatasetSource", + "VertexMultimodalDatasetDestination", "CsvDestination", "TFRecordDestination", "ContainerRegistryDestination", @@ -152,6 +154,46 @@ class BigQueryDestination(proto.Message): ) +class VertexMultimodalDatasetSource(proto.Message): + r"""The Vertex Multimodal Dataset for the input content. + + Attributes: + dataset_name (str): + Required. The resource name of the Vertex Dataset. Format: + ``projects/{project}/locations/{location}/datasets/{dataset}`` + """ + + dataset_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class VertexMultimodalDatasetDestination(proto.Message): + r"""The details for a Vertex Multimodal Dataset output. + + Attributes: + bigquery_destination (google.cloud.aiplatform_v1.types.BigQueryDestination): + Optional. The destination of the underlying + BigQuery table that will be created for the + output Multimodal Dataset. If not specified, the + BigQuery table will be created in a default + BigQuery dataset. + display_name (str): + Optional. Display name of the output dataset. + """ + + bigquery_destination: "BigQueryDestination" = proto.Field( + proto.MESSAGE, + number=1, + message="BigQueryDestination", + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + + class CsvDestination(proto.Message): r"""The storage details for CSV output content. diff --git a/google/cloud/aiplatform_v1beta1/__init__.py b/google/cloud/aiplatform_v1beta1/__init__.py index 759601dc07..cef7b426de 100644 --- a/google/cloud/aiplatform_v1beta1/__init__.py +++ b/google/cloud/aiplatform_v1beta1/__init__.py @@ -292,6 +292,7 @@ from .types.evaluation_service import CometInstance from .types.evaluation_service import CometResult from .types.evaluation_service import CometSpec +from .types.evaluation_service import ComputationBasedMetricSpec from .types.evaluation_service import ContentMap from .types.evaluation_service import CustomOutput from .types.evaluation_service import CustomOutputFormatConfig @@ -318,7 +319,9 @@ from .types.evaluation_service import GroundednessInstance from .types.evaluation_service import GroundednessResult from .types.evaluation_service import GroundednessSpec +from .types.evaluation_service import LLMBasedMetricSpec from .types.evaluation_service import Metric +from .types.evaluation_service import MetricResult from .types.evaluation_service import MetricxInput from .types.evaluation_service import MetricxInstance from .types.evaluation_service import MetricxResult @@ -341,6 +344,7 @@ from .types.evaluation_service import PointwiseMetricInstance from .types.evaluation_service import PointwiseMetricResult from .types.evaluation_service import PointwiseMetricSpec +from .types.evaluation_service import PredefinedMetricSpec from .types.evaluation_service import QuestionAnsweringCorrectnessInput from .types.evaluation_service import QuestionAnsweringCorrectnessInstance from .types.evaluation_service import QuestionAnsweringCorrectnessResult @@ -697,6 +701,8 @@ from .types.io import SharePointSources from .types.io import SlackSource from .types.io import TFRecordDestination +from .types.io import VertexMultimodalDatasetDestination +from .types.io import VertexMultimodalDatasetSource from .types.job_service import CancelBatchPredictionJobRequest from .types.job_service import CancelCustomJobRequest from .types.job_service import CancelDataLabelingJobRequest @@ -1560,6 +1566,7 @@ def _get_version(dependency_name): "CometSpec", "CompleteTrialRequest", "CompletionStats", + "ComputationBasedMetricSpec", "ComputeTokensRequest", "ComputeTokensResponse", "ContainerRegistryDestination", @@ -2003,6 +2010,7 @@ def _get_version(dependency_name): "JiraSource", "JobServiceClient", "JobState", + "LLMBasedMetricSpec", "LargeModelReference", "LineageSubgraph", "ListAnnotationsRequest", @@ -2146,6 +2154,7 @@ def _get_version(dependency_name): "MetadataServiceClient", "MetadataStore", "Metric", + "MetricResult", "MetricxInput", "MetricxInstance", "MetricxResult", @@ -2262,6 +2271,7 @@ def _get_version(dependency_name): "PostStartupScriptConfig", "PreTunedModel", "PrebuiltVoiceConfig", + "PredefinedMetricSpec", "PredefinedSplit", "PredictLongRunningMetadata", "PredictLongRunningResponse", @@ -2664,6 +2674,8 @@ def _get_version(dependency_name): "VeoTuningSpec", "VertexAISearch", "VertexAiSearchConfig", + "VertexMultimodalDatasetDestination", + "VertexMultimodalDatasetSource", "VertexRagDataServiceClient", "VertexRagServiceClient", "VertexRagStore", diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py index c69e57edea..0e45ac6915 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py @@ -119,6 +119,8 @@ class DatasetServiceAsyncClient: parse_rag_corpus_path = staticmethod(DatasetServiceClient.parse_rag_corpus_path) saved_query_path = staticmethod(DatasetServiceClient.saved_query_path) parse_saved_query_path = staticmethod(DatasetServiceClient.parse_saved_query_path) + template_path = staticmethod(DatasetServiceClient.template_path) + parse_template_path = staticmethod(DatasetServiceClient.parse_template_path) common_billing_account_path = staticmethod( DatasetServiceClient.common_billing_account_path ) diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py index fa2c36195d..694acd755b 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py @@ -469,6 +469,28 @@ def parse_saved_query_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def template_path( + project: str, + location: str, + template: str, + ) -> str: + """Returns a fully-qualified template string.""" + return "projects/{project}/locations/{location}/templates/{template}".format( + project=project, + location=location, + template=template, + ) + + @staticmethod + def parse_template_path(path: str) -> Dict[str, str]: + """Parses a template path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/templates/(?P