# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# Code generated by the Google Gen AI SDK generator DO NOT EDIT.

import asyncio
import json
import logging
import time
from typing import Any, Optional, Union
from urllib.parse import urlencode

from google.genai import _api_module
from google.genai import _common
from google.genai import types as genai_types
from google.genai._common import get_value_by_path as getv
from google.genai._common import set_value_by_path as setv

from . import _datasets_utils
from . import types


logger = logging.getLogger("vertexai_genai.datasets")


def _AssembleDatasetParameters_to_vertex(
    from_object: Union[dict[str, Any], object],
    parent_object: Optional[dict[str, Any]] = None,
) -> dict[str, Any]:
    to_object: dict[str, Any] = {}
    if getv(from_object, ["name"]) is not None:
        setv(to_object, ["_url", "name"], getv(from_object, ["name"]))

    if getv(from_object, ["gemini_request_read_config"]) is not None:
        setv(
            to_object,
            ["geminiRequestReadConfig"],
            getv(from_object, ["gemini_request_read_config"]),
        )

    if getv(from_object, ["config"]) is not None:
        setv(to_object, ["config"], getv(from_object, ["config"]))

    return to_object


def _AssessDatasetParameters_to_vertex(
    from_object: Union[dict[str, Any], object],
    parent_object: Optional[dict[str, Any]] = None,
) -> dict[str, Any]:
    to_object: dict[str, Any] = {}
    if getv(from_object, ["name"]) is not None:
        setv(to_object, ["_url", "name"], getv(from_object, ["name"]))

    if getv(from_object, ["gemini_request_read_config"]) is not None:
        setv(
            to_object,
            ["geminiRequestReadConfig"],
            getv(from_object, ["gemini_request_read_config"]),
        )

    if getv(from_object, ["tuning_resource_usage_assessment_config"]) is not None:
        setv(
            to_object,
            ["tuningResourceUsageAssessmentConfig"],
            getv(from_object, ["tuning_resource_usage_assessment_config"]),
        )

    if getv(from_object, ["tuning_validation_assessment_config"]) is not None:
        setv(
            to_object,
            ["tuningValidationAssessmentConfig"],
            getv(from_object, ["tuning_validation_assessment_config"]),
        )

    if (
        getv(from_object, ["batch_prediction_resource_usage_assessment_config"])
        is not None
    ):
        setv(
            to_object,
            ["batchPredictionResourceUsageAssessmentConfig"],
            getv(from_object, ["batch_prediction_resource_usage_assessment_config"]),
        )

    if getv(from_object, ["batch_prediction_validation_assessment_config"]) is not None:
        setv(
            to_object,
            ["batchPredictionValidationAssessmentConfig"],
            getv(from_object, ["batch_prediction_validation_assessment_config"]),
        )

    if getv(from_object, ["config"]) is not None:
        setv(to_object, ["config"], getv(from_object, ["config"]))

    return to_object


def _CreateMultimodalDatasetParameters_to_vertex(
    from_object: Union[dict[str, Any], object],
    parent_object: Optional[dict[str, Any]] = None,
) -> dict[str, Any]:
    to_object: dict[str, Any] = {}
    if getv(from_object, ["name"]) is not None:
        setv(to_object, ["name"], getv(from_object, ["name"]))

    if getv(from_object, ["display_name"]) is not None:
        setv(to_object, ["displayName"], getv(from_object, ["display_name"]))

    if getv(from_object, ["metadata_schema_uri"]) is not None:
        setv(
            to_object, ["metadataSchemaUri"], getv(from_object, ["metadata_schema_uri"])
        )

    if getv(from_object, ["metadata"]) is not None:
        setv(to_object, ["metadata"], getv(from_object, ["metadata"]))

    if getv(from_object, ["description"]) is not None:
        setv(to_object, ["description"], getv(from_object, ["description"]))

    if getv(from_object, ["encryption_spec"]) is not None:
        setv(to_object, ["encryptionSpec"], getv(from_object, ["encryption_spec"]))

    if getv(from_object, ["config"]) is not None:
        setv(to_object, ["config"], getv(from_object, ["config"]))

    return to_object


def _DeleteMultimodalDatasetRequestParameters_to_vertex(
    from_object: Union[dict[str, Any], object],
    parent_object: Optional[dict[str, Any]] = None,
) -> dict[str, Any]:
    to_object: dict[str, Any] = {}
    if getv(from_object, ["name"]) is not None:
        setv(to_object, ["_url", "name"], getv(from_object, ["name"]))

    if getv(from_object, ["config"]) is not None:
        setv(to_object, ["config"], getv(from_object, ["config"]))

    return to_object


def _GetMultimodalDatasetOperationParameters_to_vertex(
    from_object: Union[dict[str, Any], object],
    parent_object: Optional[dict[str, Any]] = None,
) -> dict[str, Any]:
    to_object: dict[str, Any] = {}
    if getv(from_object, ["dataset_id"]) is not None:
        setv(to_object, ["_url", "dataset_id"], getv(from_object, ["dataset_id"]))

    if getv(from_object, ["operation_id"]) is not None:
        setv(to_object, ["_url", "operation_id"], getv(from_object, ["operation_id"]))

    if getv(from_object, ["config"]) is not None:
        setv(to_object, ["config"], getv(from_object, ["config"]))

    return to_object


def _GetMultimodalDatasetParameters_to_vertex(
    from_object: Union[dict[str, Any], object],
    parent_object: Optional[dict[str, Any]] = None,
) -> dict[str, Any]:
    to_object: dict[str, Any] = {}
    if getv(from_object, ["name"]) is not None:
        setv(to_object, ["_url", "name"], getv(from_object, ["name"]))

    if getv(from_object, ["config"]) is not None:
        setv(to_object, ["config"], getv(from_object, ["config"]))

    return to_object


def _ListMultimodalDatasetsConfig_to_vertex(
    from_object: Union[dict[str, Any], object],
    parent_object: Optional[dict[str, Any]] = None,
) -> dict[str, Any]:
    to_object: dict[str, Any] = {}

    if getv(from_object, ["page_size"]) is not None:
        setv(parent_object, ["_query", "pageSize"], getv(from_object, ["page_size"]))

    if getv(from_object, ["page_token"]) is not None:
        setv(parent_object, ["_query", "pageToken"], getv(from_object, ["page_token"]))

    if getv(from_object, ["filter"]) is not None:
        setv(parent_object, ["_query", "filter"], getv(from_object, ["filter"]))

    return to_object


def _ListMultimodalDatasetsRequestParameters_to_vertex(
    from_object: Union[dict[str, Any], object],
    parent_object: Optional[dict[str, Any]] = None,
) -> dict[str, Any]:
    to_object: dict[str, Any] = {}
    if getv(from_object, ["config"]) is not None:
        setv(
            to_object,
            ["config"],
            _ListMultimodalDatasetsConfig_to_vertex(
                getv(from_object, ["config"]), to_object
            ),
        )

    return to_object


def _UpdateMultimodalDatasetParameters_to_vertex(
    from_object: Union[dict[str, Any], object],
    parent_object: Optional[dict[str, Any]] = None,
) -> dict[str, Any]:
    to_object: dict[str, Any] = {}
    if getv(from_object, ["name"]) is not None:
        setv(to_object, ["_url", "name"], getv(from_object, ["name"]))

    if getv(from_object, ["display_name"]) is not None:
        setv(to_object, ["displayName"], getv(from_object, ["display_name"]))

    if getv(from_object, ["metadata"]) is not None:
        setv(to_object, ["metadata"], getv(from_object, ["metadata"]))

    if getv(from_object, ["description"]) is not None:
        setv(to_object, ["description"], getv(from_object, ["description"]))

    if getv(from_object, ["encryption_spec"]) is not None:
        setv(to_object, ["encryptionSpec"], getv(from_object, ["encryption_spec"]))

    if getv(from_object, ["config"]) is not None:
        setv(to_object, ["config"], getv(from_object, ["config"]))

    return to_object


class Datasets(_api_module.BaseModule):

    def _assemble_multimodal_dataset(
        self,
        *,
        name: str,
        gemini_request_read_config: Optional[
            types.GeminiRequestReadConfigOrDict
        ] = None,
        config: Optional[types.AssembleDatasetConfigOrDict] = None,
    ) -> types.MultimodalDatasetOperation:
        """
        Assembles a multimodal dataset resource.
        """

        parameter_model = types._AssembleDatasetParameters(
            name=name,
            gemini_request_read_config=gemini_request_read_config,
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _AssembleDatasetParameters_to_vertex(parameter_model)
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets/{name}:assemble".format_map(request_url_dict)
            else:
                path = "datasets/{name}:assemble"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = self._api_client.request("post", path, request_dict, http_options)

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.MultimodalDatasetOperation._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    def _assess_multimodal_dataset(
        self,
        *,
        name: str,
        gemini_request_read_config: Optional[
            types.GeminiRequestReadConfigOrDict
        ] = None,
        tuning_resource_usage_assessment_config: Optional[
            types.TuningResourceUsageAssessmentConfigOrDict
        ] = None,
        tuning_validation_assessment_config: Optional[
            types.TuningValidationAssessmentConfigOrDict
        ] = None,
        batch_prediction_resource_usage_assessment_config: Optional[
            types.BatchPredictionResourceUsageAssessmentConfigOrDict
        ] = None,
        batch_prediction_validation_assessment_config: Optional[
            types.BatchPredictionValidationAssessmentConfigOrDict
        ] = None,
        config: Optional[types.AssessDatasetConfigOrDict] = None,
    ) -> types.MultimodalDatasetOperation:
        """
        Assesses a multimodal dataset resource.
        """

        parameter_model = types._AssessDatasetParameters(
            name=name,
            gemini_request_read_config=gemini_request_read_config,
            tuning_resource_usage_assessment_config=tuning_resource_usage_assessment_config,
            tuning_validation_assessment_config=tuning_validation_assessment_config,
            batch_prediction_resource_usage_assessment_config=batch_prediction_resource_usage_assessment_config,
            batch_prediction_validation_assessment_config=batch_prediction_validation_assessment_config,
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _AssessDatasetParameters_to_vertex(parameter_model)
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets/{name}:assess".format_map(request_url_dict)
            else:
                path = "datasets/{name}:assess"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = self._api_client.request("post", path, request_dict, http_options)

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.MultimodalDatasetOperation._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    def _create_multimodal_dataset(
        self,
        *,
        name: Optional[str] = None,
        display_name: Optional[str] = None,
        metadata_schema_uri: Optional[str] = None,
        metadata: Optional[types.SchemaTablesDatasetMetadataOrDict] = None,
        description: Optional[str] = None,
        encryption_spec: Optional[genai_types.EncryptionSpecOrDict] = None,
        config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None,
    ) -> types.MultimodalDatasetOperation:
        """
        Creates a dataset resource to store multimodal datasets.
        """

        parameter_model = types._CreateMultimodalDatasetParameters(
            name=name,
            display_name=display_name,
            metadata_schema_uri=metadata_schema_uri,
            metadata=metadata,
            description=description,
            encryption_spec=encryption_spec,
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _CreateMultimodalDatasetParameters_to_vertex(parameter_model)
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets".format_map(request_url_dict)
            else:
                path = "datasets"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = self._api_client.request("post", path, request_dict, http_options)

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.MultimodalDatasetOperation._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    def _delete_multimodal_dataset(
        self, *, name: str, config: Optional[types.VertexBaseConfigOrDict] = None
    ) -> types.MultimodalDatasetOperation:
        """
        Deletes a multimodal dataset resource.
        """

        parameter_model = types._DeleteMultimodalDatasetRequestParameters(
            name=name,
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _DeleteMultimodalDatasetRequestParameters_to_vertex(
                parameter_model
            )
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets/{name}".format_map(request_url_dict)
            else:
                path = "datasets/{name}"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = self._api_client.request("delete", path, request_dict, http_options)

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.MultimodalDatasetOperation._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    def _get_multimodal_dataset(
        self,
        *,
        name: Optional[str] = None,
        config: Optional[types.VertexBaseConfigOrDict] = None,
    ) -> types.MultimodalDataset:
        """
        Gets a multimodal dataset resource.
        """

        parameter_model = types._GetMultimodalDatasetParameters(
            name=name,
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _GetMultimodalDatasetParameters_to_vertex(parameter_model)
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets/{name}".format_map(request_url_dict)
            else:
                path = "datasets/{name}"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = self._api_client.request("get", path, request_dict, http_options)

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.MultimodalDataset._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    def _get_multimodal_dataset_operation(
        self,
        *,
        dataset_id: Optional[str] = None,
        operation_id: Optional[str] = None,
        config: Optional[types.GetMultimodalDatasetOperationConfigOrDict] = None,
    ) -> types.MultimodalDatasetOperation:
        """
        Gets the operation from creating a multimodal dataset.
        """

        parameter_model = types._GetMultimodalDatasetOperationParameters(
            dataset_id=dataset_id,
            operation_id=operation_id,
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _GetMultimodalDatasetOperationParameters_to_vertex(
                parameter_model
            )
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets/{dataset_id}/operations/{operation_id}".format_map(
                    request_url_dict
                )
            else:
                path = "datasets/{dataset_id}/operations/{operation_id}"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = self._api_client.request("get", path, request_dict, http_options)

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.MultimodalDatasetOperation._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    def _list_multimodal_datasets(
        self, *, config: Optional[types.ListMultimodalDatasetsConfigOrDict] = None
    ) -> types.ListMultimodalDatasetsResponse:
        """
        Lists multimodal datasets.
        """

        parameter_model = types._ListMultimodalDatasetsRequestParameters(
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _ListMultimodalDatasetsRequestParameters_to_vertex(
                parameter_model
            )
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets".format_map(request_url_dict)
            else:
                path = "datasets"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = self._api_client.request("get", path, request_dict, http_options)

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.ListMultimodalDatasetsResponse._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    def _update_multimodal_dataset(
        self,
        *,
        name: Optional[str] = None,
        display_name: Optional[str] = None,
        metadata: Optional[types.SchemaTablesDatasetMetadataOrDict] = None,
        description: Optional[str] = None,
        encryption_spec: Optional[genai_types.EncryptionSpecOrDict] = None,
        config: Optional[types.UpdateMultimodalDatasetConfigOrDict] = None,
    ) -> types.MultimodalDataset:
        """
        Updates a multimodal dataset resource.
        """

        parameter_model = types._UpdateMultimodalDatasetParameters(
            name=name,
            display_name=display_name,
            metadata=metadata,
            description=description,
            encryption_spec=encryption_spec,
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _UpdateMultimodalDatasetParameters_to_vertex(parameter_model)
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets/{name}".format_map(request_url_dict)
            else:
                path = "datasets/{name}"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = self._api_client.request("patch", path, request_dict, http_options)

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.MultimodalDataset._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    def _wait_for_operation(
        self,
        operation: types.MultimodalDatasetOperation,
        timeout_seconds: int,
    ) -> dict[str, Any]:
        """Waits for a multimodal or assemble dataset operation to complete.

        Args:
          operation: The multimodal or assemble dataset operation to wait for.
          timeout_seconds: The maximum time in seconds to wait for the operation
          to complete.

        Returns:
            A dict containing the operation response.

        Raises:
          TimeoutError: If the operation does not complete within the timeout.
          ValueError: If the operation fails.
        """
        response_operation_name = operation.name
        if response_operation_name is None:
            raise ValueError("Dataset operation name is empty.")
        dataset_id = response_operation_name.split("/datasets/")[1].split("/")[0]
        operation_id = response_operation_name.split("/")[-1]

        start_time = time.time()
        sleep_duration_seconds = 5
        wait_multiplier = 2
        max_wait_time_seconds = 60

        while (time.time() - start_time) < timeout_seconds:
            operation = self._get_multimodal_dataset_operation(
                dataset_id=dataset_id,
                operation_id=operation_id,
            )
            if operation.done:
                break
            time.sleep(sleep_duration_seconds)
            sleep_duration_seconds = min(
                sleep_duration_seconds * wait_multiplier, max_wait_time_seconds
            )
        else:
            raise TimeoutError(
                "The operation did not complete within the"
                f" specified timeout of {timeout_seconds} seconds."
            )
        if not operation or operation.response is None:
            logger.error(f"Error running the operation {operation.response}.")
            raise ValueError(f"Error running the operation {operation.response}.")
        if hasattr(operation, "error") and operation.error is not None:
            raise ValueError(f"Error running the operation {operation.error}")
        return operation.response

    def create_from_bigquery(
        self,
        *,
        multimodal_dataset: types.MultimodalDatasetOrDict,
        config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None,
    ) -> types.MultimodalDataset:
        """Creates a multimodal dataset from a BigQuery table.

        Args:
          multimodal_dataset:
            Required. A representation of a multimodal dataset.
          config:
            Optional. A configuration for creating the multimodal dataset. If not
            provided, the default configuration will be used.

        Returns:
          A types.MultimodalDataset object representing a multimodal dataset.
        """
        if isinstance(multimodal_dataset, dict):
            multimodal_dataset = types.MultimodalDataset(**multimodal_dataset)
        if (
            not hasattr(multimodal_dataset, "metadata")
            or multimodal_dataset.metadata is None
        ):
            raise ValueError("Multimodal dataset metadata is required.")
        if (
            not hasattr(multimodal_dataset.metadata, "input_config")
            or multimodal_dataset.metadata.input_config is None
        ):
            raise ValueError("Multimodal dataset input config is required.")
        if (
            not hasattr(multimodal_dataset.metadata.input_config, "bigquery_source")
            or multimodal_dataset.metadata.input_config.bigquery_source is None
        ):
            raise ValueError(
                "Multimodal dataset input config bigquery source is required."
            )
        if (
            not hasattr(multimodal_dataset.metadata.input_config.bigquery_source, "uri")
            or multimodal_dataset.metadata.input_config.bigquery_source.uri is None
        ):
            raise ValueError(
                "Multimodal dataset input config bigquery source uri is required."
            )
        if not multimodal_dataset.metadata.input_config.bigquery_source.uri.startswith(
            "bq://"
        ):
            multimodal_dataset.metadata.input_config.bigquery_source.uri = (
                f"bq://{multimodal_dataset.metadata.input_config.bigquery_source.uri}"
            )
        if isinstance(config, dict):
            config = types.CreateMultimodalDatasetConfig(**config)
        elif not config:
            config = types.CreateMultimodalDatasetConfig()

        multimodal_dataset_operation = self._create_multimodal_dataset(
            config=config,
            display_name=multimodal_dataset.display_name,
            metadata_schema_uri=_datasets_utils.METADATA_SCHEMA_URI,
            metadata=multimodal_dataset.metadata,
        )
        response = self._wait_for_operation(
            operation=multimodal_dataset_operation,
            timeout_seconds=config.timeout,
        )
        return _datasets_utils.create_from_response(types.MultimodalDataset, response)

    def update_multimodal_dataset(
        self,
        *,
        multimodal_dataset: types.MultimodalDatasetOrDict,
        config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None,
    ) -> types.MultimodalDataset:
        """Updates a multimodal dataset.

        Updatable fields include:
        - display_name
        - description

        Args:
          multimodal_dataset:
            Required. A representation of a multimodal dataset.
          config:
            Optional. A configuration for updating the multimodal dataset. If not
            provided, the default configuration will be used.

        Returns:
          A types.MultimodalDataset object representing the retrieved multimodal
          dataset.
        """
        if isinstance(multimodal_dataset, dict):
            multimodal_dataset = types.MultimodalDataset(**multimodal_dataset)
        if (
            not hasattr(multimodal_dataset, "metadata")
            or multimodal_dataset.metadata is None
        ):
            raise ValueError("Multimodal dataset metadata is required.")
        if (
            not hasattr(multimodal_dataset.metadata, "input_config")
            or multimodal_dataset.metadata.input_config is None
        ):
            raise ValueError("Multimodal dataset input config is required.")
        if (
            not hasattr(multimodal_dataset.metadata.input_config, "bigquery_source")
            or multimodal_dataset.metadata.input_config.bigquery_source is None
        ):
            raise ValueError(
                "Multimodal dataset input config bigquery source is required."
            )
        if (
            not hasattr(multimodal_dataset.metadata.input_config.bigquery_source, "uri")
            or multimodal_dataset.metadata.input_config.bigquery_source.uri is None
        ):
            raise ValueError(
                "Multimodal dataset input config bigquery source uri is required."
            )
        if not multimodal_dataset.metadata.input_config.bigquery_source.uri.startswith(
            "bq://"
        ):
            multimodal_dataset.metadata.input_config.bigquery_source.uri = (
                f"bq://{multimodal_dataset.metadata.input_config.bigquery_source.uri}"
            )
        if isinstance(config, dict):
            config = types.CreateMultimodalDatasetConfig(**config)
        elif not config:
            config = types.CreateMultimodalDatasetConfig()

        return self._update_multimodal_dataset(
            config=config,
            name=multimodal_dataset.name,
            display_name=multimodal_dataset.display_name,
            description=multimodal_dataset.description,
            metadata=multimodal_dataset.metadata,
        )

    def get_multimodal_dataset(
        self,
        *,
        name: str,
        config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None,
    ) -> types.MultimodalDataset:
        """Gets a multimodal dataset.

        Args:
          name:
            Required. name of a multimodal dataset.
          config:
            Optional. A configuration for getting the multimodal dataset. If not
            provided, the default configuration will be used.

        Returns:
          A types.MultimodalDataset object representing the retrieved multimodal
          dataset.
        """
        if isinstance(config, dict):
            config = types.CreateMultimodalDatasetConfig(**config)
        elif not config:
            config = types.CreateMultimodalDatasetConfig()

        return self._get_multimodal_dataset(config=config, name=name)

    def delete_multimodal_dataset(
        self,
        *,
        name: str,
        config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None,
    ) -> types.MultimodalDatasetOperation:
        """Deletes a multimodal dataset.

        Args:
          name:
            Required. name of a multimodal dataset.
          config:
            Optional. A configuration for deleting the multimodal dataset. If not
            provided, the default configuration will be used.

        Returns:
          A types.MultimodalDatasetOperation object representing the delete
          multimodal dataset operation.
        """
        if isinstance(config, dict):
            config = types.CreateMultimodalDatasetConfig(**config)
        elif not config:
            config = types.CreateMultimodalDatasetConfig()

        return self._delete_multimodal_dataset(config=config, name=name)

    def assemble(
        self,
        *,
        name: str,
        template_config: Optional[types.GeminiTemplateConfigOrDict] = None,
        config: Optional[types.AssembleDatasetConfigOrDict] = None,
    ) -> str:
        """Assemble the dataset into a BigQuery table.

        Waits for the assemble operation to complete before returning.

        Args:
          name:
            Required. The name of the dataset to assemble. The name should be in
            the format of "projects/{project}/locations/{location}/datasets/{dataset}".
          template_config:
            Optional. The template config to use to assemble the dataset. If
            not provided, the template config attached to the dataset will be
            used.
          config:
            Optional. A configuration for assembling the dataset. If not
            provided, the default configuration will be used.

        Returns:
            The URI of the bigquery table of the assembled dataset.
        """
        if isinstance(config, dict):
            config = types.AssembleDatasetConfig(**config)
        elif not config:
            config = types.AssembleDatasetConfig()

        operation = self._assemble_multimodal_dataset(
            name=name,
            gemini_request_read_config={
                "template_config": template_config,
            },
            config=config,
        )
        response = self._wait_for_operation(
            operation=operation,
            timeout_seconds=config.timeout,
        )
        return response["bigqueryDestination"]

    def assess_tuning_resources(
        self,
        *,
        dataset_name: str,
        model_name: str,
        template_config: Optional[types.GeminiTemplateConfigOrDict] = None,
        config: Optional[types.AssessDatasetConfigOrDict] = None,
    ) -> types.TuningResourceUsageAssessmentResult:
        """Assess the tuning resources required for a given model.

        Args:
          dataset_name:
            Required. The name of the dataset to assess the tuning resources
            for.
          model_name:
            Required. The name of the model to assess the tuning resources
            for.
          template_config:
            Optional. The template config used to assemble the dataset
            before assessing the tuning resources. If not provided, the
            template config attached to the dataset will be used. Required
            if no template config is attached to the dataset.
          config:
            Optional. A configuration for assessing the tuning resources. If not
            provided, the default configuration will be used.

        Returns:
          A types.TuningResourceUsageAssessmentResult object representing the
          tuning resource usage assessment result.
        """
        if isinstance(config, dict):
            config = types.AssessDatasetConfig(**config)
        elif not config:
            config = types.AssessDatasetConfig()

        operation = self._assess_multimodal_dataset(
            name=dataset_name,
            tuning_resource_usage_assessment_config=types.TuningResourceUsageAssessmentConfig(
                model_name=model_name
            ),
            gemini_request_read_config=types.GeminiRequestReadConfig(
                template_config=template_config,
            ),
            config=config,
        )
        response = self._wait_for_operation(
            operation=operation,
            timeout_seconds=config.timeout,
        )
        return _datasets_utils.create_from_response(
            types.TuningResourceUsageAssessmentResult,
            response["tuningResourceUsageAssessmentResult"],
        )


class AsyncDatasets(_api_module.BaseModule):

    async def _assemble_multimodal_dataset(
        self,
        *,
        name: str,
        gemini_request_read_config: Optional[
            types.GeminiRequestReadConfigOrDict
        ] = None,
        config: Optional[types.AssembleDatasetConfigOrDict] = None,
    ) -> types.MultimodalDatasetOperation:
        """
        Assembles a multimodal dataset resource.
        """

        parameter_model = types._AssembleDatasetParameters(
            name=name,
            gemini_request_read_config=gemini_request_read_config,
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _AssembleDatasetParameters_to_vertex(parameter_model)
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets/{name}:assemble".format_map(request_url_dict)
            else:
                path = "datasets/{name}:assemble"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = await self._api_client.async_request(
            "post", path, request_dict, http_options
        )

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.MultimodalDatasetOperation._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    async def _assess_multimodal_dataset(
        self,
        *,
        name: str,
        gemini_request_read_config: Optional[
            types.GeminiRequestReadConfigOrDict
        ] = None,
        tuning_resource_usage_assessment_config: Optional[
            types.TuningResourceUsageAssessmentConfigOrDict
        ] = None,
        tuning_validation_assessment_config: Optional[
            types.TuningValidationAssessmentConfigOrDict
        ] = None,
        batch_prediction_resource_usage_assessment_config: Optional[
            types.BatchPredictionResourceUsageAssessmentConfigOrDict
        ] = None,
        batch_prediction_validation_assessment_config: Optional[
            types.BatchPredictionValidationAssessmentConfigOrDict
        ] = None,
        config: Optional[types.AssessDatasetConfigOrDict] = None,
    ) -> types.MultimodalDatasetOperation:
        """
        Assesses a multimodal dataset resource.
        """

        parameter_model = types._AssessDatasetParameters(
            name=name,
            gemini_request_read_config=gemini_request_read_config,
            tuning_resource_usage_assessment_config=tuning_resource_usage_assessment_config,
            tuning_validation_assessment_config=tuning_validation_assessment_config,
            batch_prediction_resource_usage_assessment_config=batch_prediction_resource_usage_assessment_config,
            batch_prediction_validation_assessment_config=batch_prediction_validation_assessment_config,
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _AssessDatasetParameters_to_vertex(parameter_model)
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets/{name}:assess".format_map(request_url_dict)
            else:
                path = "datasets/{name}:assess"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = await self._api_client.async_request(
            "post", path, request_dict, http_options
        )

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.MultimodalDatasetOperation._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    async def _create_multimodal_dataset(
        self,
        *,
        name: Optional[str] = None,
        display_name: Optional[str] = None,
        metadata_schema_uri: Optional[str] = None,
        metadata: Optional[types.SchemaTablesDatasetMetadataOrDict] = None,
        description: Optional[str] = None,
        encryption_spec: Optional[genai_types.EncryptionSpecOrDict] = None,
        config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None,
    ) -> types.MultimodalDatasetOperation:
        """
        Creates a dataset resource to store multimodal datasets.
        """

        parameter_model = types._CreateMultimodalDatasetParameters(
            name=name,
            display_name=display_name,
            metadata_schema_uri=metadata_schema_uri,
            metadata=metadata,
            description=description,
            encryption_spec=encryption_spec,
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _CreateMultimodalDatasetParameters_to_vertex(parameter_model)
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets".format_map(request_url_dict)
            else:
                path = "datasets"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = await self._api_client.async_request(
            "post", path, request_dict, http_options
        )

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.MultimodalDatasetOperation._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    async def _delete_multimodal_dataset(
        self, *, name: str, config: Optional[types.VertexBaseConfigOrDict] = None
    ) -> types.MultimodalDatasetOperation:
        """
        Deletes a multimodal dataset resource.
        """

        parameter_model = types._DeleteMultimodalDatasetRequestParameters(
            name=name,
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _DeleteMultimodalDatasetRequestParameters_to_vertex(
                parameter_model
            )
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets/{name}".format_map(request_url_dict)
            else:
                path = "datasets/{name}"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = await self._api_client.async_request(
            "delete", path, request_dict, http_options
        )

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.MultimodalDatasetOperation._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    async def _get_multimodal_dataset(
        self,
        *,
        name: Optional[str] = None,
        config: Optional[types.VertexBaseConfigOrDict] = None,
    ) -> types.MultimodalDataset:
        """
        Gets a multimodal dataset resource.
        """

        parameter_model = types._GetMultimodalDatasetParameters(
            name=name,
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _GetMultimodalDatasetParameters_to_vertex(parameter_model)
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets/{name}".format_map(request_url_dict)
            else:
                path = "datasets/{name}"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = await self._api_client.async_request(
            "get", path, request_dict, http_options
        )

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.MultimodalDataset._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    async def _get_multimodal_dataset_operation(
        self,
        *,
        dataset_id: Optional[str] = None,
        operation_id: Optional[str] = None,
        config: Optional[types.GetMultimodalDatasetOperationConfigOrDict] = None,
    ) -> types.MultimodalDatasetOperation:
        """
        Gets the operation from creating a multimodal dataset.
        """

        parameter_model = types._GetMultimodalDatasetOperationParameters(
            dataset_id=dataset_id,
            operation_id=operation_id,
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _GetMultimodalDatasetOperationParameters_to_vertex(
                parameter_model
            )
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets/{dataset_id}/operations/{operation_id}".format_map(
                    request_url_dict
                )
            else:
                path = "datasets/{dataset_id}/operations/{operation_id}"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = await self._api_client.async_request(
            "get", path, request_dict, http_options
        )

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.MultimodalDatasetOperation._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    async def _list_multimodal_datasets(
        self, *, config: Optional[types.ListMultimodalDatasetsConfigOrDict] = None
    ) -> types.ListMultimodalDatasetsResponse:
        """
        Lists multimodal datasets.
        """

        parameter_model = types._ListMultimodalDatasetsRequestParameters(
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _ListMultimodalDatasetsRequestParameters_to_vertex(
                parameter_model
            )
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets".format_map(request_url_dict)
            else:
                path = "datasets"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = await self._api_client.async_request(
            "get", path, request_dict, http_options
        )

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.ListMultimodalDatasetsResponse._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    async def _update_multimodal_dataset(
        self,
        *,
        name: Optional[str] = None,
        display_name: Optional[str] = None,
        metadata: Optional[types.SchemaTablesDatasetMetadataOrDict] = None,
        description: Optional[str] = None,
        encryption_spec: Optional[genai_types.EncryptionSpecOrDict] = None,
        config: Optional[types.UpdateMultimodalDatasetConfigOrDict] = None,
    ) -> types.MultimodalDataset:
        """
        Updates a multimodal dataset resource.
        """

        parameter_model = types._UpdateMultimodalDatasetParameters(
            name=name,
            display_name=display_name,
            metadata=metadata,
            description=description,
            encryption_spec=encryption_spec,
            config=config,
        )

        request_url_dict: Optional[dict[str, str]]
        if not self._api_client.vertexai:
            raise ValueError("This method is only supported in the Vertex AI client.")
        else:
            request_dict = _UpdateMultimodalDatasetParameters_to_vertex(parameter_model)
            request_url_dict = request_dict.get("_url")
            if request_url_dict:
                path = "datasets/{name}".format_map(request_url_dict)
            else:
                path = "datasets/{name}"

        query_params = request_dict.get("_query")
        if query_params:
            path = f"{path}?{urlencode(query_params)}"
        # TODO: remove the hack that pops config.
        request_dict.pop("config", None)

        http_options: Optional[types.HttpOptions] = None
        if (
            parameter_model.config is not None
            and parameter_model.config.http_options is not None
        ):
            http_options = parameter_model.config.http_options

        request_dict = _common.convert_to_dict(request_dict)
        request_dict = _common.encode_unserializable_types(request_dict)

        response = await self._api_client.async_request(
            "patch", path, request_dict, http_options
        )

        response_dict = {} if not response.body else json.loads(response.body)

        return_value = types.MultimodalDataset._from_response(
            response=response_dict, kwargs=parameter_model.model_dump()
        )

        self._api_client._verify_response(return_value)
        return return_value

    async def _wait_for_operation(
        self,
        operation: types.MultimodalDatasetOperation,
        timeout_seconds: int,
    ) -> dict[str, Any]:
        """Waits for a multimodal dataset operation to complete.

        Args:
          operation: The multimodal dataset operation to wait for.
          timeout_seconds: The maximum time in seconds to wait for the operation
          to complete.

        Returns:
            A dict containing the operation response.

        Raises:
          TimeoutError: If the operation does not complete within the timeout.
          ValueError: If the operation fails.
        """
        response_operation_name = operation.name
        if response_operation_name is None:
            raise ValueError("Dataset operation name is empty.")
        dataset_id = response_operation_name.split("/datasets/")[1].split("/")[0]
        operation_id = response_operation_name.split("/")[-1]

        start_time = time.time()
        sleep_duration_seconds = 5
        wait_multiplier = 2
        max_wait_time_seconds = 60

        while (time.time() - start_time) < timeout_seconds:
            operation = await self._get_multimodal_dataset_operation(
                dataset_id=dataset_id,
                operation_id=operation_id,
            )
            if operation.done:
                break
            await asyncio.sleep(sleep_duration_seconds)
            sleep_duration_seconds = min(
                sleep_duration_seconds * wait_multiplier, max_wait_time_seconds
            )
        else:
            raise TimeoutError(
                "The operation did not complete within the"
                f" specified timeout of {timeout_seconds} seconds."
            )
        if not operation or operation.response is None:
            logger.error(f"Error running the operation {operation.response}.")
            raise ValueError(f"Error running the operation {operation.response}.")
        if hasattr(operation, "error") and operation.error is not None:
            raise ValueError(f"Error running the operation {operation.error}")
        return operation.response

    async def create_from_bigquery(
        self,
        *,
        multimodal_dataset: types.MultimodalDatasetOrDict,
        config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None,
    ) -> types.MultimodalDataset:
        """Creates a multimodal dataset from a BigQuery table.

        Args:
          multimodal_dataset:
            Required. A representation of a multimodal dataset.
          config:
            Optional. A configuration for creating the multimodal dataset. If not
            provided, the default configuration will be used.

        Returns:
          A types.MultimodalDataset object representing a multimodal dataset.
        """
        if isinstance(multimodal_dataset, dict):
            multimodal_dataset = types.MultimodalDataset(**multimodal_dataset)
        if (
            not hasattr(multimodal_dataset, "metadata")
            or multimodal_dataset.metadata is None
        ):
            raise ValueError("Multimodal dataset metadata is required.")
        if (
            not hasattr(multimodal_dataset.metadata, "input_config")
            or multimodal_dataset.metadata.input_config is None
        ):
            raise ValueError("Multimodal dataset input config is required.")
        if (
            not hasattr(multimodal_dataset.metadata.input_config, "bigquery_source")
            or multimodal_dataset.metadata.input_config.bigquery_source is None
        ):
            raise ValueError(
                "Multimodal dataset input config bigquery source is required."
            )
        if (
            not hasattr(multimodal_dataset.metadata.input_config.bigquery_source, "uri")
            or multimodal_dataset.metadata.input_config.bigquery_source.uri is None
        ):
            raise ValueError(
                "Multimodal dataset input config bigquery source uri is required."
            )
        if not multimodal_dataset.metadata.input_config.bigquery_source.uri.startswith(
            "bq://"
        ):
            multimodal_dataset.metadata.input_config.bigquery_source.uri = (
                f"bq://{multimodal_dataset.metadata.input_config.bigquery_source.uri}"
            )
        if isinstance(config, dict):
            config = types.CreateMultimodalDatasetConfig(**config)
        elif not config:
            config = types.CreateMultimodalDatasetConfig()

        multimodal_dataset_operation = await self._create_multimodal_dataset(
            config=config,
            display_name=multimodal_dataset.display_name,
            metadata_schema_uri=_datasets_utils.METADATA_SCHEMA_URI,
            metadata=multimodal_dataset.metadata,
        )
        response = await self._wait_for_operation(
            operation=multimodal_dataset_operation,
            timeout_seconds=config.timeout,
        )
        return _datasets_utils.create_from_response(types.MultimodalDataset, response)

    async def update_multimodal_dataset(
        self,
        *,
        multimodal_dataset: types.MultimodalDatasetOrDict,
        config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None,
    ) -> types.MultimodalDataset:
        """Updates a multimodal dataset.

        Args:
          multimodal_dataset:
            Required. A representation of a multimodal dataset.
          config:
            Optional. A configuration for updating the multimodal dataset. If not
            provided, the default configuration will be used.

        Returns:
          A types.MultimodalDataset object representing the updated multimodal
          dataset.
        """
        if isinstance(multimodal_dataset, dict):
            multimodal_dataset = types.MultimodalDataset(**multimodal_dataset)
        if (
            not hasattr(multimodal_dataset, "metadata")
            or multimodal_dataset.metadata is None
        ):
            raise ValueError("Multimodal dataset metadata is required.")
        if (
            not hasattr(multimodal_dataset.metadata, "input_config")
            or multimodal_dataset.metadata.input_config is None
        ):
            raise ValueError("Multimodal dataset input config is required.")
        if (
            not hasattr(multimodal_dataset.metadata.input_config, "bigquery_source")
            or multimodal_dataset.metadata.input_config.bigquery_source is None
        ):
            raise ValueError(
                "Multimodal dataset input config bigquery source is required."
            )
        if (
            not hasattr(multimodal_dataset.metadata.input_config.bigquery_source, "uri")
            or multimodal_dataset.metadata.input_config.bigquery_source.uri is None
        ):
            raise ValueError(
                "Multimodal dataset input config bigquery source uri is required."
            )
        if not multimodal_dataset.metadata.input_config.bigquery_source.uri.startswith(
            "bq://"
        ):
            multimodal_dataset.metadata.input_config.bigquery_source.uri = (
                f"bq://{multimodal_dataset.metadata.input_config.bigquery_source.uri}"
            )
        if isinstance(config, dict):
            config = types.CreateMultimodalDatasetConfig(**config)
        elif not config:
            config = types.CreateMultimodalDatasetConfig()

        return await self._update_multimodal_dataset(
            config=config,
            name=multimodal_dataset.name,
            display_name=multimodal_dataset.display_name,
            description=multimodal_dataset.description,
            metadata=multimodal_dataset.metadata,
        )

    async def get_multimodal_dataset(
        self,
        *,
        name: str,
        config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None,
    ) -> types.MultimodalDataset:
        """Gets a multimodal dataset.

        Args:
          name:
            Required. name of a multimodal dataset.
          config:
            Optional. A configuration for getting the multimodal dataset. If not
            provided, the default configuration will be used.

        Returns:
          A types.MultimodalDataset object representing the updated multimodal
          dataset.
        """
        if isinstance(config, dict):
            config = types.CreateMultimodalDatasetConfig(**config)
        elif not config:
            config = types.CreateMultimodalDatasetConfig()

        return await self._get_multimodal_dataset(config=config, name=name)

    async def delete_multimodal_dataset(
        self,
        *,
        name: str,
        config: Optional[types.CreateMultimodalDatasetConfigOrDict] = None,
    ) -> types.MultimodalDatasetOperation:
        """Deletes a multimodal dataset.

        Args:
          name:
            Required. name of a multimodal dataset.
          config:
            Optional. A configuration for deleting the multimodal dataset. If not
            provided, the default configuration will be used.

        Returns:
          A types.MultimodalDatasetOperation object representing the delete
          multimodal dataset operation.
        """
        if isinstance(config, dict):
            config = types.CreateMultimodalDatasetConfig(**config)
        elif not config:
            config = types.CreateMultimodalDatasetConfig()

        return await self._delete_multimodal_dataset(config=config, name=name)

    async def assemble(
        self,
        *,
        name: str,
        template_config: Optional[types.GeminiTemplateConfigOrDict] = None,
        config: Optional[types.AssembleDatasetConfigOrDict] = None,
    ) -> str:
        """Assemble the dataset into a BigQuery table.

        Waits for the assemble operation to complete before returning.

        Args:
          name:
            Required. The name of the dataset to assemble. The name should be in
            the format of "projects/{project}/locations/{location}/datasets/{dataset}".
          template_config:
            Optional. The template config to use to assemble the dataset. If
            not provided, the template config attached to the dataset will be
            used.
          config:
            Optional. A configuration for assembling the dataset. If not
            provided, the default configuration will be used.

        Returns:
            The URI of the bigquery table of the assembled dataset.
        """
        if isinstance(config, dict):
            config = types.AssembleDatasetConfig(**config)
        elif not config:
            config = types.AssembleDatasetConfig()

        operation = await self._assemble_multimodal_dataset(
            name=name,
            gemini_request_read_config={
                "template_config": template_config,
            },
            config=config,
        )
        response = await self._wait_for_operation(
            operation=operation,
            timeout_seconds=config.timeout,
        )
        return response["bigqueryDestination"]

    async def assess_tuning_resources(
        self,
        *,
        dataset_name: str,
        model_name: str,
        template_config: Optional[types.GeminiTemplateConfigOrDict] = None,
        config: Optional[types.AssessDatasetConfigOrDict] = None,
    ) -> types.TuningResourceUsageAssessmentResult:
        """Assess the tuning resources required for a given model.

        Args:
          dataset_name:
            Required. The name of the dataset to assess the tuning resources
            for.
          model_name:
            Required. The name of the model to assess the tuning resources
            for.
          template_config:
            Optional. The template config used to assemble the dataset
            before assessing the tuning resources. If not provided, the
            template config attached to the dataset will be used. Required
            if no template config is attached to the dataset.
          config:
            Optional. A configuration for assessing the tuning resources. If not
            provided, the default configuration will be used.

        Returns:
          A types.TuningResourceUsageAssessmentResult object representing the
          tuning resource usage assessment result.
        """
        if isinstance(config, dict):
            config = types.AssessDatasetConfig(**config)
        elif not config:
            config = types.AssessDatasetConfig()

        operation = await self._assess_multimodal_dataset(
            name=dataset_name,
            tuning_resource_usage_assessment_config=types.TuningResourceUsageAssessmentConfig(
                model_name=model_name
            ),
            gemini_request_read_config=types.GeminiRequestReadConfig(
                template_config=template_config,
            ),
            config=config,
        )
        response = await self._wait_for_operation(
            operation=operation,
            timeout_seconds=config.timeout,
        )
        return _datasets_utils.create_from_response(
            types.TuningResourceUsageAssessmentResult,
            response["tuningResourceUsageAssessmentResult"],
        )
