Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions google/cloud/aiplatform/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -504,7 +504,7 @@ def _get_and_validate_project_location(
location(str): The location of the resource noun.

Raises:
RuntimeError if location is different from resource location
RuntimeError: If location is different from resource location
"""

fields = utils.extract_fields_from_resource_name(
Expand Down Expand Up @@ -604,7 +604,7 @@ def _assert_gca_resource_is_available(self) -> None:
"""Helper method to raise when property is not accessible.

Raises:
RuntimeError if _gca_resource is has not been created.
RuntimeError: If _gca_resource is has not been created.
"""
if self._gca_resource is None:
raise RuntimeError(
Expand Down Expand Up @@ -1115,7 +1115,7 @@ def _wait_for_resource_creation(self) -> None:
job.run(sync=False, ...)
job._wait_for_resource_creation()
Raises:
RuntimeError if the resource has not been scheduled to be created.
RuntimeError: If the resource has not been scheduled to be created.
"""

# If the user calls this but didn't actually invoke an API to create
Expand All @@ -1141,7 +1141,7 @@ def _assert_gca_resource_is_available(self) -> None:
resource creation has failed asynchronously.

Raises:
RuntimeError when resource has not been created.
RuntimeError: When resource has not been created.
"""
if not getattr(self._gca_resource, "name", None):
raise RuntimeError(
Expand Down
4 changes: 2 additions & 2 deletions google/cloud/aiplatform/datasets/_datasources.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def __init__(
"bq://project.dataset.table_name"

Raises:
ValueError if source configuration is not valid.
ValueError: If source configuration is not valid.
"""

dataset_metadata = None
Expand Down Expand Up @@ -215,7 +215,7 @@ def create_datasource(
datasource (Datasource)

Raises:
ValueError when below scenarios happen
ValueError: When below scenarios happen:
- import_schema_uri is identified for creating TabularDatasource
- either import_schema_uri or gcs_source is missing for creating NonTabularDatasourceImportable
"""
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/datasets/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def _validate_metadata_schema_uri(self) -> None:
"""Validate the metadata_schema_uri of retrieved dataset resource.

Raises:
ValueError if the dataset type of the retrieved dataset resource is
ValueError: If the dataset type of the retrieved dataset resource is
not supported by the class.
"""
if self._supported_metadata_schema_uris and (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def __init__(
signature_name) specifies multiple outputs.

Raises:
ValueError if outputs_to_explain contains more than 1 element or
ValueError: If outputs_to_explain contains more than 1 element or
signature contains multiple outputs.
"""
if outputs_to_explain:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ def __init__(
Any keyword arguments to be passed to tf.saved_model.save() function.

Raises:
ValueError if outputs_to_explain contains more than 1 element.
ImportError if tf is not imported.
ValueError: If outputs_to_explain contains more than 1 element.
ImportError: If tf is not imported.
"""
if outputs_to_explain and len(outputs_to_explain) > 1:
raise ValueError(
Expand Down Expand Up @@ -91,7 +91,7 @@ def _infer_metadata_entries_from_model(
Inferred input metadata and output metadata from the model.

Raises:
ValueError if specified name is not found in signature outputs.
ValueError: If specified name is not found in signature outputs.
"""

loaded_sig = self._loaded_model.signatures[signature_name]
Expand Down
4 changes: 2 additions & 2 deletions google/cloud/aiplatform/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -1049,7 +1049,7 @@ def __init__(
staging_bucket set in aiplatform.init.

Raises:
RuntimeError is not staging bucket was set using aiplatfrom.init and a staging
RuntimeError: If staging bucket was not set using aiplatform.init and a staging
bucket was not passed in.
"""

Expand Down Expand Up @@ -1241,7 +1241,7 @@ def from_local_script(
staging_bucket set in aiplatform.init.

Raises:
RuntimeError is not staging bucket was set using aiplatfrom.init and a staging
RuntimeError: If staging bucket was not set using aiplatform.init and a staging
bucket was not passed in.
"""

Expand Down
6 changes: 3 additions & 3 deletions google/cloud/aiplatform/metadata/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,8 +157,8 @@ def log_metrics(self, metrics: Dict[str, Union[float, int]]):
metrics (Dict):
Required. Metrics key/value pairs. Only flot and int are supported format for value.
Raises:
TypeError if value contains unsupported types.
ValueError if Experiment or Run is not set.
TypeError: If value contains unsupported types.
ValueError: If Experiment or Run is not set.
"""

self._validate_experiment_and_run(method_name="log_metrics")
Expand Down Expand Up @@ -265,7 +265,7 @@ def _validate_metrics_value_type(metrics: Dict[str, Union[float, int]]):
metrics (Dict):
Required. Metrics key/value pairs. Only flot and int are supported format for value.
Raises:
TypeError if value contains unsupported types.
TypeError: If value contains unsupported types.
"""

for key, value in metrics.items():
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/metadata/resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -451,7 +451,7 @@ def _extract_metadata_store_id(resource_name, resource_noun) -> str:
metadata_store_id (str):
The metadata store id for the particular resource name.
Raises:
ValueError if it does not exist.
ValueError: If it does not exist.
"""
pattern = re.compile(
r"^projects\/(?P<project>[\w-]+)\/locations\/(?P<location>[\w-]+)\/metadataStores\/(?P<store>[\w-]+)\/"
Expand Down
6 changes: 3 additions & 3 deletions google/cloud/aiplatform/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -786,7 +786,7 @@ def _deploy(
will be executed in concurrent Future and any downstream object will
be immediately returned and synced when the Future has completed.
Raises:
ValueError if there is not current traffic split and traffic percentage
ValueError: If there is not current traffic split and traffic percentage
is not 0 or 100.
"""
_LOGGER.log_action_start_against_resource(
Expand Down Expand Up @@ -2366,9 +2366,9 @@ def export_model(
Details of the completed export with output destination paths to
the artifacts or container image.
Raises:
ValueError if model does not support exporting.
ValueError: If model does not support exporting.

ValueError if invalid arguments or export formats are provided.
ValueError: If invalid arguments or export formats are provided.
"""

# Model does not support exporting
Expand Down
4 changes: 2 additions & 2 deletions google/cloud/aiplatform/training_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -4060,7 +4060,7 @@ def run(
produce a Vertex AI Model.

Raises:
RuntimeError if Training job has already been run or is waiting to run.
RuntimeError: If Training job has already been run or is waiting to run.
"""

if model_display_name:
Expand Down Expand Up @@ -4269,7 +4269,7 @@ def _run_with_experiments(
produce a Vertex AI Model.

Raises:
RuntimeError if Training job has already been run or is waiting to run.
RuntimeError: If Training job has already been run or is waiting to run.
"""

if additional_experiments:
Expand Down
4 changes: 2 additions & 2 deletions google/cloud/aiplatform/utils/featurestore_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def validate_and_get_entity_type_resource_ids(
Tuple[str, str] - featurestore ID and entity_type ID

Raises:
ValueError if the provided entity_type_name is not in form of a fully-qualified
ValueError: If the provided entity_type_name is not in form of a fully-qualified
entityType resource name nor an entity_type ID with featurestore_id passed.
"""
match = CompatFeaturestoreServiceClient.parse_entity_type_path(
Expand Down Expand Up @@ -91,7 +91,7 @@ def validate_and_get_feature_resource_ids(
Tuple[str, str, str] - featurestore ID, entity_type ID, and feature ID

Raises:
ValueError if the provided feature_name is not in form of a fully-qualified
ValueError: If the provided feature_name is not in form of a fully-qualified
feature resource name nor a feature ID with featurestore_id and entity_type_id passed.
"""

Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/utils/tensorboard_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def _parse_experiment_name(experiment_name: str) -> Dict[str, str]:
Components of the experiment name.

Raises:
ValueError if the experiment_name is invalid.
ValueError: If the experiment_name is invalid.
"""
matched = TensorboardServiceClient.parse_tensorboard_experiment_path(
experiment_name
Expand Down