Merged
Show file tree
Hide file tree
Changes from all commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Failed to load files.
Original file line numberDiff line numberDiff line change
Expand Up@@ -183,19 +183,16 @@ def parse_annotated_dataset_path(path: str) -> Dict[str, str]:
return m.groupdict() if m else {}

@staticmethod
def dataset_path(project: str, location: str, dataset: str,) -> str:
def dataset_path(project: str, dataset: str,) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project, location=location, dataset=dataset,
return "projects/{project}/datasets/{dataset}".format(
project=project, dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
return m.groupdict() if m else {}

@staticmethod
Expand All@@ -215,16 +212,19 @@ def parse_dataset_path(path: str) -> Dict[str, str]:
return m.groupdict() if m else {}

@staticmethod
def dataset_path(project: str, dataset: str,) -> str:
def dataset_path(project: str, location: str, dataset: str,) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/datasets/{dataset}".format(
project=project, dataset=dataset,
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project, location=location, dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
return m.groupdict() if m else {}

@staticmethod
Expand Down
Original file line numberDiff line numberDiff line change
Expand Up@@ -112,6 +112,7 @@
from .types.event import Event
from .types.execution import Execution
from .types.explanation import Attribution
from .types.explanation import BlurBaselineConfig
from .types.explanation import Explanation
from .types.explanation import ExplanationMetadataOverride
from .types.explanation import ExplanationParameters
Expand DownExpand Up@@ -189,6 +190,9 @@
from .types.index_endpoint_service import GetIndexEndpointRequest
from .types.index_endpoint_service import ListIndexEndpointsRequest
from .types.index_endpoint_service import ListIndexEndpointsResponse
from .types.index_endpoint_service import MutateDeployedIndexOperationMetadata
from .types.index_endpoint_service import MutateDeployedIndexRequest
from .types.index_endpoint_service import MutateDeployedIndexResponse
from .types.index_endpoint_service import UndeployIndexOperationMetadata
from .types.index_endpoint_service import UndeployIndexRequest
from .types.index_endpoint_service import UndeployIndexResponse
Expand DownExpand Up@@ -447,12 +451,14 @@
from .types.training_pipeline import FractionSplit
from .types.training_pipeline import InputDataConfig
from .types.training_pipeline import PredefinedSplit
from .types.training_pipeline import StratifiedSplit
from .types.training_pipeline import TimestampSplit
from .types.training_pipeline import TrainingPipeline
from .types.types import BoolArray
from .types.types import DoubleArray
from .types.types import Int64Array
from .types.types import StringArray
from .types.unmanaged_container_model import UnmanagedContainerModel
from .types.user_action_reference import UserActionReference
from .types.value import Value
from .types.vizier_service import AddTrialMeasurementRequest
Expand DownExpand Up@@ -529,6 +535,7 @@
"BatchReadTensorboardTimeSeriesDataResponse",
"BigQueryDestination",
"BigQuerySource",
"BlurBaselineConfig",
"BoolArray",
"CancelBatchPredictionJobRequest",
"CancelCustomJobRequest",
Expand DownExpand Up@@ -813,6 +820,9 @@
"ModelMonitoringObjectiveConfig",
"ModelMonitoringStatsAnomalies",
"ModelServiceClient",
"MutateDeployedIndexOperationMetadata",
"MutateDeployedIndexRequest",
"MutateDeployedIndexResponse",
"NearestNeigrSearchOperationMetadata",
"PauseModelDeploymentMonitoringJobRequest",
"PipelineJob",
Expand DownExpand Up@@ -866,6 +876,7 @@
"SpecialistPool",
"SpecialistPoolServiceClient",
"StopTrialRequest",
"StratifiedSplit",
"ReadFeatureValuesRequest",
"StringArray",
"Study",
Expand DownExpand Up@@ -895,6 +906,7 @@
"UndeployModelOperationMetadata",
"UndeployModelRequest",
"UndeployModelResponse",
"UnmanagedContainerModel",
"UpdateArtifactRequest",
"UpdateContextRequest",
"UpdateDatasetRequest",
Expand Down
Original file line numberDiff line numberDiff line change
Expand Up@@ -481,6 +481,11 @@
"list_index_endpoints"
]
},
"MutateDeployedIndex": {
"methods": [
"mutate_deployed_index"
]
},
"UndeployIndex": {
"methods": [
"undeploy_index"
Expand DownExpand Up@@ -521,6 +526,11 @@
"list_index_endpoints"
]
},
"MutateDeployedIndex": {
"methods": [
"mutate_deployed_index"
]
},
"UndeployIndex": {
"methods": [
"undeploy_index"
Expand Down
Original file line numberDiff line numberDiff line change
Expand Up@@ -191,6 +191,7 @@ async def create_endpoint(
*,
parent: str = None,
endpoint: gca_endpoint.Endpoint = None,
endpoint_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
Expand All@@ -214,6 +215,21 @@ async def create_endpoint(
This corresponds to the ``endpoint`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
endpoint_id (:class:`str`):
Immutable. The ID to use for endpoint, which will become
the final component of the endpoint resource name. If
not provided, Vertex AI will generate a value for this
ID.

This value should be 1-10 characters, and valid
characters are /[0-9]/. When using HTTP/JSON, this field
is populated based on a query string argument, such as
``?endpoint_id=12345``. This is the fallback for fields
that are not included in either the URI or the body.

This corresponds to the ``endpoint_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
Expand All@@ -231,7 +247,7 @@ async def create_endpoint(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, endpoint])
has_flattened_params = any([parent, endpoint, endpoint_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
Expand All@@ -246,6 +262,8 @@ async def create_endpoint(
request.parent = parent
if endpoint is not None:
request.endpoint = endpoint
if endpoint_id is not None:
request.endpoint_id = endpoint_id

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
Expand Down
Original file line numberDiff line numberDiff line change
Expand Up@@ -424,6 +424,7 @@ def create_endpoint(
*,
parent: str = None,
endpoint: gca_endpoint.Endpoint = None,
endpoint_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
Expand All@@ -447,6 +448,21 @@ def create_endpoint(
This corresponds to the ``endpoint`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
endpoint_id (str):
Immutable. The ID to use for endpoint, which will become
the final component of the endpoint resource name. If
not provided, Vertex AI will generate a value for this
ID.

This value should be 1-10 characters, and valid
characters are /[0-9]/. When using HTTP/JSON, this field
is populated based on a query string argument, such as
``?endpoint_id=12345``. This is the fallback for fields
that are not included in either the URI or the body.

This corresponds to the ``endpoint_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
Expand All@@ -464,7 +480,7 @@ def create_endpoint(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, endpoint])
has_flattened_params = any([parent, endpoint, endpoint_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
Expand All@@ -483,6 +499,8 @@ def create_endpoint(
request.parent = parent
if endpoint is not None:
request.endpoint = endpoint
if endpoint_id is not None:
request.endpoint_id = endpoint_id

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
Expand Down
Loading