Merged
Show file tree
Hide file tree
Changes from all commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Failed to load files.
Original file line numberDiff line numberDiff line change
Expand Up@@ -26,8 +26,8 @@ class ImageClassificationPredictionInstance(proto.Message):
r"""Prediction input format for Image Classification.
Attributes:
content (str):
The image bytes or GCS URI to make the
prediction on.
The image bytes or Cloud Storage URI to make
the prediction on.
mime_type (str):
The MIME type of the content of the image.
Only the images in below listed MIME types are
Expand Down
Original file line numberDiff line numberDiff line change
Expand Up@@ -26,8 +26,8 @@ class ImageObjectDetectionPredictionInstance(proto.Message):
r"""Prediction input format for Image Object Detection.
Attributes:
content (str):
The image bytes or GCS URI to make the
prediction on.
The image bytes or Cloud Storage URI to make
the prediction on.
mime_type (str):
The MIME type of the content of the image.
Only the images in below listed MIME types are
Expand Down
Original file line numberDiff line numberDiff line change
Expand Up@@ -36,7 +36,7 @@ class TextExtractionPredictionInstance(proto.Message):
If a key is provided, the batch prediction
result will by mapped to this key. If omitted,
then the batch prediction result will contain
the entire input instance. AI Platform will not
the entire input instance. Vertex AI will not
check if keys in the request are duplicates, so
it is up to the caller to ensure the keys are
unique.
Expand Down
Original file line numberDiff line numberDiff line change
Expand Up@@ -139,6 +139,9 @@ class AutoMlTablesInputs(proto.Message):
predictions to a BigQuery table. If this
configuration is absent, then the export is not
performed.
additional_experiments (Sequence[str]):
Additional experiment flags for the Tables
training pipeline.
"""

class Transformation(proto.Message):
Expand DownExpand Up@@ -401,6 +404,7 @@ class TextArrayTransformation(proto.Message):
number=10,
message=gcastd_export_evaluated_data_items_config.ExportEvaluatedDataItemsConfig,
)
additional_experiments = proto.RepeatedField(proto.STRING, number=11,)


class AutoMlTablesMetadata(proto.Message):
Expand Down
Original file line numberDiff line numberDiff line change
Expand Up@@ -48,6 +48,8 @@ class ModelType(proto.Enum):
MODEL_TYPE_UNSPECIFIED = 0
CLOUD = 1
MOBILE_VERSATILE_1 = 2
MOBILE_JETSON_VERSATILE_1 = 3
MOBILE_CORAL_VERSATILE_1 = 4

model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,)

Expand Down
Original file line numberDiff line numberDiff line change
Expand Up@@ -33,7 +33,6 @@ class ExportEvaluatedDataItemsConfig(proto.Message):

If not specified, then results are exported to the following
auto-created BigQuery table:

<project_id>:export_evaluated_examples_<model_name>_<yyyy_MM_dd'T'HH_mm_ss_SSS'Z'>.evaluated_examples
override_existing_table (bool):
If true and an export destination is
Expand Down
Original file line numberDiff line numberDiff line change
Expand Up@@ -34,8 +34,10 @@
from .types.accelerator_type import AcceleratorType
from .types.annotation import Annotation
from .types.annotation_spec import AnnotationSpec
from .types.artifact import Artifact
from .types.batch_prediction_job import BatchPredictionJob
from .types.completion_stats import CompletionStats
from .types.context import Context
from .types.custom_job import ContainerSpec
from .types.custom_job import CustomJob
from .types.custom_job import CustomJobSpec
Expand DownExpand Up@@ -86,6 +88,7 @@
from .types.endpoint_service import UndeployModelResponse
from .types.endpoint_service import UpdateEndpointRequest
from .types.env_var import EnvVar
from .types.execution import Execution
from .types.hyperparameter_tuning_job import HyperparameterTuningJob
from .types.io import BigQueryDestination
from .types.io import BigQuerySource
Expand DownExpand Up@@ -118,6 +121,7 @@
from .types.job_service import ListHyperparameterTuningJobsResponse
from .types.job_state import JobState
from .types.machine_resources import AutomaticResources
from .types.machine_resources import AutoscalingMetricSpec
from .types.machine_resources import BatchDedicatedResources
from .types.machine_resources import DedicatedResources
from .types.machine_resources import DiskSpec
Expand DownExpand Up@@ -157,10 +161,20 @@
from .types.model_service import UploadModelResponse
from .types.operation import DeleteOperationMetadata
from .types.operation import GenericOperationMetadata
from .types.pipeline_job import PipelineJob
from .types.pipeline_job import PipelineJobDetail
from .types.pipeline_job import PipelineTaskDetail
from .types.pipeline_job import PipelineTaskExecutorDetail
from .types.pipeline_service import CancelPipelineJobRequest
from .types.pipeline_service import CancelTrainingPipelineRequest
from .types.pipeline_service import CreatePipelineJobRequest
from .types.pipeline_service import CreateTrainingPipelineRequest
from .types.pipeline_service import DeletePipelineJobRequest
from .types.pipeline_service import DeleteTrainingPipelineRequest
from .types.pipeline_service import GetPipelineJobRequest
from .types.pipeline_service import GetTrainingPipelineRequest
from .types.pipeline_service import ListPipelineJobsRequest
from .types.pipeline_service import ListPipelineJobsResponse
from .types.pipeline_service import ListTrainingPipelinesRequest
from .types.pipeline_service import ListTrainingPipelinesResponse
from .types.pipeline_state import PipelineState
Expand All@@ -185,6 +199,7 @@
from .types.training_pipeline import TimestampSplit
from .types.training_pipeline import TrainingPipeline
from .types.user_action_reference import UserActionReference
from .types.value import Value

__all__ = (
"DatasetServiceAsyncClient",
Expand All@@ -199,7 +214,9 @@
"ActiveLearningConfig",
"Annotation",
"AnnotationSpec",
"Artifact",
"AutomaticResources",
"AutoscalingMetricSpec",
"BatchDedicatedResources",
"BatchMigrateResourcesOperationMetadata",
"BatchMigrateResourcesRequest",
Expand All@@ -211,10 +228,12 @@
"CancelCustomJobRequest",
"CancelDataLabelingJobRequest",
"CancelHyperparameterTuningJobRequest",
"CancelPipelineJobRequest",
"CancelTrainingPipelineRequest",
"CompletionStats",
"ContainerRegistryDestination",
"ContainerSpec",
"Context",
"CreateBatchPredictionJobRequest",
"CreateCustomJobRequest",
"CreateDataLabelingJobRequest",
Expand All@@ -223,6 +242,7 @@
"CreateEndpointOperationMetadata",
"CreateEndpointRequest",
"CreateHyperparameterTuningJobRequest",
"CreatePipelineJobRequest",
"CreateSpecialistPoolOperationMetadata",
"CreateSpecialistPoolRequest",
"CreateTrainingPipelineRequest",
Expand All@@ -241,6 +261,7 @@
"DeleteHyperparameterTuningJobRequest",
"DeleteModelRequest",
"DeleteOperationMetadata",
"DeletePipelineJobRequest",
"DeleteSpecialistPoolRequest",
"DeleteTrainingPipelineRequest",
"DeployModelOperationMetadata",
Expand All@@ -253,6 +274,7 @@
"Endpoint",
"EndpointServiceClient",
"EnvVar",
"Execution",
"ExportDataConfig",
"ExportDataOperationMetadata",
"ExportDataRequest",
Expand All@@ -275,6 +297,7 @@
"GetModelEvaluationRequest",
"GetModelEvaluationSliceRequest",
"GetModelRequest",
"GetPipelineJobRequest",
"GetSpecialistPoolRequest",
"GetTrainingPipelineRequest",
"HyperparameterTuningJob",
Expand DownExpand Up@@ -307,6 +330,8 @@
"ListModelEvaluationsResponse",
"ListModelsRequest",
"ListModelsResponse",
"ListPipelineJobsRequest",
"ListPipelineJobsResponse",
"ListSpecialistPoolsRequest",
"ListSpecialistPoolsResponse",
"ListTrainingPipelinesRequest",
Expand All@@ -323,8 +348,12 @@
"ModelEvaluation",
"ModelEvaluationSlice",
"ModelServiceClient",
"PipelineJob",
"PipelineJobDetail",
"PipelineServiceClient",
"PipelineState",
"PipelineTaskDetail",
"PipelineTaskExecutorDetail",
"Port",
"PredefinedSplit",
"PredictRequest",
Expand DownExpand Up@@ -356,5 +385,6 @@
"UploadModelRequest",
"UploadModelResponse",
"UserActionReference",
"Value",
"WorkerPoolSpec",
)
Original file line numberDiff line numberDiff line change
Expand Up@@ -570,26 +570,51 @@
"grpc": {
"libraryClient": "PipelineServiceClient",
"rpcs": {
"CancelPipelineJob": {
"methods": [
"cancel_pipeline_job"
]
},
"CancelTrainingPipeline": {
"methods": [
"cancel_training_pipeline"
]
},
"CreatePipelineJob": {
"methods": [
"create_pipeline_job"
]
},
"CreateTrainingPipeline": {
"methods": [
"create_training_pipeline"
]
},
"DeletePipelineJob": {
"methods": [
"delete_pipeline_job"
]
},
"DeleteTrainingPipeline": {
"methods": [
"delete_training_pipeline"
]
},
"GetPipelineJob": {
"methods": [
"get_pipeline_job"
]
},
"GetTrainingPipeline": {
"methods": [
"get_training_pipeline"
]
},
"ListPipelineJobs": {
"methods": [
"list_pipeline_jobs"
]
},
"ListTrainingPipelines": {
"methods": [
"list_training_pipelines"
Expand All@@ -600,26 +625,51 @@
"grpc-async": {
"libraryClient": "PipelineServiceAsyncClient",
"rpcs": {
"CancelPipelineJob": {
"methods": [
"cancel_pipeline_job"
]
},
"CancelTrainingPipeline": {
"methods": [
"cancel_training_pipeline"
]
},
"CreatePipelineJob": {
"methods": [
"create_pipeline_job"
]
},
"CreateTrainingPipeline": {
"methods": [
"create_training_pipeline"
]
},
"DeletePipelineJob": {
"methods": [
"delete_pipeline_job"
]
},
"DeleteTrainingPipeline": {
"methods": [
"delete_training_pipeline"
]
},
"GetPipelineJob": {
"methods": [
"get_pipeline_job"
]
},
"GetTrainingPipeline": {
"methods": [
"get_training_pipeline"
]
},
"ListPipelineJobs": {
"methods": [
"list_pipeline_jobs"
]
},
"ListTrainingPipelines": {
"methods": [
"list_training_pipelines"
Expand Down
Original file line numberDiff line numberDiff line change
Expand Up@@ -285,6 +285,21 @@ def parse_model_path(path: str) -> Dict[str, str]:
)
return m.groupdict() if m else {}

@staticmethod
def network_path(project: str, network: str,) -> str:
"""Returns a fully-qualified network string."""
return "projects/{project}/global/networks/{network}".format(
project=project, network=network,
)

@staticmethod
def parse_network_path(path: str) -> Dict[str, str]:
"""Parses a network path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/global/networks/(?P<network>.+?)$", path
)
return m.groupdict() if m else {}

@staticmethod
def trial_path(project: str, location: str, study: str, trial: str,) -> str:
"""Returns a fully-qualified trial string."""
Expand Down
Loading