Skip to content

feat: Expose task_unique_name in pipeline task details for pipeline rerun #5497

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 9 commits into from
18 changes: 9 additions & 9 deletions google/cloud/aiplatform_v1/services/migration_service/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,40 +243,40 @@ def parse_annotated_dataset_path(path: str) -> Dict[str, str]:
@staticmethod
def dataset_path(
project: str,
location: str,
dataset: str,
) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
return "projects/{project}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
return m.groupdict() if m else {}

@staticmethod
def dataset_path(
project: str,
location: str,
dataset: str,
) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/datasets/{dataset}".format(
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
return m.groupdict() if m else {}

@staticmethod
Expand Down
37 changes: 37 additions & 0 deletions google/cloud/aiplatform_v1/types/tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,11 @@ class Tool(proto.Message):
url_context (google.cloud.aiplatform_v1.types.UrlContext):
Optional. Tool to support URL context
retrieval.
computer_use (google.cloud.aiplatform_v1.types.Tool.ComputerUse):
Optional. Tool to support the model
interacting directly with the computer. If
enabled, it automatically populates computer-use
specific Function Declarations.
"""

class GoogleSearch(proto.Message):
Expand All @@ -112,6 +117,33 @@ class CodeExecution(proto.Message):

"""

class ComputerUse(proto.Message):
r"""Tool to support computer use.

Attributes:
environment (google.cloud.aiplatform_v1.types.Tool.ComputerUse.Environment):
Required. The environment being operated.
"""

class Environment(proto.Enum):
r"""Represents the environment being operated, such as a web
browser.

Values:
ENVIRONMENT_UNSPECIFIED (0):
Defaults to browser.
ENVIRONMENT_BROWSER (1):
Operates in a web browser.
"""
ENVIRONMENT_UNSPECIFIED = 0
ENVIRONMENT_BROWSER = 1

environment: "Tool.ComputerUse.Environment" = proto.Field(
proto.ENUM,
number=1,
enum="Tool.ComputerUse.Environment",
)

function_declarations: MutableSequence["FunctionDeclaration"] = proto.RepeatedField(
proto.MESSAGE,
number=1,
Expand Down Expand Up @@ -147,6 +179,11 @@ class CodeExecution(proto.Message):
number=8,
message="UrlContext",
)
computer_use: ComputerUse = proto.Field(
proto.MESSAGE,
number=11,
message=ComputerUse,
)


class UrlContext(proto.Message):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -265,40 +265,40 @@ def parse_dataset_path(path: str) -> Dict[str, str]:
@staticmethod
def dataset_path(
project: str,
location: str,
dataset: str,
) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
return "projects/{project}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
return m.groupdict() if m else {}

@staticmethod
def dataset_path(
project: str,
location: str,
dataset: str,
) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/datasets/{dataset}".format(
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
return m.groupdict() if m else {}

@staticmethod
Expand Down
14 changes: 14 additions & 0 deletions google/cloud/aiplatform_v1beta1/types/pipeline_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -573,6 +573,16 @@ class PipelineTaskDetail(proto.Message):
outputs (MutableMapping[str, google.cloud.aiplatform_v1beta1.types.PipelineTaskDetail.ArtifactList]):
Output only. The runtime output artifacts of
the task.
task_unique_name (str):
Output only. The unique name of a task. This field is used
by pipeline job reruns. Console UI and Vertex AI SDK will
support triggering pipeline job reruns. The name is
constructed by concatenating all the parent tasks' names
with the task name. For example, if a task named
"child_task" has a parent task named "parent_task_1" and
parent task 1 has a parent task named "parent_task_2", the
task unique name will be
"parent_task_2.parent_task_1.child_task".
"""

class State(proto.Enum):
Expand Down Expand Up @@ -726,6 +736,10 @@ class ArtifactList(proto.Message):
number=11,
message=ArtifactList,
)
task_unique_name: str = proto.Field(
proto.STRING,
number=14,
)


class PipelineTaskExecutorDetail(proto.Message):
Expand Down
37 changes: 37 additions & 0 deletions google/cloud/aiplatform_v1beta1/types/tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,11 @@ class Tool(proto.Message):
url_context (google.cloud.aiplatform_v1beta1.types.UrlContext):
Optional. Tool to support URL context
retrieval.
computer_use (google.cloud.aiplatform_v1beta1.types.Tool.ComputerUse):
Optional. Tool to support the model
interacting directly with the computer. If
enabled, it automatically populates computer-use
specific Function Declarations.
"""

class GoogleSearch(proto.Message):
Expand All @@ -113,6 +118,33 @@ class CodeExecution(proto.Message):

"""

class ComputerUse(proto.Message):
r"""Tool to support computer use.

Attributes:
environment (google.cloud.aiplatform_v1beta1.types.Tool.ComputerUse.Environment):
Required. The environment being operated.
"""

class Environment(proto.Enum):
r"""Represents the environment being operated, such as a web
browser.

Values:
ENVIRONMENT_UNSPECIFIED (0):
Defaults to browser.
ENVIRONMENT_BROWSER (1):
Operates in a web browser.
"""
ENVIRONMENT_UNSPECIFIED = 0
ENVIRONMENT_BROWSER = 1

environment: "Tool.ComputerUse.Environment" = proto.Field(
proto.ENUM,
number=1,
enum="Tool.ComputerUse.Environment",
)

function_declarations: MutableSequence["FunctionDeclaration"] = proto.RepeatedField(
proto.MESSAGE,
number=1,
Expand Down Expand Up @@ -148,6 +180,11 @@ class CodeExecution(proto.Message):
number=8,
message="UrlContext",
)
computer_use: ComputerUse = proto.Field(
proto.MESSAGE,
number=11,
message=ComputerUse,
)


class UrlContext(proto.Message):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
],
"language": "PYTHON",
"name": "google-cloud-aiplatform",
"version": "1.100.0"
"version": "0.1.0"
},
"snippets": [
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
],
"language": "PYTHON",
"name": "google-cloud-aiplatform",
"version": "1.100.0"
"version": "0.1.0"
},
"snippets": [
{
Expand Down
4 changes: 4 additions & 0 deletions tests/unit/gapic/aiplatform_v1/test_gen_ai_cache_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -4721,6 +4721,7 @@ def test_create_cached_content_rest_call_success(request_type):
"enterprise_web_search": {},
"code_execution": {},
"url_context": {},
"computer_use": {"environment": 1},
}
],
"tool_config": {
Expand Down Expand Up @@ -5214,6 +5215,7 @@ def test_update_cached_content_rest_call_success(request_type):
"enterprise_web_search": {},
"code_execution": {},
"url_context": {},
"computer_use": {"environment": 1},
}
],
"tool_config": {
Expand Down Expand Up @@ -6569,6 +6571,7 @@ async def test_create_cached_content_rest_asyncio_call_success(request_type):
"enterprise_web_search": {},
"code_execution": {},
"url_context": {},
"computer_use": {"environment": 1},
}
],
"tool_config": {
Expand Down Expand Up @@ -7094,6 +7097,7 @@ async def test_update_cached_content_rest_asyncio_call_success(request_type):
"enterprise_web_search": {},
"code_execution": {},
"url_context": {},
"computer_use": {"environment": 1},
}
],
"tool_config": {
Expand Down
26 changes: 13 additions & 13 deletions tests/unit/gapic/aiplatform_v1/test_migration_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -5398,22 +5398,19 @@ def test_parse_annotated_dataset_path():

def test_dataset_path():
project = "cuttlefish"
location = "mussel"
dataset = "winkle"
expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(
dataset = "mussel"
expected = "projects/{project}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)
actual = MigrationServiceClient.dataset_path(project, location, dataset)
actual = MigrationServiceClient.dataset_path(project, dataset)
assert expected == actual


def test_parse_dataset_path():
expected = {
"project": "nautilus",
"location": "scallop",
"dataset": "abalone",
"project": "winkle",
"dataset": "nautilus",
}
path = MigrationServiceClient.dataset_path(**expected)

Expand All @@ -5423,19 +5420,22 @@ def test_parse_dataset_path():


def test_dataset_path():
project = "squid"
dataset = "clam"
expected = "projects/{project}/datasets/{dataset}".format(
project = "scallop"
location = "abalone"
dataset = "squid"
expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project,
location=location,
dataset=dataset,
)
actual = MigrationServiceClient.dataset_path(project, dataset)
actual = MigrationServiceClient.dataset_path(project, location, dataset)
assert expected == actual


def test_parse_dataset_path():
expected = {
"project": "whelk",
"project": "clam",
"location": "whelk",
"dataset": "octopus",
}
path = MigrationServiceClient.dataset_path(**expected)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4732,6 +4732,7 @@ def test_create_cached_content_rest_call_success(request_type):
"enterprise_web_search": {},
"code_execution": {},
"url_context": {},
"computer_use": {"environment": 1},
}
],
"tool_config": {
Expand Down Expand Up @@ -5236,6 +5237,7 @@ def test_update_cached_content_rest_call_success(request_type):
"enterprise_web_search": {},
"code_execution": {},
"url_context": {},
"computer_use": {"environment": 1},
}
],
"tool_config": {
Expand Down Expand Up @@ -6602,6 +6604,7 @@ async def test_create_cached_content_rest_asyncio_call_success(request_type):
"enterprise_web_search": {},
"code_execution": {},
"url_context": {},
"computer_use": {"environment": 1},
}
],
"tool_config": {
Expand Down Expand Up @@ -7138,6 +7141,7 @@ async def test_update_cached_content_rest_asyncio_call_success(request_type):
"enterprise_web_search": {},
"code_execution": {},
"url_context": {},
"computer_use": {"environment": 1},
}
],
"tool_config": {
Expand Down
Loading