Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 32 additions & 0 deletions src/a2a/helpers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,32 +3,64 @@
from a2a.helpers.agent_card import display_agent_card
from a2a.helpers.proto_helpers import (
get_artifact_text,
get_data_parts,
get_message_text,
get_raw_parts,
get_stream_response_text,
get_text_parts,
get_url_parts,
new_artifact,
new_data_artifact,
new_data_artifact_update_event,
new_data_message,
new_data_part,
new_message,
new_raw_artifact,
new_raw_artifact_update_event,
new_raw_message,
new_raw_part,
new_task,
new_task_from_user_message,
new_text_artifact,
new_text_artifact_update_event,
new_text_message,
new_text_part,
new_text_status_update_event,
new_url_artifact,
new_url_artifact_update_event,
new_url_message,
new_url_part,
)


__all__ = [
'display_agent_card',
'get_artifact_text',
'get_data_parts',
'get_message_text',
'get_raw_parts',
'get_stream_response_text',
'get_text_parts',
'get_url_parts',
'new_artifact',
'new_data_artifact',
'new_data_artifact_update_event',
'new_data_message',
'new_data_part',
'new_message',
'new_raw_artifact',
'new_raw_artifact_update_event',
'new_raw_message',
'new_raw_part',
'new_task',
'new_task_from_user_message',
'new_text_artifact',
'new_text_artifact_update_event',
'new_text_message',
'new_text_part',
'new_text_status_update_event',
'new_url_artifact',
'new_url_artifact_update_event',
'new_url_message',
'new_url_part',
]
162 changes: 162 additions & 0 deletions src/a2a/helpers/proto_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,6 +401,45 @@ def get_text_parts(parts: Sequence[Part]) -> list[str]:
return [part.text for part in parts if part.HasField('text')]


def get_data_parts(parts: Sequence[Part]) -> list[Any]:
"""Extracts structured data from all data Parts.

Each returned element is the Python object obtained from the
``google.protobuf.Value`` stored in the Part.

Args:
parts: A sequence of ``Part`` objects.

Returns:
A list of deserialized data values from any data Parts found.
"""
return [part.data for part in parts if part.HasField('data')]


def get_raw_parts(parts: Sequence[Part]) -> list[bytes]:
"""Extracts raw bytes content from all raw Parts.

Args:
parts: A sequence of ``Part`` objects.

Returns:
A list of ``bytes`` from any raw Parts found.
"""
return [part.raw for part in parts if part.HasField('raw')]


def get_url_parts(parts: Sequence[Part]) -> list[str]:
"""Extracts URL strings from all URL Parts.

Args:
parts: A sequence of ``Part`` objects.

Returns:
A list of URL strings from any URL Parts found.
"""
return [part.url for part in parts if part.HasField('url')]


# --- Event & Stream Helpers ---


Expand Down Expand Up @@ -447,6 +486,129 @@ def new_text_artifact_update_event( # noqa: PLR0913
)


def new_data_artifact_update_event( # noqa: PLR0913
task_id: str,
context_id: str,
name: str,
data: Any,
media_type: str | None = None,
append: bool = False,
last_chunk: bool = False,
artifact_id: str | None = None,
) -> TaskArtifactUpdateEvent:
"""Creates a TaskArtifactUpdateEvent with a single data artifact.

Args:
task_id: The task ID.
context_id: The context ID.
name: The name of the artifact.
data: JSON-serializable data to embed (dict, list, str, etc.).
media_type: Optional MIME type of the part content.
append: Whether to append to the existing artifact.
last_chunk: Whether this is the last chunk.
artifact_id: Optional artifact ID (auto-generated if not provided).

Returns:
A TaskArtifactUpdateEvent with a single data artifact.
"""
return TaskArtifactUpdateEvent(
task_id=task_id,
context_id=context_id,
artifact=new_data_artifact(
name=name,
data=data,
media_type=media_type,
artifact_id=artifact_id,
),
append=append,
last_chunk=last_chunk,
)


def new_raw_artifact_update_event( # noqa: PLR0913
task_id: str,
context_id: str,
name: str,
raw: bytes,
media_type: str | None = None,
filename: str | None = None,
append: bool = False,
last_chunk: bool = False,
artifact_id: str | None = None,
) -> TaskArtifactUpdateEvent:
"""Creates a TaskArtifactUpdateEvent with a single raw bytes artifact.

Args:
task_id: The task ID.
context_id: The context ID.
name: The name of the artifact.
raw: The raw bytes content.
media_type: Optional MIME type (e.g. 'image/png').
filename: Optional filename.
append: Whether to append to the existing artifact.
last_chunk: Whether this is the last chunk.
artifact_id: Optional artifact ID (auto-generated if not provided).

Returns:
A TaskArtifactUpdateEvent with a single raw artifact.
"""
return TaskArtifactUpdateEvent(
task_id=task_id,
context_id=context_id,
artifact=new_raw_artifact(
name=name,
raw=raw,
media_type=media_type,
filename=filename,
artifact_id=artifact_id,
),
append=append,
last_chunk=last_chunk,
)


def new_url_artifact_update_event( # noqa: PLR0913
task_id: str,
context_id: str,
name: str,
url: str,
media_type: str | None = None,
filename: str | None = None,
append: bool = False,
last_chunk: bool = False,
artifact_id: str | None = None,
) -> TaskArtifactUpdateEvent:
"""Creates a TaskArtifactUpdateEvent with a single URL artifact.

Args:
task_id: The task ID.
context_id: The context ID.
name: The name of the artifact.
url: The URL pointing to the file content.
media_type: Optional MIME type (e.g. 'image/png').
filename: Optional filename.
append: Whether to append to the existing artifact.
last_chunk: Whether this is the last chunk.
artifact_id: Optional artifact ID (auto-generated if not provided).

Returns:
A TaskArtifactUpdateEvent with a single URL artifact.
"""
return TaskArtifactUpdateEvent(
task_id=task_id,
context_id=context_id,
artifact=new_url_artifact(
name=name,
url=url,
media_type=media_type,
filename=filename,
artifact_id=artifact_id,
),
append=append,
last_chunk=last_chunk,
)


def get_stream_response_text(
response: StreamResponse, delimiter: str = '\n'
) -> str:
Expand Down
Loading
Loading