From 0b425ce8c2ae6f1e4513c74253f7fba8ae0d7816 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Sat, 21 Sep 2024 20:07:53 +0100 Subject: [PATCH 01/33] works Signed-off-by: Elena Kolevska --- dapr/clients/grpc/client.py | 18 +- dapr/clients/grpc/subscription.py | 102 +++++ dapr/proto/common/v1/common_pb2.py | 12 +- dapr/proto/common/v1/common_pb2_grpc.py | 9 +- dapr/proto/runtime/v1/appcallback_pb2.py | 12 +- dapr/proto/runtime/v1/appcallback_pb2_grpc.py | 12 +- dapr/proto/runtime/v1/dapr_pb2.py | 400 +++++++++--------- dapr/proto/runtime/v1/dapr_pb2.pyi | 115 ++++- dapr/proto/runtime/v1/dapr_pb2_grpc.py | 17 +- examples/pubsub_streaming/publisher.py | 68 +++ examples/pubsub_streaming/subscriber.py | 45 ++ 11 files changed, 568 insertions(+), 242 deletions(-) create mode 100644 dapr/clients/grpc/subscription.py create mode 100644 examples/pubsub_streaming/publisher.py create mode 100644 examples/pubsub_streaming/subscriber.py diff --git a/dapr/clients/grpc/client.py b/dapr/clients/grpc/client.py index 64a26408..71009b83 100644 --- a/dapr/clients/grpc/client.py +++ b/dapr/clients/grpc/client.py @@ -41,11 +41,12 @@ from dapr.clients.grpc._state import StateOptions, StateItem from dapr.clients.grpc._helpers import getWorkflowRuntimeStatus from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions +from dapr.clients.grpc.subscription import Subscription from dapr.clients.grpc.interceptors import DaprClientInterceptor, DaprClientTimeoutInterceptor from dapr.clients.health import DaprHealth from dapr.clients.retry import RetryPolicy from dapr.conf import settings -from dapr.proto import api_v1, api_service_v1, common_v1 +from dapr.proto import api_v1, api_service_v1, common_v1, appcallback_v1 from dapr.proto.runtime.v1.dapr_pb2 import UnsubscribeConfigurationResponse from dapr.version import __version__ @@ -481,6 +482,21 @@ def publish_event( return DaprResponse(call.initial_metadata()) + # def subscribe(self, pubsub_name, topic, metadata=None, dead_letter_topic=None): + # stream = self._stub.SubscribeTopicEventsAlpha1() + # + # # Send InitialRequest + # initial_request = api_v1.SubscribeTopicEventsInitialRequestAlpha1(pubsub_name=pubsub_name, topic=topic, metadata=metadata, dead_letter_topic=dead_letter_topic) + # request = api_v1.SubscribeTopicEventsRequestAlpha1(initial_request=initial_request) + # stream.write(request) + # + # return stream + + def subscribe(self, pubsub_name, topic, metadata=None, dead_letter_topic=None): + subscription = Subscription(self._stub, pubsub_name, topic, metadata, dead_letter_topic) + subscription.start() + return subscription + def get_state( self, store_name: str, diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py new file mode 100644 index 00000000..c4082411 --- /dev/null +++ b/dapr/clients/grpc/subscription.py @@ -0,0 +1,102 @@ +import grpc +from dapr.proto import api_v1, appcallback_v1 +import queue +import threading + + +def success(): + return appcallback_v1.TopicEventResponse.SUCCESS + + +def retry(): + return appcallback_v1.TopicEventResponse.RETRY + + +def drop(): + return appcallback_v1.TopicEventResponse.DROP + + +class Subscription: + def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=None): + self._stub = stub + self.pubsub_name = pubsub_name + self.topic = topic + self.metadata = metadata or {} + self.dead_letter_topic = dead_letter_topic or '' + self._stream = None + self._send_queue = queue.Queue() + self._receive_queue = queue.Queue() + self._stream_active = False + + def start(self): + def request_iterator(): + try: + # Send InitialRequest needed to establish the stream + initial_request = api_v1.SubscribeTopicEventsRequestAlpha1( + initial_request=api_v1.SubscribeTopicEventsRequestInitialAlpha1( + pubsub_name=self.pubsub_name, topic=self.topic, metadata=self.metadata or {}, + dead_letter_topic=self.dead_letter_topic or '')) + yield initial_request + + while self._stream_active: + try: + request = self._send_queue.get() + if request is None: + break + + yield request + except queue.Empty: + continue + except Exception as e: + print(f"Exception in request_iterator: {e}") + raise e + + # Create the bidirectional stream + self._stream = self._stub.SubscribeTopicEventsAlpha1(request_iterator()) + self._stream_active = True + + # Start a thread to handle incoming messages + threading.Thread(target=self._handle_responses, daemon=True).start() + + def _handle_responses(self): + try: + # The first message dapr sends on the stream is for signalling only, so discard it + next(self._stream) + + for msg in self._stream: + print(f"Received message from dapr on stream: {msg.event_message.id}") # SubscribeTopicEventsResponseAlpha1 + self._receive_queue.put(msg.event_message) + except grpc.RpcError as e: + print(f"gRPC error in stream: {e}") + except Exception as e: + print(f"Unexpected error in stream: {e}") + finally: + self._stream_active = False + + def next_message(self, timeout=None): + print("in next_message") + try: + return self._receive_queue.get(timeout=timeout) + except queue.Empty as e : + print("queue empty", e) + return None + except Exception as e: + print(f"Exception in next_message: {e}") + return None + + def respond(self, message, status): + try: + status = appcallback_v1.TopicEventResponse(status=status.value) + response = api_v1.SubscribeTopicEventsRequestProcessedAlpha1(id=message.id, + status=status) + msg = api_v1.SubscribeTopicEventsRequestAlpha1(event_processed=response) + + self._send_queue.put(msg) + except Exception as e: + print(f"Exception in send_message: {e}") + + def close(self): + self._stream_active = False + self._send_queue.put(None) + if self._stream: + self._stream.cancel() \ No newline at end of file diff --git a/dapr/proto/common/v1/common_pb2.py b/dapr/proto/common/v1/common_pb2.py index b4f795de..3f7d8f25 100644 --- a/dapr/proto/common/v1/common_pb2.py +++ b/dapr/proto/common/v1/common_pb2.py @@ -1,12 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: dapr/proto/common/v1/common.proto -# Protobuf Python Version: 5.26.1 +# Protobuf Python Version: 5.27.2 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 27, + 2, + '', + 'dapr/proto/common/v1/common.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() diff --git a/dapr/proto/common/v1/common_pb2_grpc.py b/dapr/proto/common/v1/common_pb2_grpc.py index cd86bc6a..310e7b40 100644 --- a/dapr/proto/common/v1/common_pb2_grpc.py +++ b/dapr/proto/common/v1/common_pb2_grpc.py @@ -4,10 +4,8 @@ import warnings -GRPC_GENERATED_VERSION = '1.63.0' +GRPC_GENERATED_VERSION = '1.66.1' GRPC_VERSION = grpc.__version__ -EXPECTED_ERROR_RELEASE = '1.65.0' -SCHEDULED_RELEASE_DATE = 'June 25, 2024' _version_not_supported = False try: @@ -17,13 +15,10 @@ _version_not_supported = True if _version_not_supported: - warnings.warn( + raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' + f' but the generated code in dapr/proto/common/v1/common_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - + f' This warning will become an error in {EXPECTED_ERROR_RELEASE},' - + f' scheduled for release on {SCHEDULED_RELEASE_DATE}.', - RuntimeWarning ) diff --git a/dapr/proto/runtime/v1/appcallback_pb2.py b/dapr/proto/runtime/v1/appcallback_pb2.py index b6f27030..118d1959 100644 --- a/dapr/proto/runtime/v1/appcallback_pb2.py +++ b/dapr/proto/runtime/v1/appcallback_pb2.py @@ -1,12 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: dapr/proto/runtime/v1/appcallback.proto -# Protobuf Python Version: 5.26.1 +# Protobuf Python Version: 5.27.2 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 27, + 2, + '', + 'dapr/proto/runtime/v1/appcallback.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() diff --git a/dapr/proto/runtime/v1/appcallback_pb2_grpc.py b/dapr/proto/runtime/v1/appcallback_pb2_grpc.py index 92a05f46..cd3e63c8 100644 --- a/dapr/proto/runtime/v1/appcallback_pb2_grpc.py +++ b/dapr/proto/runtime/v1/appcallback_pb2_grpc.py @@ -7,10 +7,8 @@ from dapr.proto.runtime.v1 import appcallback_pb2 as dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -GRPC_GENERATED_VERSION = '1.63.0' +GRPC_GENERATED_VERSION = '1.66.1' GRPC_VERSION = grpc.__version__ -EXPECTED_ERROR_RELEASE = '1.65.0' -SCHEDULED_RELEASE_DATE = 'June 25, 2024' _version_not_supported = False try: @@ -20,15 +18,12 @@ _version_not_supported = True if _version_not_supported: - warnings.warn( + raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' + f' but the generated code in dapr/proto/runtime/v1/appcallback_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - + f' This warning will become an error in {EXPECTED_ERROR_RELEASE},' - + f' scheduled for release on {SCHEDULED_RELEASE_DATE}.', - RuntimeWarning ) @@ -147,6 +142,7 @@ def add_AppCallbackServicer_to_server(servicer, server): generic_handler = grpc.method_handlers_generic_handler( 'dapr.proto.runtime.v1.AppCallback', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('dapr.proto.runtime.v1.AppCallback', rpc_method_handlers) # This class is part of an EXPERIMENTAL API. @@ -334,6 +330,7 @@ def add_AppCallbackHealthCheckServicer_to_server(servicer, server): generic_handler = grpc.method_handlers_generic_handler( 'dapr.proto.runtime.v1.AppCallbackHealthCheck', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('dapr.proto.runtime.v1.AppCallbackHealthCheck', rpc_method_handlers) # This class is part of an EXPERIMENTAL API. @@ -429,6 +426,7 @@ def add_AppCallbackAlphaServicer_to_server(servicer, server): generic_handler = grpc.method_handlers_generic_handler( 'dapr.proto.runtime.v1.AppCallbackAlpha', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('dapr.proto.runtime.v1.AppCallbackAlpha', rpc_method_handlers) # This class is part of an EXPERIMENTAL API. diff --git a/dapr/proto/runtime/v1/dapr_pb2.py b/dapr/proto/runtime/v1/dapr_pb2.py index e46a132a..21e766bf 100644 --- a/dapr/proto/runtime/v1/dapr_pb2.py +++ b/dapr/proto/runtime/v1/dapr_pb2.py @@ -1,12 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: dapr/proto/runtime/v1/dapr.proto -# Protobuf Python Version: 5.26.1 +# Protobuf Python Version: 5.27.2 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 27, + 2, + '', + 'dapr/proto/runtime/v1/dapr.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -19,7 +29,7 @@ from dapr.proto.runtime.v1 import appcallback_pb2 as dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n dapr/proto/runtime/v1/dapr.proto\x12\x15\x64\x61pr.proto.runtime.v1\x1a\x19google/protobuf/any.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a!dapr/proto/common/v1/common.proto\x1a\'dapr/proto/runtime/v1/appcallback.proto\"X\n\x14InvokeServiceRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x34\n\x07message\x18\x03 \x01(\x0b\x32#.dapr.proto.common.v1.InvokeRequest\"\xf5\x01\n\x0fGetStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12H\n\x0b\x63onsistency\x18\x03 \x01(\x0e\x32\x33.dapr.proto.common.v1.StateOptions.StateConsistency\x12\x46\n\x08metadata\x18\x04 \x03(\x0b\x32\x34.dapr.proto.runtime.v1.GetStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc9\x01\n\x13GetBulkStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12\x13\n\x0bparallelism\x18\x03 \x01(\x05\x12J\n\x08metadata\x18\x04 \x03(\x0b\x32\x38.dapr.proto.runtime.v1.GetBulkStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"K\n\x14GetBulkStateResponse\x12\x33\n\x05items\x18\x01 \x03(\x0b\x32$.dapr.proto.runtime.v1.BulkStateItem\"\xbe\x01\n\rBulkStateItem\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x12\r\n\x05\x65rror\x18\x04 \x01(\t\x12\x44\n\x08metadata\x18\x05 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.BulkStateItem.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa8\x01\n\x10GetStateResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x0c\n\x04\x65tag\x18\x02 \x01(\t\x12G\n\x08metadata\x18\x03 \x03(\x0b\x32\x35.dapr.proto.runtime.v1.GetStateResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x90\x02\n\x12\x44\x65leteStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12(\n\x04\x65tag\x18\x03 \x01(\x0b\x32\x1a.dapr.proto.common.v1.Etag\x12\x33\n\x07options\x18\x04 \x01(\x0b\x32\".dapr.proto.common.v1.StateOptions\x12I\n\x08metadata\x18\x05 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.DeleteStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x16\x44\x65leteBulkStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12/\n\x06states\x18\x02 \x03(\x0b\x32\x1f.dapr.proto.common.v1.StateItem\"W\n\x10SaveStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12/\n\x06states\x18\x02 \x03(\x0b\x32\x1f.dapr.proto.common.v1.StateItem\"\xbc\x01\n\x11QueryStateRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\r\n\x05query\x18\x02 \x01(\t\x12H\n\x08metadata\x18\x03 \x03(\x0b\x32\x36.dapr.proto.runtime.v1.QueryStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"H\n\x0eQueryStateItem\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x12\r\n\x05\x65rror\x18\x04 \x01(\t\"\xd7\x01\n\x12QueryStateResponse\x12\x36\n\x07results\x18\x01 \x03(\x0b\x32%.dapr.proto.runtime.v1.QueryStateItem\x12\r\n\x05token\x18\x02 \x01(\t\x12I\n\x08metadata\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.QueryStateResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdf\x01\n\x13PublishEventRequest\x12\x13\n\x0bpubsub_name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12\x19\n\x11\x64\x61ta_content_type\x18\x04 \x01(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.dapr.proto.runtime.v1.PublishEventRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xf5\x01\n\x12\x42ulkPublishRequest\x12\x13\n\x0bpubsub_name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12?\n\x07\x65ntries\x18\x03 \x03(\x0b\x32..dapr.proto.runtime.v1.BulkPublishRequestEntry\x12I\n\x08metadata\x18\x04 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.BulkPublishRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xd1\x01\n\x17\x42ulkPublishRequestEntry\x12\x10\n\x08\x65ntry_id\x18\x01 \x01(\t\x12\r\n\x05\x65vent\x18\x02 \x01(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x03 \x01(\t\x12N\n\x08metadata\x18\x04 \x03(\x0b\x32<.dapr.proto.runtime.v1.BulkPublishRequestEntry.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"c\n\x13\x42ulkPublishResponse\x12L\n\rfailedEntries\x18\x01 \x03(\x0b\x32\x35.dapr.proto.runtime.v1.BulkPublishResponseFailedEntry\"A\n\x1e\x42ulkPublishResponseFailedEntry\x12\x10\n\x08\x65ntry_id\x18\x01 \x01(\t\x12\r\n\x05\x65rror\x18\x02 \x01(\t\"\xfb\x01\n!SubscribeTopicEventsRequestAlpha1\x12Z\n\x0finitial_request\x18\x01 \x01(\x0b\x32?.dapr.proto.runtime.v1.SubscribeTopicEventsInitialRequestAlpha1H\x00\x12S\n\x0e\x65vent_response\x18\x02 \x01(\x0b\x32\x39.dapr.proto.runtime.v1.SubscribeTopicEventsResponseAlpha1H\x00\x42%\n#subscribe_topic_events_request_type\"\x96\x02\n(SubscribeTopicEventsInitialRequestAlpha1\x12\x13\n\x0bpubsub_name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12_\n\x08metadata\x18\x03 \x03(\x0b\x32M.dapr.proto.runtime.v1.SubscribeTopicEventsInitialRequestAlpha1.MetadataEntry\x12\x1e\n\x11\x64\x65\x61\x64_letter_topic\x18\x04 \x01(\tH\x00\x88\x01\x01\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x14\n\x12_dead_letter_topic\"k\n\"SubscribeTopicEventsResponseAlpha1\x12\n\n\x02id\x18\x01 \x01(\t\x12\x39\n\x06status\x18\x02 \x01(\x0b\x32).dapr.proto.runtime.v1.TopicEventResponse\"\xc3\x01\n\x14InvokeBindingRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12K\n\x08metadata\x18\x03 \x03(\x0b\x32\x39.dapr.proto.runtime.v1.InvokeBindingRequest.MetadataEntry\x12\x11\n\toperation\x18\x04 \x01(\t\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa4\x01\n\x15InvokeBindingResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12L\n\x08metadata\x18\x02 \x03(\x0b\x32:.dapr.proto.runtime.v1.InvokeBindingResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xb8\x01\n\x10GetSecretRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\x0b\n\x03key\x18\x02 \x01(\t\x12G\n\x08metadata\x18\x03 \x03(\x0b\x32\x35.dapr.proto.runtime.v1.GetSecretRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x82\x01\n\x11GetSecretResponse\x12@\n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.GetSecretResponse.DataEntry\x1a+\n\tDataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xb3\x01\n\x14GetBulkSecretRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12K\n\x08metadata\x18\x02 \x03(\x0b\x32\x39.dapr.proto.runtime.v1.GetBulkSecretRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x85\x01\n\x0eSecretResponse\x12\x43\n\x07secrets\x18\x01 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.SecretResponse.SecretsEntry\x1a.\n\x0cSecretsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xb1\x01\n\x15GetBulkSecretResponse\x12\x44\n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x36.dapr.proto.runtime.v1.GetBulkSecretResponse.DataEntry\x1aR\n\tDataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.dapr.proto.runtime.v1.SecretResponse:\x02\x38\x01\"f\n\x1bTransactionalStateOperation\x12\x15\n\roperationType\x18\x01 \x01(\t\x12\x30\n\x07request\x18\x02 \x01(\x0b\x32\x1f.dapr.proto.common.v1.StateItem\"\x83\x02\n\x1e\x45xecuteStateTransactionRequest\x12\x11\n\tstoreName\x18\x01 \x01(\t\x12\x46\n\noperations\x18\x02 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.TransactionalStateOperation\x12U\n\x08metadata\x18\x03 \x03(\x0b\x32\x43.dapr.proto.runtime.v1.ExecuteStateTransactionRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbb\x01\n\x19RegisterActorTimerRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x19\n\x08\x64ue_time\x18\x04 \x01(\tR\x07\x64ueTime\x12\x0e\n\x06period\x18\x05 \x01(\t\x12\x10\n\x08\x63\x61llback\x18\x06 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x07 \x01(\x0c\x12\x0b\n\x03ttl\x18\x08 \x01(\t\"e\n\x1bUnregisterActorTimerRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\"\xac\x01\n\x1cRegisterActorReminderRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x19\n\x08\x64ue_time\x18\x04 \x01(\tR\x07\x64ueTime\x12\x0e\n\x06period\x18\x05 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x06 \x01(\x0c\x12\x0b\n\x03ttl\x18\x07 \x01(\t\"h\n\x1eUnregisterActorReminderRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\"]\n\x14GetActorStateRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0b\n\x03key\x18\x03 \x01(\t\"\xa4\x01\n\x15GetActorStateResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12L\n\x08metadata\x18\x02 \x03(\x0b\x32:.dapr.proto.runtime.v1.GetActorStateResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xac\x01\n#ExecuteActorStateTransactionRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12K\n\noperations\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.TransactionalActorStateOperation\"\xf5\x01\n TransactionalActorStateOperation\x12\x15\n\roperationType\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12#\n\x05value\x18\x03 \x01(\x0b\x32\x14.google.protobuf.Any\x12W\n\x08metadata\x18\x04 \x03(\x0b\x32\x45.dapr.proto.runtime.v1.TransactionalActorStateOperation.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xe8\x01\n\x12InvokeActorRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0e\n\x06method\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c\x12I\n\x08metadata\x18\x05 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.InvokeActorRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"#\n\x13InvokeActorResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\"\x14\n\x12GetMetadataRequest\"\x9b\x06\n\x13GetMetadataResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12Q\n\x13\x61\x63tive_actors_count\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountB\x02\x18\x01R\x06\x61\x63tors\x12V\n\x15registered_components\x18\x03 \x03(\x0b\x32+.dapr.proto.runtime.v1.RegisteredComponentsR\ncomponents\x12\x65\n\x11\x65xtended_metadata\x18\x04 \x03(\x0b\x32@.dapr.proto.runtime.v1.GetMetadataResponse.ExtendedMetadataEntryR\x08\x65xtended\x12O\n\rsubscriptions\x18\x05 \x03(\x0b\x32).dapr.proto.runtime.v1.PubsubSubscriptionR\rsubscriptions\x12R\n\x0ehttp_endpoints\x18\x06 \x03(\x0b\x32+.dapr.proto.runtime.v1.MetadataHTTPEndpointR\rhttpEndpoints\x12j\n\x19\x61pp_connection_properties\x18\x07 \x01(\x0b\x32..dapr.proto.runtime.v1.AppConnectionPropertiesR\x17\x61ppConnectionProperties\x12\'\n\x0fruntime_version\x18\x08 \x01(\tR\x0eruntimeVersion\x12)\n\x10\x65nabled_features\x18\t \x03(\tR\x0f\x65nabledFeatures\x12H\n\ractor_runtime\x18\n \x01(\x0b\x32#.dapr.proto.runtime.v1.ActorRuntimeR\x0c\x61\x63torRuntime\x1a\x37\n\x15\x45xtendedMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbc\x02\n\x0c\x41\x63torRuntime\x12]\n\x0eruntime_status\x18\x01 \x01(\x0e\x32\x36.dapr.proto.runtime.v1.ActorRuntime.ActorRuntimeStatusR\rruntimeStatus\x12M\n\ractive_actors\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountR\x0c\x61\x63tiveActors\x12\x1d\n\nhost_ready\x18\x03 \x01(\x08R\thostReady\x12\x1c\n\tplacement\x18\x04 \x01(\tR\tplacement\"A\n\x12\x41\x63torRuntimeStatus\x12\x10\n\x0cINITIALIZING\x10\x00\x12\x0c\n\x08\x44ISABLED\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\"0\n\x11\x41\x63tiveActorsCount\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\"Y\n\x14RegisteredComponents\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x14\n\x0c\x63\x61pabilities\x18\x04 \x03(\t\"*\n\x14MetadataHTTPEndpoint\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\xd1\x01\n\x17\x41ppConnectionProperties\x12\x0c\n\x04port\x18\x01 \x01(\x05\x12\x10\n\x08protocol\x18\x02 \x01(\t\x12\'\n\x0f\x63hannel_address\x18\x03 \x01(\tR\x0e\x63hannelAddress\x12\'\n\x0fmax_concurrency\x18\x04 \x01(\x05R\x0emaxConcurrency\x12\x44\n\x06health\x18\x05 \x01(\x0b\x32\x34.dapr.proto.runtime.v1.AppConnectionHealthProperties\"\xdc\x01\n\x1d\x41ppConnectionHealthProperties\x12*\n\x11health_check_path\x18\x01 \x01(\tR\x0fhealthCheckPath\x12\x32\n\x15health_probe_interval\x18\x02 \x01(\tR\x13healthProbeInterval\x12\x30\n\x14health_probe_timeout\x18\x03 \x01(\tR\x12healthProbeTimeout\x12)\n\x10health_threshold\x18\x04 \x01(\x05R\x0fhealthThreshold\"\x86\x03\n\x12PubsubSubscription\x12\x1f\n\x0bpubsub_name\x18\x01 \x01(\tR\npubsubname\x12\x14\n\x05topic\x18\x02 \x01(\tR\x05topic\x12S\n\x08metadata\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.PubsubSubscription.MetadataEntryR\x08metadata\x12\x44\n\x05rules\x18\x04 \x01(\x0b\x32..dapr.proto.runtime.v1.PubsubSubscriptionRulesR\x05rules\x12*\n\x11\x64\x65\x61\x64_letter_topic\x18\x05 \x01(\tR\x0f\x64\x65\x61\x64LetterTopic\x12\x41\n\x04type\x18\x06 \x01(\x0e\x32-.dapr.proto.runtime.v1.PubsubSubscriptionTypeR\x04type\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"W\n\x17PubsubSubscriptionRules\x12<\n\x05rules\x18\x01 \x03(\x0b\x32-.dapr.proto.runtime.v1.PubsubSubscriptionRule\"5\n\x16PubsubSubscriptionRule\x12\r\n\x05match\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"0\n\x12SetMetadataRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\xbc\x01\n\x17GetConfigurationRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12N\n\x08metadata\x18\x03 \x03(\x0b\x32<.dapr.proto.runtime.v1.GetConfigurationRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbc\x01\n\x18GetConfigurationResponse\x12I\n\x05items\x18\x01 \x03(\x0b\x32:.dapr.proto.runtime.v1.GetConfigurationResponse.ItemsEntry\x1aU\n\nItemsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.dapr.proto.common.v1.ConfigurationItem:\x02\x38\x01\"\xc8\x01\n\x1dSubscribeConfigurationRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12T\n\x08metadata\x18\x03 \x03(\x0b\x32\x42.dapr.proto.runtime.v1.SubscribeConfigurationRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"A\n\x1fUnsubscribeConfigurationRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\t\"\xd4\x01\n\x1eSubscribeConfigurationResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12O\n\x05items\x18\x02 \x03(\x0b\x32@.dapr.proto.runtime.v1.SubscribeConfigurationResponse.ItemsEntry\x1aU\n\nItemsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.dapr.proto.common.v1.ConfigurationItem:\x02\x38\x01\"?\n UnsubscribeConfigurationResponse\x12\n\n\x02ok\x18\x01 \x01(\x08\x12\x0f\n\x07message\x18\x02 \x01(\t\"\x9b\x01\n\x0eTryLockRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\x1f\n\x0bresource_id\x18\x02 \x01(\tR\nresourceId\x12\x1d\n\nlock_owner\x18\x03 \x01(\tR\tlockOwner\x12*\n\x11\x65xpiry_in_seconds\x18\x04 \x01(\x05R\x0f\x65xpiryInSeconds\"\"\n\x0fTryLockResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\"n\n\rUnlockRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\x1f\n\x0bresource_id\x18\x02 \x01(\tR\nresourceId\x12\x1d\n\nlock_owner\x18\x03 \x01(\tR\tlockOwner\"\xae\x01\n\x0eUnlockResponse\x12<\n\x06status\x18\x01 \x01(\x0e\x32,.dapr.proto.runtime.v1.UnlockResponse.Status\"^\n\x06Status\x12\x0b\n\x07SUCCESS\x10\x00\x12\x17\n\x13LOCK_DOES_NOT_EXIST\x10\x01\x12\x1a\n\x16LOCK_BELONGS_TO_OTHERS\x10\x02\x12\x12\n\x0eINTERNAL_ERROR\x10\x03\"\xb0\x01\n\x13SubtleGetKeyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x44\n\x06\x66ormat\x18\x03 \x01(\x0e\x32\x34.dapr.proto.runtime.v1.SubtleGetKeyRequest.KeyFormat\"\x1e\n\tKeyFormat\x12\x07\n\x03PEM\x10\x00\x12\x08\n\x04JSON\x10\x01\"C\n\x14SubtleGetKeyResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1d\n\npublic_key\x18\x02 \x01(\tR\tpublicKey\"\xb6\x01\n\x14SubtleEncryptRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x11\n\tplaintext\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x06 \x01(\x0cR\x0e\x61ssociatedData\"8\n\x15SubtleEncryptResponse\x12\x12\n\nciphertext\x18\x01 \x01(\x0c\x12\x0b\n\x03tag\x18\x02 \x01(\x0c\"\xc4\x01\n\x14SubtleDecryptRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x12\n\nciphertext\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\x0b\n\x03tag\x18\x06 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x07 \x01(\x0cR\x0e\x61ssociatedData\"*\n\x15SubtleDecryptResponse\x12\x11\n\tplaintext\x18\x01 \x01(\x0c\"\xc8\x01\n\x14SubtleWrapKeyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12#\n\rplaintext_key\x18\x02 \x01(\x0cR\x0cplaintextKey\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x06 \x01(\x0cR\x0e\x61ssociatedData\"E\n\x15SubtleWrapKeyResponse\x12\x1f\n\x0bwrapped_key\x18\x01 \x01(\x0cR\nwrappedKey\x12\x0b\n\x03tag\x18\x02 \x01(\x0c\"\xd3\x01\n\x16SubtleUnwrapKeyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x1f\n\x0bwrapped_key\x18\x02 \x01(\x0cR\nwrappedKey\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\x0b\n\x03tag\x18\x06 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x07 \x01(\x0cR\x0e\x61ssociatedData\">\n\x17SubtleUnwrapKeyResponse\x12#\n\rplaintext_key\x18\x01 \x01(\x0cR\x0cplaintextKey\"x\n\x11SubtleSignRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x0e\n\x06\x64igest\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\"\'\n\x12SubtleSignResponse\x12\x11\n\tsignature\x18\x01 \x01(\x0c\"\x8d\x01\n\x13SubtleVerifyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x0e\n\x06\x64igest\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\x11\n\tsignature\x18\x05 \x01(\x0c\"%\n\x14SubtleVerifyResponse\x12\r\n\x05valid\x18\x01 \x01(\x08\"\x85\x01\n\x0e\x45ncryptRequest\x12=\n\x07options\x18\x01 \x01(\x0b\x32,.dapr.proto.runtime.v1.EncryptRequestOptions\x12\x34\n\x07payload\x18\x02 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"\xfe\x01\n\x15\x45ncryptRequestOptions\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x19\n\x08key_name\x18\x02 \x01(\tR\x07keyName\x12\x1a\n\x12key_wrap_algorithm\x18\x03 \x01(\t\x12\x1e\n\x16\x64\x61ta_encryption_cipher\x18\n \x01(\t\x12\x37\n\x18omit_decryption_key_name\x18\x0b \x01(\x08R\x15omitDecryptionKeyName\x12.\n\x13\x64\x65\x63ryption_key_name\x18\x0c \x01(\tR\x11\x64\x65\x63ryptionKeyName\"G\n\x0f\x45ncryptResponse\x12\x34\n\x07payload\x18\x01 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"\x85\x01\n\x0e\x44\x65\x63ryptRequest\x12=\n\x07options\x18\x01 \x01(\x0b\x32,.dapr.proto.runtime.v1.DecryptRequestOptions\x12\x34\n\x07payload\x18\x02 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"Y\n\x15\x44\x65\x63ryptRequestOptions\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x19\n\x08key_name\x18\x0c \x01(\tR\x07keyName\"G\n\x0f\x44\x65\x63ryptResponse\x12\x34\n\x07payload\x18\x01 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"d\n\x12GetWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"\x84\x03\n\x13GetWorkflowResponse\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12#\n\rworkflow_name\x18\x02 \x01(\tR\x0cworkflowName\x12\x39\n\ncreated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x42\n\x0flast_updated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\rlastUpdatedAt\x12%\n\x0eruntime_status\x18\x05 \x01(\tR\rruntimeStatus\x12N\n\nproperties\x18\x06 \x03(\x0b\x32:.dapr.proto.runtime.v1.GetWorkflowResponse.PropertiesEntry\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x95\x02\n\x14StartWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\x12#\n\rworkflow_name\x18\x03 \x01(\tR\x0cworkflowName\x12I\n\x07options\x18\x04 \x03(\x0b\x32\x38.dapr.proto.runtime.v1.StartWorkflowRequest.OptionsEntry\x12\r\n\x05input\x18\x05 \x01(\x0c\x1a.\n\x0cOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"8\n\x15StartWorkflowResponse\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\"j\n\x18TerminateWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"f\n\x14PauseWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"g\n\x15ResumeWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"\x9e\x01\n\x19RaiseEventWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\x12\x1d\n\nevent_name\x18\x03 \x01(\tR\teventName\x12\x12\n\nevent_data\x18\x04 \x01(\x0c\"f\n\x14PurgeWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"\x11\n\x0fShutdownRequest\"\xbb\x01\n\x03Job\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x15\n\x08schedule\x18\x02 \x01(\tH\x00\x88\x01\x01\x12\x14\n\x07repeats\x18\x03 \x01(\rH\x01\x88\x01\x01\x12\x15\n\x08\x64ue_time\x18\x04 \x01(\tH\x02\x88\x01\x01\x12\x10\n\x03ttl\x18\x05 \x01(\tH\x03\x88\x01\x01\x12\"\n\x04\x64\x61ta\x18\x06 \x01(\x0b\x32\x14.google.protobuf.AnyB\x0b\n\t_scheduleB\n\n\x08_repeatsB\x0b\n\t_due_timeB\x06\n\x04_ttl\"=\n\x12ScheduleJobRequest\x12\'\n\x03job\x18\x01 \x01(\x0b\x32\x1a.dapr.proto.runtime.v1.Job\"\x15\n\x13ScheduleJobResponse\"\x1d\n\rGetJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"9\n\x0eGetJobResponse\x12\'\n\x03job\x18\x01 \x01(\x0b\x32\x1a.dapr.proto.runtime.v1.Job\" \n\x10\x44\x65leteJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x13\n\x11\x44\x65leteJobResponse*W\n\x16PubsubSubscriptionType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0f\n\x0b\x44\x45\x43LARATIVE\x10\x01\x12\x10\n\x0cPROGRAMMATIC\x10\x02\x12\r\n\tSTREAMING\x10\x03\x32\xab\x30\n\x04\x44\x61pr\x12\x64\n\rInvokeService\x12+.dapr.proto.runtime.v1.InvokeServiceRequest\x1a$.dapr.proto.common.v1.InvokeResponse\"\x00\x12]\n\x08GetState\x12&.dapr.proto.runtime.v1.GetStateRequest\x1a\'.dapr.proto.runtime.v1.GetStateResponse\"\x00\x12i\n\x0cGetBulkState\x12*.dapr.proto.runtime.v1.GetBulkStateRequest\x1a+.dapr.proto.runtime.v1.GetBulkStateResponse\"\x00\x12N\n\tSaveState\x12\'.dapr.proto.runtime.v1.SaveStateRequest\x1a\x16.google.protobuf.Empty\"\x00\x12i\n\x10QueryStateAlpha1\x12(.dapr.proto.runtime.v1.QueryStateRequest\x1a).dapr.proto.runtime.v1.QueryStateResponse\"\x00\x12R\n\x0b\x44\x65leteState\x12).dapr.proto.runtime.v1.DeleteStateRequest\x1a\x16.google.protobuf.Empty\"\x00\x12Z\n\x0f\x44\x65leteBulkState\x12-.dapr.proto.runtime.v1.DeleteBulkStateRequest\x1a\x16.google.protobuf.Empty\"\x00\x12j\n\x17\x45xecuteStateTransaction\x12\x35.dapr.proto.runtime.v1.ExecuteStateTransactionRequest\x1a\x16.google.protobuf.Empty\"\x00\x12T\n\x0cPublishEvent\x12*.dapr.proto.runtime.v1.PublishEventRequest\x1a\x16.google.protobuf.Empty\"\x00\x12q\n\x16\x42ulkPublishEventAlpha1\x12).dapr.proto.runtime.v1.BulkPublishRequest\x1a*.dapr.proto.runtime.v1.BulkPublishResponse\"\x00\x12\x86\x01\n\x1aSubscribeTopicEventsAlpha1\x12\x38.dapr.proto.runtime.v1.SubscribeTopicEventsRequestAlpha1\x1a(.dapr.proto.runtime.v1.TopicEventRequest\"\x00(\x01\x30\x01\x12l\n\rInvokeBinding\x12+.dapr.proto.runtime.v1.InvokeBindingRequest\x1a,.dapr.proto.runtime.v1.InvokeBindingResponse\"\x00\x12`\n\tGetSecret\x12\'.dapr.proto.runtime.v1.GetSecretRequest\x1a(.dapr.proto.runtime.v1.GetSecretResponse\"\x00\x12l\n\rGetBulkSecret\x12+.dapr.proto.runtime.v1.GetBulkSecretRequest\x1a,.dapr.proto.runtime.v1.GetBulkSecretResponse\"\x00\x12`\n\x12RegisterActorTimer\x12\x30.dapr.proto.runtime.v1.RegisterActorTimerRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x64\n\x14UnregisterActorTimer\x12\x32.dapr.proto.runtime.v1.UnregisterActorTimerRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x66\n\x15RegisterActorReminder\x12\x33.dapr.proto.runtime.v1.RegisterActorReminderRequest\x1a\x16.google.protobuf.Empty\"\x00\x12j\n\x17UnregisterActorReminder\x12\x35.dapr.proto.runtime.v1.UnregisterActorReminderRequest\x1a\x16.google.protobuf.Empty\"\x00\x12l\n\rGetActorState\x12+.dapr.proto.runtime.v1.GetActorStateRequest\x1a,.dapr.proto.runtime.v1.GetActorStateResponse\"\x00\x12t\n\x1c\x45xecuteActorStateTransaction\x12:.dapr.proto.runtime.v1.ExecuteActorStateTransactionRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x66\n\x0bInvokeActor\x12).dapr.proto.runtime.v1.InvokeActorRequest\x1a*.dapr.proto.runtime.v1.InvokeActorResponse\"\x00\x12{\n\x16GetConfigurationAlpha1\x12..dapr.proto.runtime.v1.GetConfigurationRequest\x1a/.dapr.proto.runtime.v1.GetConfigurationResponse\"\x00\x12u\n\x10GetConfiguration\x12..dapr.proto.runtime.v1.GetConfigurationRequest\x1a/.dapr.proto.runtime.v1.GetConfigurationResponse\"\x00\x12\x8f\x01\n\x1cSubscribeConfigurationAlpha1\x12\x34.dapr.proto.runtime.v1.SubscribeConfigurationRequest\x1a\x35.dapr.proto.runtime.v1.SubscribeConfigurationResponse\"\x00\x30\x01\x12\x89\x01\n\x16SubscribeConfiguration\x12\x34.dapr.proto.runtime.v1.SubscribeConfigurationRequest\x1a\x35.dapr.proto.runtime.v1.SubscribeConfigurationResponse\"\x00\x30\x01\x12\x93\x01\n\x1eUnsubscribeConfigurationAlpha1\x12\x36.dapr.proto.runtime.v1.UnsubscribeConfigurationRequest\x1a\x37.dapr.proto.runtime.v1.UnsubscribeConfigurationResponse\"\x00\x12\x8d\x01\n\x18UnsubscribeConfiguration\x12\x36.dapr.proto.runtime.v1.UnsubscribeConfigurationRequest\x1a\x37.dapr.proto.runtime.v1.UnsubscribeConfigurationResponse\"\x00\x12`\n\rTryLockAlpha1\x12%.dapr.proto.runtime.v1.TryLockRequest\x1a&.dapr.proto.runtime.v1.TryLockResponse\"\x00\x12]\n\x0cUnlockAlpha1\x12$.dapr.proto.runtime.v1.UnlockRequest\x1a%.dapr.proto.runtime.v1.UnlockResponse\"\x00\x12\x62\n\rEncryptAlpha1\x12%.dapr.proto.runtime.v1.EncryptRequest\x1a&.dapr.proto.runtime.v1.EncryptResponse(\x01\x30\x01\x12\x62\n\rDecryptAlpha1\x12%.dapr.proto.runtime.v1.DecryptRequest\x1a&.dapr.proto.runtime.v1.DecryptResponse(\x01\x30\x01\x12\x66\n\x0bGetMetadata\x12).dapr.proto.runtime.v1.GetMetadataRequest\x1a*.dapr.proto.runtime.v1.GetMetadataResponse\"\x00\x12R\n\x0bSetMetadata\x12).dapr.proto.runtime.v1.SetMetadataRequest\x1a\x16.google.protobuf.Empty\"\x00\x12m\n\x12SubtleGetKeyAlpha1\x12*.dapr.proto.runtime.v1.SubtleGetKeyRequest\x1a+.dapr.proto.runtime.v1.SubtleGetKeyResponse\x12p\n\x13SubtleEncryptAlpha1\x12+.dapr.proto.runtime.v1.SubtleEncryptRequest\x1a,.dapr.proto.runtime.v1.SubtleEncryptResponse\x12p\n\x13SubtleDecryptAlpha1\x12+.dapr.proto.runtime.v1.SubtleDecryptRequest\x1a,.dapr.proto.runtime.v1.SubtleDecryptResponse\x12p\n\x13SubtleWrapKeyAlpha1\x12+.dapr.proto.runtime.v1.SubtleWrapKeyRequest\x1a,.dapr.proto.runtime.v1.SubtleWrapKeyResponse\x12v\n\x15SubtleUnwrapKeyAlpha1\x12-.dapr.proto.runtime.v1.SubtleUnwrapKeyRequest\x1a..dapr.proto.runtime.v1.SubtleUnwrapKeyResponse\x12g\n\x10SubtleSignAlpha1\x12(.dapr.proto.runtime.v1.SubtleSignRequest\x1a).dapr.proto.runtime.v1.SubtleSignResponse\x12m\n\x12SubtleVerifyAlpha1\x12*.dapr.proto.runtime.v1.SubtleVerifyRequest\x1a+.dapr.proto.runtime.v1.SubtleVerifyResponse\x12r\n\x13StartWorkflowAlpha1\x12+.dapr.proto.runtime.v1.StartWorkflowRequest\x1a,.dapr.proto.runtime.v1.StartWorkflowResponse\"\x00\x12l\n\x11GetWorkflowAlpha1\x12).dapr.proto.runtime.v1.GetWorkflowRequest\x1a*.dapr.proto.runtime.v1.GetWorkflowResponse\"\x00\x12\\\n\x13PurgeWorkflowAlpha1\x12+.dapr.proto.runtime.v1.PurgeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x64\n\x17TerminateWorkflowAlpha1\x12/.dapr.proto.runtime.v1.TerminateWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\\\n\x13PauseWorkflowAlpha1\x12+.dapr.proto.runtime.v1.PauseWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12^\n\x14ResumeWorkflowAlpha1\x12,.dapr.proto.runtime.v1.ResumeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x66\n\x18RaiseEventWorkflowAlpha1\x12\x30.dapr.proto.runtime.v1.RaiseEventWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12q\n\x12StartWorkflowBeta1\x12+.dapr.proto.runtime.v1.StartWorkflowRequest\x1a,.dapr.proto.runtime.v1.StartWorkflowResponse\"\x00\x12k\n\x10GetWorkflowBeta1\x12).dapr.proto.runtime.v1.GetWorkflowRequest\x1a*.dapr.proto.runtime.v1.GetWorkflowResponse\"\x00\x12[\n\x12PurgeWorkflowBeta1\x12+.dapr.proto.runtime.v1.PurgeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x63\n\x16TerminateWorkflowBeta1\x12/.dapr.proto.runtime.v1.TerminateWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12[\n\x12PauseWorkflowBeta1\x12+.dapr.proto.runtime.v1.PauseWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12]\n\x13ResumeWorkflowBeta1\x12,.dapr.proto.runtime.v1.ResumeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x65\n\x17RaiseEventWorkflowBeta1\x12\x30.dapr.proto.runtime.v1.RaiseEventWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12L\n\x08Shutdown\x12&.dapr.proto.runtime.v1.ShutdownRequest\x1a\x16.google.protobuf.Empty\"\x00\x12l\n\x11ScheduleJobAlpha1\x12).dapr.proto.runtime.v1.ScheduleJobRequest\x1a*.dapr.proto.runtime.v1.ScheduleJobResponse\"\x00\x12]\n\x0cGetJobAlpha1\x12$.dapr.proto.runtime.v1.GetJobRequest\x1a%.dapr.proto.runtime.v1.GetJobResponse\"\x00\x12\x66\n\x0f\x44\x65leteJobAlpha1\x12\'.dapr.proto.runtime.v1.DeleteJobRequest\x1a(.dapr.proto.runtime.v1.DeleteJobResponse\"\x00\x42i\n\nio.dapr.v1B\nDaprProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\xaa\x02\x1b\x44\x61pr.Client.Autogen.Grpc.v1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n dapr/proto/runtime/v1/dapr.proto\x12\x15\x64\x61pr.proto.runtime.v1\x1a\x19google/protobuf/any.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a!dapr/proto/common/v1/common.proto\x1a\'dapr/proto/runtime/v1/appcallback.proto\"X\n\x14InvokeServiceRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x34\n\x07message\x18\x03 \x01(\x0b\x32#.dapr.proto.common.v1.InvokeRequest\"\xf5\x01\n\x0fGetStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12H\n\x0b\x63onsistency\x18\x03 \x01(\x0e\x32\x33.dapr.proto.common.v1.StateOptions.StateConsistency\x12\x46\n\x08metadata\x18\x04 \x03(\x0b\x32\x34.dapr.proto.runtime.v1.GetStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc9\x01\n\x13GetBulkStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12\x13\n\x0bparallelism\x18\x03 \x01(\x05\x12J\n\x08metadata\x18\x04 \x03(\x0b\x32\x38.dapr.proto.runtime.v1.GetBulkStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"K\n\x14GetBulkStateResponse\x12\x33\n\x05items\x18\x01 \x03(\x0b\x32$.dapr.proto.runtime.v1.BulkStateItem\"\xbe\x01\n\rBulkStateItem\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x12\r\n\x05\x65rror\x18\x04 \x01(\t\x12\x44\n\x08metadata\x18\x05 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.BulkStateItem.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa8\x01\n\x10GetStateResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x0c\n\x04\x65tag\x18\x02 \x01(\t\x12G\n\x08metadata\x18\x03 \x03(\x0b\x32\x35.dapr.proto.runtime.v1.GetStateResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x90\x02\n\x12\x44\x65leteStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12(\n\x04\x65tag\x18\x03 \x01(\x0b\x32\x1a.dapr.proto.common.v1.Etag\x12\x33\n\x07options\x18\x04 \x01(\x0b\x32\".dapr.proto.common.v1.StateOptions\x12I\n\x08metadata\x18\x05 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.DeleteStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x16\x44\x65leteBulkStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12/\n\x06states\x18\x02 \x03(\x0b\x32\x1f.dapr.proto.common.v1.StateItem\"W\n\x10SaveStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12/\n\x06states\x18\x02 \x03(\x0b\x32\x1f.dapr.proto.common.v1.StateItem\"\xbc\x01\n\x11QueryStateRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\r\n\x05query\x18\x02 \x01(\t\x12H\n\x08metadata\x18\x03 \x03(\x0b\x32\x36.dapr.proto.runtime.v1.QueryStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"H\n\x0eQueryStateItem\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x12\r\n\x05\x65rror\x18\x04 \x01(\t\"\xd7\x01\n\x12QueryStateResponse\x12\x36\n\x07results\x18\x01 \x03(\x0b\x32%.dapr.proto.runtime.v1.QueryStateItem\x12\r\n\x05token\x18\x02 \x01(\t\x12I\n\x08metadata\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.QueryStateResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdf\x01\n\x13PublishEventRequest\x12\x13\n\x0bpubsub_name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12\x19\n\x11\x64\x61ta_content_type\x18\x04 \x01(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.dapr.proto.runtime.v1.PublishEventRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xf5\x01\n\x12\x42ulkPublishRequest\x12\x13\n\x0bpubsub_name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12?\n\x07\x65ntries\x18\x03 \x03(\x0b\x32..dapr.proto.runtime.v1.BulkPublishRequestEntry\x12I\n\x08metadata\x18\x04 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.BulkPublishRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xd1\x01\n\x17\x42ulkPublishRequestEntry\x12\x10\n\x08\x65ntry_id\x18\x01 \x01(\t\x12\r\n\x05\x65vent\x18\x02 \x01(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x03 \x01(\t\x12N\n\x08metadata\x18\x04 \x03(\x0b\x32<.dapr.proto.runtime.v1.BulkPublishRequestEntry.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"c\n\x13\x42ulkPublishResponse\x12L\n\rfailedEntries\x18\x01 \x03(\x0b\x32\x35.dapr.proto.runtime.v1.BulkPublishResponseFailedEntry\"A\n\x1e\x42ulkPublishResponseFailedEntry\x12\x10\n\x08\x65ntry_id\x18\x01 \x01(\t\x12\r\n\x05\x65rror\x18\x02 \x01(\t\"\x84\x02\n!SubscribeTopicEventsRequestAlpha1\x12Z\n\x0finitial_request\x18\x01 \x01(\x0b\x32?.dapr.proto.runtime.v1.SubscribeTopicEventsRequestInitialAlpha1H\x00\x12\\\n\x0f\x65vent_processed\x18\x02 \x01(\x0b\x32\x41.dapr.proto.runtime.v1.SubscribeTopicEventsRequestProcessedAlpha1H\x00\x42%\n#subscribe_topic_events_request_type\"\x96\x02\n(SubscribeTopicEventsRequestInitialAlpha1\x12\x13\n\x0bpubsub_name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12_\n\x08metadata\x18\x03 \x03(\x0b\x32M.dapr.proto.runtime.v1.SubscribeTopicEventsRequestInitialAlpha1.MetadataEntry\x12\x1e\n\x11\x64\x65\x61\x64_letter_topic\x18\x04 \x01(\tH\x00\x88\x01\x01\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x14\n\x12_dead_letter_topic\"s\n*SubscribeTopicEventsRequestProcessedAlpha1\x12\n\n\x02id\x18\x01 \x01(\t\x12\x39\n\x06status\x18\x02 \x01(\x0b\x32).dapr.proto.runtime.v1.TopicEventResponse\"\xed\x01\n\"SubscribeTopicEventsResponseAlpha1\x12\\\n\x10initial_response\x18\x01 \x01(\x0b\x32@.dapr.proto.runtime.v1.SubscribeTopicEventsResponseInitialAlpha1H\x00\x12\x41\n\revent_message\x18\x02 \x01(\x0b\x32(.dapr.proto.runtime.v1.TopicEventRequestH\x00\x42&\n$subscribe_topic_events_response_type\"+\n)SubscribeTopicEventsResponseInitialAlpha1\"\xc3\x01\n\x14InvokeBindingRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12K\n\x08metadata\x18\x03 \x03(\x0b\x32\x39.dapr.proto.runtime.v1.InvokeBindingRequest.MetadataEntry\x12\x11\n\toperation\x18\x04 \x01(\t\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa4\x01\n\x15InvokeBindingResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12L\n\x08metadata\x18\x02 \x03(\x0b\x32:.dapr.proto.runtime.v1.InvokeBindingResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xb8\x01\n\x10GetSecretRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\x0b\n\x03key\x18\x02 \x01(\t\x12G\n\x08metadata\x18\x03 \x03(\x0b\x32\x35.dapr.proto.runtime.v1.GetSecretRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x82\x01\n\x11GetSecretResponse\x12@\n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.GetSecretResponse.DataEntry\x1a+\n\tDataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xb3\x01\n\x14GetBulkSecretRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12K\n\x08metadata\x18\x02 \x03(\x0b\x32\x39.dapr.proto.runtime.v1.GetBulkSecretRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x85\x01\n\x0eSecretResponse\x12\x43\n\x07secrets\x18\x01 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.SecretResponse.SecretsEntry\x1a.\n\x0cSecretsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xb1\x01\n\x15GetBulkSecretResponse\x12\x44\n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x36.dapr.proto.runtime.v1.GetBulkSecretResponse.DataEntry\x1aR\n\tDataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.dapr.proto.runtime.v1.SecretResponse:\x02\x38\x01\"f\n\x1bTransactionalStateOperation\x12\x15\n\roperationType\x18\x01 \x01(\t\x12\x30\n\x07request\x18\x02 \x01(\x0b\x32\x1f.dapr.proto.common.v1.StateItem\"\x83\x02\n\x1e\x45xecuteStateTransactionRequest\x12\x11\n\tstoreName\x18\x01 \x01(\t\x12\x46\n\noperations\x18\x02 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.TransactionalStateOperation\x12U\n\x08metadata\x18\x03 \x03(\x0b\x32\x43.dapr.proto.runtime.v1.ExecuteStateTransactionRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbb\x01\n\x19RegisterActorTimerRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x19\n\x08\x64ue_time\x18\x04 \x01(\tR\x07\x64ueTime\x12\x0e\n\x06period\x18\x05 \x01(\t\x12\x10\n\x08\x63\x61llback\x18\x06 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x07 \x01(\x0c\x12\x0b\n\x03ttl\x18\x08 \x01(\t\"e\n\x1bUnregisterActorTimerRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\"\xac\x01\n\x1cRegisterActorReminderRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x19\n\x08\x64ue_time\x18\x04 \x01(\tR\x07\x64ueTime\x12\x0e\n\x06period\x18\x05 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x06 \x01(\x0c\x12\x0b\n\x03ttl\x18\x07 \x01(\t\"h\n\x1eUnregisterActorReminderRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\"]\n\x14GetActorStateRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0b\n\x03key\x18\x03 \x01(\t\"\xa4\x01\n\x15GetActorStateResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12L\n\x08metadata\x18\x02 \x03(\x0b\x32:.dapr.proto.runtime.v1.GetActorStateResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xac\x01\n#ExecuteActorStateTransactionRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12K\n\noperations\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.TransactionalActorStateOperation\"\xf5\x01\n TransactionalActorStateOperation\x12\x15\n\roperationType\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12#\n\x05value\x18\x03 \x01(\x0b\x32\x14.google.protobuf.Any\x12W\n\x08metadata\x18\x04 \x03(\x0b\x32\x45.dapr.proto.runtime.v1.TransactionalActorStateOperation.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xe8\x01\n\x12InvokeActorRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0e\n\x06method\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c\x12I\n\x08metadata\x18\x05 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.InvokeActorRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"#\n\x13InvokeActorResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\"\x14\n\x12GetMetadataRequest\"\x9b\x06\n\x13GetMetadataResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12Q\n\x13\x61\x63tive_actors_count\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountB\x02\x18\x01R\x06\x61\x63tors\x12V\n\x15registered_components\x18\x03 \x03(\x0b\x32+.dapr.proto.runtime.v1.RegisteredComponentsR\ncomponents\x12\x65\n\x11\x65xtended_metadata\x18\x04 \x03(\x0b\x32@.dapr.proto.runtime.v1.GetMetadataResponse.ExtendedMetadataEntryR\x08\x65xtended\x12O\n\rsubscriptions\x18\x05 \x03(\x0b\x32).dapr.proto.runtime.v1.PubsubSubscriptionR\rsubscriptions\x12R\n\x0ehttp_endpoints\x18\x06 \x03(\x0b\x32+.dapr.proto.runtime.v1.MetadataHTTPEndpointR\rhttpEndpoints\x12j\n\x19\x61pp_connection_properties\x18\x07 \x01(\x0b\x32..dapr.proto.runtime.v1.AppConnectionPropertiesR\x17\x61ppConnectionProperties\x12\'\n\x0fruntime_version\x18\x08 \x01(\tR\x0eruntimeVersion\x12)\n\x10\x65nabled_features\x18\t \x03(\tR\x0f\x65nabledFeatures\x12H\n\ractor_runtime\x18\n \x01(\x0b\x32#.dapr.proto.runtime.v1.ActorRuntimeR\x0c\x61\x63torRuntime\x1a\x37\n\x15\x45xtendedMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbc\x02\n\x0c\x41\x63torRuntime\x12]\n\x0eruntime_status\x18\x01 \x01(\x0e\x32\x36.dapr.proto.runtime.v1.ActorRuntime.ActorRuntimeStatusR\rruntimeStatus\x12M\n\ractive_actors\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountR\x0c\x61\x63tiveActors\x12\x1d\n\nhost_ready\x18\x03 \x01(\x08R\thostReady\x12\x1c\n\tplacement\x18\x04 \x01(\tR\tplacement\"A\n\x12\x41\x63torRuntimeStatus\x12\x10\n\x0cINITIALIZING\x10\x00\x12\x0c\n\x08\x44ISABLED\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\"0\n\x11\x41\x63tiveActorsCount\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\"Y\n\x14RegisteredComponents\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x14\n\x0c\x63\x61pabilities\x18\x04 \x03(\t\"*\n\x14MetadataHTTPEndpoint\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\xd1\x01\n\x17\x41ppConnectionProperties\x12\x0c\n\x04port\x18\x01 \x01(\x05\x12\x10\n\x08protocol\x18\x02 \x01(\t\x12\'\n\x0f\x63hannel_address\x18\x03 \x01(\tR\x0e\x63hannelAddress\x12\'\n\x0fmax_concurrency\x18\x04 \x01(\x05R\x0emaxConcurrency\x12\x44\n\x06health\x18\x05 \x01(\x0b\x32\x34.dapr.proto.runtime.v1.AppConnectionHealthProperties\"\xdc\x01\n\x1d\x41ppConnectionHealthProperties\x12*\n\x11health_check_path\x18\x01 \x01(\tR\x0fhealthCheckPath\x12\x32\n\x15health_probe_interval\x18\x02 \x01(\tR\x13healthProbeInterval\x12\x30\n\x14health_probe_timeout\x18\x03 \x01(\tR\x12healthProbeTimeout\x12)\n\x10health_threshold\x18\x04 \x01(\x05R\x0fhealthThreshold\"\x86\x03\n\x12PubsubSubscription\x12\x1f\n\x0bpubsub_name\x18\x01 \x01(\tR\npubsubname\x12\x14\n\x05topic\x18\x02 \x01(\tR\x05topic\x12S\n\x08metadata\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.PubsubSubscription.MetadataEntryR\x08metadata\x12\x44\n\x05rules\x18\x04 \x01(\x0b\x32..dapr.proto.runtime.v1.PubsubSubscriptionRulesR\x05rules\x12*\n\x11\x64\x65\x61\x64_letter_topic\x18\x05 \x01(\tR\x0f\x64\x65\x61\x64LetterTopic\x12\x41\n\x04type\x18\x06 \x01(\x0e\x32-.dapr.proto.runtime.v1.PubsubSubscriptionTypeR\x04type\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"W\n\x17PubsubSubscriptionRules\x12<\n\x05rules\x18\x01 \x03(\x0b\x32-.dapr.proto.runtime.v1.PubsubSubscriptionRule\"5\n\x16PubsubSubscriptionRule\x12\r\n\x05match\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"0\n\x12SetMetadataRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\xbc\x01\n\x17GetConfigurationRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12N\n\x08metadata\x18\x03 \x03(\x0b\x32<.dapr.proto.runtime.v1.GetConfigurationRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbc\x01\n\x18GetConfigurationResponse\x12I\n\x05items\x18\x01 \x03(\x0b\x32:.dapr.proto.runtime.v1.GetConfigurationResponse.ItemsEntry\x1aU\n\nItemsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.dapr.proto.common.v1.ConfigurationItem:\x02\x38\x01\"\xc8\x01\n\x1dSubscribeConfigurationRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12T\n\x08metadata\x18\x03 \x03(\x0b\x32\x42.dapr.proto.runtime.v1.SubscribeConfigurationRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"A\n\x1fUnsubscribeConfigurationRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\t\"\xd4\x01\n\x1eSubscribeConfigurationResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12O\n\x05items\x18\x02 \x03(\x0b\x32@.dapr.proto.runtime.v1.SubscribeConfigurationResponse.ItemsEntry\x1aU\n\nItemsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.dapr.proto.common.v1.ConfigurationItem:\x02\x38\x01\"?\n UnsubscribeConfigurationResponse\x12\n\n\x02ok\x18\x01 \x01(\x08\x12\x0f\n\x07message\x18\x02 \x01(\t\"\x9b\x01\n\x0eTryLockRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\x1f\n\x0bresource_id\x18\x02 \x01(\tR\nresourceId\x12\x1d\n\nlock_owner\x18\x03 \x01(\tR\tlockOwner\x12*\n\x11\x65xpiry_in_seconds\x18\x04 \x01(\x05R\x0f\x65xpiryInSeconds\"\"\n\x0fTryLockResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\"n\n\rUnlockRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\x1f\n\x0bresource_id\x18\x02 \x01(\tR\nresourceId\x12\x1d\n\nlock_owner\x18\x03 \x01(\tR\tlockOwner\"\xae\x01\n\x0eUnlockResponse\x12<\n\x06status\x18\x01 \x01(\x0e\x32,.dapr.proto.runtime.v1.UnlockResponse.Status\"^\n\x06Status\x12\x0b\n\x07SUCCESS\x10\x00\x12\x17\n\x13LOCK_DOES_NOT_EXIST\x10\x01\x12\x1a\n\x16LOCK_BELONGS_TO_OTHERS\x10\x02\x12\x12\n\x0eINTERNAL_ERROR\x10\x03\"\xb0\x01\n\x13SubtleGetKeyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x44\n\x06\x66ormat\x18\x03 \x01(\x0e\x32\x34.dapr.proto.runtime.v1.SubtleGetKeyRequest.KeyFormat\"\x1e\n\tKeyFormat\x12\x07\n\x03PEM\x10\x00\x12\x08\n\x04JSON\x10\x01\"C\n\x14SubtleGetKeyResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1d\n\npublic_key\x18\x02 \x01(\tR\tpublicKey\"\xb6\x01\n\x14SubtleEncryptRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x11\n\tplaintext\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x06 \x01(\x0cR\x0e\x61ssociatedData\"8\n\x15SubtleEncryptResponse\x12\x12\n\nciphertext\x18\x01 \x01(\x0c\x12\x0b\n\x03tag\x18\x02 \x01(\x0c\"\xc4\x01\n\x14SubtleDecryptRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x12\n\nciphertext\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\x0b\n\x03tag\x18\x06 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x07 \x01(\x0cR\x0e\x61ssociatedData\"*\n\x15SubtleDecryptResponse\x12\x11\n\tplaintext\x18\x01 \x01(\x0c\"\xc8\x01\n\x14SubtleWrapKeyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12#\n\rplaintext_key\x18\x02 \x01(\x0cR\x0cplaintextKey\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x06 \x01(\x0cR\x0e\x61ssociatedData\"E\n\x15SubtleWrapKeyResponse\x12\x1f\n\x0bwrapped_key\x18\x01 \x01(\x0cR\nwrappedKey\x12\x0b\n\x03tag\x18\x02 \x01(\x0c\"\xd3\x01\n\x16SubtleUnwrapKeyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x1f\n\x0bwrapped_key\x18\x02 \x01(\x0cR\nwrappedKey\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\x0b\n\x03tag\x18\x06 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x07 \x01(\x0cR\x0e\x61ssociatedData\">\n\x17SubtleUnwrapKeyResponse\x12#\n\rplaintext_key\x18\x01 \x01(\x0cR\x0cplaintextKey\"x\n\x11SubtleSignRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x0e\n\x06\x64igest\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\"\'\n\x12SubtleSignResponse\x12\x11\n\tsignature\x18\x01 \x01(\x0c\"\x8d\x01\n\x13SubtleVerifyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x0e\n\x06\x64igest\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\x11\n\tsignature\x18\x05 \x01(\x0c\"%\n\x14SubtleVerifyResponse\x12\r\n\x05valid\x18\x01 \x01(\x08\"\x85\x01\n\x0e\x45ncryptRequest\x12=\n\x07options\x18\x01 \x01(\x0b\x32,.dapr.proto.runtime.v1.EncryptRequestOptions\x12\x34\n\x07payload\x18\x02 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"\xfe\x01\n\x15\x45ncryptRequestOptions\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x19\n\x08key_name\x18\x02 \x01(\tR\x07keyName\x12\x1a\n\x12key_wrap_algorithm\x18\x03 \x01(\t\x12\x1e\n\x16\x64\x61ta_encryption_cipher\x18\n \x01(\t\x12\x37\n\x18omit_decryption_key_name\x18\x0b \x01(\x08R\x15omitDecryptionKeyName\x12.\n\x13\x64\x65\x63ryption_key_name\x18\x0c \x01(\tR\x11\x64\x65\x63ryptionKeyName\"G\n\x0f\x45ncryptResponse\x12\x34\n\x07payload\x18\x01 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"\x85\x01\n\x0e\x44\x65\x63ryptRequest\x12=\n\x07options\x18\x01 \x01(\x0b\x32,.dapr.proto.runtime.v1.DecryptRequestOptions\x12\x34\n\x07payload\x18\x02 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"Y\n\x15\x44\x65\x63ryptRequestOptions\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x19\n\x08key_name\x18\x0c \x01(\tR\x07keyName\"G\n\x0f\x44\x65\x63ryptResponse\x12\x34\n\x07payload\x18\x01 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"d\n\x12GetWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"\x84\x03\n\x13GetWorkflowResponse\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12#\n\rworkflow_name\x18\x02 \x01(\tR\x0cworkflowName\x12\x39\n\ncreated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x42\n\x0flast_updated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\rlastUpdatedAt\x12%\n\x0eruntime_status\x18\x05 \x01(\tR\rruntimeStatus\x12N\n\nproperties\x18\x06 \x03(\x0b\x32:.dapr.proto.runtime.v1.GetWorkflowResponse.PropertiesEntry\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x95\x02\n\x14StartWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\x12#\n\rworkflow_name\x18\x03 \x01(\tR\x0cworkflowName\x12I\n\x07options\x18\x04 \x03(\x0b\x32\x38.dapr.proto.runtime.v1.StartWorkflowRequest.OptionsEntry\x12\r\n\x05input\x18\x05 \x01(\x0c\x1a.\n\x0cOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"8\n\x15StartWorkflowResponse\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\"j\n\x18TerminateWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"f\n\x14PauseWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"g\n\x15ResumeWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"\x9e\x01\n\x19RaiseEventWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\x12\x1d\n\nevent_name\x18\x03 \x01(\tR\teventName\x12\x12\n\nevent_data\x18\x04 \x01(\x0c\"f\n\x14PurgeWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"\x11\n\x0fShutdownRequest\"\xe8\x01\n\x03Job\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1f\n\x08schedule\x18\x02 \x01(\tH\x00R\x08schedule\x88\x01\x01\x12\x1d\n\x07repeats\x18\x03 \x01(\rH\x01R\x07repeats\x88\x01\x01\x12\x1e\n\x08\x64ue_time\x18\x04 \x01(\tH\x02R\x07\x64ueTime\x88\x01\x01\x12\x15\n\x03ttl\x18\x05 \x01(\tH\x03R\x03ttl\x88\x01\x01\x12(\n\x04\x64\x61ta\x18\x06 \x01(\x0b\x32\x14.google.protobuf.AnyR\x04\x64\x61taB\x0b\n\t_scheduleB\n\n\x08_repeatsB\x0b\n\t_due_timeB\x06\n\x04_ttl\"=\n\x12ScheduleJobRequest\x12\'\n\x03job\x18\x01 \x01(\x0b\x32\x1a.dapr.proto.runtime.v1.Job\"\x15\n\x13ScheduleJobResponse\"\x1d\n\rGetJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"9\n\x0eGetJobResponse\x12\'\n\x03job\x18\x01 \x01(\x0b\x32\x1a.dapr.proto.runtime.v1.Job\" \n\x10\x44\x65leteJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x13\n\x11\x44\x65leteJobResponse*W\n\x16PubsubSubscriptionType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0f\n\x0b\x44\x45\x43LARATIVE\x10\x01\x12\x10\n\x0cPROGRAMMATIC\x10\x02\x12\r\n\tSTREAMING\x10\x03\x32\xbc\x30\n\x04\x44\x61pr\x12\x64\n\rInvokeService\x12+.dapr.proto.runtime.v1.InvokeServiceRequest\x1a$.dapr.proto.common.v1.InvokeResponse\"\x00\x12]\n\x08GetState\x12&.dapr.proto.runtime.v1.GetStateRequest\x1a\'.dapr.proto.runtime.v1.GetStateResponse\"\x00\x12i\n\x0cGetBulkState\x12*.dapr.proto.runtime.v1.GetBulkStateRequest\x1a+.dapr.proto.runtime.v1.GetBulkStateResponse\"\x00\x12N\n\tSaveState\x12\'.dapr.proto.runtime.v1.SaveStateRequest\x1a\x16.google.protobuf.Empty\"\x00\x12i\n\x10QueryStateAlpha1\x12(.dapr.proto.runtime.v1.QueryStateRequest\x1a).dapr.proto.runtime.v1.QueryStateResponse\"\x00\x12R\n\x0b\x44\x65leteState\x12).dapr.proto.runtime.v1.DeleteStateRequest\x1a\x16.google.protobuf.Empty\"\x00\x12Z\n\x0f\x44\x65leteBulkState\x12-.dapr.proto.runtime.v1.DeleteBulkStateRequest\x1a\x16.google.protobuf.Empty\"\x00\x12j\n\x17\x45xecuteStateTransaction\x12\x35.dapr.proto.runtime.v1.ExecuteStateTransactionRequest\x1a\x16.google.protobuf.Empty\"\x00\x12T\n\x0cPublishEvent\x12*.dapr.proto.runtime.v1.PublishEventRequest\x1a\x16.google.protobuf.Empty\"\x00\x12q\n\x16\x42ulkPublishEventAlpha1\x12).dapr.proto.runtime.v1.BulkPublishRequest\x1a*.dapr.proto.runtime.v1.BulkPublishResponse\"\x00\x12\x97\x01\n\x1aSubscribeTopicEventsAlpha1\x12\x38.dapr.proto.runtime.v1.SubscribeTopicEventsRequestAlpha1\x1a\x39.dapr.proto.runtime.v1.SubscribeTopicEventsResponseAlpha1\"\x00(\x01\x30\x01\x12l\n\rInvokeBinding\x12+.dapr.proto.runtime.v1.InvokeBindingRequest\x1a,.dapr.proto.runtime.v1.InvokeBindingResponse\"\x00\x12`\n\tGetSecret\x12\'.dapr.proto.runtime.v1.GetSecretRequest\x1a(.dapr.proto.runtime.v1.GetSecretResponse\"\x00\x12l\n\rGetBulkSecret\x12+.dapr.proto.runtime.v1.GetBulkSecretRequest\x1a,.dapr.proto.runtime.v1.GetBulkSecretResponse\"\x00\x12`\n\x12RegisterActorTimer\x12\x30.dapr.proto.runtime.v1.RegisterActorTimerRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x64\n\x14UnregisterActorTimer\x12\x32.dapr.proto.runtime.v1.UnregisterActorTimerRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x66\n\x15RegisterActorReminder\x12\x33.dapr.proto.runtime.v1.RegisterActorReminderRequest\x1a\x16.google.protobuf.Empty\"\x00\x12j\n\x17UnregisterActorReminder\x12\x35.dapr.proto.runtime.v1.UnregisterActorReminderRequest\x1a\x16.google.protobuf.Empty\"\x00\x12l\n\rGetActorState\x12+.dapr.proto.runtime.v1.GetActorStateRequest\x1a,.dapr.proto.runtime.v1.GetActorStateResponse\"\x00\x12t\n\x1c\x45xecuteActorStateTransaction\x12:.dapr.proto.runtime.v1.ExecuteActorStateTransactionRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x66\n\x0bInvokeActor\x12).dapr.proto.runtime.v1.InvokeActorRequest\x1a*.dapr.proto.runtime.v1.InvokeActorResponse\"\x00\x12{\n\x16GetConfigurationAlpha1\x12..dapr.proto.runtime.v1.GetConfigurationRequest\x1a/.dapr.proto.runtime.v1.GetConfigurationResponse\"\x00\x12u\n\x10GetConfiguration\x12..dapr.proto.runtime.v1.GetConfigurationRequest\x1a/.dapr.proto.runtime.v1.GetConfigurationResponse\"\x00\x12\x8f\x01\n\x1cSubscribeConfigurationAlpha1\x12\x34.dapr.proto.runtime.v1.SubscribeConfigurationRequest\x1a\x35.dapr.proto.runtime.v1.SubscribeConfigurationResponse\"\x00\x30\x01\x12\x89\x01\n\x16SubscribeConfiguration\x12\x34.dapr.proto.runtime.v1.SubscribeConfigurationRequest\x1a\x35.dapr.proto.runtime.v1.SubscribeConfigurationResponse\"\x00\x30\x01\x12\x93\x01\n\x1eUnsubscribeConfigurationAlpha1\x12\x36.dapr.proto.runtime.v1.UnsubscribeConfigurationRequest\x1a\x37.dapr.proto.runtime.v1.UnsubscribeConfigurationResponse\"\x00\x12\x8d\x01\n\x18UnsubscribeConfiguration\x12\x36.dapr.proto.runtime.v1.UnsubscribeConfigurationRequest\x1a\x37.dapr.proto.runtime.v1.UnsubscribeConfigurationResponse\"\x00\x12`\n\rTryLockAlpha1\x12%.dapr.proto.runtime.v1.TryLockRequest\x1a&.dapr.proto.runtime.v1.TryLockResponse\"\x00\x12]\n\x0cUnlockAlpha1\x12$.dapr.proto.runtime.v1.UnlockRequest\x1a%.dapr.proto.runtime.v1.UnlockResponse\"\x00\x12\x62\n\rEncryptAlpha1\x12%.dapr.proto.runtime.v1.EncryptRequest\x1a&.dapr.proto.runtime.v1.EncryptResponse(\x01\x30\x01\x12\x62\n\rDecryptAlpha1\x12%.dapr.proto.runtime.v1.DecryptRequest\x1a&.dapr.proto.runtime.v1.DecryptResponse(\x01\x30\x01\x12\x66\n\x0bGetMetadata\x12).dapr.proto.runtime.v1.GetMetadataRequest\x1a*.dapr.proto.runtime.v1.GetMetadataResponse\"\x00\x12R\n\x0bSetMetadata\x12).dapr.proto.runtime.v1.SetMetadataRequest\x1a\x16.google.protobuf.Empty\"\x00\x12m\n\x12SubtleGetKeyAlpha1\x12*.dapr.proto.runtime.v1.SubtleGetKeyRequest\x1a+.dapr.proto.runtime.v1.SubtleGetKeyResponse\x12p\n\x13SubtleEncryptAlpha1\x12+.dapr.proto.runtime.v1.SubtleEncryptRequest\x1a,.dapr.proto.runtime.v1.SubtleEncryptResponse\x12p\n\x13SubtleDecryptAlpha1\x12+.dapr.proto.runtime.v1.SubtleDecryptRequest\x1a,.dapr.proto.runtime.v1.SubtleDecryptResponse\x12p\n\x13SubtleWrapKeyAlpha1\x12+.dapr.proto.runtime.v1.SubtleWrapKeyRequest\x1a,.dapr.proto.runtime.v1.SubtleWrapKeyResponse\x12v\n\x15SubtleUnwrapKeyAlpha1\x12-.dapr.proto.runtime.v1.SubtleUnwrapKeyRequest\x1a..dapr.proto.runtime.v1.SubtleUnwrapKeyResponse\x12g\n\x10SubtleSignAlpha1\x12(.dapr.proto.runtime.v1.SubtleSignRequest\x1a).dapr.proto.runtime.v1.SubtleSignResponse\x12m\n\x12SubtleVerifyAlpha1\x12*.dapr.proto.runtime.v1.SubtleVerifyRequest\x1a+.dapr.proto.runtime.v1.SubtleVerifyResponse\x12r\n\x13StartWorkflowAlpha1\x12+.dapr.proto.runtime.v1.StartWorkflowRequest\x1a,.dapr.proto.runtime.v1.StartWorkflowResponse\"\x00\x12l\n\x11GetWorkflowAlpha1\x12).dapr.proto.runtime.v1.GetWorkflowRequest\x1a*.dapr.proto.runtime.v1.GetWorkflowResponse\"\x00\x12\\\n\x13PurgeWorkflowAlpha1\x12+.dapr.proto.runtime.v1.PurgeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x64\n\x17TerminateWorkflowAlpha1\x12/.dapr.proto.runtime.v1.TerminateWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\\\n\x13PauseWorkflowAlpha1\x12+.dapr.proto.runtime.v1.PauseWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12^\n\x14ResumeWorkflowAlpha1\x12,.dapr.proto.runtime.v1.ResumeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x66\n\x18RaiseEventWorkflowAlpha1\x12\x30.dapr.proto.runtime.v1.RaiseEventWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12q\n\x12StartWorkflowBeta1\x12+.dapr.proto.runtime.v1.StartWorkflowRequest\x1a,.dapr.proto.runtime.v1.StartWorkflowResponse\"\x00\x12k\n\x10GetWorkflowBeta1\x12).dapr.proto.runtime.v1.GetWorkflowRequest\x1a*.dapr.proto.runtime.v1.GetWorkflowResponse\"\x00\x12[\n\x12PurgeWorkflowBeta1\x12+.dapr.proto.runtime.v1.PurgeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x63\n\x16TerminateWorkflowBeta1\x12/.dapr.proto.runtime.v1.TerminateWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12[\n\x12PauseWorkflowBeta1\x12+.dapr.proto.runtime.v1.PauseWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12]\n\x13ResumeWorkflowBeta1\x12,.dapr.proto.runtime.v1.ResumeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x65\n\x17RaiseEventWorkflowBeta1\x12\x30.dapr.proto.runtime.v1.RaiseEventWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12L\n\x08Shutdown\x12&.dapr.proto.runtime.v1.ShutdownRequest\x1a\x16.google.protobuf.Empty\"\x00\x12l\n\x11ScheduleJobAlpha1\x12).dapr.proto.runtime.v1.ScheduleJobRequest\x1a*.dapr.proto.runtime.v1.ScheduleJobResponse\"\x00\x12]\n\x0cGetJobAlpha1\x12$.dapr.proto.runtime.v1.GetJobRequest\x1a%.dapr.proto.runtime.v1.GetJobResponse\"\x00\x12\x66\n\x0f\x44\x65leteJobAlpha1\x12\'.dapr.proto.runtime.v1.DeleteJobRequest\x1a(.dapr.proto.runtime.v1.DeleteJobResponse\"\x00\x42i\n\nio.dapr.v1B\nDaprProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\xaa\x02\x1b\x44\x61pr.Client.Autogen.Grpc.v1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -47,8 +57,8 @@ _globals['_BULKPUBLISHREQUEST_METADATAENTRY']._serialized_options = b'8\001' _globals['_BULKPUBLISHREQUESTENTRY_METADATAENTRY']._loaded_options = None _globals['_BULKPUBLISHREQUESTENTRY_METADATAENTRY']._serialized_options = b'8\001' - _globals['_SUBSCRIBETOPICEVENTSINITIALREQUESTALPHA1_METADATAENTRY']._loaded_options = None - _globals['_SUBSCRIBETOPICEVENTSINITIALREQUESTALPHA1_METADATAENTRY']._serialized_options = b'8\001' + _globals['_SUBSCRIBETOPICEVENTSREQUESTINITIALALPHA1_METADATAENTRY']._loaded_options = None + _globals['_SUBSCRIBETOPICEVENTSREQUESTINITIALALPHA1_METADATAENTRY']._serialized_options = b'8\001' _globals['_INVOKEBINDINGREQUEST_METADATAENTRY']._loaded_options = None _globals['_INVOKEBINDINGREQUEST_METADATAENTRY']._serialized_options = b'8\001' _globals['_INVOKEBINDINGRESPONSE_METADATAENTRY']._loaded_options = None @@ -89,8 +99,8 @@ _globals['_GETWORKFLOWRESPONSE_PROPERTIESENTRY']._serialized_options = b'8\001' _globals['_STARTWORKFLOWREQUEST_OPTIONSENTRY']._loaded_options = None _globals['_STARTWORKFLOWREQUEST_OPTIONSENTRY']._serialized_options = b'8\001' - _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_start=14739 - _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_end=14826 + _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_start=15086 + _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_end=15173 _globals['_INVOKESERVICEREQUEST']._serialized_start=224 _globals['_INVOKESERVICEREQUEST']._serialized_end=312 _globals['_GETSTATEREQUEST']._serialized_start=315 @@ -146,213 +156,217 @@ _globals['_BULKPUBLISHRESPONSEFAILEDENTRY']._serialized_start=2936 _globals['_BULKPUBLISHRESPONSEFAILEDENTRY']._serialized_end=3001 _globals['_SUBSCRIBETOPICEVENTSREQUESTALPHA1']._serialized_start=3004 - _globals['_SUBSCRIBETOPICEVENTSREQUESTALPHA1']._serialized_end=3255 - _globals['_SUBSCRIBETOPICEVENTSINITIALREQUESTALPHA1']._serialized_start=3258 - _globals['_SUBSCRIBETOPICEVENTSINITIALREQUESTALPHA1']._serialized_end=3536 - _globals['_SUBSCRIBETOPICEVENTSINITIALREQUESTALPHA1_METADATAENTRY']._serialized_start=513 - _globals['_SUBSCRIBETOPICEVENTSINITIALREQUESTALPHA1_METADATAENTRY']._serialized_end=560 - _globals['_SUBSCRIBETOPICEVENTSRESPONSEALPHA1']._serialized_start=3538 - _globals['_SUBSCRIBETOPICEVENTSRESPONSEALPHA1']._serialized_end=3645 - _globals['_INVOKEBINDINGREQUEST']._serialized_start=3648 - _globals['_INVOKEBINDINGREQUEST']._serialized_end=3843 + _globals['_SUBSCRIBETOPICEVENTSREQUESTALPHA1']._serialized_end=3264 + _globals['_SUBSCRIBETOPICEVENTSREQUESTINITIALALPHA1']._serialized_start=3267 + _globals['_SUBSCRIBETOPICEVENTSREQUESTINITIALALPHA1']._serialized_end=3545 + _globals['_SUBSCRIBETOPICEVENTSREQUESTINITIALALPHA1_METADATAENTRY']._serialized_start=513 + _globals['_SUBSCRIBETOPICEVENTSREQUESTINITIALALPHA1_METADATAENTRY']._serialized_end=560 + _globals['_SUBSCRIBETOPICEVENTSREQUESTPROCESSEDALPHA1']._serialized_start=3547 + _globals['_SUBSCRIBETOPICEVENTSREQUESTPROCESSEDALPHA1']._serialized_end=3662 + _globals['_SUBSCRIBETOPICEVENTSRESPONSEALPHA1']._serialized_start=3665 + _globals['_SUBSCRIBETOPICEVENTSRESPONSEALPHA1']._serialized_end=3902 + _globals['_SUBSCRIBETOPICEVENTSRESPONSEINITIALALPHA1']._serialized_start=3904 + _globals['_SUBSCRIBETOPICEVENTSRESPONSEINITIALALPHA1']._serialized_end=3947 + _globals['_INVOKEBINDINGREQUEST']._serialized_start=3950 + _globals['_INVOKEBINDINGREQUEST']._serialized_end=4145 _globals['_INVOKEBINDINGREQUEST_METADATAENTRY']._serialized_start=513 _globals['_INVOKEBINDINGREQUEST_METADATAENTRY']._serialized_end=560 - _globals['_INVOKEBINDINGRESPONSE']._serialized_start=3846 - _globals['_INVOKEBINDINGRESPONSE']._serialized_end=4010 + _globals['_INVOKEBINDINGRESPONSE']._serialized_start=4148 + _globals['_INVOKEBINDINGRESPONSE']._serialized_end=4312 _globals['_INVOKEBINDINGRESPONSE_METADATAENTRY']._serialized_start=513 _globals['_INVOKEBINDINGRESPONSE_METADATAENTRY']._serialized_end=560 - _globals['_GETSECRETREQUEST']._serialized_start=4013 - _globals['_GETSECRETREQUEST']._serialized_end=4197 + _globals['_GETSECRETREQUEST']._serialized_start=4315 + _globals['_GETSECRETREQUEST']._serialized_end=4499 _globals['_GETSECRETREQUEST_METADATAENTRY']._serialized_start=513 _globals['_GETSECRETREQUEST_METADATAENTRY']._serialized_end=560 - _globals['_GETSECRETRESPONSE']._serialized_start=4200 - _globals['_GETSECRETRESPONSE']._serialized_end=4330 - _globals['_GETSECRETRESPONSE_DATAENTRY']._serialized_start=4287 - _globals['_GETSECRETRESPONSE_DATAENTRY']._serialized_end=4330 - _globals['_GETBULKSECRETREQUEST']._serialized_start=4333 - _globals['_GETBULKSECRETREQUEST']._serialized_end=4512 + _globals['_GETSECRETRESPONSE']._serialized_start=4502 + _globals['_GETSECRETRESPONSE']._serialized_end=4632 + _globals['_GETSECRETRESPONSE_DATAENTRY']._serialized_start=4589 + _globals['_GETSECRETRESPONSE_DATAENTRY']._serialized_end=4632 + _globals['_GETBULKSECRETREQUEST']._serialized_start=4635 + _globals['_GETBULKSECRETREQUEST']._serialized_end=4814 _globals['_GETBULKSECRETREQUEST_METADATAENTRY']._serialized_start=513 _globals['_GETBULKSECRETREQUEST_METADATAENTRY']._serialized_end=560 - _globals['_SECRETRESPONSE']._serialized_start=4515 - _globals['_SECRETRESPONSE']._serialized_end=4648 - _globals['_SECRETRESPONSE_SECRETSENTRY']._serialized_start=4602 - _globals['_SECRETRESPONSE_SECRETSENTRY']._serialized_end=4648 - _globals['_GETBULKSECRETRESPONSE']._serialized_start=4651 - _globals['_GETBULKSECRETRESPONSE']._serialized_end=4828 - _globals['_GETBULKSECRETRESPONSE_DATAENTRY']._serialized_start=4746 - _globals['_GETBULKSECRETRESPONSE_DATAENTRY']._serialized_end=4828 - _globals['_TRANSACTIONALSTATEOPERATION']._serialized_start=4830 - _globals['_TRANSACTIONALSTATEOPERATION']._serialized_end=4932 - _globals['_EXECUTESTATETRANSACTIONREQUEST']._serialized_start=4935 - _globals['_EXECUTESTATETRANSACTIONREQUEST']._serialized_end=5194 + _globals['_SECRETRESPONSE']._serialized_start=4817 + _globals['_SECRETRESPONSE']._serialized_end=4950 + _globals['_SECRETRESPONSE_SECRETSENTRY']._serialized_start=4904 + _globals['_SECRETRESPONSE_SECRETSENTRY']._serialized_end=4950 + _globals['_GETBULKSECRETRESPONSE']._serialized_start=4953 + _globals['_GETBULKSECRETRESPONSE']._serialized_end=5130 + _globals['_GETBULKSECRETRESPONSE_DATAENTRY']._serialized_start=5048 + _globals['_GETBULKSECRETRESPONSE_DATAENTRY']._serialized_end=5130 + _globals['_TRANSACTIONALSTATEOPERATION']._serialized_start=5132 + _globals['_TRANSACTIONALSTATEOPERATION']._serialized_end=5234 + _globals['_EXECUTESTATETRANSACTIONREQUEST']._serialized_start=5237 + _globals['_EXECUTESTATETRANSACTIONREQUEST']._serialized_end=5496 _globals['_EXECUTESTATETRANSACTIONREQUEST_METADATAENTRY']._serialized_start=513 _globals['_EXECUTESTATETRANSACTIONREQUEST_METADATAENTRY']._serialized_end=560 - _globals['_REGISTERACTORTIMERREQUEST']._serialized_start=5197 - _globals['_REGISTERACTORTIMERREQUEST']._serialized_end=5384 - _globals['_UNREGISTERACTORTIMERREQUEST']._serialized_start=5386 - _globals['_UNREGISTERACTORTIMERREQUEST']._serialized_end=5487 - _globals['_REGISTERACTORREMINDERREQUEST']._serialized_start=5490 - _globals['_REGISTERACTORREMINDERREQUEST']._serialized_end=5662 - _globals['_UNREGISTERACTORREMINDERREQUEST']._serialized_start=5664 - _globals['_UNREGISTERACTORREMINDERREQUEST']._serialized_end=5768 - _globals['_GETACTORSTATEREQUEST']._serialized_start=5770 - _globals['_GETACTORSTATEREQUEST']._serialized_end=5863 - _globals['_GETACTORSTATERESPONSE']._serialized_start=5866 - _globals['_GETACTORSTATERESPONSE']._serialized_end=6030 + _globals['_REGISTERACTORTIMERREQUEST']._serialized_start=5499 + _globals['_REGISTERACTORTIMERREQUEST']._serialized_end=5686 + _globals['_UNREGISTERACTORTIMERREQUEST']._serialized_start=5688 + _globals['_UNREGISTERACTORTIMERREQUEST']._serialized_end=5789 + _globals['_REGISTERACTORREMINDERREQUEST']._serialized_start=5792 + _globals['_REGISTERACTORREMINDERREQUEST']._serialized_end=5964 + _globals['_UNREGISTERACTORREMINDERREQUEST']._serialized_start=5966 + _globals['_UNREGISTERACTORREMINDERREQUEST']._serialized_end=6070 + _globals['_GETACTORSTATEREQUEST']._serialized_start=6072 + _globals['_GETACTORSTATEREQUEST']._serialized_end=6165 + _globals['_GETACTORSTATERESPONSE']._serialized_start=6168 + _globals['_GETACTORSTATERESPONSE']._serialized_end=6332 _globals['_GETACTORSTATERESPONSE_METADATAENTRY']._serialized_start=513 _globals['_GETACTORSTATERESPONSE_METADATAENTRY']._serialized_end=560 - _globals['_EXECUTEACTORSTATETRANSACTIONREQUEST']._serialized_start=6033 - _globals['_EXECUTEACTORSTATETRANSACTIONREQUEST']._serialized_end=6205 - _globals['_TRANSACTIONALACTORSTATEOPERATION']._serialized_start=6208 - _globals['_TRANSACTIONALACTORSTATEOPERATION']._serialized_end=6453 + _globals['_EXECUTEACTORSTATETRANSACTIONREQUEST']._serialized_start=6335 + _globals['_EXECUTEACTORSTATETRANSACTIONREQUEST']._serialized_end=6507 + _globals['_TRANSACTIONALACTORSTATEOPERATION']._serialized_start=6510 + _globals['_TRANSACTIONALACTORSTATEOPERATION']._serialized_end=6755 _globals['_TRANSACTIONALACTORSTATEOPERATION_METADATAENTRY']._serialized_start=513 _globals['_TRANSACTIONALACTORSTATEOPERATION_METADATAENTRY']._serialized_end=560 - _globals['_INVOKEACTORREQUEST']._serialized_start=6456 - _globals['_INVOKEACTORREQUEST']._serialized_end=6688 + _globals['_INVOKEACTORREQUEST']._serialized_start=6758 + _globals['_INVOKEACTORREQUEST']._serialized_end=6990 _globals['_INVOKEACTORREQUEST_METADATAENTRY']._serialized_start=513 _globals['_INVOKEACTORREQUEST_METADATAENTRY']._serialized_end=560 - _globals['_INVOKEACTORRESPONSE']._serialized_start=6690 - _globals['_INVOKEACTORRESPONSE']._serialized_end=6725 - _globals['_GETMETADATAREQUEST']._serialized_start=6727 - _globals['_GETMETADATAREQUEST']._serialized_end=6747 - _globals['_GETMETADATARESPONSE']._serialized_start=6750 - _globals['_GETMETADATARESPONSE']._serialized_end=7545 - _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_start=7490 - _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_end=7545 - _globals['_ACTORRUNTIME']._serialized_start=7548 - _globals['_ACTORRUNTIME']._serialized_end=7864 - _globals['_ACTORRUNTIME_ACTORRUNTIMESTATUS']._serialized_start=7799 - _globals['_ACTORRUNTIME_ACTORRUNTIMESTATUS']._serialized_end=7864 - _globals['_ACTIVEACTORSCOUNT']._serialized_start=7866 - _globals['_ACTIVEACTORSCOUNT']._serialized_end=7914 - _globals['_REGISTEREDCOMPONENTS']._serialized_start=7916 - _globals['_REGISTEREDCOMPONENTS']._serialized_end=8005 - _globals['_METADATAHTTPENDPOINT']._serialized_start=8007 - _globals['_METADATAHTTPENDPOINT']._serialized_end=8049 - _globals['_APPCONNECTIONPROPERTIES']._serialized_start=8052 - _globals['_APPCONNECTIONPROPERTIES']._serialized_end=8261 - _globals['_APPCONNECTIONHEALTHPROPERTIES']._serialized_start=8264 - _globals['_APPCONNECTIONHEALTHPROPERTIES']._serialized_end=8484 - _globals['_PUBSUBSUBSCRIPTION']._serialized_start=8487 - _globals['_PUBSUBSUBSCRIPTION']._serialized_end=8877 + _globals['_INVOKEACTORRESPONSE']._serialized_start=6992 + _globals['_INVOKEACTORRESPONSE']._serialized_end=7027 + _globals['_GETMETADATAREQUEST']._serialized_start=7029 + _globals['_GETMETADATAREQUEST']._serialized_end=7049 + _globals['_GETMETADATARESPONSE']._serialized_start=7052 + _globals['_GETMETADATARESPONSE']._serialized_end=7847 + _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_start=7792 + _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_end=7847 + _globals['_ACTORRUNTIME']._serialized_start=7850 + _globals['_ACTORRUNTIME']._serialized_end=8166 + _globals['_ACTORRUNTIME_ACTORRUNTIMESTATUS']._serialized_start=8101 + _globals['_ACTORRUNTIME_ACTORRUNTIMESTATUS']._serialized_end=8166 + _globals['_ACTIVEACTORSCOUNT']._serialized_start=8168 + _globals['_ACTIVEACTORSCOUNT']._serialized_end=8216 + _globals['_REGISTEREDCOMPONENTS']._serialized_start=8218 + _globals['_REGISTEREDCOMPONENTS']._serialized_end=8307 + _globals['_METADATAHTTPENDPOINT']._serialized_start=8309 + _globals['_METADATAHTTPENDPOINT']._serialized_end=8351 + _globals['_APPCONNECTIONPROPERTIES']._serialized_start=8354 + _globals['_APPCONNECTIONPROPERTIES']._serialized_end=8563 + _globals['_APPCONNECTIONHEALTHPROPERTIES']._serialized_start=8566 + _globals['_APPCONNECTIONHEALTHPROPERTIES']._serialized_end=8786 + _globals['_PUBSUBSUBSCRIPTION']._serialized_start=8789 + _globals['_PUBSUBSUBSCRIPTION']._serialized_end=9179 _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._serialized_start=513 _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._serialized_end=560 - _globals['_PUBSUBSUBSCRIPTIONRULES']._serialized_start=8879 - _globals['_PUBSUBSUBSCRIPTIONRULES']._serialized_end=8966 - _globals['_PUBSUBSUBSCRIPTIONRULE']._serialized_start=8968 - _globals['_PUBSUBSUBSCRIPTIONRULE']._serialized_end=9021 - _globals['_SETMETADATAREQUEST']._serialized_start=9023 - _globals['_SETMETADATAREQUEST']._serialized_end=9071 - _globals['_GETCONFIGURATIONREQUEST']._serialized_start=9074 - _globals['_GETCONFIGURATIONREQUEST']._serialized_end=9262 + _globals['_PUBSUBSUBSCRIPTIONRULES']._serialized_start=9181 + _globals['_PUBSUBSUBSCRIPTIONRULES']._serialized_end=9268 + _globals['_PUBSUBSUBSCRIPTIONRULE']._serialized_start=9270 + _globals['_PUBSUBSUBSCRIPTIONRULE']._serialized_end=9323 + _globals['_SETMETADATAREQUEST']._serialized_start=9325 + _globals['_SETMETADATAREQUEST']._serialized_end=9373 + _globals['_GETCONFIGURATIONREQUEST']._serialized_start=9376 + _globals['_GETCONFIGURATIONREQUEST']._serialized_end=9564 _globals['_GETCONFIGURATIONREQUEST_METADATAENTRY']._serialized_start=513 _globals['_GETCONFIGURATIONREQUEST_METADATAENTRY']._serialized_end=560 - _globals['_GETCONFIGURATIONRESPONSE']._serialized_start=9265 - _globals['_GETCONFIGURATIONRESPONSE']._serialized_end=9453 - _globals['_GETCONFIGURATIONRESPONSE_ITEMSENTRY']._serialized_start=9368 - _globals['_GETCONFIGURATIONRESPONSE_ITEMSENTRY']._serialized_end=9453 - _globals['_SUBSCRIBECONFIGURATIONREQUEST']._serialized_start=9456 - _globals['_SUBSCRIBECONFIGURATIONREQUEST']._serialized_end=9656 + _globals['_GETCONFIGURATIONRESPONSE']._serialized_start=9567 + _globals['_GETCONFIGURATIONRESPONSE']._serialized_end=9755 + _globals['_GETCONFIGURATIONRESPONSE_ITEMSENTRY']._serialized_start=9670 + _globals['_GETCONFIGURATIONRESPONSE_ITEMSENTRY']._serialized_end=9755 + _globals['_SUBSCRIBECONFIGURATIONREQUEST']._serialized_start=9758 + _globals['_SUBSCRIBECONFIGURATIONREQUEST']._serialized_end=9958 _globals['_SUBSCRIBECONFIGURATIONREQUEST_METADATAENTRY']._serialized_start=513 _globals['_SUBSCRIBECONFIGURATIONREQUEST_METADATAENTRY']._serialized_end=560 - _globals['_UNSUBSCRIBECONFIGURATIONREQUEST']._serialized_start=9658 - _globals['_UNSUBSCRIBECONFIGURATIONREQUEST']._serialized_end=9723 - _globals['_SUBSCRIBECONFIGURATIONRESPONSE']._serialized_start=9726 - _globals['_SUBSCRIBECONFIGURATIONRESPONSE']._serialized_end=9938 - _globals['_SUBSCRIBECONFIGURATIONRESPONSE_ITEMSENTRY']._serialized_start=9368 - _globals['_SUBSCRIBECONFIGURATIONRESPONSE_ITEMSENTRY']._serialized_end=9453 - _globals['_UNSUBSCRIBECONFIGURATIONRESPONSE']._serialized_start=9940 - _globals['_UNSUBSCRIBECONFIGURATIONRESPONSE']._serialized_end=10003 - _globals['_TRYLOCKREQUEST']._serialized_start=10006 - _globals['_TRYLOCKREQUEST']._serialized_end=10161 - _globals['_TRYLOCKRESPONSE']._serialized_start=10163 - _globals['_TRYLOCKRESPONSE']._serialized_end=10197 - _globals['_UNLOCKREQUEST']._serialized_start=10199 - _globals['_UNLOCKREQUEST']._serialized_end=10309 - _globals['_UNLOCKRESPONSE']._serialized_start=10312 - _globals['_UNLOCKRESPONSE']._serialized_end=10486 - _globals['_UNLOCKRESPONSE_STATUS']._serialized_start=10392 - _globals['_UNLOCKRESPONSE_STATUS']._serialized_end=10486 - _globals['_SUBTLEGETKEYREQUEST']._serialized_start=10489 - _globals['_SUBTLEGETKEYREQUEST']._serialized_end=10665 - _globals['_SUBTLEGETKEYREQUEST_KEYFORMAT']._serialized_start=10635 - _globals['_SUBTLEGETKEYREQUEST_KEYFORMAT']._serialized_end=10665 - _globals['_SUBTLEGETKEYRESPONSE']._serialized_start=10667 - _globals['_SUBTLEGETKEYRESPONSE']._serialized_end=10734 - _globals['_SUBTLEENCRYPTREQUEST']._serialized_start=10737 - _globals['_SUBTLEENCRYPTREQUEST']._serialized_end=10919 - _globals['_SUBTLEENCRYPTRESPONSE']._serialized_start=10921 - _globals['_SUBTLEENCRYPTRESPONSE']._serialized_end=10977 - _globals['_SUBTLEDECRYPTREQUEST']._serialized_start=10980 - _globals['_SUBTLEDECRYPTREQUEST']._serialized_end=11176 - _globals['_SUBTLEDECRYPTRESPONSE']._serialized_start=11178 - _globals['_SUBTLEDECRYPTRESPONSE']._serialized_end=11220 - _globals['_SUBTLEWRAPKEYREQUEST']._serialized_start=11223 - _globals['_SUBTLEWRAPKEYREQUEST']._serialized_end=11423 - _globals['_SUBTLEWRAPKEYRESPONSE']._serialized_start=11425 - _globals['_SUBTLEWRAPKEYRESPONSE']._serialized_end=11494 - _globals['_SUBTLEUNWRAPKEYREQUEST']._serialized_start=11497 - _globals['_SUBTLEUNWRAPKEYREQUEST']._serialized_end=11708 - _globals['_SUBTLEUNWRAPKEYRESPONSE']._serialized_start=11710 - _globals['_SUBTLEUNWRAPKEYRESPONSE']._serialized_end=11772 - _globals['_SUBTLESIGNREQUEST']._serialized_start=11774 - _globals['_SUBTLESIGNREQUEST']._serialized_end=11894 - _globals['_SUBTLESIGNRESPONSE']._serialized_start=11896 - _globals['_SUBTLESIGNRESPONSE']._serialized_end=11935 - _globals['_SUBTLEVERIFYREQUEST']._serialized_start=11938 - _globals['_SUBTLEVERIFYREQUEST']._serialized_end=12079 - _globals['_SUBTLEVERIFYRESPONSE']._serialized_start=12081 - _globals['_SUBTLEVERIFYRESPONSE']._serialized_end=12118 - _globals['_ENCRYPTREQUEST']._serialized_start=12121 - _globals['_ENCRYPTREQUEST']._serialized_end=12254 - _globals['_ENCRYPTREQUESTOPTIONS']._serialized_start=12257 - _globals['_ENCRYPTREQUESTOPTIONS']._serialized_end=12511 - _globals['_ENCRYPTRESPONSE']._serialized_start=12513 - _globals['_ENCRYPTRESPONSE']._serialized_end=12584 - _globals['_DECRYPTREQUEST']._serialized_start=12587 - _globals['_DECRYPTREQUEST']._serialized_end=12720 - _globals['_DECRYPTREQUESTOPTIONS']._serialized_start=12722 - _globals['_DECRYPTREQUESTOPTIONS']._serialized_end=12811 - _globals['_DECRYPTRESPONSE']._serialized_start=12813 - _globals['_DECRYPTRESPONSE']._serialized_end=12884 - _globals['_GETWORKFLOWREQUEST']._serialized_start=12886 - _globals['_GETWORKFLOWREQUEST']._serialized_end=12986 - _globals['_GETWORKFLOWRESPONSE']._serialized_start=12989 - _globals['_GETWORKFLOWRESPONSE']._serialized_end=13377 - _globals['_GETWORKFLOWRESPONSE_PROPERTIESENTRY']._serialized_start=13328 - _globals['_GETWORKFLOWRESPONSE_PROPERTIESENTRY']._serialized_end=13377 - _globals['_STARTWORKFLOWREQUEST']._serialized_start=13380 - _globals['_STARTWORKFLOWREQUEST']._serialized_end=13657 - _globals['_STARTWORKFLOWREQUEST_OPTIONSENTRY']._serialized_start=13611 - _globals['_STARTWORKFLOWREQUEST_OPTIONSENTRY']._serialized_end=13657 - _globals['_STARTWORKFLOWRESPONSE']._serialized_start=13659 - _globals['_STARTWORKFLOWRESPONSE']._serialized_end=13715 - _globals['_TERMINATEWORKFLOWREQUEST']._serialized_start=13717 - _globals['_TERMINATEWORKFLOWREQUEST']._serialized_end=13823 - _globals['_PAUSEWORKFLOWREQUEST']._serialized_start=13825 - _globals['_PAUSEWORKFLOWREQUEST']._serialized_end=13927 - _globals['_RESUMEWORKFLOWREQUEST']._serialized_start=13929 - _globals['_RESUMEWORKFLOWREQUEST']._serialized_end=14032 - _globals['_RAISEEVENTWORKFLOWREQUEST']._serialized_start=14035 - _globals['_RAISEEVENTWORKFLOWREQUEST']._serialized_end=14193 - _globals['_PURGEWORKFLOWREQUEST']._serialized_start=14195 - _globals['_PURGEWORKFLOWREQUEST']._serialized_end=14297 - _globals['_SHUTDOWNREQUEST']._serialized_start=14299 - _globals['_SHUTDOWNREQUEST']._serialized_end=14316 - _globals['_JOB']._serialized_start=14319 - _globals['_JOB']._serialized_end=14506 - _globals['_SCHEDULEJOBREQUEST']._serialized_start=14508 - _globals['_SCHEDULEJOBREQUEST']._serialized_end=14569 - _globals['_SCHEDULEJOBRESPONSE']._serialized_start=14571 - _globals['_SCHEDULEJOBRESPONSE']._serialized_end=14592 - _globals['_GETJOBREQUEST']._serialized_start=14594 - _globals['_GETJOBREQUEST']._serialized_end=14623 - _globals['_GETJOBRESPONSE']._serialized_start=14625 - _globals['_GETJOBRESPONSE']._serialized_end=14682 - _globals['_DELETEJOBREQUEST']._serialized_start=14684 - _globals['_DELETEJOBREQUEST']._serialized_end=14716 - _globals['_DELETEJOBRESPONSE']._serialized_start=14718 - _globals['_DELETEJOBRESPONSE']._serialized_end=14737 - _globals['_DAPR']._serialized_start=14829 - _globals['_DAPR']._serialized_end=21016 + _globals['_UNSUBSCRIBECONFIGURATIONREQUEST']._serialized_start=9960 + _globals['_UNSUBSCRIBECONFIGURATIONREQUEST']._serialized_end=10025 + _globals['_SUBSCRIBECONFIGURATIONRESPONSE']._serialized_start=10028 + _globals['_SUBSCRIBECONFIGURATIONRESPONSE']._serialized_end=10240 + _globals['_SUBSCRIBECONFIGURATIONRESPONSE_ITEMSENTRY']._serialized_start=9670 + _globals['_SUBSCRIBECONFIGURATIONRESPONSE_ITEMSENTRY']._serialized_end=9755 + _globals['_UNSUBSCRIBECONFIGURATIONRESPONSE']._serialized_start=10242 + _globals['_UNSUBSCRIBECONFIGURATIONRESPONSE']._serialized_end=10305 + _globals['_TRYLOCKREQUEST']._serialized_start=10308 + _globals['_TRYLOCKREQUEST']._serialized_end=10463 + _globals['_TRYLOCKRESPONSE']._serialized_start=10465 + _globals['_TRYLOCKRESPONSE']._serialized_end=10499 + _globals['_UNLOCKREQUEST']._serialized_start=10501 + _globals['_UNLOCKREQUEST']._serialized_end=10611 + _globals['_UNLOCKRESPONSE']._serialized_start=10614 + _globals['_UNLOCKRESPONSE']._serialized_end=10788 + _globals['_UNLOCKRESPONSE_STATUS']._serialized_start=10694 + _globals['_UNLOCKRESPONSE_STATUS']._serialized_end=10788 + _globals['_SUBTLEGETKEYREQUEST']._serialized_start=10791 + _globals['_SUBTLEGETKEYREQUEST']._serialized_end=10967 + _globals['_SUBTLEGETKEYREQUEST_KEYFORMAT']._serialized_start=10937 + _globals['_SUBTLEGETKEYREQUEST_KEYFORMAT']._serialized_end=10967 + _globals['_SUBTLEGETKEYRESPONSE']._serialized_start=10969 + _globals['_SUBTLEGETKEYRESPONSE']._serialized_end=11036 + _globals['_SUBTLEENCRYPTREQUEST']._serialized_start=11039 + _globals['_SUBTLEENCRYPTREQUEST']._serialized_end=11221 + _globals['_SUBTLEENCRYPTRESPONSE']._serialized_start=11223 + _globals['_SUBTLEENCRYPTRESPONSE']._serialized_end=11279 + _globals['_SUBTLEDECRYPTREQUEST']._serialized_start=11282 + _globals['_SUBTLEDECRYPTREQUEST']._serialized_end=11478 + _globals['_SUBTLEDECRYPTRESPONSE']._serialized_start=11480 + _globals['_SUBTLEDECRYPTRESPONSE']._serialized_end=11522 + _globals['_SUBTLEWRAPKEYREQUEST']._serialized_start=11525 + _globals['_SUBTLEWRAPKEYREQUEST']._serialized_end=11725 + _globals['_SUBTLEWRAPKEYRESPONSE']._serialized_start=11727 + _globals['_SUBTLEWRAPKEYRESPONSE']._serialized_end=11796 + _globals['_SUBTLEUNWRAPKEYREQUEST']._serialized_start=11799 + _globals['_SUBTLEUNWRAPKEYREQUEST']._serialized_end=12010 + _globals['_SUBTLEUNWRAPKEYRESPONSE']._serialized_start=12012 + _globals['_SUBTLEUNWRAPKEYRESPONSE']._serialized_end=12074 + _globals['_SUBTLESIGNREQUEST']._serialized_start=12076 + _globals['_SUBTLESIGNREQUEST']._serialized_end=12196 + _globals['_SUBTLESIGNRESPONSE']._serialized_start=12198 + _globals['_SUBTLESIGNRESPONSE']._serialized_end=12237 + _globals['_SUBTLEVERIFYREQUEST']._serialized_start=12240 + _globals['_SUBTLEVERIFYREQUEST']._serialized_end=12381 + _globals['_SUBTLEVERIFYRESPONSE']._serialized_start=12383 + _globals['_SUBTLEVERIFYRESPONSE']._serialized_end=12420 + _globals['_ENCRYPTREQUEST']._serialized_start=12423 + _globals['_ENCRYPTREQUEST']._serialized_end=12556 + _globals['_ENCRYPTREQUESTOPTIONS']._serialized_start=12559 + _globals['_ENCRYPTREQUESTOPTIONS']._serialized_end=12813 + _globals['_ENCRYPTRESPONSE']._serialized_start=12815 + _globals['_ENCRYPTRESPONSE']._serialized_end=12886 + _globals['_DECRYPTREQUEST']._serialized_start=12889 + _globals['_DECRYPTREQUEST']._serialized_end=13022 + _globals['_DECRYPTREQUESTOPTIONS']._serialized_start=13024 + _globals['_DECRYPTREQUESTOPTIONS']._serialized_end=13113 + _globals['_DECRYPTRESPONSE']._serialized_start=13115 + _globals['_DECRYPTRESPONSE']._serialized_end=13186 + _globals['_GETWORKFLOWREQUEST']._serialized_start=13188 + _globals['_GETWORKFLOWREQUEST']._serialized_end=13288 + _globals['_GETWORKFLOWRESPONSE']._serialized_start=13291 + _globals['_GETWORKFLOWRESPONSE']._serialized_end=13679 + _globals['_GETWORKFLOWRESPONSE_PROPERTIESENTRY']._serialized_start=13630 + _globals['_GETWORKFLOWRESPONSE_PROPERTIESENTRY']._serialized_end=13679 + _globals['_STARTWORKFLOWREQUEST']._serialized_start=13682 + _globals['_STARTWORKFLOWREQUEST']._serialized_end=13959 + _globals['_STARTWORKFLOWREQUEST_OPTIONSENTRY']._serialized_start=13913 + _globals['_STARTWORKFLOWREQUEST_OPTIONSENTRY']._serialized_end=13959 + _globals['_STARTWORKFLOWRESPONSE']._serialized_start=13961 + _globals['_STARTWORKFLOWRESPONSE']._serialized_end=14017 + _globals['_TERMINATEWORKFLOWREQUEST']._serialized_start=14019 + _globals['_TERMINATEWORKFLOWREQUEST']._serialized_end=14125 + _globals['_PAUSEWORKFLOWREQUEST']._serialized_start=14127 + _globals['_PAUSEWORKFLOWREQUEST']._serialized_end=14229 + _globals['_RESUMEWORKFLOWREQUEST']._serialized_start=14231 + _globals['_RESUMEWORKFLOWREQUEST']._serialized_end=14334 + _globals['_RAISEEVENTWORKFLOWREQUEST']._serialized_start=14337 + _globals['_RAISEEVENTWORKFLOWREQUEST']._serialized_end=14495 + _globals['_PURGEWORKFLOWREQUEST']._serialized_start=14497 + _globals['_PURGEWORKFLOWREQUEST']._serialized_end=14599 + _globals['_SHUTDOWNREQUEST']._serialized_start=14601 + _globals['_SHUTDOWNREQUEST']._serialized_end=14618 + _globals['_JOB']._serialized_start=14621 + _globals['_JOB']._serialized_end=14853 + _globals['_SCHEDULEJOBREQUEST']._serialized_start=14855 + _globals['_SCHEDULEJOBREQUEST']._serialized_end=14916 + _globals['_SCHEDULEJOBRESPONSE']._serialized_start=14918 + _globals['_SCHEDULEJOBRESPONSE']._serialized_end=14939 + _globals['_GETJOBREQUEST']._serialized_start=14941 + _globals['_GETJOBREQUEST']._serialized_end=14970 + _globals['_GETJOBRESPONSE']._serialized_start=14972 + _globals['_GETJOBRESPONSE']._serialized_end=15029 + _globals['_DELETEJOBREQUEST']._serialized_start=15031 + _globals['_DELETEJOBREQUEST']._serialized_end=15063 + _globals['_DELETEJOBRESPONSE']._serialized_start=15065 + _globals['_DELETEJOBRESPONSE']._serialized_end=15084 + _globals['_DAPR']._serialized_start=15176 + _globals['_DAPR']._serialized_end=21380 # @@protoc_insertion_point(module_scope) diff --git a/dapr/proto/runtime/v1/dapr_pb2.pyi b/dapr/proto/runtime/v1/dapr_pb2.pyi index dd4a98f2..c9a99f8b 100644 --- a/dapr/proto/runtime/v1/dapr_pb2.pyi +++ b/dapr/proto/runtime/v1/dapr_pb2.pyi @@ -737,33 +737,33 @@ class SubscribeTopicEventsRequestAlpha1(google.protobuf.message.Message): """SubscribeTopicEventsRequestAlpha1 is a message containing the details for subscribing to a topic via streaming. The first message must always be the initial request. All subsequent - messages must be event responses. + messages must be event processed responses. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor INITIAL_REQUEST_FIELD_NUMBER: builtins.int - EVENT_RESPONSE_FIELD_NUMBER: builtins.int + EVENT_PROCESSED_FIELD_NUMBER: builtins.int @property - def initial_request(self) -> global___SubscribeTopicEventsInitialRequestAlpha1: ... + def initial_request(self) -> global___SubscribeTopicEventsRequestInitialAlpha1: ... @property - def event_response(self) -> global___SubscribeTopicEventsResponseAlpha1: ... + def event_processed(self) -> global___SubscribeTopicEventsRequestProcessedAlpha1: ... def __init__( self, *, - initial_request: global___SubscribeTopicEventsInitialRequestAlpha1 | None = ..., - event_response: global___SubscribeTopicEventsResponseAlpha1 | None = ..., + initial_request: global___SubscribeTopicEventsRequestInitialAlpha1 | None = ..., + event_processed: global___SubscribeTopicEventsRequestProcessedAlpha1 | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["event_response", b"event_response", "initial_request", b"initial_request", "subscribe_topic_events_request_type", b"subscribe_topic_events_request_type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["event_response", b"event_response", "initial_request", b"initial_request", "subscribe_topic_events_request_type", b"subscribe_topic_events_request_type"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["subscribe_topic_events_request_type", b"subscribe_topic_events_request_type"]) -> typing.Literal["initial_request", "event_response"] | None: ... + def HasField(self, field_name: typing.Literal["event_processed", b"event_processed", "initial_request", b"initial_request", "subscribe_topic_events_request_type", b"subscribe_topic_events_request_type"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["event_processed", b"event_processed", "initial_request", b"initial_request", "subscribe_topic_events_request_type", b"subscribe_topic_events_request_type"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["subscribe_topic_events_request_type", b"subscribe_topic_events_request_type"]) -> typing.Literal["initial_request", "event_processed"] | None: ... global___SubscribeTopicEventsRequestAlpha1 = SubscribeTopicEventsRequestAlpha1 @typing.final -class SubscribeTopicEventsInitialRequestAlpha1(google.protobuf.message.Message): - """SubscribeTopicEventsInitialRequestAlpha1 is the initial message containing the - details for subscribing to a topic via streaming. +class SubscribeTopicEventsRequestInitialAlpha1(google.protobuf.message.Message): + """SubscribeTopicEventsRequestInitialAlpha1 is the initial message containing + the details for subscribing to a topic via streaming. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -816,11 +816,11 @@ class SubscribeTopicEventsInitialRequestAlpha1(google.protobuf.message.Message): def ClearField(self, field_name: typing.Literal["_dead_letter_topic", b"_dead_letter_topic", "dead_letter_topic", b"dead_letter_topic", "metadata", b"metadata", "pubsub_name", b"pubsub_name", "topic", b"topic"]) -> None: ... def WhichOneof(self, oneof_group: typing.Literal["_dead_letter_topic", b"_dead_letter_topic"]) -> typing.Literal["dead_letter_topic"] | None: ... -global___SubscribeTopicEventsInitialRequestAlpha1 = SubscribeTopicEventsInitialRequestAlpha1 +global___SubscribeTopicEventsRequestInitialAlpha1 = SubscribeTopicEventsRequestInitialAlpha1 @typing.final -class SubscribeTopicEventsResponseAlpha1(google.protobuf.message.Message): - """SubscribeTopicEventsResponseAlpha1 is a message containing the result of a +class SubscribeTopicEventsRequestProcessedAlpha1(google.protobuf.message.Message): + """SubscribeTopicEventsRequestProcessedAlpha1 is the message containing the subscription to a topic. """ @@ -843,8 +843,48 @@ class SubscribeTopicEventsResponseAlpha1(google.protobuf.message.Message): def HasField(self, field_name: typing.Literal["status", b"status"]) -> builtins.bool: ... def ClearField(self, field_name: typing.Literal["id", b"id", "status", b"status"]) -> None: ... +global___SubscribeTopicEventsRequestProcessedAlpha1 = SubscribeTopicEventsRequestProcessedAlpha1 + +@typing.final +class SubscribeTopicEventsResponseAlpha1(google.protobuf.message.Message): + """SubscribeTopicEventsResponseAlpha1 is a message returned from daprd + when subscribing to a topic via streaming. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INITIAL_RESPONSE_FIELD_NUMBER: builtins.int + EVENT_MESSAGE_FIELD_NUMBER: builtins.int + @property + def initial_response(self) -> global___SubscribeTopicEventsResponseInitialAlpha1: ... + @property + def event_message(self) -> dapr.proto.runtime.v1.appcallback_pb2.TopicEventRequest: ... + def __init__( + self, + *, + initial_response: global___SubscribeTopicEventsResponseInitialAlpha1 | None = ..., + event_message: dapr.proto.runtime.v1.appcallback_pb2.TopicEventRequest | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["event_message", b"event_message", "initial_response", b"initial_response", "subscribe_topic_events_response_type", b"subscribe_topic_events_response_type"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["event_message", b"event_message", "initial_response", b"initial_response", "subscribe_topic_events_response_type", b"subscribe_topic_events_response_type"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["subscribe_topic_events_response_type", b"subscribe_topic_events_response_type"]) -> typing.Literal["initial_response", "event_message"] | None: ... + global___SubscribeTopicEventsResponseAlpha1 = SubscribeTopicEventsResponseAlpha1 +@typing.final +class SubscribeTopicEventsResponseInitialAlpha1(google.protobuf.message.Message): + """SubscribeTopicEventsResponseInitialAlpha1 is the initial response from daprd + when subscribing to a topic. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___SubscribeTopicEventsResponseInitialAlpha1 = SubscribeTopicEventsResponseInitialAlpha1 + @typing.final class InvokeBindingRequest(google.protobuf.message.Message): """InvokeBindingRequest is the message to send data to output bindings""" @@ -3088,7 +3128,9 @@ global___ShutdownRequest = ShutdownRequest @typing.final class Job(google.protobuf.message.Message): - """Job is the definition of a job.""" + """Job is the definition of a job. At least one of schedule or due_time must be + provided but can also be provided together. + """ DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -3101,16 +3143,47 @@ class Job(google.protobuf.message.Message): name: builtins.str """The unique name for the job.""" schedule: builtins.str - """The schedule for the job.""" + """schedule is an optional schedule at which the job is to be run. + Accepts both systemd timer style cron expressions, as well as human + readable '@' prefixed period strings as defined below. + + Systemd timer style cron accepts 6 fields: + seconds | minutes | hours | day of month | month | day of week + 0-59 | 0-59 | 0-23 | 1-31 | 1-12/jan-dec | 0-7/sun-sat + + "0 30 * * * *" - every hour on the half hour + "0 15 3 * * *" - every day at 03:15 + + Period string expressions: + Entry | Description | Equivalent To + ----- | ----------- | ------------- + @every | Run every (e.g. '@every 1h30m') | N/A + @yearly (or @annually) | Run once a year, midnight, Jan. 1st | 0 0 0 1 1 * + @monthly | Run once a month, midnight, first of month | 0 0 0 1 * * + @weekly | Run once a week, midnight on Sunday | 0 0 0 * * 0 + @daily (or @midnight) | Run once a day, midnight | 0 0 0 * * * + @hourly | Run once an hour, beginning of hour | 0 0 * * * * + """ repeats: builtins.int - """Optional: jobs with fixed repeat counts (accounting for Actor Reminders).""" + """repeats is the optional number of times in which the job should be + triggered. If not set, the job will run indefinitely or until expiration. + """ due_time: builtins.str - """Optional: sets time at which or time interval before the callback is invoked for the first time.""" + """due_time is the optional time at which the job should be active, or the + "one shot" time if other scheduling type fields are not provided. Accepts + a "point in time" string in the format of RFC3339, Go duration string + (calculated from job creation time), or non-repeating ISO8601. + """ ttl: builtins.str - """Optional: Time To Live to allow for auto deletes (accounting for Actor Reminders).""" + """ttl is the optional time to live or expiration of the job. Accepts a + "point in time" string in the format of RFC3339, Go duration string + (calculated from job creation time), or non-repeating ISO8601. + """ @property def data(self) -> google.protobuf.any_pb2.Any: - """Job data.""" + """payload is the serialized job payload that will be sent to the recipient + when the job is triggered. + """ def __init__( self, diff --git a/dapr/proto/runtime/v1/dapr_pb2_grpc.py b/dapr/proto/runtime/v1/dapr_pb2_grpc.py index da7ae756..b97d7f02 100644 --- a/dapr/proto/runtime/v1/dapr_pb2_grpc.py +++ b/dapr/proto/runtime/v1/dapr_pb2_grpc.py @@ -4,14 +4,11 @@ import warnings from dapr.proto.common.v1 import common_pb2 as dapr_dot_proto_dot_common_dot_v1_dot_common__pb2 -from dapr.proto.runtime.v1 import appcallback_pb2 as dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2 from dapr.proto.runtime.v1 import dapr_pb2 as dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -GRPC_GENERATED_VERSION = '1.63.0' +GRPC_GENERATED_VERSION = '1.66.1' GRPC_VERSION = grpc.__version__ -EXPECTED_ERROR_RELEASE = '1.65.0' -SCHEDULED_RELEASE_DATE = 'June 25, 2024' _version_not_supported = False try: @@ -21,15 +18,12 @@ _version_not_supported = True if _version_not_supported: - warnings.warn( + raise RuntimeError( f'The grpc package installed is at version {GRPC_VERSION},' + f' but the generated code in dapr/proto/runtime/v1/dapr_pb2_grpc.py depends on' + f' grpcio>={GRPC_GENERATED_VERSION}.' + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - + f' This warning will become an error in {EXPECTED_ERROR_RELEASE},' - + f' scheduled for release on {SCHEDULED_RELEASE_DATE}.', - RuntimeWarning ) @@ -96,7 +90,7 @@ def __init__(self, channel): self.SubscribeTopicEventsAlpha1 = channel.stream_stream( '/dapr.proto.runtime.v1.Dapr/SubscribeTopicEventsAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeTopicEventsRequestAlpha1.SerializeToString, - response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.TopicEventRequest.FromString, + response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeTopicEventsResponseAlpha1.FromString, _registered_method=True) self.InvokeBinding = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/InvokeBinding', @@ -803,7 +797,7 @@ def add_DaprServicer_to_server(servicer, server): 'SubscribeTopicEventsAlpha1': grpc.stream_stream_rpc_method_handler( servicer.SubscribeTopicEventsAlpha1, request_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeTopicEventsRequestAlpha1.FromString, - response_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.TopicEventRequest.SerializeToString, + response_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeTopicEventsResponseAlpha1.SerializeToString, ), 'InvokeBinding': grpc.unary_unary_rpc_method_handler( servicer.InvokeBinding, @@ -1044,6 +1038,7 @@ def add_DaprServicer_to_server(servicer, server): generic_handler = grpc.method_handlers_generic_handler( 'dapr.proto.runtime.v1.Dapr', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('dapr.proto.runtime.v1.Dapr', rpc_method_handlers) # This class is part of an EXPERIMENTAL API. @@ -1337,7 +1332,7 @@ def SubscribeTopicEventsAlpha1(request_iterator, target, '/dapr.proto.runtime.v1.Dapr/SubscribeTopicEventsAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeTopicEventsRequestAlpha1.SerializeToString, - dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.TopicEventRequest.FromString, + dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeTopicEventsResponseAlpha1.FromString, options, channel_credentials, insecure, diff --git a/examples/pubsub_streaming/publisher.py b/examples/pubsub_streaming/publisher.py new file mode 100644 index 00000000..32e6db51 --- /dev/null +++ b/examples/pubsub_streaming/publisher.py @@ -0,0 +1,68 @@ +# ------------------------------------------------------------ +# Copyright 2022 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------ + +import json +import time + +from dapr.clients import DaprClient + +with DaprClient() as d: + id = 0 + while id < 3: + id += 1 + req_data = {'id': time.time(), 'message': 'hello world'} + + # Create a typed message with content type and body + resp = d.publish_event( + pubsub_name='pubsub', + topic_name='TOPIC_A', + data=json.dumps(req_data), + data_content_type='application/json', + ) + + # Print the request + print(req_data, flush=True) + + time.sleep(1) + + # we can publish events to different topics but handle them with the same method + # by disabling topic validation in the subscriber + # + # id = 3 + # while id < 6: + # id += 1 + # req_data = {'id': id, 'message': 'hello world'} + # resp = d.publish_event( + # pubsub_name='pubsub', + # topic_name=f'topic/{id}', + # data=json.dumps(req_data), + # data_content_type='application/json', + # ) + # + # # Print the request + # print(req_data, flush=True) + # + # time.sleep(0.5) + # + # # This topic will fail - initiate a retry which gets routed to the dead letter topic + # req_data['id'] = 7 + # resp = d.publish_event( + # pubsub_name='pubsub', + # topic_name='TOPIC_D', + # data=json.dumps(req_data), + # data_content_type='application/json', + # publish_metadata={'custommeta': 'somevalue'}, + # ) + # + # # Print the request + # print(req_data, flush=True) diff --git a/examples/pubsub_streaming/subscriber.py b/examples/pubsub_streaming/subscriber.py new file mode 100644 index 00000000..59d95cf8 --- /dev/null +++ b/examples/pubsub_streaming/subscriber.py @@ -0,0 +1,45 @@ +from dapr.clients import DaprClient +from dapr.clients.grpc._response import TopicEventResponse +from dapr.clients.grpc.subscription import success, retry, drop + + +def process_message(message): + # Process the message here + print(f"Processing message: {message.data}") + return TopicEventResponse('success').status + + +def main(): + with DaprClient() as client: + + subscription = client.subscribe(pubsub_name="pubsub", topic="TOPIC_A", dead_letter_topic="TOPIC_A_DEAD") + + try: + while True: + try: + try: + message = subscription.next_message(timeout=5) + except Exception as e: + print(f"An error occurred: {e}") + + if message is None: + print("No message received within timeout period.") + continue + + print(f"Received message with ID: {message.id}") + + # Process the message + try: + subscription.respond(message, process_message(message)) + except Exception as e: + print(f"An error occurred while sending the message: {e}") + except KeyboardInterrupt: + print("Received interrupt, shutting down...") + break + + finally: + subscription.close() + + +if __name__ == "__main__": + main() From b7abfb454c9be262024c18c662f5d49e8289aa49 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Sun, 22 Sep 2024 20:54:48 +0100 Subject: [PATCH 02/33] works Signed-off-by: Elena Kolevska --- dapr/clients/exceptions.py | 27 +++--- dapr/clients/grpc/subscription.py | 114 +++++++++++++++++------- examples/pubsub_streaming/subscriber.py | 31 ++++--- tests/clients/fake_dapr_server.py | 10 ++- tests/clients/test_dapr_grpc_client.py | 41 +++++++-- 5 files changed, 150 insertions(+), 73 deletions(-) diff --git a/dapr/clients/exceptions.py b/dapr/clients/exceptions.py index 91bc04a8..6650ec6a 100644 --- a/dapr/clients/exceptions.py +++ b/dapr/clients/exceptions.py @@ -27,22 +27,15 @@ class DaprInternalError(Exception): """DaprInternalError encapsulates all Dapr exceptions""" - def __init__( - self, - message: Optional[str], - error_code: Optional[str] = ERROR_CODE_UNKNOWN, - raw_response_bytes: Optional[bytes] = None, - ): + def __init__(self, message: Optional[str], error_code: Optional[str] = ERROR_CODE_UNKNOWN, + raw_response_bytes: Optional[bytes] = None, ): self._message = message self._error_code = error_code self._raw_response_bytes = raw_response_bytes def as_dict(self): - return { - 'message': self._message, - 'errorCode': self._error_code, - 'raw_response_bytes': self._raw_response_bytes, - } + return {'message': self._message, 'errorCode': self._error_code, + 'raw_response_bytes': self._raw_response_bytes, } class StatusDetails: @@ -119,12 +112,8 @@ def get_grpc_status(self): return self._grpc_status def json(self): - error_details = { - 'status_code': self.code().name, - 'message': self.details(), - 'error_code': self.error_code(), - 'details': self._details.as_dict(), - } + error_details = {'status_code': self.code().name, 'message': self.details(), + 'error_code': self.error_code(), 'details': self._details.as_dict(), } return json.dumps(error_details) @@ -132,3 +121,7 @@ def serialize_status_detail(status_detail): if not status_detail: return None return MessageToDict(status_detail, preserving_proto_field_name=True) + + +class StreamInactiveError(Exception): + pass diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index c4082411..3439fdca 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -1,4 +1,7 @@ import grpc + +from dapr.clients.exceptions import StreamInactiveError +from dapr.clients.grpc._response import TopicEventResponse from dapr.proto import api_v1, appcallback_v1 import queue import threading @@ -24,9 +27,11 @@ def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=No self.metadata = metadata or {} self.dead_letter_topic = dead_letter_topic or '' self._stream = None + self._response_thread = None self._send_queue = queue.Queue() self._receive_queue = queue.Queue() self._stream_active = False + self._stream_lock = threading.Lock() # Protects _stream_active def start(self): def request_iterator(): @@ -38,65 +43,112 @@ def request_iterator(): dead_letter_topic=self.dead_letter_topic or '')) yield initial_request - while self._stream_active: + while self._is_stream_active(): try: - request = self._send_queue.get() - if request is None: - break - - yield request + yield self._send_queue.get() # TODO Should I add a timeout? except queue.Empty: continue except Exception as e: - print(f"Exception in request_iterator: {e}") - raise e + raise Exception(f"Error in request iterator: {e}") # Create the bidirectional stream self._stream = self._stub.SubscribeTopicEventsAlpha1(request_iterator()) - self._stream_active = True + self._set_stream_active() # Start a thread to handle incoming messages - threading.Thread(target=self._handle_responses, daemon=True).start() + self._response_thread = threading.Thread(target=self._handle_responses, daemon=True) + self._response_thread.start() def _handle_responses(self): try: # The first message dapr sends on the stream is for signalling only, so discard it next(self._stream) - for msg in self._stream: - print(f"Received message from dapr on stream: {msg.event_message.id}") # SubscribeTopicEventsResponseAlpha1 - self._receive_queue.put(msg.event_message) + # Read messages from the stream and put them in the receive queue + for message in self._stream: + if self._is_stream_active(): + self._receive_queue.put(message.event_message) + else: + break except grpc.RpcError as e: - print(f"gRPC error in stream: {e}") + if e.code() != grpc.StatusCode.CANCELLED: + print(f"gRPC error in stream: {e.details()}, Status Code: {e.code()}") except Exception as e: - print(f"Unexpected error in stream: {e}") + raise Exception(f"Error while handling responses: {e}") finally: - self._stream_active = False + self._set_stream_inactive() - def next_message(self, timeout=None): - print("in next_message") - try: - return self._receive_queue.get(timeout=timeout) - except queue.Empty as e : - print("queue empty", e) - return None - except Exception as e: - print(f"Exception in next_message: {e}") - return None + def next_message(self, timeout=1): + """ + Gets the next message from the receive queue + @param timeout: Timeout in seconds + @return: The next message + """ + return self.read_message_from_queue(self._receive_queue, timeout=timeout) - def respond(self, message, status): + def _respond(self, message, status): try: status = appcallback_v1.TopicEventResponse(status=status.value) response = api_v1.SubscribeTopicEventsRequestProcessedAlpha1(id=message.id, status=status) msg = api_v1.SubscribeTopicEventsRequestAlpha1(event_processed=response) - self._send_queue.put(msg) + self.send_message_to_queue(self._send_queue, msg) except Exception as e: print(f"Exception in send_message: {e}") + def respond_success(self, message): + self._respond(message, TopicEventResponse('success').status) + + def respond_retry(self, message): + self._respond(message, TopicEventResponse('retry').status) + + def respond_drop(self, message): + self._respond(message, TopicEventResponse('drop').status) + + def send_message_to_queue(self, q, message): + if not self._is_stream_active(): + raise StreamInactiveError("Stream is not active") + q.put(message) + + def read_message_from_queue(self, q, timeout): + if not self._is_stream_active(): + raise StreamInactiveError("Stream is not active") + try: + return q.get(timeout=timeout) + except queue.Empty: + return None + + def _set_stream_active(self): + with self._stream_lock: + self._stream_active = True + + def _set_stream_inactive(self): + with self._stream_lock: + self._stream_active = False + + def _is_stream_active(self): + with self._stream_lock: + return self._stream_active + def close(self): - self._stream_active = False - self._send_queue.put(None) + if not self._is_stream_active(): + return + + self._set_stream_inactive() + + # Cancel the stream if self._stream: - self._stream.cancel() \ No newline at end of file + try: + self._stream.cancel() + except grpc.RpcError as e: + if e.code() != grpc.StatusCode.CANCELLED: + raise Exception(f"Error while closing stream: {e}") + except Exception as e: + raise Exception(f"Error while closing stream: {e}") + + # Join the response-handling thread to ensure it has finished + if self._response_thread: + self._response_thread.join() + self._response_thread = None + diff --git a/examples/pubsub_streaming/subscriber.py b/examples/pubsub_streaming/subscriber.py index 59d95cf8..d2029c4e 100644 --- a/examples/pubsub_streaming/subscriber.py +++ b/examples/pubsub_streaming/subscriber.py @@ -5,8 +5,8 @@ def process_message(message): # Process the message here - print(f"Processing message: {message.data}") - return TopicEventResponse('success').status + print(f"Processing message: {message}") + return "success" def main(): @@ -15,26 +15,25 @@ def main(): subscription = client.subscribe(pubsub_name="pubsub", topic="TOPIC_A", dead_letter_topic="TOPIC_A_DEAD") try: - while True: + for i in range(5): try: - try: - message = subscription.next_message(timeout=5) - except Exception as e: - print(f"An error occurred: {e}") - + message = subscription.next_message(timeout=0.1) if message is None: print("No message received within timeout period.") continue - print(f"Received message with ID: {message.id}") - # Process the message - try: - subscription.respond(message, process_message(message)) - except Exception as e: - print(f"An error occurred while sending the message: {e}") - except KeyboardInterrupt: - print("Received interrupt, shutting down...") + response_status = process_message(message) + + if response_status == "success": + subscription.respond_success(message) + elif response_status == "retry": + subscription.respond_retry(message) + elif response_status == "drop": + subscription.respond_drop(message) + + except Exception as e: + print(f"Error getting message: {e}") break finally: diff --git a/tests/clients/fake_dapr_server.py b/tests/clients/fake_dapr_server.py index d2f57a82..392be45c 100644 --- a/tests/clients/fake_dapr_server.py +++ b/tests/clients/fake_dapr_server.py @@ -7,7 +7,7 @@ from grpc_status import rpc_status from dapr.clients.grpc._helpers import to_bytes -from dapr.proto import api_service_v1, common_v1, api_v1 +from dapr.proto import api_service_v1, common_v1, api_v1, appcallback_v1 from dapr.proto.common.v1.common_pb2 import ConfigurationItem from dapr.clients.grpc._response import WorkflowRuntimeStatus from dapr.proto.runtime.v1.dapr_pb2 import ( @@ -177,6 +177,14 @@ def PublishEvent(self, request, context): context.set_trailing_metadata(trailers) return empty_pb2.Empty() + def SubscribeTopicEventsAlpha1(self, request_iterator, context): + yield api_v1.SubscribeTopicEventsResponseAlpha1( + initial_response=api_v1.SubscribeTopicEventsResponseInitialAlpha1()) + yield api_v1.SubscribeTopicEventsResponseAlpha1( + event_message=appcallback_v1.TopicEventRequest(id='123', topic="TOPIC_A", data=b'hello1')) + yield api_v1.SubscribeTopicEventsResponseAlpha1( + event_message=appcallback_v1.TopicEventRequest(id='456', topic="TOPIC_A", data=b'hello2')) + def SaveState(self, request, context): self.check_for_exception(context) diff --git a/tests/clients/test_dapr_grpc_client.py b/tests/clients/test_dapr_grpc_client.py index 1ddb8bc2..3588867e 100644 --- a/tests/clients/test_dapr_grpc_client.py +++ b/tests/clients/test_dapr_grpc_client.py @@ -24,7 +24,7 @@ from google.rpc import status_pb2, code_pb2 -from dapr.clients.exceptions import DaprGrpcError +from dapr.clients.exceptions import DaprGrpcError, StreamInactiveError from dapr.clients.grpc.client import DaprGrpcClient from dapr.clients import DaprClient from dapr.proto import common_v1 @@ -34,13 +34,9 @@ from dapr.clients.grpc._request import TransactionalStateOperation from dapr.clients.grpc._state import StateOptions, Consistency, Concurrency, StateItem from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions -from dapr.clients.grpc._response import ( - ConfigurationItem, - ConfigurationResponse, - ConfigurationWatcher, - UnlockResponseStatus, - WorkflowRuntimeStatus, -) +from dapr.clients.grpc._response import (ConfigurationItem, ConfigurationResponse, + ConfigurationWatcher, UnlockResponseStatus, + WorkflowRuntimeStatus, TopicEventResponse, ) class DaprGrpcClientTests(unittest.TestCase): @@ -262,6 +258,35 @@ def test_publish_error(self): data=111, ) + def test_subscribe_topic(self): + dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') + subscription = dapr.subscribe(pubsub_name='pubsub', topic='example') + + # First message + message1 = subscription.next_message(timeout=5) + subscription.respond_success(message1) + + self.assertEqual('123', message1.id) + self.assertEqual(b'hello1', message1.data) + self.assertEqual('TOPIC_A', message1.topic) + + # Second message + message2 = subscription.next_message(timeout=5) + subscription.respond_success(message2) + + self.assertEqual('456', message2.id) + self.assertEqual(b'hello2', message2.data) + self.assertEqual('TOPIC_A', message2.topic) + + def test_subscribe_topic_early_close(self): + dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') + subscription = dapr.subscribe(pubsub_name='pubsub', topic='example') + subscription.close() + + with self.assertRaises(StreamInactiveError): + subscription.next_message(timeout=5) + + @patch.object(settings, 'DAPR_API_TOKEN', 'test-token') def test_dapr_api_token_insertion(self): dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') From 19f8c9b5e0461f5b1c8dad57022d35e1c9cd88ea Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 23 Sep 2024 01:29:18 +0100 Subject: [PATCH 03/33] Sync bidi streaming and tests Signed-off-by: Elena Kolevska --- dapr/clients/exceptions.py | 23 ++- dapr/clients/grpc/client.py | 12 +- dapr/clients/grpc/subscription.py | 138 ++++++++++++++---- examples/pubsub-streaming/README.md | 76 ++++++++++ .../publisher.py | 36 +---- .../subscriber.py | 23 +-- tests/clients/fake_dapr_server.py | 33 ++++- tests/clients/test_dapr_grpc_client.py | 42 ++++-- tox.ini | 1 + 9 files changed, 279 insertions(+), 105 deletions(-) create mode 100644 examples/pubsub-streaming/README.md rename examples/{pubsub_streaming => pubsub-streaming}/publisher.py (51%) rename examples/{pubsub_streaming => pubsub-streaming}/subscriber.py (57%) diff --git a/dapr/clients/exceptions.py b/dapr/clients/exceptions.py index 6650ec6a..c872b65a 100644 --- a/dapr/clients/exceptions.py +++ b/dapr/clients/exceptions.py @@ -27,15 +27,22 @@ class DaprInternalError(Exception): """DaprInternalError encapsulates all Dapr exceptions""" - def __init__(self, message: Optional[str], error_code: Optional[str] = ERROR_CODE_UNKNOWN, - raw_response_bytes: Optional[bytes] = None, ): + def __init__( + self, + message: Optional[str], + error_code: Optional[str] = ERROR_CODE_UNKNOWN, + raw_response_bytes: Optional[bytes] = None, + ): self._message = message self._error_code = error_code self._raw_response_bytes = raw_response_bytes def as_dict(self): - return {'message': self._message, 'errorCode': self._error_code, - 'raw_response_bytes': self._raw_response_bytes, } + return { + 'message': self._message, + 'errorCode': self._error_code, + 'raw_response_bytes': self._raw_response_bytes, + } class StatusDetails: @@ -112,8 +119,12 @@ def get_grpc_status(self): return self._grpc_status def json(self): - error_details = {'status_code': self.code().name, 'message': self.details(), - 'error_code': self.error_code(), 'details': self._details.as_dict(), } + error_details = { + 'status_code': self.code().name, + 'message': self.details(), + 'error_code': self.error_code(), + 'details': self._details.as_dict(), + } return json.dumps(error_details) diff --git a/dapr/clients/grpc/client.py b/dapr/clients/grpc/client.py index 71009b83..81e65c7e 100644 --- a/dapr/clients/grpc/client.py +++ b/dapr/clients/grpc/client.py @@ -46,7 +46,7 @@ from dapr.clients.health import DaprHealth from dapr.clients.retry import RetryPolicy from dapr.conf import settings -from dapr.proto import api_v1, api_service_v1, common_v1, appcallback_v1 +from dapr.proto import api_v1, api_service_v1, common_v1 from dapr.proto.runtime.v1.dapr_pb2 import UnsubscribeConfigurationResponse from dapr.version import __version__ @@ -482,16 +482,6 @@ def publish_event( return DaprResponse(call.initial_metadata()) - # def subscribe(self, pubsub_name, topic, metadata=None, dead_letter_topic=None): - # stream = self._stub.SubscribeTopicEventsAlpha1() - # - # # Send InitialRequest - # initial_request = api_v1.SubscribeTopicEventsInitialRequestAlpha1(pubsub_name=pubsub_name, topic=topic, metadata=metadata, dead_letter_topic=dead_letter_topic) - # request = api_v1.SubscribeTopicEventsRequestAlpha1(initial_request=initial_request) - # stream.write(request) - # - # return stream - def subscribe(self, pubsub_name, topic, metadata=None, dead_letter_topic=None): subscription = Subscription(self._stub, pubsub_name, topic, metadata, dead_letter_topic) subscription.start() diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index 3439fdca..1cdf1ef5 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -1,3 +1,5 @@ +import json + import grpc from dapr.clients.exceptions import StreamInactiveError @@ -34,32 +36,45 @@ def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=No self._stream_lock = threading.Lock() # Protects _stream_active def start(self): - def request_iterator(): + def outgoing_request_iterator(): + """ + Generator function to create the request iterator for the stream + """ try: # Send InitialRequest needed to establish the stream initial_request = api_v1.SubscribeTopicEventsRequestAlpha1( initial_request=api_v1.SubscribeTopicEventsRequestInitialAlpha1( - pubsub_name=self.pubsub_name, topic=self.topic, metadata=self.metadata or {}, - dead_letter_topic=self.dead_letter_topic or '')) + pubsub_name=self.pubsub_name, + topic=self.topic, + metadata=self.metadata or {}, + dead_letter_topic=self.dead_letter_topic or '', + ) + ) yield initial_request + # Start sending back acknowledgement messages from the send queue while self._is_stream_active(): try: - yield self._send_queue.get() # TODO Should I add a timeout? + response = self._send_queue.get() + # The above blocks until a message is available or the stream is closed + # so that's why we need to check again if the stream is still active + if not self._is_stream_active(): + break + yield response except queue.Empty: continue except Exception as e: - raise Exception(f"Error in request iterator: {e}") + raise Exception(f'Error in request iterator: {e}') # Create the bidirectional stream - self._stream = self._stub.SubscribeTopicEventsAlpha1(request_iterator()) + self._stream = self._stub.SubscribeTopicEventsAlpha1(outgoing_request_iterator()) self._set_stream_active() # Start a thread to handle incoming messages - self._response_thread = threading.Thread(target=self._handle_responses, daemon=True) + self._response_thread = threading.Thread(target=self._handle_incoming_messages, daemon=True) self._response_thread.start() - def _handle_responses(self): + def _handle_incoming_messages(self): try: # The first message dapr sends on the stream is for signalling only, so discard it next(self._stream) @@ -72,30 +87,31 @@ def _handle_responses(self): break except grpc.RpcError as e: if e.code() != grpc.StatusCode.CANCELLED: - print(f"gRPC error in stream: {e.details()}, Status Code: {e.code()}") + print(f'gRPC error in stream: {e.details()}, Status Code: {e.code()}') except Exception as e: - raise Exception(f"Error while handling responses: {e}") + raise Exception(f'Error while handling responses: {e}') finally: self._set_stream_inactive() - def next_message(self, timeout=1): - """ - Gets the next message from the receive queue - @param timeout: Timeout in seconds - @return: The next message - """ - return self.read_message_from_queue(self._receive_queue, timeout=timeout) + def next_message(self, timeout=None): + msg = self.read_message_from_queue(self._receive_queue, timeout=timeout) + + if msg is None: + return None + + return SubscriptionMessage(msg) def _respond(self, message, status): try: status = appcallback_v1.TopicEventResponse(status=status.value) - response = api_v1.SubscribeTopicEventsRequestProcessedAlpha1(id=message.id, - status=status) + response = api_v1.SubscribeTopicEventsRequestProcessedAlpha1( + id=message.id(), status=status + ) msg = api_v1.SubscribeTopicEventsRequestAlpha1(event_processed=response) self.send_message_to_queue(self._send_queue, msg) except Exception as e: - print(f"Exception in send_message: {e}") + print(f'Exception in send_message: {e}') def respond_success(self, message): self._respond(message, TopicEventResponse('success').status) @@ -108,12 +124,12 @@ def respond_drop(self, message): def send_message_to_queue(self, q, message): if not self._is_stream_active(): - raise StreamInactiveError("Stream is not active") + raise StreamInactiveError('Stream is not active') q.put(message) - def read_message_from_queue(self, q, timeout): + def read_message_from_queue(self, q, timeout=None): if not self._is_stream_active(): - raise StreamInactiveError("Stream is not active") + raise StreamInactiveError('Stream is not active') try: return q.get(timeout=timeout) except queue.Empty: @@ -143,12 +159,84 @@ def close(self): self._stream.cancel() except grpc.RpcError as e: if e.code() != grpc.StatusCode.CANCELLED: - raise Exception(f"Error while closing stream: {e}") + raise Exception(f'Error while closing stream: {e}') except Exception as e: - raise Exception(f"Error while closing stream: {e}") + raise Exception(f'Error while closing stream: {e}') # Join the response-handling thread to ensure it has finished if self._response_thread: self._response_thread.join() self._response_thread = None + +class SubscriptionMessage: + def __init__(self, msg): + self._id = msg.id + self._source = msg.source + self._type = msg.type + self._spec_version = msg.spec_version + self._data_content_type = msg.data_content_type + self._topic = msg.topic + self._pubsub_name = msg.pubsub_name + self._raw_data = msg.data + self._extensions = msg.extensions + self._data = None + + # Parse the content based on its media type + if self._raw_data and len(self._raw_data) > 0: + self._parse_data_content() + + def id(self): + return self._id + + def source(self): + return self._source + + def type(self): + return self._type + + def spec_version(self): + return self._spec_version + + def data_content_type(self): + return self._data_content_type + + def topic(self): + return self._topic + + def pubsub_name(self): + return self._pubsub_name + + def raw_data(self): + return self._raw_data + + def extensions(self): + return self._extensions + + def data(self): + return self._data + + def _parse_data_content(self): + try: + if self._data_content_type == 'application/json': + try: + self._data = json.loads(self._raw_data) + except json.JSONDecodeError: + pass # If JSON parsing fails, keep `data` as None + elif self._data_content_type == 'text/plain': + # Assume UTF-8 encoding + try: + self._data = self._raw_data.decode('utf-8') + except UnicodeDecodeError: + pass + elif self._data_content_type.startswith( + 'application/' + ) and self._data_content_type.endswith('+json'): + # Handle custom JSON-based media types (e.g., application/vnd.api+json) + try: + self._data = json.loads(self._raw_data) + except json.JSONDecodeError: + pass # If JSON parsing fails, keep `data` as None + except Exception as e: + # Log or handle any unexpected exceptions + print(f'Error parsing media type: {e}') diff --git a/examples/pubsub-streaming/README.md b/examples/pubsub-streaming/README.md new file mode 100644 index 00000000..5d80cf0b --- /dev/null +++ b/examples/pubsub-streaming/README.md @@ -0,0 +1,76 @@ +# Example - Publish and subscribe to messages + +This example utilizes a publisher and a subscriber to show the bidirectional pubsub pattern. +It creates a publisher and calls the `publish_event` method in the `DaprClient`. +In the s`subscriber.py` file it creates a subscriber object that can call the `next_message` method to get new messages from the stream. After processing the new message, it returns a status to the stream. + + +> **Note:** Make sure to use the latest proto bindings + +## Pre-requisites + +- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started) +- [Install Python 3.8+](https://www.python.org/downloads/) + +## Install Dapr python-SDK + + + +```bash +pip3 install dapr +``` + +## Run the example + +Run the following command in a terminal/command prompt: + + + +```bash +# 1. Start Subscriber +dapr run --app-id python-subscriber --app-protocol grpc python3 subscriber.py +``` + + + +In another terminal/command prompt run: + + + +```bash +# 2. Start Publisher +dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py +``` + + + +## Cleanup + + diff --git a/examples/pubsub_streaming/publisher.py b/examples/pubsub-streaming/publisher.py similarity index 51% rename from examples/pubsub_streaming/publisher.py rename to examples/pubsub-streaming/publisher.py index 32e6db51..f7af0f10 100644 --- a/examples/pubsub_streaming/publisher.py +++ b/examples/pubsub-streaming/publisher.py @@ -18,9 +18,9 @@ with DaprClient() as d: id = 0 - while id < 3: + while id < 5: id += 1 - req_data = {'id': time.time(), 'message': 'hello world'} + req_data = {'id': id, 'message': 'hello world'} # Create a typed message with content type and body resp = d.publish_event( @@ -34,35 +34,3 @@ print(req_data, flush=True) time.sleep(1) - - # we can publish events to different topics but handle them with the same method - # by disabling topic validation in the subscriber - # - # id = 3 - # while id < 6: - # id += 1 - # req_data = {'id': id, 'message': 'hello world'} - # resp = d.publish_event( - # pubsub_name='pubsub', - # topic_name=f'topic/{id}', - # data=json.dumps(req_data), - # data_content_type='application/json', - # ) - # - # # Print the request - # print(req_data, flush=True) - # - # time.sleep(0.5) - # - # # This topic will fail - initiate a retry which gets routed to the dead letter topic - # req_data['id'] = 7 - # resp = d.publish_event( - # pubsub_name='pubsub', - # topic_name='TOPIC_D', - # data=json.dumps(req_data), - # data_content_type='application/json', - # publish_metadata={'custommeta': 'somevalue'}, - # ) - # - # # Print the request - # print(req_data, flush=True) diff --git a/examples/pubsub_streaming/subscriber.py b/examples/pubsub-streaming/subscriber.py similarity index 57% rename from examples/pubsub_streaming/subscriber.py rename to examples/pubsub-streaming/subscriber.py index d2029c4e..f6f9078a 100644 --- a/examples/pubsub_streaming/subscriber.py +++ b/examples/pubsub-streaming/subscriber.py @@ -5,40 +5,41 @@ def process_message(message): # Process the message here - print(f"Processing message: {message}") - return "success" + print(f'Processing message: {message.data()} from {message.topic()}') + return 'success' def main(): with DaprClient() as client: - - subscription = client.subscribe(pubsub_name="pubsub", topic="TOPIC_A", dead_letter_topic="TOPIC_A_DEAD") + subscription = client.subscribe( + pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' + ) try: for i in range(5): try: - message = subscription.next_message(timeout=0.1) + message = subscription.next_message() if message is None: - print("No message received within timeout period.") + print('No message received within timeout period.') continue # Process the message response_status = process_message(message) - if response_status == "success": + if response_status == 'success': subscription.respond_success(message) - elif response_status == "retry": + elif response_status == 'retry': subscription.respond_retry(message) - elif response_status == "drop": + elif response_status == 'drop': subscription.respond_drop(message) except Exception as e: - print(f"Error getting message: {e}") + print(f'Error getting message: {e}') break finally: subscription.close() -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tests/clients/fake_dapr_server.py b/tests/clients/fake_dapr_server.py index 392be45c..1080ab30 100644 --- a/tests/clients/fake_dapr_server.py +++ b/tests/clients/fake_dapr_server.py @@ -179,11 +179,34 @@ def PublishEvent(self, request, context): def SubscribeTopicEventsAlpha1(self, request_iterator, context): yield api_v1.SubscribeTopicEventsResponseAlpha1( - initial_response=api_v1.SubscribeTopicEventsResponseInitialAlpha1()) - yield api_v1.SubscribeTopicEventsResponseAlpha1( - event_message=appcallback_v1.TopicEventRequest(id='123', topic="TOPIC_A", data=b'hello1')) - yield api_v1.SubscribeTopicEventsResponseAlpha1( - event_message=appcallback_v1.TopicEventRequest(id='456', topic="TOPIC_A", data=b'hello2')) + initial_response=api_v1.SubscribeTopicEventsResponseInitialAlpha1() + ) + + msg2 = appcallback_v1.TopicEventRequest( + id='123', + topic='TOPIC_A', + data=b'hello2', + source='app1', + data_content_type='text/plain', + type='com.example.type2', + pubsub_name='pubsub', + spec_version='1.0', + ) + yield api_v1.SubscribeTopicEventsResponseAlpha1(event_message=msg2) + + msg3 = appcallback_v1.TopicEventRequest( + id='456', + topic='TOPIC_A', + data=b'{"a": 1}', + source='app1', + data_content_type='application/json', + type='com.example.type2', + pubsub_name='pubsub', + spec_version='1.0', + ) + yield api_v1.SubscribeTopicEventsResponseAlpha1(event_message=msg3) + # Simulate the stream being closed with an error + context.abort(grpc.StatusCode.CANCELLED, 'Stream closed by server') def SaveState(self, request, context): self.check_for_exception(context) diff --git a/tests/clients/test_dapr_grpc_client.py b/tests/clients/test_dapr_grpc_client.py index 3588867e..4ef9e02a 100644 --- a/tests/clients/test_dapr_grpc_client.py +++ b/tests/clients/test_dapr_grpc_client.py @@ -34,9 +34,13 @@ from dapr.clients.grpc._request import TransactionalStateOperation from dapr.clients.grpc._state import StateOptions, Consistency, Concurrency, StateItem from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions -from dapr.clients.grpc._response import (ConfigurationItem, ConfigurationResponse, - ConfigurationWatcher, UnlockResponseStatus, - WorkflowRuntimeStatus, TopicEventResponse, ) +from dapr.clients.grpc._response import ( + ConfigurationItem, + ConfigurationResponse, + ConfigurationWatcher, + UnlockResponseStatus, + WorkflowRuntimeStatus, +) class DaprGrpcClientTests(unittest.TestCase): @@ -263,20 +267,33 @@ def test_subscribe_topic(self): subscription = dapr.subscribe(pubsub_name='pubsub', topic='example') # First message - message1 = subscription.next_message(timeout=5) + message1 = subscription.next_message() subscription.respond_success(message1) - self.assertEqual('123', message1.id) - self.assertEqual(b'hello1', message1.data) - self.assertEqual('TOPIC_A', message1.topic) + self.assertEqual('123', message1.id()) + self.assertEqual('app1', message1.source()) + self.assertEqual('com.example.type2', message1.type()) + self.assertEqual('1.0', message1.spec_version()) + self.assertEqual('text/plain', message1.data_content_type()) + self.assertEqual('TOPIC_A', message1.topic()) + self.assertEqual('pubsub', message1.pubsub_name()) + self.assertEqual(b'hello2', message1.raw_data()) + self.assertEqual('text/plain', message1.data_content_type()) + self.assertEqual('hello2', message1.data()) # Second message - message2 = subscription.next_message(timeout=5) + message2 = subscription.next_message() subscription.respond_success(message2) - self.assertEqual('456', message2.id) - self.assertEqual(b'hello2', message2.data) - self.assertEqual('TOPIC_A', message2.topic) + self.assertEqual('456', message2.id()) + self.assertEqual('app1', message2.source()) + self.assertEqual('com.example.type2', message2.type()) + self.assertEqual('1.0', message2.spec_version()) + self.assertEqual('TOPIC_A', message2.topic()) + self.assertEqual('pubsub', message2.pubsub_name()) + self.assertEqual(b'{"a": 1}', message2.raw_data()) + self.assertEqual('application/json', message2.data_content_type()) + self.assertEqual({'a': 1}, message2.data()) def test_subscribe_topic_early_close(self): dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') @@ -284,8 +301,7 @@ def test_subscribe_topic_early_close(self): subscription.close() with self.assertRaises(StreamInactiveError): - subscription.next_message(timeout=5) - + subscription.next_message() @patch.object(settings, 'DAPR_API_TOKEN', 'test-token') def test_dapr_api_token_insertion(self): diff --git a/tox.ini b/tox.ini index e7f9a672..6400e329 100644 --- a/tox.ini +++ b/tox.ini @@ -50,6 +50,7 @@ commands = ./validate.sh metadata ./validate.sh error_handling ./validate.sh pubsub-simple + ./validate.sh pubsub-streaming ./validate.sh state_store ./validate.sh state_store_query ./validate.sh secret_store From aaf4599e99b47eedded95a49ceaeb833f65e3c93 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 23 Sep 2024 01:42:15 +0100 Subject: [PATCH 04/33] example fix Signed-off-by: Elena Kolevska fixes typing Signed-off-by: Elena Kolevska more readable example Signed-off-by: Elena Kolevska linter Signed-off-by: Elena Kolevska --- dapr/clients/grpc/subscription.py | 46 +++++++++++++------------ examples/pubsub-streaming/README.md | 16 ++++----- examples/pubsub-streaming/subscriber.py | 33 ++++++++---------- 3 files changed, 44 insertions(+), 51 deletions(-) diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index 1cdf1ef5..5ca30119 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -1,12 +1,13 @@ import json -import grpc +from grpc import StreamStreamMultiCallable, RpcError, StatusCode # type: ignore from dapr.clients.exceptions import StreamInactiveError from dapr.clients.grpc._response import TopicEventResponse from dapr.proto import api_v1, appcallback_v1 import queue import threading +from typing import Optional def success(): @@ -28,11 +29,11 @@ def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=No self.topic = topic self.metadata = metadata or {} self.dead_letter_topic = dead_letter_topic or '' - self._stream = None - self._response_thread = None - self._send_queue = queue.Queue() - self._receive_queue = queue.Queue() - self._stream_active = False + self._stream: Optional[StreamStreamMultiCallable] = None # Type annotation for gRPC stream + self._response_thread: Optional[threading.Thread] = None # Type for thread + self._send_queue: queue.Queue = queue.Queue() # Type annotation for send queue + self._receive_queue: queue.Queue = queue.Queue() # Type annotation for receive queue + self._stream_active: bool = False self._stream_lock = threading.Lock() # Protects _stream_active def start(self): @@ -55,9 +56,8 @@ def outgoing_request_iterator(): # Start sending back acknowledgement messages from the send queue while self._is_stream_active(): try: - response = self._send_queue.get() - # The above blocks until a message is available or the stream is closed - # so that's why we need to check again if the stream is still active + response = self._send_queue.get(timeout=1) + # Check again if the stream is still active if not self._is_stream_active(): break yield response @@ -76,17 +76,19 @@ def outgoing_request_iterator(): def _handle_incoming_messages(self): try: - # The first message dapr sends on the stream is for signalling only, so discard it - next(self._stream) - - # Read messages from the stream and put them in the receive queue - for message in self._stream: - if self._is_stream_active(): - self._receive_queue.put(message.event_message) - else: - break - except grpc.RpcError as e: - if e.code() != grpc.StatusCode.CANCELLED: + # Check if the stream is not None + if self._stream is not None: + # The first message dapr sends on the stream is for signalling only, so discard it + next(self._stream) + + # Read messages from the stream and put them in the receive queue + for message in self._stream: + if self._is_stream_active(): + self._receive_queue.put(message.event_message) + else: + break + except RpcError as e: + if e.code() != StatusCode.CANCELLED: print(f'gRPC error in stream: {e.details()}, Status Code: {e.code()}') except Exception as e: raise Exception(f'Error while handling responses: {e}') @@ -157,8 +159,8 @@ def close(self): if self._stream: try: self._stream.cancel() - except grpc.RpcError as e: - if e.code() != grpc.StatusCode.CANCELLED: + except RpcError as e: + if e.code() != StatusCode.CANCELLED: raise Exception(f'Error while closing stream: {e}') except Exception as e: raise Exception(f'Error while closing stream: {e}') diff --git a/examples/pubsub-streaming/README.md b/examples/pubsub-streaming/README.md index 5d80cf0b..f0fe0d93 100644 --- a/examples/pubsub-streaming/README.md +++ b/examples/pubsub-streaming/README.md @@ -27,16 +27,11 @@ Run the following command in a terminal/command prompt: diff --git a/examples/pubsub-streaming/subscriber.py b/examples/pubsub-streaming/subscriber.py index f6f9078a..701f5775 100644 --- a/examples/pubsub-streaming/subscriber.py +++ b/examples/pubsub-streaming/subscriber.py @@ -17,25 +17,20 @@ def main(): try: for i in range(5): - try: - message = subscription.next_message() - if message is None: - print('No message received within timeout period.') - continue - - # Process the message - response_status = process_message(message) - - if response_status == 'success': - subscription.respond_success(message) - elif response_status == 'retry': - subscription.respond_retry(message) - elif response_status == 'drop': - subscription.respond_drop(message) - - except Exception as e: - print(f'Error getting message: {e}') - break + message = subscription.next_message() + if message is None: + print('No message received within timeout period.') + continue + + # Process the message + response_status = process_message(message) + + if response_status == 'success': + subscription.respond_success(message) + elif response_status == 'retry': + subscription.respond_retry(message) + elif response_status == 'drop': + subscription.respond_drop(message) finally: subscription.close() From 2e817b49444d7df735bb62c6beb3329db6958762 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 23 Sep 2024 11:27:25 +0100 Subject: [PATCH 05/33] examples fix Signed-off-by: Elena Kolevska --- examples/invoke-binding/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/invoke-binding/README.md b/examples/invoke-binding/README.md index f1a44417..b95f5d7a 100644 --- a/examples/invoke-binding/README.md +++ b/examples/invoke-binding/README.md @@ -29,7 +29,7 @@ sleep: 30 1. Start the kafka containers using docker-compose ```bash -docker-compose -f ./docker-compose-single-kafka.yml up -d +docker compose -f ./docker-compose-single-kafka.yml up -d ``` @@ -91,7 +91,7 @@ dapr stop --app-id receiver For kafka cleanup, run the following code: ```bash -docker-compose -f ./docker-compose-single-kafka.yml down +docker compose -f ./docker-compose-single-kafka.yml down ``` From e34bd9507297c696cbd6caf236befc0d15102fd3 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 23 Sep 2024 13:40:29 +0100 Subject: [PATCH 06/33] Adds support for api token Signed-off-by: Elena Kolevska --- dapr/clients/grpc/interceptors.py | 28 ++++++++++++--- dapr/clients/grpc/subscription.py | 45 ++++++++++++------------- examples/pubsub-streaming/subscriber.py | 2 -- 3 files changed, 46 insertions(+), 29 deletions(-) diff --git a/dapr/clients/grpc/interceptors.py b/dapr/clients/grpc/interceptors.py index 22098f53..adda29c1 100644 --- a/dapr/clients/grpc/interceptors.py +++ b/dapr/clients/grpc/interceptors.py @@ -1,7 +1,7 @@ from collections import namedtuple from typing import List, Tuple -from grpc import UnaryUnaryClientInterceptor, ClientCallDetails # type: ignore +from grpc import UnaryUnaryClientInterceptor, ClientCallDetails, StreamStreamClientInterceptor # type: ignore from dapr.conf import settings @@ -38,7 +38,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): return continuation(client_call_details, request) -class DaprClientInterceptor(UnaryUnaryClientInterceptor): +class DaprClientInterceptor(UnaryUnaryClientInterceptor, StreamStreamClientInterceptor): """The class implements a UnaryUnaryClientInterceptor from grpc to add an interceptor to add additional headers to all calls as needed. @@ -91,8 +91,8 @@ def _intercept_call(self, client_call_details: ClientCallDetails) -> ClientCallD return new_call_details def intercept_unary_unary(self, continuation, client_call_details, request): - """This method intercepts a unary-unary gRPC call. This is the implementation of the - abstract method defined in UnaryUnaryClientInterceptor defined in grpc. This is invoked + """This method intercepts a unary-unary gRPC call. It is the implementation of the + abstract method defined in UnaryUnaryClientInterceptor defined in grpc. It's invoked automatically by grpc based on the order in which interceptors are added to the channel. Args: @@ -108,3 +108,23 @@ def intercept_unary_unary(self, continuation, client_call_details, request): # Call continuation response = continuation(new_call_details, request) return response + + def intercept_stream_stream(self, continuation, client_call_details, request_iterator): + """This method intercepts a stream-stream gRPC call. It is the implementation of the + abstract method defined in StreamStreamClientInterceptor defined in grpc. It's invoked + automatically by grpc based on the order in which interceptors are added to the channel. + + Args: + continuation: a callable to be invoked to continue with the RPC or next interceptor + client_call_details: a ClientCallDetails object describing the outgoing RPC + request_iterator: the request value for the RPC + + Returns: + A response object after invoking the continuation callable + """ + # Pre-process or intercept call + + new_call_details = self._intercept_call(client_call_details) + # Call continuation + response = continuation(new_call_details, request_iterator) + return response diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index 5ca30119..2c8c18e3 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -1,27 +1,16 @@ import json -from grpc import StreamStreamMultiCallable, RpcError, StatusCode # type: ignore +from grpc import RpcError, StatusCode, Call # type: ignore from dapr.clients.exceptions import StreamInactiveError from dapr.clients.grpc._response import TopicEventResponse +from dapr.clients.health import DaprHealth from dapr.proto import api_v1, appcallback_v1 import queue import threading from typing import Optional -def success(): - return appcallback_v1.TopicEventResponse.SUCCESS - - -def retry(): - return appcallback_v1.TopicEventResponse.RETRY - - -def drop(): - return appcallback_v1.TopicEventResponse.DROP - - class Subscription: def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=None): self._stub = stub @@ -29,10 +18,10 @@ def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=No self.topic = topic self.metadata = metadata or {} self.dead_letter_topic = dead_letter_topic or '' - self._stream: Optional[StreamStreamMultiCallable] = None # Type annotation for gRPC stream - self._response_thread: Optional[threading.Thread] = None # Type for thread - self._send_queue: queue.Queue = queue.Queue() # Type annotation for send queue - self._receive_queue: queue.Queue = queue.Queue() # Type annotation for receive queue + self._stream: Optional[Call] = None + self._response_thread: Optional[threading.Thread] = None + self._send_queue: queue.Queue = queue.Queue() + self._receive_queue: queue.Queue = queue.Queue() self._stream_active: bool = False self._stream_lock = threading.Lock() # Protects _stream_active @@ -56,7 +45,7 @@ def outgoing_request_iterator(): # Start sending back acknowledgement messages from the send queue while self._is_stream_active(): try: - response = self._send_queue.get(timeout=1) + response = self._send_queue.get() # Check again if the stream is still active if not self._is_stream_active(): break @@ -75,6 +64,7 @@ def outgoing_request_iterator(): self._response_thread.start() def _handle_incoming_messages(self): + reconnect = False try: # Check if the stream is not None if self._stream is not None: @@ -83,17 +73,26 @@ def _handle_incoming_messages(self): # Read messages from the stream and put them in the receive queue for message in self._stream: - if self._is_stream_active(): - self._receive_queue.put(message.event_message) - else: - break + self._receive_queue.put(message.event_message) except RpcError as e: - if e.code() != StatusCode.CANCELLED: + if e.code() == StatusCode.UNAVAILABLE: + print('Stream unavailable, attempting to reconnect...') + reconnect = True + elif e.code() != StatusCode.CANCELLED: print(f'gRPC error in stream: {e.details()}, Status Code: {e.code()}') + except Exception as e: raise Exception(f'Error while handling responses: {e}') finally: self._set_stream_inactive() + if reconnect: + self.reconnect_stream() + + def reconnect_stream(self): + DaprHealth.wait_until_ready() + print('Attempting to reconnect...') + self.close() + self.start() def next_message(self, timeout=None): msg = self.read_message_from_queue(self._receive_queue, timeout=timeout) diff --git a/examples/pubsub-streaming/subscriber.py b/examples/pubsub-streaming/subscriber.py index 701f5775..8b396281 100644 --- a/examples/pubsub-streaming/subscriber.py +++ b/examples/pubsub-streaming/subscriber.py @@ -1,6 +1,4 @@ from dapr.clients import DaprClient -from dapr.clients.grpc._response import TopicEventResponse -from dapr.clients.grpc.subscription import success, retry, drop def process_message(message): From d6ceec206b171aa0b6ae7f2853028fa96f22f607 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Tue, 24 Sep 2024 00:26:23 +0100 Subject: [PATCH 07/33] clean up Signed-off-by: Elena Kolevska --- dapr/clients/grpc/client.py | 12 ++++++++++++ tests/clients/fake_dapr_server.py | 2 -- tests/clients/test_dapr_grpc_client.py | 4 ++-- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/dapr/clients/grpc/client.py b/dapr/clients/grpc/client.py index 81e65c7e..63fb9a31 100644 --- a/dapr/clients/grpc/client.py +++ b/dapr/clients/grpc/client.py @@ -483,6 +483,18 @@ def publish_event( return DaprResponse(call.initial_metadata()) def subscribe(self, pubsub_name, topic, metadata=None, dead_letter_topic=None): + """ + Subscribe to a topic with a bidirectional stream + + Args: + pubsub_name (str): The name of the pubsub component. + topic (str): The name of the topic. + metadata (Optional[Dict]): Additional metadata for the subscription. + dead_letter_topic (Optional[str]): Name of the dead-letter topic. + + Returns: + Subscription: The Subscription object managing the stream. + """ subscription = Subscription(self._stub, pubsub_name, topic, metadata, dead_letter_topic) subscription.start() return subscription diff --git a/tests/clients/fake_dapr_server.py b/tests/clients/fake_dapr_server.py index 1080ab30..a910accc 100644 --- a/tests/clients/fake_dapr_server.py +++ b/tests/clients/fake_dapr_server.py @@ -205,8 +205,6 @@ def SubscribeTopicEventsAlpha1(self, request_iterator, context): spec_version='1.0', ) yield api_v1.SubscribeTopicEventsResponseAlpha1(event_message=msg3) - # Simulate the stream being closed with an error - context.abort(grpc.StatusCode.CANCELLED, 'Stream closed by server') def SaveState(self, request, context): self.check_for_exception(context) diff --git a/tests/clients/test_dapr_grpc_client.py b/tests/clients/test_dapr_grpc_client.py index 4ef9e02a..ac411745 100644 --- a/tests/clients/test_dapr_grpc_client.py +++ b/tests/clients/test_dapr_grpc_client.py @@ -266,7 +266,7 @@ def test_subscribe_topic(self): dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') subscription = dapr.subscribe(pubsub_name='pubsub', topic='example') - # First message + # First message - text message1 = subscription.next_message() subscription.respond_success(message1) @@ -281,7 +281,7 @@ def test_subscribe_topic(self): self.assertEqual('text/plain', message1.data_content_type()) self.assertEqual('hello2', message1.data()) - # Second message + # Second message - json message2 = subscription.next_message() subscription.respond_success(message2) From 3cadf3238aa03efab3f65684e4db9cb004b02ead Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Tue, 24 Sep 2024 00:51:28 +0100 Subject: [PATCH 08/33] Adds docs Signed-off-by: Elena Kolevska --- .../en/python-sdk-docs/python-client.md | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/daprdocs/content/en/python-sdk-docs/python-client.md b/daprdocs/content/en/python-sdk-docs/python-client.md index 52d8b2e8..6d646ff0 100644 --- a/daprdocs/content/en/python-sdk-docs/python-client.md +++ b/daprdocs/content/en/python-sdk-docs/python-client.md @@ -255,6 +255,36 @@ def mytopic_important(event: v1.Event) -> None: - For more information about pub/sub, visit [How-To: Publish & subscribe]({{< ref howto-publish-subscribe.md >}}). - Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/pubsub-simple) for code samples and instructions to try out pub/sub. +#### Subscribe to messages with streaming +You can subscribe to messages from a PubSub topic with streaming by using the `subscribe` method. +This method will return a `Subscription` object on which you can call the `next_message` method to +yield messages as they arrive. +When done using the subscription, you should call the `close` method to stop the subscription. + +```python + with DaprClient() as client: + subscription = client.subscribe( + pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' + ) + + try: + for i in range(5): + message = subscription.next_message(1) + if message is None: + print('No message received within timeout period.') + continue + + # Process the message + # ... + + # Return the status based on the processing result + subscription.respond_success(message) + # or subscription.respond_retry(message) + # or subscription.respond_drop(message) + + finally: + subscription.close() +``` ### Interact with output bindings From f232478183809996ccee4e873982b2d3cab8fbb6 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Tue, 24 Sep 2024 01:05:32 +0100 Subject: [PATCH 09/33] more small tweaks Signed-off-by: Elena Kolevska --- dapr/clients/exceptions.py | 4 -- dapr/clients/grpc/subscription.py | 12 ++++- .../en/python-sdk-docs/python-client.md | 48 +++++++++++-------- examples/pubsub-streaming/subscriber.py | 15 +++++- tests/clients/test_dapr_grpc_client.py | 7 ++- 5 files changed, 59 insertions(+), 27 deletions(-) diff --git a/dapr/clients/exceptions.py b/dapr/clients/exceptions.py index c872b65a..91bc04a8 100644 --- a/dapr/clients/exceptions.py +++ b/dapr/clients/exceptions.py @@ -132,7 +132,3 @@ def serialize_status_detail(status_detail): if not status_detail: return None return MessageToDict(status_detail, preserving_proto_field_name=True) - - -class StreamInactiveError(Exception): - pass diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index 2c8c18e3..b2a88b9d 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -2,7 +2,6 @@ from grpc import RpcError, StatusCode, Call # type: ignore -from dapr.clients.exceptions import StreamInactiveError from dapr.clients.grpc._response import TopicEventResponse from dapr.clients.health import DaprHealth from dapr.proto import api_v1, appcallback_v1 @@ -95,6 +94,13 @@ def reconnect_stream(self): self.start() def next_message(self, timeout=None): + """ + Get the next message from the receive queue. + @param timeout: The time in seconds to wait for a message before returning None. + If None, wait indefinitely. + @return: The next message from the queue, + or None if no message is received within the timeout. + """ msg = self.read_message_from_queue(self._receive_queue, timeout=timeout) if msg is None: @@ -241,3 +247,7 @@ def _parse_data_content(self): except Exception as e: # Log or handle any unexpected exceptions print(f'Error parsing media type: {e}') + + +class StreamInactiveError(Exception): + pass diff --git a/daprdocs/content/en/python-sdk-docs/python-client.md b/daprdocs/content/en/python-sdk-docs/python-client.md index 6d646ff0..900546ed 100644 --- a/daprdocs/content/en/python-sdk-docs/python-client.md +++ b/daprdocs/content/en/python-sdk-docs/python-client.md @@ -263,27 +263,37 @@ When done using the subscription, you should call the `close` method to stop the ```python with DaprClient() as client: - subscription = client.subscribe( - pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' - ) - - try: - for i in range(5): + subscription = client.subscribe( + pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' + ) + + try: + i = 0 + while i < 5: + try: message = subscription.next_message(1) - if message is None: - print('No message received within timeout period.') - continue - - # Process the message - # ... - - # Return the status based on the processing result + except StreamInactiveError as e: + print('Stream is inactive. Retrying...') + time.sleep(5) + continue + if message is None: + print('No message received within timeout period.') + continue + + # Process the message + response_status = process_message(message) + + if response_status == 'success': subscription.respond_success(message) - # or subscription.respond_retry(message) - # or subscription.respond_drop(message) - - finally: - subscription.close() + elif response_status == 'retry': + subscription.respond_retry(message) + elif response_status == 'drop': + subscription.respond_drop(message) + + i += 1 + + finally: + subscription.close() ``` ### Interact with output bindings diff --git a/examples/pubsub-streaming/subscriber.py b/examples/pubsub-streaming/subscriber.py index 8b396281..c8cc8205 100644 --- a/examples/pubsub-streaming/subscriber.py +++ b/examples/pubsub-streaming/subscriber.py @@ -1,4 +1,7 @@ +import time + from dapr.clients import DaprClient +from dapr.clients.grpc.subscription import StreamInactiveError def process_message(message): @@ -14,8 +17,14 @@ def main(): ) try: - for i in range(5): - message = subscription.next_message() + i = 0 + while i < 5: + try: + message = subscription.next_message(1) + except StreamInactiveError as e: + print('Stream is inactive. Retrying...') + time.sleep(5) + continue if message is None: print('No message received within timeout period.') continue @@ -30,6 +39,8 @@ def main(): elif response_status == 'drop': subscription.respond_drop(message) + i += 1 + finally: subscription.close() diff --git a/tests/clients/test_dapr_grpc_client.py b/tests/clients/test_dapr_grpc_client.py index ac411745..019ea84f 100644 --- a/tests/clients/test_dapr_grpc_client.py +++ b/tests/clients/test_dapr_grpc_client.py @@ -24,9 +24,10 @@ from google.rpc import status_pb2, code_pb2 -from dapr.clients.exceptions import DaprGrpcError, StreamInactiveError +from dapr.clients.exceptions import DaprGrpcError from dapr.clients.grpc.client import DaprGrpcClient from dapr.clients import DaprClient +from dapr.clients.grpc.subscription import StreamInactiveError from dapr.proto import common_v1 from .fake_dapr_server import FakeDaprSidecar from dapr.conf import settings @@ -295,6 +296,10 @@ def test_subscribe_topic(self): self.assertEqual('application/json', message2.data_content_type()) self.assertEqual({'a': 1}, message2.data()) + # Third call with timeout + message3 = subscription.next_message(1) + self.assertIsNone(message3) + def test_subscribe_topic_early_close(self): dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') subscription = dapr.subscribe(pubsub_name='pubsub', topic='example') From cc318b600f4a9a1aac9957d359618ddc71265d36 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Sat, 28 Sep 2024 22:50:18 +0100 Subject: [PATCH 10/33] cleanups and tests Signed-off-by: Elena Kolevska --- dapr/clients/grpc/subscription.py | 10 +++- examples/pubsub-streaming/publisher.py | 1 + examples/pubsub-streaming/subscriber.py | 2 +- tests/clients/test_subscription.py | 69 +++++++++++++++++++++++++ 4 files changed, 79 insertions(+), 3 deletions(-) create mode 100644 tests/clients/test_subscription.py diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index b2a88b9d..efdbebfc 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -177,7 +177,8 @@ def close(self): class SubscriptionMessage: - def __init__(self, msg): + + def __init__(self, msg: TopicEventRequest): self._id = msg.id self._source = msg.source self._type = msg.type @@ -188,6 +189,11 @@ def __init__(self, msg): self._raw_data = msg.data self._extensions = msg.extensions self._data = None + try: + self._extensions = MessageToDict(msg.extensions) + except Exception as e: + self._extensions = {} + print(f'Error parsing extensions: {e}') # Parse the content based on its media type if self._raw_data and len(self._raw_data) > 0: @@ -235,7 +241,7 @@ def _parse_data_content(self): try: self._data = self._raw_data.decode('utf-8') except UnicodeDecodeError: - pass + print(f'Error decoding message data from topic {self._topic} as UTF-8') elif self._data_content_type.startswith( 'application/' ) and self._data_content_type.endswith('+json'): diff --git a/examples/pubsub-streaming/publisher.py b/examples/pubsub-streaming/publisher.py index f7af0f10..9c18ac3c 100644 --- a/examples/pubsub-streaming/publisher.py +++ b/examples/pubsub-streaming/publisher.py @@ -28,6 +28,7 @@ topic_name='TOPIC_A', data=json.dumps(req_data), data_content_type='application/json', + publish_metadata={'ttlInSeconds': '100', 'rawPayload': 'false'} ) # Print the request diff --git a/examples/pubsub-streaming/subscriber.py b/examples/pubsub-streaming/subscriber.py index c8cc8205..2f8fcf00 100644 --- a/examples/pubsub-streaming/subscriber.py +++ b/examples/pubsub-streaming/subscriber.py @@ -19,6 +19,7 @@ def main(): try: i = 0 while i < 5: + i += 1 try: message = subscription.next_message(1) except StreamInactiveError as e: @@ -39,7 +40,6 @@ def main(): elif response_status == 'drop': subscription.respond_drop(message) - i += 1 finally: subscription.close() diff --git a/tests/clients/test_subscription.py b/tests/clients/test_subscription.py new file mode 100644 index 00000000..253f4454 --- /dev/null +++ b/tests/clients/test_subscription.py @@ -0,0 +1,69 @@ +from dapr.clients.grpc.subscription import SubscriptionMessage +from dapr.proto.runtime.v1.appcallback_pb2 import TopicEventRequest +from google.protobuf.struct_pb2 import Struct + +import unittest + + +class SubscriptionMessageTests(unittest.TestCase): + def test_subscription_message_init_raw_text(self): + extensions = Struct() + extensions["field1"] = "value1" + extensions["field2"] = 42 + extensions["field3"] = True + + msg = TopicEventRequest(id='id', data=b'hello', data_content_type='text/plain', + topic='topicA', pubsub_name='pubsub_name', source='source', + type='type', spec_version='spec_version', path='path', + extensions=extensions) + subscription_message = SubscriptionMessage(msg=msg) + + self.assertEqual('id', subscription_message.id()) + self.assertEqual('source', subscription_message.source()) + self.assertEqual('type', subscription_message.type()) + self.assertEqual('spec_version', subscription_message.spec_version()) + self.assertEqual('text/plain', subscription_message.data_content_type()) + self.assertEqual('topicA', subscription_message.topic()) + self.assertEqual('pubsub_name', subscription_message.pubsub_name()) + self.assertEqual(b'hello', subscription_message.raw_data()) + self.assertEqual('hello', subscription_message.data()) + self.assertEqual({'field1': 'value1', "field2": 42, "field3": True}, + subscription_message.extensions()) + + def test_subscription_message_init_raw_text_non_utf(self): + msg = TopicEventRequest(id='id', data=b'\x80\x81\x82', data_content_type='text/plain', + topic='topicA', pubsub_name='pubsub_name', source='source', + type='type', spec_version='spec_version', path='path') + subscription_message = SubscriptionMessage(msg=msg) + + self.assertEqual(b'\x80\x81\x82', subscription_message.raw_data()) + self.assertIsNone(subscription_message.data()) + + def test_subscription_message_init_json(self): + msg = TopicEventRequest(id='id', data=b'{"a": 1}', data_content_type='application/json', + topic='topicA', pubsub_name='pubsub_name', source='source', + type='type', spec_version='spec_version', path='path') + subscription_message = SubscriptionMessage(msg=msg) + + self.assertEqual(b'{"a": 1}', subscription_message.raw_data()) + self.assertEqual({"a": 1}, subscription_message.data()) + print(subscription_message.data()["a"]) + + def test_subscription_message_init_json_faimly(self): + msg = TopicEventRequest(id='id', data=b'{"a": 1}', + data_content_type='application/vnd.api+json', topic='topicA', + pubsub_name='pubsub_name', source='source', type='type', + spec_version='spec_version', path='path') + subscription_message = SubscriptionMessage(msg=msg) + + self.assertEqual(b'{"a": 1}', subscription_message.raw_data()) + self.assertEqual({"a": 1}, subscription_message.data()) + + def test_subscription_message_init_unknown_content_type(self): + msg = TopicEventRequest(id='id', data=b'{"a": 1}', data_content_type='unknown/content-type', + topic='topicA', pubsub_name='pubsub_name', source='source', + type='type', spec_version='spec_version', path='path') + subscription_message = SubscriptionMessage(msg=msg) + + self.assertEqual(b'{"a": 1}', subscription_message.raw_data()) + self.assertIsNone(subscription_message.data()) From e75436ceb973a17041f9f5ef0131a379f1a52587 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Sun, 29 Sep 2024 00:07:31 +0100 Subject: [PATCH 11/33] Removes receive queue Signed-off-by: Elena Kolevska --- dapr/clients/grpc/client.py | 16 +++- dapr/clients/grpc/subscription.py | 116 +++++++++--------------- examples/pubsub-streaming/subscriber.py | 4 +- tests/clients/fake_dapr_server.py | 28 ++++-- tests/clients/test_dapr_grpc_client.py | 26 +++++- 5 files changed, 100 insertions(+), 90 deletions(-) diff --git a/dapr/clients/grpc/client.py b/dapr/clients/grpc/client.py index 63fb9a31..590279be 100644 --- a/dapr/clients/grpc/client.py +++ b/dapr/clients/grpc/client.py @@ -482,20 +482,30 @@ def publish_event( return DaprResponse(call.initial_metadata()) - def subscribe(self, pubsub_name, topic, metadata=None, dead_letter_topic=None): + def subscribe(self, + pubsub_name: str, + topic: str, + metadata: Optional[MetadataTuple] = None, + dead_letter_topic: Optional[str] = None, + timeout: Optional[int] = None + ) -> Subscription: """ Subscribe to a topic with a bidirectional stream Args: pubsub_name (str): The name of the pubsub component. topic (str): The name of the topic. - metadata (Optional[Dict]): Additional metadata for the subscription. + metadata (Optional[MetadataTuple]): Additional metadata for the subscription. dead_letter_topic (Optional[str]): Name of the dead-letter topic. + timeout (Optional[int]): The time in seconds to wait for a message before returning None + If not set, the `next_message` method will block indefinitely + until a message is received. Returns: Subscription: The Subscription object managing the stream. """ - subscription = Subscription(self._stub, pubsub_name, topic, metadata, dead_letter_topic) + subscription = Subscription(self._stub, pubsub_name, topic, metadata, dead_letter_topic, + timeout) subscription.start() return subscription diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index efdbebfc..4453d7fb 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -1,5 +1,6 @@ import json +from google.protobuf.json_format import MessageToDict from grpc import RpcError, StatusCode, Call # type: ignore from dapr.clients.grpc._response import TopicEventResponse @@ -9,9 +10,11 @@ import threading from typing import Optional +from dapr.proto.runtime.v1.appcallback_pb2 import TopicEventRequest + class Subscription: - def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=None): + def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=None, timeout=None): self._stub = stub self.pubsub_name = pubsub_name self.topic = topic @@ -20,34 +23,30 @@ def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=No self._stream: Optional[Call] = None self._response_thread: Optional[threading.Thread] = None self._send_queue: queue.Queue = queue.Queue() - self._receive_queue: queue.Queue = queue.Queue() self._stream_active: bool = False self._stream_lock = threading.Lock() # Protects _stream_active + self._timeout = timeout def start(self): def outgoing_request_iterator(): """ - Generator function to create the request iterator for the stream + Generator function to create the request iterator for the stream. + This sends the initial request to establish the stream. """ try: # Send InitialRequest needed to establish the stream initial_request = api_v1.SubscribeTopicEventsRequestAlpha1( initial_request=api_v1.SubscribeTopicEventsRequestInitialAlpha1( - pubsub_name=self.pubsub_name, - topic=self.topic, + pubsub_name=self.pubsub_name, topic=self.topic, metadata=self.metadata or {}, - dead_letter_topic=self.dead_letter_topic or '', - ) - ) + dead_letter_topic=self.dead_letter_topic or '', )) yield initial_request # Start sending back acknowledgement messages from the send queue - while self._is_stream_active(): + while self._is_stream_active(): # TODO check if this is correct try: - response = self._send_queue.get() - # Check again if the stream is still active - if not self._is_stream_active(): - break + # Wait for responses/acknowledgements to send from the send queue. + response = self._send_queue.get() # TODO check timeout yield response except queue.Empty: continue @@ -55,58 +54,49 @@ def outgoing_request_iterator(): raise Exception(f'Error in request iterator: {e}') # Create the bidirectional stream - self._stream = self._stub.SubscribeTopicEventsAlpha1(outgoing_request_iterator()) + self._stream = self._stub.SubscribeTopicEventsAlpha1(outgoing_request_iterator(), + timeout=self._timeout) self._set_stream_active() - - # Start a thread to handle incoming messages - self._response_thread = threading.Thread(target=self._handle_incoming_messages, daemon=True) - self._response_thread.start() - - def _handle_incoming_messages(self): - reconnect = False - try: - # Check if the stream is not None - if self._stream is not None: - # The first message dapr sends on the stream is for signalling only, so discard it - next(self._stream) - - # Read messages from the stream and put them in the receive queue - for message in self._stream: - self._receive_queue.put(message.event_message) - except RpcError as e: - if e.code() == StatusCode.UNAVAILABLE: - print('Stream unavailable, attempting to reconnect...') - reconnect = True - elif e.code() != StatusCode.CANCELLED: - print(f'gRPC error in stream: {e.details()}, Status Code: {e.code()}') - - except Exception as e: - raise Exception(f'Error while handling responses: {e}') - finally: - self._set_stream_inactive() - if reconnect: - self.reconnect_stream() + next(self._stream) # discard the initial message def reconnect_stream(self): + self.close() DaprHealth.wait_until_ready() print('Attempting to reconnect...') - self.close() self.start() - def next_message(self, timeout=None): + def next_message(self): """ Get the next message from the receive queue. - @param timeout: The time in seconds to wait for a message before returning None. - If None, wait indefinitely. @return: The next message from the queue, or None if no message is received within the timeout. """ - msg = self.read_message_from_queue(self._receive_queue, timeout=timeout) + if not self._is_stream_active(): + raise StreamInactiveError("Stream is not active") - if msg is None: - return None + try: + # Read the next message from the stream directly + if self._stream is not None: + message = next(self._stream, None) + if message is None: + return None + return SubscriptionMessage(message.event_message) + except RpcError as e: + if e.code() == StatusCode.UNAVAILABLE: + print( + f'gRPC error while reading from stream: {e.details()}, Status Code: {e.code()}') + self.reconnect_stream() + elif e.code() == StatusCode.DEADLINE_EXCEEDED: + # A message hasn't been received on the stream in `self._timeout` seconds + # so return control back to app + return None + elif e.code() != StatusCode.CANCELLED: + raise Exception(f'gRPC error while reading from subscription stream: {e.details()} ' + f'Status Code: {e.code()}') + except Exception as e: + raise Exception(f'Error while fetching message: {e}') - return SubscriptionMessage(msg) + return None def _respond(self, message, status): try: @@ -115,8 +105,9 @@ def _respond(self, message, status): id=message.id(), status=status ) msg = api_v1.SubscribeTopicEventsRequestAlpha1(event_processed=response) - - self.send_message_to_queue(self._send_queue, msg) + if not self._is_stream_active(): + raise StreamInactiveError('Stream is not active') + self._send_queue.put(msg) except Exception as e: print(f'Exception in send_message: {e}') @@ -129,19 +120,6 @@ def respond_retry(self, message): def respond_drop(self, message): self._respond(message, TopicEventResponse('drop').status) - def send_message_to_queue(self, q, message): - if not self._is_stream_active(): - raise StreamInactiveError('Stream is not active') - q.put(message) - - def read_message_from_queue(self, q, timeout=None): - if not self._is_stream_active(): - raise StreamInactiveError('Stream is not active') - try: - return q.get(timeout=timeout) - except queue.Empty: - return None - def _set_stream_active(self): with self._stream_lock: self._stream_active = True @@ -160,7 +138,6 @@ def close(self): self._set_stream_inactive() - # Cancel the stream if self._stream: try: self._stream.cancel() @@ -170,11 +147,6 @@ def close(self): except Exception as e: raise Exception(f'Error while closing stream: {e}') - # Join the response-handling thread to ensure it has finished - if self._response_thread: - self._response_thread.join() - self._response_thread = None - class SubscriptionMessage: diff --git a/examples/pubsub-streaming/subscriber.py b/examples/pubsub-streaming/subscriber.py index 2f8fcf00..476da9e7 100644 --- a/examples/pubsub-streaming/subscriber.py +++ b/examples/pubsub-streaming/subscriber.py @@ -13,7 +13,7 @@ def process_message(message): def main(): with DaprClient() as client: subscription = client.subscribe( - pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' + pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD', timeout=2 ) try: @@ -21,7 +21,7 @@ def main(): while i < 5: i += 1 try: - message = subscription.next_message(1) + message = subscription.next_message() except StreamInactiveError as e: print('Stream is inactive. Retrying...') time.sleep(5) diff --git a/tests/clients/fake_dapr_server.py b/tests/clients/fake_dapr_server.py index a910accc..8e75cb27 100644 --- a/tests/clients/fake_dapr_server.py +++ b/tests/clients/fake_dapr_server.py @@ -3,7 +3,8 @@ from concurrent import futures from google.protobuf.any_pb2 import Any as GrpcAny -from google.protobuf import empty_pb2 +from google.protobuf import empty_pb2, struct_pb2 +from google.rpc import status_pb2, code_pb2 from grpc_status import rpc_status from dapr.clients.grpc._helpers import to_bytes @@ -182,29 +183,38 @@ def SubscribeTopicEventsAlpha1(self, request_iterator, context): initial_response=api_v1.SubscribeTopicEventsResponseInitialAlpha1() ) - msg2 = appcallback_v1.TopicEventRequest( - id='123', + extensions = struct_pb2.Struct() + extensions["field1"] = "value1" + extensions["field2"] = 42 + extensions["field3"] = True + + msg1 = appcallback_v1.TopicEventRequest( + id='111', topic='TOPIC_A', data=b'hello2', source='app1', data_content_type='text/plain', type='com.example.type2', pubsub_name='pubsub', - spec_version='1.0', + spec_version='1.0', extensions=extensions ) - yield api_v1.SubscribeTopicEventsResponseAlpha1(event_message=msg2) + yield api_v1.SubscribeTopicEventsResponseAlpha1(event_message=msg1) - msg3 = appcallback_v1.TopicEventRequest( - id='456', + msg2 = appcallback_v1.TopicEventRequest( + id='222', topic='TOPIC_A', data=b'{"a": 1}', source='app1', data_content_type='application/json', type='com.example.type2', pubsub_name='pubsub', - spec_version='1.0', + spec_version='1.0', extensions=extensions ) - yield api_v1.SubscribeTopicEventsResponseAlpha1(event_message=msg3) + yield api_v1.SubscribeTopicEventsResponseAlpha1(event_message=msg2) + + # On the third message simulate a disconnection + status = status_pb2.Status(code=code_pb2.UNAVAILABLE, message='Simulated disconnection') + context.abort_with_status(rpc_status.to_status(status)) def SaveState(self, request, context): self.check_for_exception(context) diff --git a/tests/clients/test_dapr_grpc_client.py b/tests/clients/test_dapr_grpc_client.py index 019ea84f..6ab4861e 100644 --- a/tests/clients/test_dapr_grpc_client.py +++ b/tests/clients/test_dapr_grpc_client.py @@ -264,6 +264,9 @@ def test_publish_error(self): ) def test_subscribe_topic(self): + # The fake server we're using sends two messages and then closes the stream + # The client should be able to read both messages, handle the stream closure and reconnect + # which will result in the reading the same two messages again dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') subscription = dapr.subscribe(pubsub_name='pubsub', topic='example') @@ -271,7 +274,7 @@ def test_subscribe_topic(self): message1 = subscription.next_message() subscription.respond_success(message1) - self.assertEqual('123', message1.id()) + self.assertEqual('111', message1.id()) self.assertEqual('app1', message1.source()) self.assertEqual('com.example.type2', message1.type()) self.assertEqual('1.0', message1.spec_version()) @@ -286,7 +289,7 @@ def test_subscribe_topic(self): message2 = subscription.next_message() subscription.respond_success(message2) - self.assertEqual('456', message2.id()) + self.assertEqual('222', message2.id()) self.assertEqual('app1', message2.source()) self.assertEqual('com.example.type2', message2.type()) self.assertEqual('1.0', message2.spec_version()) @@ -296,10 +299,25 @@ def test_subscribe_topic(self): self.assertEqual('application/json', message2.data_content_type()) self.assertEqual({'a': 1}, message2.data()) - # Third call with timeout - message3 = subscription.next_message(1) + # On this call the stream will be closed and return an error, so the message will be none + # but the client will try to reconnect + message3 = subscription.next_message() self.assertIsNone(message3) + # The client already reconnected and will start reading the messages again + # Since we're working with a fake server, the messages will be the same + message4 = subscription.next_message() + self.assertEqual('111', message4.id()) + self.assertEqual('app1', message4.source()) + self.assertEqual('com.example.type2', message4.type()) + self.assertEqual('1.0', message4.spec_version()) + self.assertEqual('text/plain', message4.data_content_type()) + self.assertEqual('TOPIC_A', message4.topic()) + self.assertEqual('pubsub', message4.pubsub_name()) + self.assertEqual(b'hello2', message4.raw_data()) + self.assertEqual('text/plain', message4.data_content_type()) + self.assertEqual('hello2', message4.data()) + def test_subscribe_topic_early_close(self): dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') subscription = dapr.subscribe(pubsub_name='pubsub', topic='example') From bcf116b8bc4be3dd7087346fce6b7f1425b1a15d Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 30 Sep 2024 15:55:07 +0100 Subject: [PATCH 12/33] Adds `subscribe_with_handler` Signed-off-by: Elena Kolevska --- dapr/clients/grpc/client.py | 67 ++++++-- dapr/clients/grpc/subscription.py | 87 +++++----- .../en/python-sdk-docs/python-client.md | 152 +++++++++++++----- examples/pubsub-streaming/README.md | 61 ++++++- .../pubsub-streaming/subscriber-handler.py | 36 +++++ examples/pubsub-streaming/subscriber.py | 19 ++- tests/clients/fake_dapr_server.py | 12 +- tests/clients/test_dapr_grpc_client.py | 64 +++++++- tests/clients/test_subscription.py | 90 ++++++++--- 9 files changed, 454 insertions(+), 134 deletions(-) create mode 100644 examples/pubsub-streaming/subscriber-handler.py diff --git a/dapr/clients/grpc/client.py b/dapr/clients/grpc/client.py index 590279be..1af81e90 100644 --- a/dapr/clients/grpc/client.py +++ b/dapr/clients/grpc/client.py @@ -12,7 +12,7 @@ See the License for the specific language governing permissions and limitations under the License. """ - +import threading import time import socket import json @@ -41,7 +41,7 @@ from dapr.clients.grpc._state import StateOptions, StateItem from dapr.clients.grpc._helpers import getWorkflowRuntimeStatus from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions -from dapr.clients.grpc.subscription import Subscription +from dapr.clients.grpc.subscription import Subscription, StreamInactiveError from dapr.clients.grpc.interceptors import DaprClientInterceptor, DaprClientTimeoutInterceptor from dapr.clients.health import DaprHealth from dapr.clients.retry import RetryPolicy @@ -86,6 +86,8 @@ StartWorkflowResponse, EncryptResponse, DecryptResponse, + TopicEventResponseStatus, + TopicEventResponse, ) @@ -482,12 +484,12 @@ def publish_event( return DaprResponse(call.initial_metadata()) - def subscribe(self, - pubsub_name: str, - topic: str, - metadata: Optional[MetadataTuple] = None, - dead_letter_topic: Optional[str] = None, - timeout: Optional[int] = None + def subscribe( + self, + pubsub_name: str, + topic: str, + metadata: Optional[MetadataTuple] = None, + dead_letter_topic: Optional[str] = None, ) -> Subscription: """ Subscribe to a topic with a bidirectional stream @@ -504,11 +506,56 @@ def subscribe(self, Returns: Subscription: The Subscription object managing the stream. """ - subscription = Subscription(self._stub, pubsub_name, topic, metadata, dead_letter_topic, - timeout) + subscription = Subscription(self._stub, pubsub_name, topic, metadata, dead_letter_topic) subscription.start() return subscription + def subscribe_with_handler( + self, + pubsub_name: str, + topic: str, + handler_fn: Callable[..., TopicEventResponse], + metadata: Optional[MetadataTuple] = None, + dead_letter_topic: Optional[str] = None, + ) -> Callable: + """ + Subscribe to a topic with a bidirectional stream and a message handler function + + Args: + pubsub_name (str): The name of the pubsub component. + topic (str): The name of the topic. + handler_fn (Callable[..., TopicEventResponseStatus]): The function to call when a message is received. + metadata (Optional[MetadataTuple]): Additional metadata for the subscription. + dead_letter_topic (Optional[str]): Name of the dead-letter topic. + timeout (Optional[int]): The time in seconds to wait for a message before returning None + If not set, the `next_message` method will block indefinitely + until a message is received. + """ + subscription = self.subscribe(pubsub_name, topic, metadata, dead_letter_topic) + + def stream_messages(sub): + while True: + try: + message = sub.next_message() + if message: + # Process the message + response = handler_fn(message) + if response: + subscription._respond(message, response) + else: + # No message received + continue + except StreamInactiveError: + break + + def close_subscription(): + subscription.close() + + streaming_thread = threading.Thread(target=stream_messages, args=(subscription,)) + streaming_thread.start() + + return close_subscription + def get_state( self, store_name: str, diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index 4453d7fb..053194ad 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -8,24 +8,27 @@ from dapr.proto import api_v1, appcallback_v1 import queue import threading -from typing import Optional +from typing import Optional, Union from dapr.proto.runtime.v1.appcallback_pb2 import TopicEventRequest class Subscription: - def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=None, timeout=None): + SUCCESS = TopicEventResponse('success').status + RETRY = TopicEventResponse('retry').status + DROP = TopicEventResponse('drop').status + + def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=None): self._stub = stub - self.pubsub_name = pubsub_name - self.topic = topic - self.metadata = metadata or {} - self.dead_letter_topic = dead_letter_topic or '' + self._pubsub_name = pubsub_name + self._topic = topic + self._metadata = metadata or {} + self._dead_letter_topic = dead_letter_topic or '' self._stream: Optional[Call] = None self._response_thread: Optional[threading.Thread] = None self._send_queue: queue.Queue = queue.Queue() self._stream_active: bool = False self._stream_lock = threading.Lock() # Protects _stream_active - self._timeout = timeout def start(self): def outgoing_request_iterator(): @@ -37,25 +40,27 @@ def outgoing_request_iterator(): # Send InitialRequest needed to establish the stream initial_request = api_v1.SubscribeTopicEventsRequestAlpha1( initial_request=api_v1.SubscribeTopicEventsRequestInitialAlpha1( - pubsub_name=self.pubsub_name, topic=self.topic, - metadata=self.metadata or {}, - dead_letter_topic=self.dead_letter_topic or '', )) + pubsub_name=self._pubsub_name, + topic=self._topic, + metadata=self._metadata or {}, + dead_letter_topic=self._dead_letter_topic or '', + ) + ) yield initial_request # Start sending back acknowledgement messages from the send queue - while self._is_stream_active(): # TODO check if this is correct + while self._is_stream_active(): try: # Wait for responses/acknowledgements to send from the send queue. - response = self._send_queue.get() # TODO check timeout + response = self._send_queue.get() yield response except queue.Empty: continue except Exception as e: - raise Exception(f'Error in request iterator: {e}') + raise Exception(f'Error while writing to stream: {e}') # Create the bidirectional stream - self._stream = self._stub.SubscribeTopicEventsAlpha1(outgoing_request_iterator(), - timeout=self._timeout) + self._stream = self._stub.SubscribeTopicEventsAlpha1(outgoing_request_iterator()) self._set_stream_active() next(self._stream) # discard the initial message @@ -72,7 +77,7 @@ def next_message(self): or None if no message is received within the timeout. """ if not self._is_stream_active(): - raise StreamInactiveError("Stream is not active") + raise StreamInactiveError('Stream is not active') try: # Read the next message from the stream directly @@ -84,15 +89,14 @@ def next_message(self): except RpcError as e: if e.code() == StatusCode.UNAVAILABLE: print( - f'gRPC error while reading from stream: {e.details()}, Status Code: {e.code()}') + f'gRPC error while reading from stream: {e.details()}, Status Code: {e.code()}' + ) self.reconnect_stream() - elif e.code() == StatusCode.DEADLINE_EXCEEDED: - # A message hasn't been received on the stream in `self._timeout` seconds - # so return control back to app - return None elif e.code() != StatusCode.CANCELLED: - raise Exception(f'gRPC error while reading from subscription stream: {e.details()} ' - f'Status Code: {e.code()}') + raise Exception( + f'gRPC error while reading from subscription stream: {e.details()} ' + f'Status Code: {e.code()}' + ) except Exception as e: raise Exception(f'Error while fetching message: {e}') @@ -109,16 +113,16 @@ def _respond(self, message, status): raise StreamInactiveError('Stream is not active') self._send_queue.put(msg) except Exception as e: - print(f'Exception in send_message: {e}') + print(f"Can't send message on inactive stream: {e}") def respond_success(self, message): - self._respond(message, TopicEventResponse('success').status) + self._respond(message, self.SUCCESS) def respond_retry(self, message): - self._respond(message, TopicEventResponse('retry').status) + self._respond(message, self.RETRY) def respond_drop(self, message): - self._respond(message, TopicEventResponse('drop').status) + self._respond(message, self.DROP) def _set_stream_active(self): with self._stream_lock: @@ -133,14 +137,10 @@ def _is_stream_active(self): return self._stream_active def close(self): - if not self._is_stream_active(): - return - - self._set_stream_inactive() - if self._stream: try: self._stream.cancel() + self._set_stream_inactive() except RpcError as e: if e.code() != StatusCode.CANCELLED: raise Exception(f'Error while closing stream: {e}') @@ -149,18 +149,17 @@ def close(self): class SubscriptionMessage: - def __init__(self, msg: TopicEventRequest): - self._id = msg.id - self._source = msg.source - self._type = msg.type - self._spec_version = msg.spec_version - self._data_content_type = msg.data_content_type - self._topic = msg.topic - self._pubsub_name = msg.pubsub_name - self._raw_data = msg.data - self._extensions = msg.extensions - self._data = None + self._id: str = msg.id + self._source: str = msg.source + self._type: str = msg.type + self._spec_version: str = msg.spec_version + self._data_content_type: str = msg.data_content_type + self._topic: str = msg.topic + self._pubsub_name: str = msg.pubsub_name + self._raw_data: bytes = msg.data + self._data: Optional[Union[dict, str]] = None + try: self._extensions = MessageToDict(msg.extensions) except Exception as e: @@ -207,6 +206,7 @@ def _parse_data_content(self): try: self._data = json.loads(self._raw_data) except json.JSONDecodeError: + print(f'Error parsing json message data from topic {self._topic}') pass # If JSON parsing fails, keep `data` as None elif self._data_content_type == 'text/plain': # Assume UTF-8 encoding @@ -221,6 +221,7 @@ def _parse_data_content(self): try: self._data = json.loads(self._raw_data) except json.JSONDecodeError: + print(f'Error parsing json message data from topic {self._topic}') pass # If JSON parsing fails, keep `data` as None except Exception as e: # Log or handle any unexpected exceptions diff --git a/daprdocs/content/en/python-sdk-docs/python-client.md b/daprdocs/content/en/python-sdk-docs/python-client.md index 900546ed..4f51f945 100644 --- a/daprdocs/content/en/python-sdk-docs/python-client.md +++ b/daprdocs/content/en/python-sdk-docs/python-client.md @@ -255,45 +255,125 @@ def mytopic_important(event: v1.Event) -> None: - For more information about pub/sub, visit [How-To: Publish & subscribe]({{< ref howto-publish-subscribe.md >}}). - Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/pubsub-simple) for code samples and instructions to try out pub/sub. -#### Subscribe to messages with streaming -You can subscribe to messages from a PubSub topic with streaming by using the `subscribe` method. -This method will return a `Subscription` object on which you can call the `next_message` method to -yield messages as they arrive. -When done using the subscription, you should call the `close` method to stop the subscription. +#### Streaming message subscription + +You can create a streaming subscription to a PubSub topic using either the `subscribe` +or `subscribe_handler` methods. + +The `subscribe` method returns a `Subscription` object, which allows you to pull messages from the +stream by +calling the `next_message` method. This will block on the main thread while waiting for messages. +When done, you should call the close method to terminate the +subscription and stop receiving messages. + +The `subscribe_with_handler` method accepts a callback function that is executed for each message +received from the stream. +It runs in a separate thread, so it doesn't block the main thread. The callback should return a +`TopicEventResponseStatus`, indicating whether the message was processed successfully, should be +retried, or should be discarded. You can return these statuses using the `Subscription.SUCCESS`, +`Subscription.RETRY`, and `Subscription.DROP` class properties. The method will automatically manage +message acknowledgments based on the returned status. When done, the subscription will automatically +close, and you don't need to manually stop it. + +The call to `subscribe_with_handler` method returns a close function, which should be called to +terminate the subscription when you're done. + +Here's an example of using the `subscribe` method: ```python +import time + +from dapr.clients import DaprClient +from dapr.clients.grpc.subscription import StreamInactiveError + +counter = 0 + + +def process_message(message): + global counter + counter += 1 + # Process the message here + print(f'Processing message: {message.data()} from {message.topic()}...') + return 'success' + + +def main(): with DaprClient() as client: - subscription = client.subscribe( - pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' - ) - - try: - i = 0 - while i < 5: - try: - message = subscription.next_message(1) - except StreamInactiveError as e: - print('Stream is inactive. Retrying...') - time.sleep(5) - continue - if message is None: - print('No message received within timeout period.') - continue - - # Process the message - response_status = process_message(message) - - if response_status == 'success': - subscription.respond_success(message) - elif response_status == 'retry': - subscription.respond_retry(message) - elif response_status == 'drop': - subscription.respond_drop(message) - - i += 1 - - finally: - subscription.close() + global counter + + subscription = client.subscribe( + pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' + ) + + try: + while counter < 5: + try: + message = subscription.next_message() + + except StreamInactiveError as e: + print('Stream is inactive. Retrying...') + time.sleep(1) + continue + if message is None: + print('No message received within timeout period.') + continue + + # Process the message + response_status = process_message(message) + + if response_status == 'success': + subscription.respond_success(message) + elif response_status == 'retry': + subscription.respond_retry(message) + elif response_status == 'drop': + subscription.respond_drop(message) + + finally: + print("Closing subscription...") + subscription.close() + + +if __name__ == '__main__': + main() +``` + +And here's an example of using the `subscribe_with_handler` method: + +```python +import time + +from dapr.clients import DaprClient +from dapr.clients.grpc.subscription import Subscription + +counter = 0 + + +def process_message(message): + # Process the message here + global counter + counter += 1 + print(f'Processing message: {message.data()} from {message.topic()}...') + return Subscription.SUCCESS + + +def main(): + with (DaprClient() as client): + # This will start a new thread that will listen for messages + # and process them in the `process_message` function + close_fn = client.subscribe_with_handler( + pubsub_name='pubsub', topic='TOPIC_A', handler_fn=process_message, + dead_letter_topic='TOPIC_A_DEAD' + ) + + while counter < 5: + time.sleep(1) + + print("Closing subscription...") + close_fn() + + +if __name__ == '__main__': + main() ``` ### Interact with output bindings diff --git a/examples/pubsub-streaming/README.md b/examples/pubsub-streaming/README.md index f0fe0d93..4849e791 100644 --- a/examples/pubsub-streaming/README.md +++ b/examples/pubsub-streaming/README.md @@ -20,18 +20,19 @@ In the s`subscriber.py` file it creates a subscriber object that can call the `n pip3 install dapr ``` -## Run the example +## Run example where users control reading messages off the stream Run the following command in a terminal/command prompt: +## Run example with a handler function + +Run the following command in a terminal/command prompt: + + + +```bash +# 1. Start Subscriber +dapr run --app-id python-subscriber --app-protocol grpc python3 subscriber-handler.py +``` + + + +In another terminal/command prompt run: + + + +```bash +# 2. Start Publisher +dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py +``` + + + ## Cleanup diff --git a/examples/pubsub-streaming/subscriber-handler.py b/examples/pubsub-streaming/subscriber-handler.py new file mode 100644 index 00000000..896c00ac --- /dev/null +++ b/examples/pubsub-streaming/subscriber-handler.py @@ -0,0 +1,36 @@ +import time + +from dapr.clients import DaprClient +from dapr.clients.grpc.subscription import Subscription + +counter = 0 + + +def process_message(message): + # Process the message here + global counter + counter += 1 + print(f'Processing message: {message.data()} from {message.topic()}...') + return Subscription.SUCCESS + + +def main(): + with DaprClient() as client: + # This will start a new thread that will listen for messages + # and process them in the `process_message` function + close_fn = client.subscribe_with_handler( + pubsub_name='pubsub', + topic='TOPIC_A', + handler_fn=process_message, + dead_letter_topic='TOPIC_A_DEAD', + ) + + while counter < 5: + time.sleep(1) + + print('Closing subscription...') + close_fn() + + +if __name__ == '__main__': + main() diff --git a/examples/pubsub-streaming/subscriber.py b/examples/pubsub-streaming/subscriber.py index 476da9e7..5716b34c 100644 --- a/examples/pubsub-streaming/subscriber.py +++ b/examples/pubsub-streaming/subscriber.py @@ -3,28 +3,33 @@ from dapr.clients import DaprClient from dapr.clients.grpc.subscription import StreamInactiveError +counter = 0 + def process_message(message): + global counter + counter += 1 # Process the message here - print(f'Processing message: {message.data()} from {message.topic()}') + print(f'Processing message: {message.data()} from {message.topic()}...') return 'success' def main(): with DaprClient() as client: + global counter + subscription = client.subscribe( - pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD', timeout=2 + pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' ) try: - i = 0 - while i < 5: - i += 1 + while counter < 5: try: message = subscription.next_message() + except StreamInactiveError as e: print('Stream is inactive. Retrying...') - time.sleep(5) + time.sleep(1) continue if message is None: print('No message received within timeout period.') @@ -40,8 +45,8 @@ def main(): elif response_status == 'drop': subscription.respond_drop(message) - finally: + print('Closing subscription...') subscription.close() diff --git a/tests/clients/fake_dapr_server.py b/tests/clients/fake_dapr_server.py index 8e75cb27..8627ab46 100644 --- a/tests/clients/fake_dapr_server.py +++ b/tests/clients/fake_dapr_server.py @@ -184,9 +184,9 @@ def SubscribeTopicEventsAlpha1(self, request_iterator, context): ) extensions = struct_pb2.Struct() - extensions["field1"] = "value1" - extensions["field2"] = 42 - extensions["field3"] = True + extensions['field1'] = 'value1' + extensions['field2'] = 42 + extensions['field3'] = True msg1 = appcallback_v1.TopicEventRequest( id='111', @@ -196,7 +196,8 @@ def SubscribeTopicEventsAlpha1(self, request_iterator, context): data_content_type='text/plain', type='com.example.type2', pubsub_name='pubsub', - spec_version='1.0', extensions=extensions + spec_version='1.0', + extensions=extensions, ) yield api_v1.SubscribeTopicEventsResponseAlpha1(event_message=msg1) @@ -208,7 +209,8 @@ def SubscribeTopicEventsAlpha1(self, request_iterator, context): data_content_type='application/json', type='com.example.type2', pubsub_name='pubsub', - spec_version='1.0', extensions=extensions + spec_version='1.0', + extensions=extensions, ) yield api_v1.SubscribeTopicEventsResponseAlpha1(event_message=msg2) diff --git a/tests/clients/test_dapr_grpc_client.py b/tests/clients/test_dapr_grpc_client.py index 6ab4861e..fee4eb2d 100644 --- a/tests/clients/test_dapr_grpc_client.py +++ b/tests/clients/test_dapr_grpc_client.py @@ -16,6 +16,7 @@ import json import socket import tempfile +import time import unittest import uuid import asyncio @@ -27,7 +28,7 @@ from dapr.clients.exceptions import DaprGrpcError from dapr.clients.grpc.client import DaprGrpcClient from dapr.clients import DaprClient -from dapr.clients.grpc.subscription import StreamInactiveError +from dapr.clients.grpc.subscription import StreamInactiveError, Subscription from dapr.proto import common_v1 from .fake_dapr_server import FakeDaprSidecar from dapr.conf import settings @@ -266,7 +267,8 @@ def test_publish_error(self): def test_subscribe_topic(self): # The fake server we're using sends two messages and then closes the stream # The client should be able to read both messages, handle the stream closure and reconnect - # which will result in the reading the same two messages again + # which will result in reading the same two messages again. + # That's why message 3 should be the same as message 1 dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') subscription = dapr.subscribe(pubsub_name='pubsub', topic='example') @@ -307,6 +309,7 @@ def test_subscribe_topic(self): # The client already reconnected and will start reading the messages again # Since we're working with a fake server, the messages will be the same message4 = subscription.next_message() + subscription.respond_success(message4) self.assertEqual('111', message4.id()) self.assertEqual('app1', message4.source()) self.assertEqual('com.example.type2', message4.type()) @@ -318,6 +321,8 @@ def test_subscribe_topic(self): self.assertEqual('text/plain', message4.data_content_type()) self.assertEqual('hello2', message4.data()) + subscription.close() + def test_subscribe_topic_early_close(self): dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') subscription = dapr.subscribe(pubsub_name='pubsub', topic='example') @@ -326,6 +331,61 @@ def test_subscribe_topic_early_close(self): with self.assertRaises(StreamInactiveError): subscription.next_message() + def test_subscribe_topic_with_handler(self): + # The fake server we're using sends two messages and then closes the stream + # The client should be able to read both messages, handle the stream closure and reconnect + # which will result in reading the same two messages again. + # That's why message 3 should be the same as message 1 + dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') + counter = 0 + + def handler(message): + nonlocal counter + if counter == 0: + self.assertEqual('111', message.id()) + self.assertEqual('app1', message.source()) + self.assertEqual('com.example.type2', message.type()) + self.assertEqual('1.0', message.spec_version()) + self.assertEqual('text/plain', message.data_content_type()) + self.assertEqual('TOPIC_A', message.topic()) + self.assertEqual('pubsub', message.pubsub_name()) + self.assertEqual(b'hello2', message.raw_data()) + self.assertEqual('text/plain', message.data_content_type()) + self.assertEqual('hello2', message.data()) + elif counter == 1: + self.assertEqual('222', message.id()) + self.assertEqual('app1', message.source()) + self.assertEqual('com.example.type2', message.type()) + self.assertEqual('1.0', message.spec_version()) + self.assertEqual('TOPIC_A', message.topic()) + self.assertEqual('pubsub', message.pubsub_name()) + self.assertEqual(b'{"a": 1}', message.raw_data()) + self.assertEqual('application/json', message.data_content_type()) + self.assertEqual({'a': 1}, message.data()) + elif counter == 2: + self.assertEqual('111', message.id()) + self.assertEqual('app1', message.source()) + self.assertEqual('com.example.type2', message.type()) + self.assertEqual('1.0', message.spec_version()) + self.assertEqual('text/plain', message.data_content_type()) + self.assertEqual('TOPIC_A', message.topic()) + self.assertEqual('pubsub', message.pubsub_name()) + self.assertEqual(b'hello2', message.raw_data()) + self.assertEqual('text/plain', message.data_content_type()) + self.assertEqual('hello2', message.data()) + + counter += 1 + + return Subscription.SUCCESS + + close_fn = dapr.subscribe_with_handler( + pubsub_name='pubsub', topic='example', handler_fn=handler + ) + + while counter < 3: + time.sleep(0.1) # Use sleep to prevent a busy-wait loop + close_fn() + @patch.object(settings, 'DAPR_API_TOKEN', 'test-token') def test_dapr_api_token_insertion(self): dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') diff --git a/tests/clients/test_subscription.py b/tests/clients/test_subscription.py index 253f4454..ed2eae3f 100644 --- a/tests/clients/test_subscription.py +++ b/tests/clients/test_subscription.py @@ -8,14 +8,22 @@ class SubscriptionMessageTests(unittest.TestCase): def test_subscription_message_init_raw_text(self): extensions = Struct() - extensions["field1"] = "value1" - extensions["field2"] = 42 - extensions["field3"] = True + extensions['field1'] = 'value1' + extensions['field2'] = 42 + extensions['field3'] = True - msg = TopicEventRequest(id='id', data=b'hello', data_content_type='text/plain', - topic='topicA', pubsub_name='pubsub_name', source='source', - type='type', spec_version='spec_version', path='path', - extensions=extensions) + msg = TopicEventRequest( + id='id', + data=b'hello', + data_content_type='text/plain', + topic='topicA', + pubsub_name='pubsub_name', + source='source', + type='type', + spec_version='spec_version', + path='path', + extensions=extensions, + ) subscription_message = SubscriptionMessage(msg=msg) self.assertEqual('id', subscription_message.id()) @@ -27,42 +35,74 @@ def test_subscription_message_init_raw_text(self): self.assertEqual('pubsub_name', subscription_message.pubsub_name()) self.assertEqual(b'hello', subscription_message.raw_data()) self.assertEqual('hello', subscription_message.data()) - self.assertEqual({'field1': 'value1', "field2": 42, "field3": True}, - subscription_message.extensions()) + self.assertEqual( + {'field1': 'value1', 'field2': 42, 'field3': True}, subscription_message.extensions() + ) def test_subscription_message_init_raw_text_non_utf(self): - msg = TopicEventRequest(id='id', data=b'\x80\x81\x82', data_content_type='text/plain', - topic='topicA', pubsub_name='pubsub_name', source='source', - type='type', spec_version='spec_version', path='path') + msg = TopicEventRequest( + id='id', + data=b'\x80\x81\x82', + data_content_type='text/plain', + topic='topicA', + pubsub_name='pubsub_name', + source='source', + type='type', + spec_version='spec_version', + path='path', + ) subscription_message = SubscriptionMessage(msg=msg) self.assertEqual(b'\x80\x81\x82', subscription_message.raw_data()) self.assertIsNone(subscription_message.data()) def test_subscription_message_init_json(self): - msg = TopicEventRequest(id='id', data=b'{"a": 1}', data_content_type='application/json', - topic='topicA', pubsub_name='pubsub_name', source='source', - type='type', spec_version='spec_version', path='path') + msg = TopicEventRequest( + id='id', + data=b'{"a": 1}', + data_content_type='application/json', + topic='topicA', + pubsub_name='pubsub_name', + source='source', + type='type', + spec_version='spec_version', + path='path', + ) subscription_message = SubscriptionMessage(msg=msg) self.assertEqual(b'{"a": 1}', subscription_message.raw_data()) - self.assertEqual({"a": 1}, subscription_message.data()) - print(subscription_message.data()["a"]) + self.assertEqual({'a': 1}, subscription_message.data()) + print(subscription_message.data()['a']) def test_subscription_message_init_json_faimly(self): - msg = TopicEventRequest(id='id', data=b'{"a": 1}', - data_content_type='application/vnd.api+json', topic='topicA', - pubsub_name='pubsub_name', source='source', type='type', - spec_version='spec_version', path='path') + msg = TopicEventRequest( + id='id', + data=b'{"a": 1}', + data_content_type='application/vnd.api+json', + topic='topicA', + pubsub_name='pubsub_name', + source='source', + type='type', + spec_version='spec_version', + path='path', + ) subscription_message = SubscriptionMessage(msg=msg) self.assertEqual(b'{"a": 1}', subscription_message.raw_data()) - self.assertEqual({"a": 1}, subscription_message.data()) + self.assertEqual({'a': 1}, subscription_message.data()) def test_subscription_message_init_unknown_content_type(self): - msg = TopicEventRequest(id='id', data=b'{"a": 1}', data_content_type='unknown/content-type', - topic='topicA', pubsub_name='pubsub_name', source='source', - type='type', spec_version='spec_version', path='path') + msg = TopicEventRequest( + id='id', + data=b'{"a": 1}', + data_content_type='unknown/content-type', + topic='topicA', + pubsub_name='pubsub_name', + source='source', + type='type', + spec_version='spec_version', + path='path', + ) subscription_message = SubscriptionMessage(msg=msg) self.assertEqual(b'{"a": 1}', subscription_message.raw_data()) From f85c0ab4f5b2638b82e0d09c182e47e5b1155c0f Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 30 Sep 2024 15:55:56 +0100 Subject: [PATCH 13/33] Fixes linter Signed-off-by: Elena Kolevska --- dapr/clients/grpc/client.py | 1 - 1 file changed, 1 deletion(-) diff --git a/dapr/clients/grpc/client.py b/dapr/clients/grpc/client.py index 1af81e90..e6469c4f 100644 --- a/dapr/clients/grpc/client.py +++ b/dapr/clients/grpc/client.py @@ -86,7 +86,6 @@ StartWorkflowResponse, EncryptResponse, DecryptResponse, - TopicEventResponseStatus, TopicEventResponse, ) From 49dde9de826231f119b22562988c164debe2d5e9 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 30 Sep 2024 18:32:18 +0100 Subject: [PATCH 14/33] Fixes linter Signed-off-by: Elena Kolevska --- examples/pubsub-streaming/publisher.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/pubsub-streaming/publisher.py b/examples/pubsub-streaming/publisher.py index 9c18ac3c..fd797470 100644 --- a/examples/pubsub-streaming/publisher.py +++ b/examples/pubsub-streaming/publisher.py @@ -28,7 +28,7 @@ topic_name='TOPIC_A', data=json.dumps(req_data), data_content_type='application/json', - publish_metadata={'ttlInSeconds': '100', 'rawPayload': 'false'} + publish_metadata={'ttlInSeconds': '100', 'rawPayload': 'false'}, ) # Print the request From 920bd24bfb586d88619a22faa73dc79c9af36cfd Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Wed, 9 Oct 2024 17:40:37 +0100 Subject: [PATCH 15/33] Adds async Signed-off-by: Elena Kolevska --- dapr/aio/clients/grpc/client.py | 90 +++++++++++---- dapr/aio/clients/grpc/subscription.py | 109 ++++++++++++++++++ dapr/clients/grpc/client.py | 4 +- dapr/clients/grpc/subscription.py | 102 +--------------- dapr/common/pubsub/subscription.py | 92 +++++++++++++++ .../en/python-sdk-docs/python-client.md | 24 ++-- examples/pubsub-streaming/README.md | 97 ++++++++++++++++ .../async-subscriber-handler.py | 43 +++++++ examples/pubsub-streaming/async-subscriber.py | 54 +++++++++ .../pubsub-streaming/subscriber-handler.py | 4 +- 10 files changed, 485 insertions(+), 134 deletions(-) create mode 100644 dapr/aio/clients/grpc/subscription.py create mode 100644 dapr/common/pubsub/subscription.py create mode 100644 examples/pubsub-streaming/async-subscriber-handler.py create mode 100644 examples/pubsub-streaming/async-subscriber.py diff --git a/dapr/aio/clients/grpc/client.py b/dapr/aio/clients/grpc/client.py index f9f53498..d69be23c 100644 --- a/dapr/aio/clients/grpc/client.py +++ b/dapr/aio/clients/grpc/client.py @@ -24,7 +24,7 @@ from warnings import warn -from typing import Callable, Dict, Optional, Text, Union, Sequence, List, Any +from typing import Callable, Dict, Optional, Text, Union, Sequence, List, Any, Awaitable from typing_extensions import Self from google.protobuf.message import Message as GrpcMessage @@ -39,12 +39,14 @@ AioRpcError, ) +from dapr.aio.clients.grpc.subscription import Subscription from dapr.clients.exceptions import DaprInternalError, DaprGrpcError from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions from dapr.clients.grpc._state import StateOptions, StateItem from dapr.clients.grpc._helpers import getWorkflowRuntimeStatus from dapr.clients.health import DaprHealth from dapr.clients.retry import RetryPolicy +from dapr.common.pubsub.subscription import StreamInactiveError from dapr.conf.helpers import GrpcEndpoint from dapr.conf import settings from dapr.proto import api_v1, api_service_v1, common_v1 @@ -74,27 +76,14 @@ BindingRequest, TransactionalStateOperation, ) -from dapr.clients.grpc._response import ( - BindingResponse, - DaprResponse, - GetSecretResponse, - GetBulkSecretResponse, - GetMetadataResponse, - InvokeMethodResponse, - UnlockResponseStatus, - StateResponse, - BulkStatesResponse, - BulkStateItem, - ConfigurationResponse, - QueryResponse, - QueryResponseItem, - RegisteredComponents, - ConfigurationWatcher, - TryLockResponse, - UnlockResponse, - GetWorkflowResponse, - StartWorkflowResponse, -) +from dapr.clients.grpc._response import (BindingResponse, DaprResponse, GetSecretResponse, + GetBulkSecretResponse, GetMetadataResponse, + InvokeMethodResponse, UnlockResponseStatus, StateResponse, + BulkStatesResponse, BulkStateItem, ConfigurationResponse, + QueryResponse, QueryResponseItem, RegisteredComponents, + ConfigurationWatcher, TryLockResponse, UnlockResponse, + GetWorkflowResponse, StartWorkflowResponse, + TopicEventResponse, ) class DaprGrpcClientAsync: @@ -482,6 +471,63 @@ async def publish_event( return DaprResponse(await call.initial_metadata()) + async def subscribe(self, pubsub_name: str, topic: str, metadata: Optional[dict] = None, + dead_letter_topic: Optional[str] = None, ) -> Subscription: + """ + Subscribe to a topic with a bidirectional stream + + Args: + pubsub_name (str): The name of the pubsub component. + topic (str): The name of the topic. + metadata (Optional[dict]): Additional metadata for the subscription. + dead_letter_topic (Optional[str]): Name of the dead-letter topic. + + Returns: + Subscription: The Subscription object managing the stream. + """ + subscription = Subscription(self._stub, pubsub_name, topic, metadata, + dead_letter_topic) + await subscription.start() + return subscription + + async def subscribe_with_handler(self, pubsub_name: str, topic: str, + handler_fn: Callable[..., TopicEventResponse], metadata: Optional[dict] = None, + dead_letter_topic: Optional[str] = None, ) -> Callable[[], Awaitable[None]]: + """ + Subscribe to a topic with a bidirectional stream and a message handler function + + Args: + pubsub_name (str): The name of the pubsub component. + topic (str): The name of the topic. + handler_fn (Callable[..., TopicEventResponse]): The function to call when a message is received. + metadata (Optional[dict]): Additional metadata for the subscription. + dead_letter_topic (Optional[str]): Name of the dead-letter topic. + + Returns: + Callable[[], Awaitable[None]]: An async function to close the subscription. + """ + subscription = await self.subscribe(pubsub_name, topic, metadata, dead_letter_topic) + + async def stream_messages(sub: Subscription): + while True: + try: + message = await sub.next_message() + if message: + response = await handler_fn(message) + if response: + await subscription._respond(message, response.status) + else: + continue + except StreamInactiveError: + break + + async def close_subscription(): + await subscription.close() + + asyncio.create_task(stream_messages(subscription)) + + return close_subscription + async def get_state( self, store_name: str, diff --git a/dapr/aio/clients/grpc/subscription.py b/dapr/aio/clients/grpc/subscription.py new file mode 100644 index 00000000..b4be6250 --- /dev/null +++ b/dapr/aio/clients/grpc/subscription.py @@ -0,0 +1,109 @@ +import asyncio +from grpc import StatusCode +from grpc.aio import AioRpcError + +from dapr.clients.grpc._response import TopicEventResponse +from dapr.clients.health import DaprHealth +from dapr.common.pubsub.subscription import StreamInactiveError, SubscriptionMessage +from dapr.proto import api_v1, appcallback_v1 + +class Subscription: + + def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=None): + self._stub = stub + self._pubsub_name = pubsub_name + self._topic = topic + self._metadata = metadata or {} + self._dead_letter_topic = dead_letter_topic or '' + self._stream = None + self._send_queue = asyncio.Queue() + self._stream_active = asyncio.Event() + + async def start(self): + async def outgoing_request_iterator(): + try: + initial_request = api_v1.SubscribeTopicEventsRequestAlpha1( + initial_request=api_v1.SubscribeTopicEventsRequestInitialAlpha1( + pubsub_name=self._pubsub_name, + topic=self._topic, + metadata=self._metadata, + dead_letter_topic=self._dead_letter_topic, + ) + ) + yield initial_request + + while self._stream_active.is_set(): + try: + response = await asyncio.wait_for(self._send_queue.get(), timeout=1.0) + yield response + except asyncio.TimeoutError: + continue + except Exception as e: + raise Exception(f'Error while writing to stream: {e}') + + self._stream = self._stub.SubscribeTopicEventsAlpha1(outgoing_request_iterator()) + self._stream_active.set() + await self._stream.read() # discard the initial message + + async def reconnect_stream(self): + await self.close() + DaprHealth.wait_until_ready() + print('Attempting to reconnect...') + await self.start() + + async def next_message(self): + if not self._stream_active.is_set(): + raise StreamInactiveError('Stream is not active') + + try: + if self._stream is not None: + message = await self._stream.read() + if message is None: + return None + return SubscriptionMessage(message.event_message) + except AioRpcError as e: + if e.code() == StatusCode.UNAVAILABLE: + print(f'gRPC error while reading from stream: {e.details()}, ' + f'Status Code: {e.code()}. ' + f'Attempting to reconnect...') + await self.reconnect_stream() + elif e.code() != StatusCode.CANCELLED: + raise Exception(f'gRPC error while reading from subscription stream: {e.details()} ' + f'Status Code: {e.code()}') + except Exception as e: + raise Exception(f'Error while fetching message: {e}') + + return None + + async def _respond(self, message, status): + try: + status = appcallback_v1.TopicEventResponse(status=status.value) + response = api_v1.SubscribeTopicEventsRequestProcessedAlpha1( + id=message.id(), status=status + ) + msg = api_v1.SubscribeTopicEventsRequestAlpha1(event_processed=response) + if not self._stream_active.is_set(): + raise StreamInactiveError('Stream is not active') + await self._send_queue.put(msg) + except Exception as e: + print(f"Can't send message on inactive stream: {e}") + + async def respond_success(self, message): + await self._respond(message, TopicEventResponse('success').status) + + async def respond_retry(self, message): + await self._respond(message, TopicEventResponse('retry').status) + + async def respond_drop(self, message): + await self._respond(message, TopicEventResponse('drop').status) + + async def close(self): + if self._stream: + try: + self._stream.cancel() + self._stream_active.clear() + except AioRpcError as e: + if e.code() != StatusCode.CANCELLED: + raise Exception(f'Error while closing stream: {e}') + except Exception as e: + raise Exception(f'Error while closing stream: {e}') \ No newline at end of file diff --git a/dapr/clients/grpc/client.py b/dapr/clients/grpc/client.py index e6469c4f..94793907 100644 --- a/dapr/clients/grpc/client.py +++ b/dapr/clients/grpc/client.py @@ -523,7 +523,7 @@ def subscribe_with_handler( Args: pubsub_name (str): The name of the pubsub component. topic (str): The name of the topic. - handler_fn (Callable[..., TopicEventResponseStatus]): The function to call when a message is received. + handler_fn (Callable[..., TopicEventResponse]): The function to call when a message is received. metadata (Optional[MetadataTuple]): Additional metadata for the subscription. dead_letter_topic (Optional[str]): Name of the dead-letter topic. timeout (Optional[int]): The time in seconds to wait for a message before returning None @@ -540,7 +540,7 @@ def stream_messages(sub): # Process the message response = handler_fn(message) if response: - subscription._respond(message, response) + subscription.respond(message, response.status) else: # No message received continue diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index 053194ad..8b99b34f 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -1,22 +1,16 @@ -import json - -from google.protobuf.json_format import MessageToDict from grpc import RpcError, StatusCode, Call # type: ignore from dapr.clients.grpc._response import TopicEventResponse from dapr.clients.health import DaprHealth +from dapr.common.pubsub.subscription import StreamInactiveError, SubscriptionMessage from dapr.proto import api_v1, appcallback_v1 import queue import threading -from typing import Optional, Union +from typing import Optional -from dapr.proto.runtime.v1.appcallback_pb2 import TopicEventRequest class Subscription: - SUCCESS = TopicEventResponse('success').status - RETRY = TopicEventResponse('retry').status - DROP = TopicEventResponse('drop').status def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=None): self._stub = stub @@ -102,7 +96,7 @@ def next_message(self): return None - def _respond(self, message, status): + def respond(self, message, status): try: status = appcallback_v1.TopicEventResponse(status=status.value) response = api_v1.SubscribeTopicEventsRequestProcessedAlpha1( @@ -116,13 +110,13 @@ def _respond(self, message, status): print(f"Can't send message on inactive stream: {e}") def respond_success(self, message): - self._respond(message, self.SUCCESS) + self.respond(message, TopicEventResponse('success').status) def respond_retry(self, message): - self._respond(message, self.RETRY) + self.respond(message, TopicEventResponse('retry').status) def respond_drop(self, message): - self._respond(message, self.DROP) + self.respond(message, TopicEventResponse('drop').status) def _set_stream_active(self): with self._stream_lock: @@ -146,87 +140,3 @@ def close(self): raise Exception(f'Error while closing stream: {e}') except Exception as e: raise Exception(f'Error while closing stream: {e}') - - -class SubscriptionMessage: - def __init__(self, msg: TopicEventRequest): - self._id: str = msg.id - self._source: str = msg.source - self._type: str = msg.type - self._spec_version: str = msg.spec_version - self._data_content_type: str = msg.data_content_type - self._topic: str = msg.topic - self._pubsub_name: str = msg.pubsub_name - self._raw_data: bytes = msg.data - self._data: Optional[Union[dict, str]] = None - - try: - self._extensions = MessageToDict(msg.extensions) - except Exception as e: - self._extensions = {} - print(f'Error parsing extensions: {e}') - - # Parse the content based on its media type - if self._raw_data and len(self._raw_data) > 0: - self._parse_data_content() - - def id(self): - return self._id - - def source(self): - return self._source - - def type(self): - return self._type - - def spec_version(self): - return self._spec_version - - def data_content_type(self): - return self._data_content_type - - def topic(self): - return self._topic - - def pubsub_name(self): - return self._pubsub_name - - def raw_data(self): - return self._raw_data - - def extensions(self): - return self._extensions - - def data(self): - return self._data - - def _parse_data_content(self): - try: - if self._data_content_type == 'application/json': - try: - self._data = json.loads(self._raw_data) - except json.JSONDecodeError: - print(f'Error parsing json message data from topic {self._topic}') - pass # If JSON parsing fails, keep `data` as None - elif self._data_content_type == 'text/plain': - # Assume UTF-8 encoding - try: - self._data = self._raw_data.decode('utf-8') - except UnicodeDecodeError: - print(f'Error decoding message data from topic {self._topic} as UTF-8') - elif self._data_content_type.startswith( - 'application/' - ) and self._data_content_type.endswith('+json'): - # Handle custom JSON-based media types (e.g., application/vnd.api+json) - try: - self._data = json.loads(self._raw_data) - except json.JSONDecodeError: - print(f'Error parsing json message data from topic {self._topic}') - pass # If JSON parsing fails, keep `data` as None - except Exception as e: - # Log or handle any unexpected exceptions - print(f'Error parsing media type: {e}') - - -class StreamInactiveError(Exception): - pass diff --git a/dapr/common/pubsub/subscription.py b/dapr/common/pubsub/subscription.py new file mode 100644 index 00000000..ac8db973 --- /dev/null +++ b/dapr/common/pubsub/subscription.py @@ -0,0 +1,92 @@ +import json +from google.protobuf.json_format import MessageToDict +from dapr.proto.runtime.v1.appcallback_pb2 import TopicEventRequest +from typing import Optional, Union + +class SubscriptionMessage: + def __init__(self, msg: TopicEventRequest): + self._id: str = msg.id + self._source: str = msg.source + self._type: str = msg.type + self._spec_version: str = msg.spec_version + self._data_content_type: str = msg.data_content_type + self._topic: str = msg.topic + self._pubsub_name: str = msg.pubsub_name + self._raw_data: bytes = msg.data + self._data: Optional[Union[dict, str]] = None + + try: + self._extensions = MessageToDict(msg.extensions) + except Exception as e: + self._extensions = {} + print(f'Error parsing extensions: {e}') + + # Parse the content based on its media type + if self._raw_data and len(self._raw_data) > 0: + self._parse_data_content() + + def id(self): + return self._id + + def source(self): + return self._source + + def type(self): + return self._type + + def spec_version(self): + return self._spec_version + + def data_content_type(self): + return self._data_content_type + + def topic(self): + return self._topic + + def pubsub_name(self): + return self._pubsub_name + + def raw_data(self): + return self._raw_data + + def extensions(self): + return self._extensions + + def data(self): + return self._data + + def _parse_data_content(self): + try: + if self._data_content_type == 'application/json': + try: + self._data = json.loads(self._raw_data) + except json.JSONDecodeError: + print(f'Error parsing json message data from topic {self._topic}') + pass # If JSON parsing fails, keep `data` as None + elif self._data_content_type == 'text/plain': + # Assume UTF-8 encoding + try: + self._data = self._raw_data.decode('utf-8') + except UnicodeDecodeError: + print(f'Error decoding message data from topic {self._topic} as UTF-8') + elif self._data_content_type.startswith( + 'application/' + ) and self._data_content_type.endswith('+json'): + # Handle custom JSON-based media types (e.g., application/vnd.api+json) + try: + self._data = json.loads(self._raw_data) + except json.JSONDecodeError: + print(f'Error parsing json message data from topic {self._topic}') + pass # If JSON parsing fails, keep `data` as None + except Exception as e: + # Log or handle any unexpected exceptions + print(f'Error parsing media type: {e}') + + +class StreamInactiveError(Exception): + pass + +class PubSubEventStatus: + SUCCESS = 'success' + RETRY = 'retry' + DROP = 'drop' \ No newline at end of file diff --git a/daprdocs/content/en/python-sdk-docs/python-client.md b/daprdocs/content/en/python-sdk-docs/python-client.md index 4f51f945..b4e92a9b 100644 --- a/daprdocs/content/en/python-sdk-docs/python-client.md +++ b/daprdocs/content/en/python-sdk-docs/python-client.md @@ -216,7 +216,7 @@ with DaprClient() as d: - For a full list of state store query options visit [How-To: Query state]({{< ref howto-state-query-api.md >}}). - Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/state_store_query) for code samples and instructions to try out state store querying. -### Publish & subscribe to messages +### Publish & subscribe #### Publish messages @@ -269,14 +269,11 @@ subscription and stop receiving messages. The `subscribe_with_handler` method accepts a callback function that is executed for each message received from the stream. It runs in a separate thread, so it doesn't block the main thread. The callback should return a -`TopicEventResponseStatus`, indicating whether the message was processed successfully, should be -retried, or should be discarded. You can return these statuses using the `Subscription.SUCCESS`, -`Subscription.RETRY`, and `Subscription.DROP` class properties. The method will automatically manage -message acknowledgments based on the returned status. When done, the subscription will automatically -close, and you don't need to manually stop it. - -The call to `subscribe_with_handler` method returns a close function, which should be called to -terminate the subscription when you're done. +`TopicEventResponse` (ex. `TopicEventResponse('success')`), indicating whether the message was +processed successfully, should be retried, or should be discarded. The method will automatically +manage message acknowledgements based on the returned status. The call to `subscribe_with_handler` +method returns a close function, which should be called to terminate the subscription when you're +done. Here's an example of using the `subscribe` method: @@ -343,7 +340,7 @@ And here's an example of using the `subscribe_with_handler` method: import time from dapr.clients import DaprClient -from dapr.clients.grpc.subscription import Subscription +from dapr.clients.grpc._response import TopicEventResponse counter = 0 @@ -353,7 +350,7 @@ def process_message(message): global counter counter += 1 print(f'Processing message: {message.data()} from {message.topic()}...') - return Subscription.SUCCESS + return TopicEventResponse('success') def main(): @@ -376,6 +373,9 @@ if __name__ == '__main__': main() ``` +- For more information about pub/sub, visit [How-To: Publish & subscribe]({{< ref howto-publish-subscribe.md >}}). +- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/main/examples/pubsub-simple) for code samples and instructions to try out streaming pub/sub. + ### Interact with output bindings ```python @@ -386,7 +386,7 @@ with DaprClient() as d: ``` - For a full guide on output bindings visit [How-To: Use bindings]({{< ref howto-bindings.md >}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/invoke-binding) for code samples and instructions to try out output bindings. +- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/main/examples/invoke-binding) for code samples and instructions to try out output bindings. ### Retrieve secrets diff --git a/examples/pubsub-streaming/README.md b/examples/pubsub-streaming/README.md index 4849e791..4bad7f3c 100644 --- a/examples/pubsub-streaming/README.md +++ b/examples/pubsub-streaming/README.md @@ -116,6 +116,103 @@ dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --e +## Run async example where users control reading messages off the stream + +Run the following command in a terminal/command prompt: + + + +```bash +# 1. Start Subscriber +dapr run --app-id python-subscriber --app-protocol grpc python3 async-subscriber.py +``` + + + +In another terminal/command prompt run: + + + +```bash +# 2. Start Publisher +dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py +``` + + + +## Run async example with a handler function + +Run the following command in a terminal/command prompt: + + + +```bash +# 1. Start Subscriber +dapr run --app-id python-subscriber --app-protocol grpc python3 async-subscriber-handler.py +``` + + + +In another terminal/command prompt run: + + + +```bash +# 2. Start Publisher +dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py +``` + + + + ## Cleanup diff --git a/examples/pubsub-streaming/async-subscriber-handler.py b/examples/pubsub-streaming/async-subscriber-handler.py new file mode 100644 index 00000000..75aca19e --- /dev/null +++ b/examples/pubsub-streaming/async-subscriber-handler.py @@ -0,0 +1,43 @@ +import asyncio +from dapr.aio.clients import DaprClient +from dapr.clients.grpc._response import TopicEventResponse + +counter = 0 + + +async def process_message(message) -> TopicEventResponse: + """ + Asynchronously processes the message and returns a TopicEventResponse. + """ + + print(f'Processing message: {message.data()} from {message.topic()}...') + global counter + counter += 1 + return TopicEventResponse('success') + + +async def main(): + """ + Main function to subscribe to a pubsub topic and handle messages asynchronously. + """ + async with DaprClient() as client: + # Subscribe to the pubsub topic with the message handler + close_fn = await client.subscribe_with_handler( + pubsub_name='pubsub', + topic='TOPIC_A', + handler_fn=process_message, + dead_letter_topic='TOPIC_A_DEAD', + ) + + # Wait until 5 messages are processed + global counter + while counter < 5: + print("Counter: ", counter) + await asyncio.sleep(1) + + print('Closing subscription...') + await close_fn() + + +if __name__ == '__main__': + asyncio.run(main()) diff --git a/examples/pubsub-streaming/async-subscriber.py b/examples/pubsub-streaming/async-subscriber.py new file mode 100644 index 00000000..396b3cc2 --- /dev/null +++ b/examples/pubsub-streaming/async-subscriber.py @@ -0,0 +1,54 @@ +import asyncio + +from dapr.aio.clients import DaprClient +from dapr.clients.grpc.subscription import StreamInactiveError + +counter = 0 + + +def process_message(message): + global counter + counter += 1 + # Process the message here + print(f'Processing message: {message.data()} from {message.topic()}...') + return 'success' + + +async def main(): + async with DaprClient() as client: + global counter + subscription = await client.subscribe( + pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' + ) + + try: + while counter < 5: + try: + message = await subscription.next_message() + + except StreamInactiveError: + print('Stream is inactive. Retrying...') + await asyncio.sleep(1) + continue + if message is None: + print('No message received within timeout period.') + continue + + # Process the message + response_status = process_message(message) + + if response_status == 'success': + await subscription.respond_success(message) + elif response_status == 'retry': + await subscription.respond_retry(message) + elif response_status == 'drop': + await subscription.respond_drop(message) + + finally: + print('Closing subscription...') + await subscription.close() + + + +if __name__ == '__main__': + asyncio.run(main()) diff --git a/examples/pubsub-streaming/subscriber-handler.py b/examples/pubsub-streaming/subscriber-handler.py index 896c00ac..aab840a4 100644 --- a/examples/pubsub-streaming/subscriber-handler.py +++ b/examples/pubsub-streaming/subscriber-handler.py @@ -1,7 +1,7 @@ import time from dapr.clients import DaprClient -from dapr.clients.grpc.subscription import Subscription +from dapr.clients.grpc._response import TopicEventResponse counter = 0 @@ -11,7 +11,7 @@ def process_message(message): global counter counter += 1 print(f'Processing message: {message.data()} from {message.topic()}...') - return Subscription.SUCCESS + return TopicEventResponse('success') def main(): From fb862e4f7cd23150c317bdf15d6e77e36504df45 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Wed, 9 Oct 2024 21:54:36 +0100 Subject: [PATCH 16/33] Adds tests for async streaming subscription Signed-off-by: Elena Kolevska --- dapr/aio/clients/grpc/client.py | 2 +- dapr/aio/clients/grpc/subscription.py | 13 +- tests/clients/test_dapr_grpc_client.py | 12 +- tests/clients/test_dapr_grpc_client_async.py | 137 +++++++++++++++++-- 4 files changed, 140 insertions(+), 24 deletions(-) diff --git a/dapr/aio/clients/grpc/client.py b/dapr/aio/clients/grpc/client.py index d69be23c..626ca697 100644 --- a/dapr/aio/clients/grpc/client.py +++ b/dapr/aio/clients/grpc/client.py @@ -515,7 +515,7 @@ async def stream_messages(sub: Subscription): if message: response = await handler_fn(message) if response: - await subscription._respond(message, response.status) + await subscription.respond(message, response.status) else: continue except StreamInactiveError: diff --git a/dapr/aio/clients/grpc/subscription.py b/dapr/aio/clients/grpc/subscription.py index b4be6250..7dc10f9a 100644 --- a/dapr/aio/clients/grpc/subscription.py +++ b/dapr/aio/clients/grpc/subscription.py @@ -68,14 +68,13 @@ async def next_message(self): f'Attempting to reconnect...') await self.reconnect_stream() elif e.code() != StatusCode.CANCELLED: - raise Exception(f'gRPC error while reading from subscription stream: {e.details()} ' - f'Status Code: {e.code()}') + raise Exception(f'gRPC error while reading from subscription stream: {e} ') except Exception as e: raise Exception(f'Error while fetching message: {e}') return None - async def _respond(self, message, status): + async def respond(self, message, status): try: status = appcallback_v1.TopicEventResponse(status=status.value) response = api_v1.SubscribeTopicEventsRequestProcessedAlpha1( @@ -86,16 +85,16 @@ async def _respond(self, message, status): raise StreamInactiveError('Stream is not active') await self._send_queue.put(msg) except Exception as e: - print(f"Can't send message on inactive stream: {e}") + print(f"Can't send message: {e}") async def respond_success(self, message): - await self._respond(message, TopicEventResponse('success').status) + await self.respond(message, TopicEventResponse('success').status) async def respond_retry(self, message): - await self._respond(message, TopicEventResponse('retry').status) + await self.respond(message, TopicEventResponse('retry').status) async def respond_drop(self, message): - await self._respond(message, TopicEventResponse('drop').status) + await self.respond(message, TopicEventResponse('drop').status) async def close(self): if self._stream: diff --git a/tests/clients/test_dapr_grpc_client.py b/tests/clients/test_dapr_grpc_client.py index fee4eb2d..6c46d5ec 100644 --- a/tests/clients/test_dapr_grpc_client.py +++ b/tests/clients/test_dapr_grpc_client.py @@ -36,13 +36,9 @@ from dapr.clients.grpc._request import TransactionalStateOperation from dapr.clients.grpc._state import StateOptions, Consistency, Concurrency, StateItem from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions -from dapr.clients.grpc._response import ( - ConfigurationItem, - ConfigurationResponse, - ConfigurationWatcher, - UnlockResponseStatus, - WorkflowRuntimeStatus, -) +from dapr.clients.grpc._response import (ConfigurationItem, ConfigurationResponse, + ConfigurationWatcher, UnlockResponseStatus, + WorkflowRuntimeStatus, TopicEventResponse, ) class DaprGrpcClientTests(unittest.TestCase): @@ -376,7 +372,7 @@ def handler(message): counter += 1 - return Subscription.SUCCESS + return TopicEventResponse("success") close_fn = dapr.subscribe_with_handler( pubsub_name='pubsub', topic='example', handler_fn=handler diff --git a/tests/clients/test_dapr_grpc_client_async.py b/tests/clients/test_dapr_grpc_client_async.py index 8099e3ab..e8f8af3c 100644 --- a/tests/clients/test_dapr_grpc_client_async.py +++ b/tests/clients/test_dapr_grpc_client_async.py @@ -12,13 +12,13 @@ See the License for the specific language governing permissions and limitations under the License. """ - +import asyncio import json import socket import tempfile import unittest import uuid - +import time from unittest.mock import patch from google.rpc import status_pb2, code_pb2 @@ -26,6 +26,7 @@ from dapr.aio.clients.grpc.client import DaprGrpcClientAsync from dapr.aio.clients import DaprClient from dapr.clients.exceptions import DaprGrpcError +from dapr.common.pubsub.subscription import StreamInactiveError from dapr.proto import common_v1 from .fake_dapr_server import FakeDaprSidecar from dapr.conf import settings @@ -33,12 +34,9 @@ from dapr.clients.grpc._request import TransactionalStateOperation from dapr.clients.grpc._state import StateOptions, Consistency, Concurrency, StateItem from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions -from dapr.clients.grpc._response import ( - ConfigurationItem, - ConfigurationWatcher, - ConfigurationResponse, - UnlockResponseStatus, -) +from dapr.clients.grpc._response import (ConfigurationItem, ConfigurationWatcher, + ConfigurationResponse, UnlockResponseStatus, + TopicEventResponse, ) class DaprGrpcClientAsyncTests(unittest.IsolatedAsyncioTestCase): @@ -262,6 +260,129 @@ async def test_publish_error(self): data=111, ) + async def test_subscribe_topic(self): + # The fake server we're using sends two messages and then closes the stream + # The client should be able to read both messages, handle the stream closure and reconnect + # which will result in reading the same two messages again. + # That's why message 3 should be the same as message 1 + dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') + subscription = await dapr.subscribe(pubsub_name='pubsub', topic='example') + + # First message - text + message1 = await subscription.next_message() + await subscription.respond_success(message1) + + self.assertEqual('111', message1.id()) + self.assertEqual('app1', message1.source()) + self.assertEqual('com.example.type2', message1.type()) + self.assertEqual('1.0', message1.spec_version()) + self.assertEqual('text/plain', message1.data_content_type()) + self.assertEqual('TOPIC_A', message1.topic()) + self.assertEqual('pubsub', message1.pubsub_name()) + self.assertEqual(b'hello2', message1.raw_data()) + self.assertEqual('text/plain', message1.data_content_type()) + self.assertEqual('hello2', message1.data()) + + # Second message - json + message2 = await subscription.next_message() + await subscription.respond_success(message2) + + self.assertEqual('222', message2.id()) + self.assertEqual('app1', message2.source()) + self.assertEqual('com.example.type2', message2.type()) + self.assertEqual('1.0', message2.spec_version()) + self.assertEqual('TOPIC_A', message2.topic()) + self.assertEqual('pubsub', message2.pubsub_name()) + self.assertEqual(b'{"a": 1}', message2.raw_data()) + self.assertEqual('application/json', message2.data_content_type()) + self.assertEqual({'a': 1}, message2.data()) + + # On this call the stream will be closed and return an error, so the message will be none + # but the client will try to reconnect + message3 = await subscription.next_message() + self.assertIsNone(message3) + + # # The client already reconnected and will start reading the messages again + # # Since we're working with a fake server, the messages will be the same + # message4 = await subscription.next_message() + # await subscription.respond_success(message4) + # self.assertEqual('111', message4.id()) + # self.assertEqual('app1', message4.source()) + # self.assertEqual('com.example.type2', message4.type()) + # self.assertEqual('1.0', message4.spec_version()) + # self.assertEqual('text/plain', message4.data_content_type()) + # self.assertEqual('TOPIC_A', message4.topic()) + # self.assertEqual('pubsub', message4.pubsub_name()) + # self.assertEqual(b'hello2', message4.raw_data()) + # self.assertEqual('text/plain', message4.data_content_type()) + # self.assertEqual('hello2', message4.data()) + + await subscription.close() + + async def test_subscribe_topic_early_close(self): + dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') + subscription = await dapr.subscribe(pubsub_name='pubsub', topic='example') + await subscription.close() + + with self.assertRaises(StreamInactiveError): + await subscription.next_message() + + # async def test_subscribe_topic_with_handler(self): + # # The fake server we're using sends two messages and then closes the stream + # # The client should be able to read both messages, handle the stream closure and reconnect + # # which will result in reading the same two messages again. + # # That's why message 3 should be the same as message 1 + # dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') + # counter = 0 + # + # async def handler(message): + # nonlocal counter + # if counter == 0: + # self.assertEqual('111', message.id()) + # self.assertEqual('app1', message.source()) + # self.assertEqual('com.example.type2', message.type()) + # self.assertEqual('1.0', message.spec_version()) + # self.assertEqual('text/plain', message.data_content_type()) + # self.assertEqual('TOPIC_A', message.topic()) + # self.assertEqual('pubsub', message.pubsub_name()) + # self.assertEqual(b'hello2', message.raw_data()) + # self.assertEqual('text/plain', message.data_content_type()) + # self.assertEqual('hello2', message.data()) + # elif counter == 1: + # self.assertEqual('222', message.id()) + # self.assertEqual('app1', message.source()) + # self.assertEqual('com.example.type2', message.type()) + # self.assertEqual('1.0', message.spec_version()) + # self.assertEqual('TOPIC_A', message.topic()) + # self.assertEqual('pubsub', message.pubsub_name()) + # self.assertEqual(b'{"a": 1}', message.raw_data()) + # self.assertEqual('application/json', message.data_content_type()) + # self.assertEqual({'a': 1}, message.data()) + # elif counter == 2: + # self.assertEqual('111', message.id()) + # self.assertEqual('app1', message.source()) + # self.assertEqual('com.example.type2', message.type()) + # self.assertEqual('1.0', message.spec_version()) + # self.assertEqual('text/plain', message.data_content_type()) + # self.assertEqual('TOPIC_A', message.topic()) + # self.assertEqual('pubsub', message.pubsub_name()) + # self.assertEqual(b'hello2', message.raw_data()) + # self.assertEqual('text/plain', message.data_content_type()) + # self.assertEqual('hello2', message.data()) + # + # counter += 1 + # + # return TopicEventResponse("success") + # + # close_fn = await dapr.subscribe_with_handler( + # pubsub_name='pubsub', topic='example', handler_fn=handler + # ) + # + # while counter < 3: + # await asyncio.sleep(0.1) # sleep to prevent a busy loop + # await close_fn() + + @patch.object(settings, 'DAPR_API_TOKEN', 'test-token') async def test_dapr_api_token_insertion(self): dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') From 7ac13de4455394d8af1a0e4ef8e72e984d6fd19e Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Thu, 10 Oct 2024 11:02:09 +0100 Subject: [PATCH 17/33] Linter Signed-off-by: Elena Kolevska --- dapr/aio/clients/grpc/client.py | 53 +++++++++++++------ dapr/aio/clients/grpc/subscription.py | 12 +++-- dapr/clients/grpc/subscription.py | 2 - dapr/common/pubsub/subscription.py | 4 +- .../async-subscriber-handler.py | 2 +- examples/pubsub-streaming/async-subscriber.py | 1 - tests/clients/test_dapr_grpc_client.py | 15 ++++-- tests/clients/test_dapr_grpc_client_async.py | 12 ++--- 8 files changed, 65 insertions(+), 36 deletions(-) diff --git a/dapr/aio/clients/grpc/client.py b/dapr/aio/clients/grpc/client.py index 626ca697..2b40101c 100644 --- a/dapr/aio/clients/grpc/client.py +++ b/dapr/aio/clients/grpc/client.py @@ -76,14 +76,28 @@ BindingRequest, TransactionalStateOperation, ) -from dapr.clients.grpc._response import (BindingResponse, DaprResponse, GetSecretResponse, - GetBulkSecretResponse, GetMetadataResponse, - InvokeMethodResponse, UnlockResponseStatus, StateResponse, - BulkStatesResponse, BulkStateItem, ConfigurationResponse, - QueryResponse, QueryResponseItem, RegisteredComponents, - ConfigurationWatcher, TryLockResponse, UnlockResponse, - GetWorkflowResponse, StartWorkflowResponse, - TopicEventResponse, ) +from dapr.clients.grpc._response import ( + BindingResponse, + DaprResponse, + GetSecretResponse, + GetBulkSecretResponse, + GetMetadataResponse, + InvokeMethodResponse, + UnlockResponseStatus, + StateResponse, + BulkStatesResponse, + BulkStateItem, + ConfigurationResponse, + QueryResponse, + QueryResponseItem, + RegisteredComponents, + ConfigurationWatcher, + TryLockResponse, + UnlockResponse, + GetWorkflowResponse, + StartWorkflowResponse, + TopicEventResponse, +) class DaprGrpcClientAsync: @@ -471,8 +485,13 @@ async def publish_event( return DaprResponse(await call.initial_metadata()) - async def subscribe(self, pubsub_name: str, topic: str, metadata: Optional[dict] = None, - dead_letter_topic: Optional[str] = None, ) -> Subscription: + async def subscribe( + self, + pubsub_name: str, + topic: str, + metadata: Optional[dict] = None, + dead_letter_topic: Optional[str] = None, + ) -> Subscription: """ Subscribe to a topic with a bidirectional stream @@ -485,14 +504,18 @@ async def subscribe(self, pubsub_name: str, topic: str, metadata: Optional[dict] Returns: Subscription: The Subscription object managing the stream. """ - subscription = Subscription(self._stub, pubsub_name, topic, metadata, - dead_letter_topic) + subscription = Subscription(self._stub, pubsub_name, topic, metadata, dead_letter_topic) await subscription.start() return subscription - async def subscribe_with_handler(self, pubsub_name: str, topic: str, - handler_fn: Callable[..., TopicEventResponse], metadata: Optional[dict] = None, - dead_letter_topic: Optional[str] = None, ) -> Callable[[], Awaitable[None]]: + async def subscribe_with_handler( + self, + pubsub_name: str, + topic: str, + handler_fn: Callable[..., TopicEventResponse], + metadata: Optional[dict] = None, + dead_letter_topic: Optional[str] = None, + ) -> Callable[[], Awaitable[None]]: """ Subscribe to a topic with a bidirectional stream and a message handler function diff --git a/dapr/aio/clients/grpc/subscription.py b/dapr/aio/clients/grpc/subscription.py index 7dc10f9a..84542bb4 100644 --- a/dapr/aio/clients/grpc/subscription.py +++ b/dapr/aio/clients/grpc/subscription.py @@ -7,8 +7,8 @@ from dapr.common.pubsub.subscription import StreamInactiveError, SubscriptionMessage from dapr.proto import api_v1, appcallback_v1 -class Subscription: +class Subscription: def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=None): self._stub = stub self._pubsub_name = pubsub_name @@ -63,9 +63,11 @@ async def next_message(self): return SubscriptionMessage(message.event_message) except AioRpcError as e: if e.code() == StatusCode.UNAVAILABLE: - print(f'gRPC error while reading from stream: {e.details()}, ' - f'Status Code: {e.code()}. ' - f'Attempting to reconnect...') + print( + f'gRPC error while reading from stream: {e.details()}, ' + f'Status Code: {e.code()}. ' + f'Attempting to reconnect...' + ) await self.reconnect_stream() elif e.code() != StatusCode.CANCELLED: raise Exception(f'gRPC error while reading from subscription stream: {e} ') @@ -105,4 +107,4 @@ async def close(self): if e.code() != StatusCode.CANCELLED: raise Exception(f'Error while closing stream: {e}') except Exception as e: - raise Exception(f'Error while closing stream: {e}') \ No newline at end of file + raise Exception(f'Error while closing stream: {e}') diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index 8b99b34f..3374a121 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -9,9 +9,7 @@ from typing import Optional - class Subscription: - def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=None): self._stub = stub self._pubsub_name = pubsub_name diff --git a/dapr/common/pubsub/subscription.py b/dapr/common/pubsub/subscription.py index ac8db973..0f96ab6b 100644 --- a/dapr/common/pubsub/subscription.py +++ b/dapr/common/pubsub/subscription.py @@ -3,6 +3,7 @@ from dapr.proto.runtime.v1.appcallback_pb2 import TopicEventRequest from typing import Optional, Union + class SubscriptionMessage: def __init__(self, msg: TopicEventRequest): self._id: str = msg.id @@ -86,7 +87,8 @@ def _parse_data_content(self): class StreamInactiveError(Exception): pass + class PubSubEventStatus: SUCCESS = 'success' RETRY = 'retry' - DROP = 'drop' \ No newline at end of file + DROP = 'drop' diff --git a/examples/pubsub-streaming/async-subscriber-handler.py b/examples/pubsub-streaming/async-subscriber-handler.py index 75aca19e..e5f68953 100644 --- a/examples/pubsub-streaming/async-subscriber-handler.py +++ b/examples/pubsub-streaming/async-subscriber-handler.py @@ -32,7 +32,7 @@ async def main(): # Wait until 5 messages are processed global counter while counter < 5: - print("Counter: ", counter) + print('Counter: ', counter) await asyncio.sleep(1) print('Closing subscription...') diff --git a/examples/pubsub-streaming/async-subscriber.py b/examples/pubsub-streaming/async-subscriber.py index 396b3cc2..0f7da59b 100644 --- a/examples/pubsub-streaming/async-subscriber.py +++ b/examples/pubsub-streaming/async-subscriber.py @@ -49,6 +49,5 @@ async def main(): await subscription.close() - if __name__ == '__main__': asyncio.run(main()) diff --git a/tests/clients/test_dapr_grpc_client.py b/tests/clients/test_dapr_grpc_client.py index 6c46d5ec..d3eab236 100644 --- a/tests/clients/test_dapr_grpc_client.py +++ b/tests/clients/test_dapr_grpc_client.py @@ -28,7 +28,7 @@ from dapr.clients.exceptions import DaprGrpcError from dapr.clients.grpc.client import DaprGrpcClient from dapr.clients import DaprClient -from dapr.clients.grpc.subscription import StreamInactiveError, Subscription +from dapr.clients.grpc.subscription import StreamInactiveError from dapr.proto import common_v1 from .fake_dapr_server import FakeDaprSidecar from dapr.conf import settings @@ -36,9 +36,14 @@ from dapr.clients.grpc._request import TransactionalStateOperation from dapr.clients.grpc._state import StateOptions, Consistency, Concurrency, StateItem from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions -from dapr.clients.grpc._response import (ConfigurationItem, ConfigurationResponse, - ConfigurationWatcher, UnlockResponseStatus, - WorkflowRuntimeStatus, TopicEventResponse, ) +from dapr.clients.grpc._response import ( + ConfigurationItem, + ConfigurationResponse, + ConfigurationWatcher, + UnlockResponseStatus, + WorkflowRuntimeStatus, + TopicEventResponse, +) class DaprGrpcClientTests(unittest.TestCase): @@ -372,7 +377,7 @@ def handler(message): counter += 1 - return TopicEventResponse("success") + return TopicEventResponse('success') close_fn = dapr.subscribe_with_handler( pubsub_name='pubsub', topic='example', handler_fn=handler diff --git a/tests/clients/test_dapr_grpc_client_async.py b/tests/clients/test_dapr_grpc_client_async.py index e8f8af3c..42bbd830 100644 --- a/tests/clients/test_dapr_grpc_client_async.py +++ b/tests/clients/test_dapr_grpc_client_async.py @@ -12,13 +12,11 @@ See the License for the specific language governing permissions and limitations under the License. """ -import asyncio import json import socket import tempfile import unittest import uuid -import time from unittest.mock import patch from google.rpc import status_pb2, code_pb2 @@ -34,9 +32,12 @@ from dapr.clients.grpc._request import TransactionalStateOperation from dapr.clients.grpc._state import StateOptions, Consistency, Concurrency, StateItem from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions -from dapr.clients.grpc._response import (ConfigurationItem, ConfigurationWatcher, - ConfigurationResponse, UnlockResponseStatus, - TopicEventResponse, ) +from dapr.clients.grpc._response import ( + ConfigurationItem, + ConfigurationWatcher, + ConfigurationResponse, + UnlockResponseStatus, +) class DaprGrpcClientAsyncTests(unittest.IsolatedAsyncioTestCase): @@ -382,7 +383,6 @@ async def test_subscribe_topic_early_close(self): # await asyncio.sleep(0.1) # sleep to prevent a busy loop # await close_fn() - @patch.object(settings, 'DAPR_API_TOKEN', 'test-token') async def test_dapr_api_token_insertion(self): dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') From c5e5f4fdef83db8143f8221d88d03f453a30b941 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Thu, 10 Oct 2024 16:25:30 +0100 Subject: [PATCH 18/33] Split sync and async examples Signed-off-by: Elena Kolevska --- examples/pubsub-streaming-async/README.md | 122 ++++++++++++++++++ examples/pubsub-streaming-async/publisher.py | 43 ++++++ .../subscriber-handler.py} | 1 - .../subscriber.py} | 0 examples/pubsub-streaming/README.md | 97 -------------- tox.ini | 1 + 6 files changed, 166 insertions(+), 98 deletions(-) create mode 100644 examples/pubsub-streaming-async/README.md create mode 100644 examples/pubsub-streaming-async/publisher.py rename examples/{pubsub-streaming/async-subscriber-handler.py => pubsub-streaming-async/subscriber-handler.py} (96%) rename examples/{pubsub-streaming/async-subscriber.py => pubsub-streaming-async/subscriber.py} (100%) diff --git a/examples/pubsub-streaming-async/README.md b/examples/pubsub-streaming-async/README.md new file mode 100644 index 00000000..dfa7d27d --- /dev/null +++ b/examples/pubsub-streaming-async/README.md @@ -0,0 +1,122 @@ +# Example - Publish and subscribe to messages + +This example utilizes a publisher and a subscriber to show the bidirectional pubsub pattern. +It creates a publisher and calls the `publish_event` method in the `DaprClient`. +In the s`subscriber.py` file it creates a subscriber object that can call the `next_message` method to get new messages from the stream. After processing the new message, it returns a status to the stream. + + +> **Note:** Make sure to use the latest proto bindings + +## Pre-requisites + +- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started) +- [Install Python 3.8+](https://www.python.org/downloads/) + +## Install Dapr python-SDK + + + +```bash +pip3 install dapr +``` + +## Run async example where users control reading messages off the stream + +Run the following command in a terminal/command prompt: + + + +```bash +# 1. Start Subscriber +dapr run --app-id python-subscriber --app-protocol grpc python3 subscriber.py +``` + + + +In another terminal/command prompt run: + + + +```bash +# 2. Start Publisher +dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py +``` + + + +## Run async example with a handler function + +Run the following command in a terminal/command prompt: + + + +```bash +# 1. Start Subscriber +dapr run --app-id python-subscriber --app-protocol grpc python3 subscriber-handler.py +``` + + + +In another terminal/command prompt run: + + + +```bash +# 2. Start Publisher +dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py +``` + + + + +## Cleanup + + diff --git a/examples/pubsub-streaming-async/publisher.py b/examples/pubsub-streaming-async/publisher.py new file mode 100644 index 00000000..7268f16a --- /dev/null +++ b/examples/pubsub-streaming-async/publisher.py @@ -0,0 +1,43 @@ +# ------------------------------------------------------------ +# Copyright 2022 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------ +import asyncio +import json + +from dapr.aio.clients import DaprClient + +async def publish_events(): + """ + Publishes events to a pubsub topic asynchronously + """ + + async with DaprClient() as d: + id = 0 + while id < 5: + id += 1 + req_data = {'id': id, 'message': 'hello world'} + + # Create a typed message with content type and body + await d.publish_event( + pubsub_name='pubsub', + topic_name='TOPIC_A', + data=json.dumps(req_data), + data_content_type='application/json', + publish_metadata={'ttlInSeconds': '100', 'rawPayload': 'false'}, + ) + + # Print the request + print(req_data, flush=True) + + await asyncio.sleep(1) + +asyncio.run(publish_events()) \ No newline at end of file diff --git a/examples/pubsub-streaming/async-subscriber-handler.py b/examples/pubsub-streaming-async/subscriber-handler.py similarity index 96% rename from examples/pubsub-streaming/async-subscriber-handler.py rename to examples/pubsub-streaming-async/subscriber-handler.py index e5f68953..f9503f06 100644 --- a/examples/pubsub-streaming/async-subscriber-handler.py +++ b/examples/pubsub-streaming-async/subscriber-handler.py @@ -32,7 +32,6 @@ async def main(): # Wait until 5 messages are processed global counter while counter < 5: - print('Counter: ', counter) await asyncio.sleep(1) print('Closing subscription...') diff --git a/examples/pubsub-streaming/async-subscriber.py b/examples/pubsub-streaming-async/subscriber.py similarity index 100% rename from examples/pubsub-streaming/async-subscriber.py rename to examples/pubsub-streaming-async/subscriber.py diff --git a/examples/pubsub-streaming/README.md b/examples/pubsub-streaming/README.md index 4bad7f3c..4849e791 100644 --- a/examples/pubsub-streaming/README.md +++ b/examples/pubsub-streaming/README.md @@ -116,103 +116,6 @@ dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --e -## Run async example where users control reading messages off the stream - -Run the following command in a terminal/command prompt: - - - -```bash -# 1. Start Subscriber -dapr run --app-id python-subscriber --app-protocol grpc python3 async-subscriber.py -``` - - - -In another terminal/command prompt run: - - - -```bash -# 2. Start Publisher -dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py -``` - - - -## Run async example with a handler function - -Run the following command in a terminal/command prompt: - - - -```bash -# 1. Start Subscriber -dapr run --app-id python-subscriber --app-protocol grpc python3 async-subscriber-handler.py -``` - - - -In another terminal/command prompt run: - - - -```bash -# 2. Start Publisher -dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py -``` - - - - ## Cleanup diff --git a/tox.ini b/tox.ini index 6400e329..78f23086 100644 --- a/tox.ini +++ b/tox.ini @@ -51,6 +51,7 @@ commands = ./validate.sh error_handling ./validate.sh pubsub-simple ./validate.sh pubsub-streaming + ./validate.sh pubsub-streaming-async ./validate.sh state_store ./validate.sh state_store_query ./validate.sh secret_store From 2d89fef5462157491104a917f5c742b08bcb0a83 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Thu, 10 Oct 2024 16:38:37 +0100 Subject: [PATCH 19/33] linter Signed-off-by: Elena Kolevska --- examples/pubsub-streaming-async/publisher.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/examples/pubsub-streaming-async/publisher.py b/examples/pubsub-streaming-async/publisher.py index 7268f16a..b9702355 100644 --- a/examples/pubsub-streaming-async/publisher.py +++ b/examples/pubsub-streaming-async/publisher.py @@ -15,6 +15,7 @@ from dapr.aio.clients import DaprClient + async def publish_events(): """ Publishes events to a pubsub topic asynchronously @@ -40,4 +41,5 @@ async def publish_events(): await asyncio.sleep(1) -asyncio.run(publish_events()) \ No newline at end of file + +asyncio.run(publish_events()) From a7f2169bdf08b1996845eee15982b76f83626853 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Fri, 11 Oct 2024 19:01:23 +0100 Subject: [PATCH 20/33] Adds interceptors to the async client for bidirectional streaming Signed-off-by: Elena Kolevska --- dapr/aio/clients/grpc/interceptors.py | 25 ++++++++++++++++++++----- dapr/clients/grpc/interceptors.py | 3 --- dapr/clients/grpc/subscription.py | 6 +++++- examples/pubsub-streaming/subscriber.py | 13 ++++++++++--- 4 files changed, 35 insertions(+), 12 deletions(-) diff --git a/dapr/aio/clients/grpc/interceptors.py b/dapr/aio/clients/grpc/interceptors.py index 55ede4b9..346214dc 100644 --- a/dapr/aio/clients/grpc/interceptors.py +++ b/dapr/aio/clients/grpc/interceptors.py @@ -16,7 +16,7 @@ from collections import namedtuple from typing import List, Tuple -from grpc.aio import UnaryUnaryClientInterceptor, ClientCallDetails # type: ignore +from grpc.aio import UnaryUnaryClientInterceptor, StreamStreamClientInterceptor, ClientCallDetails # type: ignore from dapr.conf import settings @@ -50,8 +50,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): return continuation(client_call_details, request) - -class DaprClientInterceptorAsync(UnaryUnaryClientInterceptor): +class DaprClientInterceptorAsync(UnaryUnaryClientInterceptor, StreamStreamClientInterceptor): """The class implements a UnaryUnaryClientInterceptor from grpc to add an interceptor to add additional headers to all calls as needed. @@ -115,9 +114,25 @@ async def intercept_unary_unary(self, continuation, client_call_details, request Returns: A response object after invoking the continuation callable """ - - # Pre-process or intercept call new_call_details = await self._intercept_call(client_call_details) # Call continuation response = await continuation(new_call_details, request) return response + + async def intercept_stream_stream(self, continuation, client_call_details, request): + """This method intercepts a stream-stream gRPC call. This is the implementation of the + abstract method defined in StreamStreamClientInterceptor defined in grpc. This is invoked + automatically by grpc based on the order in which interceptors are added to the channel. + + Args: + continuation: a callable to be invoked to continue with the RPC or next interceptor + client_call_details: a ClientCallDetails object describing the outgoing RPC + request: the request value for the RPC + + Returns: + A response object after invoking the continuation callable + """ + new_call_details = await self._intercept_call(client_call_details) + # Call continuation + response = await continuation(new_call_details, request) + return response \ No newline at end of file diff --git a/dapr/clients/grpc/interceptors.py b/dapr/clients/grpc/interceptors.py index adda29c1..15bde185 100644 --- a/dapr/clients/grpc/interceptors.py +++ b/dapr/clients/grpc/interceptors.py @@ -103,7 +103,6 @@ def intercept_unary_unary(self, continuation, client_call_details, request): Returns: A response object after invoking the continuation callable """ - # Pre-process or intercept call new_call_details = self._intercept_call(client_call_details) # Call continuation response = continuation(new_call_details, request) @@ -122,8 +121,6 @@ def intercept_stream_stream(self, continuation, client_call_details, request_ite Returns: A response object after invoking the continuation callable """ - # Pre-process or intercept call - new_call_details = self._intercept_call(client_call_details) # Call continuation response = continuation(new_call_details, request_iterator) diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index 3374a121..a69df125 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -54,7 +54,11 @@ def outgoing_request_iterator(): # Create the bidirectional stream self._stream = self._stub.SubscribeTopicEventsAlpha1(outgoing_request_iterator()) self._set_stream_active() - next(self._stream) # discard the initial message + try: + next(self._stream) # discard the initial message + except Exception as e: + raise Exception(f'Error while initializing stream: {e}') + def reconnect_stream(self): self.close() diff --git a/examples/pubsub-streaming/subscriber.py b/examples/pubsub-streaming/subscriber.py index 5716b34c..4af7ee0a 100644 --- a/examples/pubsub-streaming/subscriber.py +++ b/examples/pubsub-streaming/subscriber.py @@ -18,9 +18,13 @@ def main(): with DaprClient() as client: global counter - subscription = client.subscribe( - pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' - ) + try: + subscription = client.subscribe( + pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' + ) + except Exception as e: + print(f'Error occurred: {e}') + return try: while counter < 5: @@ -31,6 +35,9 @@ def main(): print('Stream is inactive. Retrying...') time.sleep(1) continue + except Exception as e: + print(f'Error occurred: {e}') + pass if message is None: print('No message received within timeout period.') continue From be4cc406b8819291e837fa92ffd87c1868e4011e Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Fri, 11 Oct 2024 19:05:33 +0100 Subject: [PATCH 21/33] Removes unneeded class Signed-off-by: Elena Kolevska --- dapr/aio/clients/grpc/interceptors.py | 3 ++- dapr/clients/grpc/subscription.py | 1 - dapr/common/pubsub/subscription.py | 6 ------ 3 files changed, 2 insertions(+), 8 deletions(-) diff --git a/dapr/aio/clients/grpc/interceptors.py b/dapr/aio/clients/grpc/interceptors.py index 346214dc..bf83cf56 100644 --- a/dapr/aio/clients/grpc/interceptors.py +++ b/dapr/aio/clients/grpc/interceptors.py @@ -50,6 +50,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): return continuation(client_call_details, request) + class DaprClientInterceptorAsync(UnaryUnaryClientInterceptor, StreamStreamClientInterceptor): """The class implements a UnaryUnaryClientInterceptor from grpc to add an interceptor to add additional headers to all calls as needed. @@ -135,4 +136,4 @@ async def intercept_stream_stream(self, continuation, client_call_details, reque new_call_details = await self._intercept_call(client_call_details) # Call continuation response = await continuation(new_call_details, request) - return response \ No newline at end of file + return response diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index a69df125..b5a87080 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -59,7 +59,6 @@ def outgoing_request_iterator(): except Exception as e: raise Exception(f'Error while initializing stream: {e}') - def reconnect_stream(self): self.close() DaprHealth.wait_until_ready() diff --git a/dapr/common/pubsub/subscription.py b/dapr/common/pubsub/subscription.py index 0f96ab6b..ad6f6f56 100644 --- a/dapr/common/pubsub/subscription.py +++ b/dapr/common/pubsub/subscription.py @@ -86,9 +86,3 @@ def _parse_data_content(self): class StreamInactiveError(Exception): pass - - -class PubSubEventStatus: - SUCCESS = 'success' - RETRY = 'retry' - DROP = 'drop' From 46f2923897a17e09c6699a2b6e2af2c2c5b42afb Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 21 Oct 2024 11:24:35 +0100 Subject: [PATCH 22/33] Removes async client Signed-off-by: Elena Kolevska --- dapr/aio/clients/grpc/client.py | 71 +--------- dapr/aio/clients/grpc/subscription.py | 110 ---------------- .../en/python-sdk-docs/python-client.md | 8 +- examples/pubsub-streaming-async/README.md | 122 ----------------- examples/pubsub-streaming-async/publisher.py | 45 ------- .../subscriber-handler.py | 42 ------ examples/pubsub-streaming-async/subscriber.py | 53 -------- tests/clients/test_dapr_grpc_client_async.py | 123 ------------------ 8 files changed, 5 insertions(+), 569 deletions(-) delete mode 100644 dapr/aio/clients/grpc/subscription.py delete mode 100644 examples/pubsub-streaming-async/README.md delete mode 100644 examples/pubsub-streaming-async/publisher.py delete mode 100644 examples/pubsub-streaming-async/subscriber-handler.py delete mode 100644 examples/pubsub-streaming-async/subscriber.py diff --git a/dapr/aio/clients/grpc/client.py b/dapr/aio/clients/grpc/client.py index 2b40101c..f9f53498 100644 --- a/dapr/aio/clients/grpc/client.py +++ b/dapr/aio/clients/grpc/client.py @@ -24,7 +24,7 @@ from warnings import warn -from typing import Callable, Dict, Optional, Text, Union, Sequence, List, Any, Awaitable +from typing import Callable, Dict, Optional, Text, Union, Sequence, List, Any from typing_extensions import Self from google.protobuf.message import Message as GrpcMessage @@ -39,14 +39,12 @@ AioRpcError, ) -from dapr.aio.clients.grpc.subscription import Subscription from dapr.clients.exceptions import DaprInternalError, DaprGrpcError from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions from dapr.clients.grpc._state import StateOptions, StateItem from dapr.clients.grpc._helpers import getWorkflowRuntimeStatus from dapr.clients.health import DaprHealth from dapr.clients.retry import RetryPolicy -from dapr.common.pubsub.subscription import StreamInactiveError from dapr.conf.helpers import GrpcEndpoint from dapr.conf import settings from dapr.proto import api_v1, api_service_v1, common_v1 @@ -96,7 +94,6 @@ UnlockResponse, GetWorkflowResponse, StartWorkflowResponse, - TopicEventResponse, ) @@ -485,72 +482,6 @@ async def publish_event( return DaprResponse(await call.initial_metadata()) - async def subscribe( - self, - pubsub_name: str, - topic: str, - metadata: Optional[dict] = None, - dead_letter_topic: Optional[str] = None, - ) -> Subscription: - """ - Subscribe to a topic with a bidirectional stream - - Args: - pubsub_name (str): The name of the pubsub component. - topic (str): The name of the topic. - metadata (Optional[dict]): Additional metadata for the subscription. - dead_letter_topic (Optional[str]): Name of the dead-letter topic. - - Returns: - Subscription: The Subscription object managing the stream. - """ - subscription = Subscription(self._stub, pubsub_name, topic, metadata, dead_letter_topic) - await subscription.start() - return subscription - - async def subscribe_with_handler( - self, - pubsub_name: str, - topic: str, - handler_fn: Callable[..., TopicEventResponse], - metadata: Optional[dict] = None, - dead_letter_topic: Optional[str] = None, - ) -> Callable[[], Awaitable[None]]: - """ - Subscribe to a topic with a bidirectional stream and a message handler function - - Args: - pubsub_name (str): The name of the pubsub component. - topic (str): The name of the topic. - handler_fn (Callable[..., TopicEventResponse]): The function to call when a message is received. - metadata (Optional[dict]): Additional metadata for the subscription. - dead_letter_topic (Optional[str]): Name of the dead-letter topic. - - Returns: - Callable[[], Awaitable[None]]: An async function to close the subscription. - """ - subscription = await self.subscribe(pubsub_name, topic, metadata, dead_letter_topic) - - async def stream_messages(sub: Subscription): - while True: - try: - message = await sub.next_message() - if message: - response = await handler_fn(message) - if response: - await subscription.respond(message, response.status) - else: - continue - except StreamInactiveError: - break - - async def close_subscription(): - await subscription.close() - - asyncio.create_task(stream_messages(subscription)) - - return close_subscription - async def get_state( self, store_name: str, diff --git a/dapr/aio/clients/grpc/subscription.py b/dapr/aio/clients/grpc/subscription.py deleted file mode 100644 index 84542bb4..00000000 --- a/dapr/aio/clients/grpc/subscription.py +++ /dev/null @@ -1,110 +0,0 @@ -import asyncio -from grpc import StatusCode -from grpc.aio import AioRpcError - -from dapr.clients.grpc._response import TopicEventResponse -from dapr.clients.health import DaprHealth -from dapr.common.pubsub.subscription import StreamInactiveError, SubscriptionMessage -from dapr.proto import api_v1, appcallback_v1 - - -class Subscription: - def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=None): - self._stub = stub - self._pubsub_name = pubsub_name - self._topic = topic - self._metadata = metadata or {} - self._dead_letter_topic = dead_letter_topic or '' - self._stream = None - self._send_queue = asyncio.Queue() - self._stream_active = asyncio.Event() - - async def start(self): - async def outgoing_request_iterator(): - try: - initial_request = api_v1.SubscribeTopicEventsRequestAlpha1( - initial_request=api_v1.SubscribeTopicEventsRequestInitialAlpha1( - pubsub_name=self._pubsub_name, - topic=self._topic, - metadata=self._metadata, - dead_letter_topic=self._dead_letter_topic, - ) - ) - yield initial_request - - while self._stream_active.is_set(): - try: - response = await asyncio.wait_for(self._send_queue.get(), timeout=1.0) - yield response - except asyncio.TimeoutError: - continue - except Exception as e: - raise Exception(f'Error while writing to stream: {e}') - - self._stream = self._stub.SubscribeTopicEventsAlpha1(outgoing_request_iterator()) - self._stream_active.set() - await self._stream.read() # discard the initial message - - async def reconnect_stream(self): - await self.close() - DaprHealth.wait_until_ready() - print('Attempting to reconnect...') - await self.start() - - async def next_message(self): - if not self._stream_active.is_set(): - raise StreamInactiveError('Stream is not active') - - try: - if self._stream is not None: - message = await self._stream.read() - if message is None: - return None - return SubscriptionMessage(message.event_message) - except AioRpcError as e: - if e.code() == StatusCode.UNAVAILABLE: - print( - f'gRPC error while reading from stream: {e.details()}, ' - f'Status Code: {e.code()}. ' - f'Attempting to reconnect...' - ) - await self.reconnect_stream() - elif e.code() != StatusCode.CANCELLED: - raise Exception(f'gRPC error while reading from subscription stream: {e} ') - except Exception as e: - raise Exception(f'Error while fetching message: {e}') - - return None - - async def respond(self, message, status): - try: - status = appcallback_v1.TopicEventResponse(status=status.value) - response = api_v1.SubscribeTopicEventsRequestProcessedAlpha1( - id=message.id(), status=status - ) - msg = api_v1.SubscribeTopicEventsRequestAlpha1(event_processed=response) - if not self._stream_active.is_set(): - raise StreamInactiveError('Stream is not active') - await self._send_queue.put(msg) - except Exception as e: - print(f"Can't send message: {e}") - - async def respond_success(self, message): - await self.respond(message, TopicEventResponse('success').status) - - async def respond_retry(self, message): - await self.respond(message, TopicEventResponse('retry').status) - - async def respond_drop(self, message): - await self.respond(message, TopicEventResponse('drop').status) - - async def close(self): - if self._stream: - try: - self._stream.cancel() - self._stream_active.clear() - except AioRpcError as e: - if e.code() != StatusCode.CANCELLED: - raise Exception(f'Error while closing stream: {e}') - except Exception as e: - raise Exception(f'Error while closing stream: {e}') diff --git a/daprdocs/content/en/python-sdk-docs/python-client.md b/daprdocs/content/en/python-sdk-docs/python-client.md index b4e92a9b..539604dd 100644 --- a/daprdocs/content/en/python-sdk-docs/python-client.md +++ b/daprdocs/content/en/python-sdk-docs/python-client.md @@ -261,10 +261,10 @@ You can create a streaming subscription to a PubSub topic using either the `subs or `subscribe_handler` methods. The `subscribe` method returns a `Subscription` object, which allows you to pull messages from the -stream by -calling the `next_message` method. This will block on the main thread while waiting for messages. -When done, you should call the close method to terminate the -subscription and stop receiving messages. +stream by calling the `next_message` method. This will block on the main thread while waiting for +messages. +When done, you should call the close method to terminate the subscription and stop receiving +messages. The `subscribe_with_handler` method accepts a callback function that is executed for each message received from the stream. diff --git a/examples/pubsub-streaming-async/README.md b/examples/pubsub-streaming-async/README.md deleted file mode 100644 index dfa7d27d..00000000 --- a/examples/pubsub-streaming-async/README.md +++ /dev/null @@ -1,122 +0,0 @@ -# Example - Publish and subscribe to messages - -This example utilizes a publisher and a subscriber to show the bidirectional pubsub pattern. -It creates a publisher and calls the `publish_event` method in the `DaprClient`. -In the s`subscriber.py` file it creates a subscriber object that can call the `next_message` method to get new messages from the stream. After processing the new message, it returns a status to the stream. - - -> **Note:** Make sure to use the latest proto bindings - -## Pre-requisites - -- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started) -- [Install Python 3.8+](https://www.python.org/downloads/) - -## Install Dapr python-SDK - - - -```bash -pip3 install dapr -``` - -## Run async example where users control reading messages off the stream - -Run the following command in a terminal/command prompt: - - - -```bash -# 1. Start Subscriber -dapr run --app-id python-subscriber --app-protocol grpc python3 subscriber.py -``` - - - -In another terminal/command prompt run: - - - -```bash -# 2. Start Publisher -dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py -``` - - - -## Run async example with a handler function - -Run the following command in a terminal/command prompt: - - - -```bash -# 1. Start Subscriber -dapr run --app-id python-subscriber --app-protocol grpc python3 subscriber-handler.py -``` - - - -In another terminal/command prompt run: - - - -```bash -# 2. Start Publisher -dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py -``` - - - - -## Cleanup - - diff --git a/examples/pubsub-streaming-async/publisher.py b/examples/pubsub-streaming-async/publisher.py deleted file mode 100644 index b9702355..00000000 --- a/examples/pubsub-streaming-async/publisher.py +++ /dev/null @@ -1,45 +0,0 @@ -# ------------------------------------------------------------ -# Copyright 2022 The Dapr Authors -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ------------------------------------------------------------ -import asyncio -import json - -from dapr.aio.clients import DaprClient - - -async def publish_events(): - """ - Publishes events to a pubsub topic asynchronously - """ - - async with DaprClient() as d: - id = 0 - while id < 5: - id += 1 - req_data = {'id': id, 'message': 'hello world'} - - # Create a typed message with content type and body - await d.publish_event( - pubsub_name='pubsub', - topic_name='TOPIC_A', - data=json.dumps(req_data), - data_content_type='application/json', - publish_metadata={'ttlInSeconds': '100', 'rawPayload': 'false'}, - ) - - # Print the request - print(req_data, flush=True) - - await asyncio.sleep(1) - - -asyncio.run(publish_events()) diff --git a/examples/pubsub-streaming-async/subscriber-handler.py b/examples/pubsub-streaming-async/subscriber-handler.py deleted file mode 100644 index f9503f06..00000000 --- a/examples/pubsub-streaming-async/subscriber-handler.py +++ /dev/null @@ -1,42 +0,0 @@ -import asyncio -from dapr.aio.clients import DaprClient -from dapr.clients.grpc._response import TopicEventResponse - -counter = 0 - - -async def process_message(message) -> TopicEventResponse: - """ - Asynchronously processes the message and returns a TopicEventResponse. - """ - - print(f'Processing message: {message.data()} from {message.topic()}...') - global counter - counter += 1 - return TopicEventResponse('success') - - -async def main(): - """ - Main function to subscribe to a pubsub topic and handle messages asynchronously. - """ - async with DaprClient() as client: - # Subscribe to the pubsub topic with the message handler - close_fn = await client.subscribe_with_handler( - pubsub_name='pubsub', - topic='TOPIC_A', - handler_fn=process_message, - dead_letter_topic='TOPIC_A_DEAD', - ) - - # Wait until 5 messages are processed - global counter - while counter < 5: - await asyncio.sleep(1) - - print('Closing subscription...') - await close_fn() - - -if __name__ == '__main__': - asyncio.run(main()) diff --git a/examples/pubsub-streaming-async/subscriber.py b/examples/pubsub-streaming-async/subscriber.py deleted file mode 100644 index 0f7da59b..00000000 --- a/examples/pubsub-streaming-async/subscriber.py +++ /dev/null @@ -1,53 +0,0 @@ -import asyncio - -from dapr.aio.clients import DaprClient -from dapr.clients.grpc.subscription import StreamInactiveError - -counter = 0 - - -def process_message(message): - global counter - counter += 1 - # Process the message here - print(f'Processing message: {message.data()} from {message.topic()}...') - return 'success' - - -async def main(): - async with DaprClient() as client: - global counter - subscription = await client.subscribe( - pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' - ) - - try: - while counter < 5: - try: - message = await subscription.next_message() - - except StreamInactiveError: - print('Stream is inactive. Retrying...') - await asyncio.sleep(1) - continue - if message is None: - print('No message received within timeout period.') - continue - - # Process the message - response_status = process_message(message) - - if response_status == 'success': - await subscription.respond_success(message) - elif response_status == 'retry': - await subscription.respond_retry(message) - elif response_status == 'drop': - await subscription.respond_drop(message) - - finally: - print('Closing subscription...') - await subscription.close() - - -if __name__ == '__main__': - asyncio.run(main()) diff --git a/tests/clients/test_dapr_grpc_client_async.py b/tests/clients/test_dapr_grpc_client_async.py index 42bbd830..754abbeb 100644 --- a/tests/clients/test_dapr_grpc_client_async.py +++ b/tests/clients/test_dapr_grpc_client_async.py @@ -24,7 +24,6 @@ from dapr.aio.clients.grpc.client import DaprGrpcClientAsync from dapr.aio.clients import DaprClient from dapr.clients.exceptions import DaprGrpcError -from dapr.common.pubsub.subscription import StreamInactiveError from dapr.proto import common_v1 from .fake_dapr_server import FakeDaprSidecar from dapr.conf import settings @@ -261,128 +260,6 @@ async def test_publish_error(self): data=111, ) - async def test_subscribe_topic(self): - # The fake server we're using sends two messages and then closes the stream - # The client should be able to read both messages, handle the stream closure and reconnect - # which will result in reading the same two messages again. - # That's why message 3 should be the same as message 1 - dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') - subscription = await dapr.subscribe(pubsub_name='pubsub', topic='example') - - # First message - text - message1 = await subscription.next_message() - await subscription.respond_success(message1) - - self.assertEqual('111', message1.id()) - self.assertEqual('app1', message1.source()) - self.assertEqual('com.example.type2', message1.type()) - self.assertEqual('1.0', message1.spec_version()) - self.assertEqual('text/plain', message1.data_content_type()) - self.assertEqual('TOPIC_A', message1.topic()) - self.assertEqual('pubsub', message1.pubsub_name()) - self.assertEqual(b'hello2', message1.raw_data()) - self.assertEqual('text/plain', message1.data_content_type()) - self.assertEqual('hello2', message1.data()) - - # Second message - json - message2 = await subscription.next_message() - await subscription.respond_success(message2) - - self.assertEqual('222', message2.id()) - self.assertEqual('app1', message2.source()) - self.assertEqual('com.example.type2', message2.type()) - self.assertEqual('1.0', message2.spec_version()) - self.assertEqual('TOPIC_A', message2.topic()) - self.assertEqual('pubsub', message2.pubsub_name()) - self.assertEqual(b'{"a": 1}', message2.raw_data()) - self.assertEqual('application/json', message2.data_content_type()) - self.assertEqual({'a': 1}, message2.data()) - - # On this call the stream will be closed and return an error, so the message will be none - # but the client will try to reconnect - message3 = await subscription.next_message() - self.assertIsNone(message3) - - # # The client already reconnected and will start reading the messages again - # # Since we're working with a fake server, the messages will be the same - # message4 = await subscription.next_message() - # await subscription.respond_success(message4) - # self.assertEqual('111', message4.id()) - # self.assertEqual('app1', message4.source()) - # self.assertEqual('com.example.type2', message4.type()) - # self.assertEqual('1.0', message4.spec_version()) - # self.assertEqual('text/plain', message4.data_content_type()) - # self.assertEqual('TOPIC_A', message4.topic()) - # self.assertEqual('pubsub', message4.pubsub_name()) - # self.assertEqual(b'hello2', message4.raw_data()) - # self.assertEqual('text/plain', message4.data_content_type()) - # self.assertEqual('hello2', message4.data()) - - await subscription.close() - - async def test_subscribe_topic_early_close(self): - dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') - subscription = await dapr.subscribe(pubsub_name='pubsub', topic='example') - await subscription.close() - - with self.assertRaises(StreamInactiveError): - await subscription.next_message() - - # async def test_subscribe_topic_with_handler(self): - # # The fake server we're using sends two messages and then closes the stream - # # The client should be able to read both messages, handle the stream closure and reconnect - # # which will result in reading the same two messages again. - # # That's why message 3 should be the same as message 1 - # dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') - # counter = 0 - # - # async def handler(message): - # nonlocal counter - # if counter == 0: - # self.assertEqual('111', message.id()) - # self.assertEqual('app1', message.source()) - # self.assertEqual('com.example.type2', message.type()) - # self.assertEqual('1.0', message.spec_version()) - # self.assertEqual('text/plain', message.data_content_type()) - # self.assertEqual('TOPIC_A', message.topic()) - # self.assertEqual('pubsub', message.pubsub_name()) - # self.assertEqual(b'hello2', message.raw_data()) - # self.assertEqual('text/plain', message.data_content_type()) - # self.assertEqual('hello2', message.data()) - # elif counter == 1: - # self.assertEqual('222', message.id()) - # self.assertEqual('app1', message.source()) - # self.assertEqual('com.example.type2', message.type()) - # self.assertEqual('1.0', message.spec_version()) - # self.assertEqual('TOPIC_A', message.topic()) - # self.assertEqual('pubsub', message.pubsub_name()) - # self.assertEqual(b'{"a": 1}', message.raw_data()) - # self.assertEqual('application/json', message.data_content_type()) - # self.assertEqual({'a': 1}, message.data()) - # elif counter == 2: - # self.assertEqual('111', message.id()) - # self.assertEqual('app1', message.source()) - # self.assertEqual('com.example.type2', message.type()) - # self.assertEqual('1.0', message.spec_version()) - # self.assertEqual('text/plain', message.data_content_type()) - # self.assertEqual('TOPIC_A', message.topic()) - # self.assertEqual('pubsub', message.pubsub_name()) - # self.assertEqual(b'hello2', message.raw_data()) - # self.assertEqual('text/plain', message.data_content_type()) - # self.assertEqual('hello2', message.data()) - # - # counter += 1 - # - # return TopicEventResponse("success") - # - # close_fn = await dapr.subscribe_with_handler( - # pubsub_name='pubsub', topic='example', handler_fn=handler - # ) - # - # while counter < 3: - # await asyncio.sleep(0.1) # sleep to prevent a busy loop - # await close_fn() - @patch.object(settings, 'DAPR_API_TOKEN', 'test-token') async def test_dapr_api_token_insertion(self): dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') From df84b63ec662fcdfd6498c71e45f7d755addfe3e Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 21 Oct 2024 06:17:39 +0100 Subject: [PATCH 23/33] Fixes missing docker-compose in examples (#736) Signed-off-by: Elena Kolevska --- examples/invoke-binding/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/invoke-binding/README.md b/examples/invoke-binding/README.md index b95f5d7a..d006f76d 100644 --- a/examples/invoke-binding/README.md +++ b/examples/invoke-binding/README.md @@ -26,7 +26,7 @@ name: Kafka install sleep: 30 --> -1. Start the kafka containers using docker-compose +1. Start the kafka containers using docker compose ```bash docker compose -f ./docker-compose-single-kafka.yml up -d From 75c3729298ac07ade5c6d465343bb4fe54579380 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 21 Oct 2024 11:31:11 +0100 Subject: [PATCH 24/33] Removes async examples test Signed-off-by: Elena Kolevska --- tox.ini | 1 - 1 file changed, 1 deletion(-) diff --git a/tox.ini b/tox.ini index 78f23086..6400e329 100644 --- a/tox.ini +++ b/tox.ini @@ -51,7 +51,6 @@ commands = ./validate.sh error_handling ./validate.sh pubsub-simple ./validate.sh pubsub-streaming - ./validate.sh pubsub-streaming-async ./validate.sh state_store ./validate.sh state_store_query ./validate.sh secret_store From 8d4ccd23b16ebb5bf049dc8ead06b041488df1cb Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 21 Oct 2024 11:58:52 +0100 Subject: [PATCH 25/33] Small cleanup Signed-off-by: Elena Kolevska --- dapr/clients/grpc/subscription.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index b5a87080..931be8e1 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -71,17 +71,16 @@ def next_message(self): @return: The next message from the queue, or None if no message is received within the timeout. """ - if not self._is_stream_active(): + if not self._is_stream_active() or self._stream is None: raise StreamInactiveError('Stream is not active') + try: # Read the next message from the stream directly - if self._stream is not None: - message = next(self._stream, None) - if message is None: - return None - return SubscriptionMessage(message.event_message) + message = next(self._stream) + return SubscriptionMessage(message.event_message) except RpcError as e: + # If Dapr can't be reached, wait until it's ready and reconnect the stream if e.code() == StatusCode.UNAVAILABLE: print( f'gRPC error while reading from stream: {e.details()}, Status Code: {e.code()}' @@ -95,7 +94,6 @@ def next_message(self): except Exception as e: raise Exception(f'Error while fetching message: {e}') - return None def respond(self, message, status): try: From 533c3391d97fe0221412658952861e42ab1a70d6 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 21 Oct 2024 14:10:33 +0100 Subject: [PATCH 26/33] Split up topic names between tests Signed-off-by: Elena Kolevska --- examples/pubsub-streaming/README.md | 28 +++++++++---------- examples/pubsub-streaming/publisher.py | 10 +++++-- .../pubsub-streaming/subscriber-handler.py | 11 ++++++-- examples/pubsub-streaming/subscriber.py | 10 ++++++- 4 files changed, 40 insertions(+), 19 deletions(-) diff --git a/examples/pubsub-streaming/README.md b/examples/pubsub-streaming/README.md index 4849e791..15664522 100644 --- a/examples/pubsub-streaming/README.md +++ b/examples/pubsub-streaming/README.md @@ -27,11 +27,11 @@ Run the following command in a terminal/command prompt: @@ -63,7 +63,7 @@ sleep: 15 ```bash # 2. Start Publisher -dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py +dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check -- python3 publisher.py --topic=TOPIC_A1 ``` @@ -75,11 +75,11 @@ Run the following command in a terminal/command prompt: @@ -111,7 +111,7 @@ sleep: 15 ```bash # 2. Start Publisher -dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py +dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check -- python3 publisher.py --topic=TOPIC_A2 ``` diff --git a/examples/pubsub-streaming/publisher.py b/examples/pubsub-streaming/publisher.py index fd797470..6ae68c22 100644 --- a/examples/pubsub-streaming/publisher.py +++ b/examples/pubsub-streaming/publisher.py @@ -10,12 +10,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------ - +import argparse import json import time from dapr.clients import DaprClient +parser = argparse.ArgumentParser(description='Publish events to a Dapr pub/sub topic.') +parser.add_argument('--topic', type=str, required=True, help='The topic name to publish to.') +args = parser.parse_args() + +topic_name = args.topic + with DaprClient() as d: id = 0 while id < 5: @@ -25,7 +31,7 @@ # Create a typed message with content type and body resp = d.publish_event( pubsub_name='pubsub', - topic_name='TOPIC_A', + topic_name=topic_name, data=json.dumps(req_data), data_content_type='application/json', publish_metadata={'ttlInSeconds': '100', 'rawPayload': 'false'}, diff --git a/examples/pubsub-streaming/subscriber-handler.py b/examples/pubsub-streaming/subscriber-handler.py index aab840a4..4409fa5a 100644 --- a/examples/pubsub-streaming/subscriber-handler.py +++ b/examples/pubsub-streaming/subscriber-handler.py @@ -1,3 +1,4 @@ +import argparse import time from dapr.clients import DaprClient @@ -5,6 +6,12 @@ counter = 0 +parser = argparse.ArgumentParser(description='Publish events to a Dapr pub/sub topic.') +parser.add_argument('--topic', type=str, required=True, help='The topic name to publish to.') +args = parser.parse_args() + +topic_name = args.topic +dlq_topic_name = topic_name + '_DEAD' def process_message(message): # Process the message here @@ -20,9 +27,9 @@ def main(): # and process them in the `process_message` function close_fn = client.subscribe_with_handler( pubsub_name='pubsub', - topic='TOPIC_A', + topic=topic_name, handler_fn=process_message, - dead_letter_topic='TOPIC_A_DEAD', + dead_letter_topic=dlq_topic_name, ) while counter < 5: diff --git a/examples/pubsub-streaming/subscriber.py b/examples/pubsub-streaming/subscriber.py index 4af7ee0a..2c79235a 100644 --- a/examples/pubsub-streaming/subscriber.py +++ b/examples/pubsub-streaming/subscriber.py @@ -1,3 +1,4 @@ +import argparse import time from dapr.clients import DaprClient @@ -5,6 +6,13 @@ counter = 0 +parser = argparse.ArgumentParser(description='Publish events to a Dapr pub/sub topic.') +parser.add_argument('--topic', type=str, required=True, help='The topic name to publish to.') +args = parser.parse_args() + +topic_name = args.topic +dlq_topic_name = topic_name + '_DEAD' + def process_message(message): global counter @@ -20,7 +28,7 @@ def main(): try: subscription = client.subscribe( - pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' + pubsub_name='pubsub', topic=topic_name, dead_letter_topic=dlq_topic_name ) except Exception as e: print(f'Error occurred: {e}') From c70b927d1484b48ad75b2addbe90c285104446df Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 21 Oct 2024 14:16:08 +0100 Subject: [PATCH 27/33] lint Signed-off-by: Elena Kolevska --- dapr/clients/grpc/subscription.py | 2 -- examples/pubsub-streaming/subscriber-handler.py | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index 931be8e1..80db4fbf 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -74,7 +74,6 @@ def next_message(self): if not self._is_stream_active() or self._stream is None: raise StreamInactiveError('Stream is not active') - try: # Read the next message from the stream directly message = next(self._stream) @@ -94,7 +93,6 @@ def next_message(self): except Exception as e: raise Exception(f'Error while fetching message: {e}') - def respond(self, message, status): try: status = appcallback_v1.TopicEventResponse(status=status.value) diff --git a/examples/pubsub-streaming/subscriber-handler.py b/examples/pubsub-streaming/subscriber-handler.py index 4409fa5a..3a963fd2 100644 --- a/examples/pubsub-streaming/subscriber-handler.py +++ b/examples/pubsub-streaming/subscriber-handler.py @@ -13,6 +13,7 @@ topic_name = args.topic dlq_topic_name = topic_name + '_DEAD' + def process_message(message): # Process the message here global counter From f7f516227a2d794252bb295bf387dc3cdf37f341 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 21 Oct 2024 14:47:40 +0100 Subject: [PATCH 28/33] Revert "Removes async client" This reverts commit cb4b65b201d41e0a7b65beb3ef199b610e699725. Signed-off-by: Elena Kolevska --- dapr/aio/clients/grpc/client.py | 71 +++++++++- dapr/aio/clients/grpc/subscription.py | 110 ++++++++++++++++ .../en/python-sdk-docs/python-client.md | 8 +- examples/pubsub-streaming-async/README.md | 122 +++++++++++++++++ examples/pubsub-streaming-async/publisher.py | 45 +++++++ .../subscriber-handler.py | 42 ++++++ examples/pubsub-streaming-async/subscriber.py | 53 ++++++++ tests/clients/test_dapr_grpc_client_async.py | 123 ++++++++++++++++++ 8 files changed, 569 insertions(+), 5 deletions(-) create mode 100644 dapr/aio/clients/grpc/subscription.py create mode 100644 examples/pubsub-streaming-async/README.md create mode 100644 examples/pubsub-streaming-async/publisher.py create mode 100644 examples/pubsub-streaming-async/subscriber-handler.py create mode 100644 examples/pubsub-streaming-async/subscriber.py diff --git a/dapr/aio/clients/grpc/client.py b/dapr/aio/clients/grpc/client.py index f9f53498..2b40101c 100644 --- a/dapr/aio/clients/grpc/client.py +++ b/dapr/aio/clients/grpc/client.py @@ -24,7 +24,7 @@ from warnings import warn -from typing import Callable, Dict, Optional, Text, Union, Sequence, List, Any +from typing import Callable, Dict, Optional, Text, Union, Sequence, List, Any, Awaitable from typing_extensions import Self from google.protobuf.message import Message as GrpcMessage @@ -39,12 +39,14 @@ AioRpcError, ) +from dapr.aio.clients.grpc.subscription import Subscription from dapr.clients.exceptions import DaprInternalError, DaprGrpcError from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions from dapr.clients.grpc._state import StateOptions, StateItem from dapr.clients.grpc._helpers import getWorkflowRuntimeStatus from dapr.clients.health import DaprHealth from dapr.clients.retry import RetryPolicy +from dapr.common.pubsub.subscription import StreamInactiveError from dapr.conf.helpers import GrpcEndpoint from dapr.conf import settings from dapr.proto import api_v1, api_service_v1, common_v1 @@ -94,6 +96,7 @@ UnlockResponse, GetWorkflowResponse, StartWorkflowResponse, + TopicEventResponse, ) @@ -482,6 +485,72 @@ async def publish_event( return DaprResponse(await call.initial_metadata()) + async def subscribe( + self, + pubsub_name: str, + topic: str, + metadata: Optional[dict] = None, + dead_letter_topic: Optional[str] = None, + ) -> Subscription: + """ + Subscribe to a topic with a bidirectional stream + + Args: + pubsub_name (str): The name of the pubsub component. + topic (str): The name of the topic. + metadata (Optional[dict]): Additional metadata for the subscription. + dead_letter_topic (Optional[str]): Name of the dead-letter topic. + + Returns: + Subscription: The Subscription object managing the stream. + """ + subscription = Subscription(self._stub, pubsub_name, topic, metadata, dead_letter_topic) + await subscription.start() + return subscription + + async def subscribe_with_handler( + self, + pubsub_name: str, + topic: str, + handler_fn: Callable[..., TopicEventResponse], + metadata: Optional[dict] = None, + dead_letter_topic: Optional[str] = None, + ) -> Callable[[], Awaitable[None]]: + """ + Subscribe to a topic with a bidirectional stream and a message handler function + + Args: + pubsub_name (str): The name of the pubsub component. + topic (str): The name of the topic. + handler_fn (Callable[..., TopicEventResponse]): The function to call when a message is received. + metadata (Optional[dict]): Additional metadata for the subscription. + dead_letter_topic (Optional[str]): Name of the dead-letter topic. + + Returns: + Callable[[], Awaitable[None]]: An async function to close the subscription. + """ + subscription = await self.subscribe(pubsub_name, topic, metadata, dead_letter_topic) + + async def stream_messages(sub: Subscription): + while True: + try: + message = await sub.next_message() + if message: + response = await handler_fn(message) + if response: + await subscription.respond(message, response.status) + else: + continue + except StreamInactiveError: + break + + async def close_subscription(): + await subscription.close() + + asyncio.create_task(stream_messages(subscription)) + + return close_subscription + async def get_state( self, store_name: str, diff --git a/dapr/aio/clients/grpc/subscription.py b/dapr/aio/clients/grpc/subscription.py new file mode 100644 index 00000000..84542bb4 --- /dev/null +++ b/dapr/aio/clients/grpc/subscription.py @@ -0,0 +1,110 @@ +import asyncio +from grpc import StatusCode +from grpc.aio import AioRpcError + +from dapr.clients.grpc._response import TopicEventResponse +from dapr.clients.health import DaprHealth +from dapr.common.pubsub.subscription import StreamInactiveError, SubscriptionMessage +from dapr.proto import api_v1, appcallback_v1 + + +class Subscription: + def __init__(self, stub, pubsub_name, topic, metadata=None, dead_letter_topic=None): + self._stub = stub + self._pubsub_name = pubsub_name + self._topic = topic + self._metadata = metadata or {} + self._dead_letter_topic = dead_letter_topic or '' + self._stream = None + self._send_queue = asyncio.Queue() + self._stream_active = asyncio.Event() + + async def start(self): + async def outgoing_request_iterator(): + try: + initial_request = api_v1.SubscribeTopicEventsRequestAlpha1( + initial_request=api_v1.SubscribeTopicEventsRequestInitialAlpha1( + pubsub_name=self._pubsub_name, + topic=self._topic, + metadata=self._metadata, + dead_letter_topic=self._dead_letter_topic, + ) + ) + yield initial_request + + while self._stream_active.is_set(): + try: + response = await asyncio.wait_for(self._send_queue.get(), timeout=1.0) + yield response + except asyncio.TimeoutError: + continue + except Exception as e: + raise Exception(f'Error while writing to stream: {e}') + + self._stream = self._stub.SubscribeTopicEventsAlpha1(outgoing_request_iterator()) + self._stream_active.set() + await self._stream.read() # discard the initial message + + async def reconnect_stream(self): + await self.close() + DaprHealth.wait_until_ready() + print('Attempting to reconnect...') + await self.start() + + async def next_message(self): + if not self._stream_active.is_set(): + raise StreamInactiveError('Stream is not active') + + try: + if self._stream is not None: + message = await self._stream.read() + if message is None: + return None + return SubscriptionMessage(message.event_message) + except AioRpcError as e: + if e.code() == StatusCode.UNAVAILABLE: + print( + f'gRPC error while reading from stream: {e.details()}, ' + f'Status Code: {e.code()}. ' + f'Attempting to reconnect...' + ) + await self.reconnect_stream() + elif e.code() != StatusCode.CANCELLED: + raise Exception(f'gRPC error while reading from subscription stream: {e} ') + except Exception as e: + raise Exception(f'Error while fetching message: {e}') + + return None + + async def respond(self, message, status): + try: + status = appcallback_v1.TopicEventResponse(status=status.value) + response = api_v1.SubscribeTopicEventsRequestProcessedAlpha1( + id=message.id(), status=status + ) + msg = api_v1.SubscribeTopicEventsRequestAlpha1(event_processed=response) + if not self._stream_active.is_set(): + raise StreamInactiveError('Stream is not active') + await self._send_queue.put(msg) + except Exception as e: + print(f"Can't send message: {e}") + + async def respond_success(self, message): + await self.respond(message, TopicEventResponse('success').status) + + async def respond_retry(self, message): + await self.respond(message, TopicEventResponse('retry').status) + + async def respond_drop(self, message): + await self.respond(message, TopicEventResponse('drop').status) + + async def close(self): + if self._stream: + try: + self._stream.cancel() + self._stream_active.clear() + except AioRpcError as e: + if e.code() != StatusCode.CANCELLED: + raise Exception(f'Error while closing stream: {e}') + except Exception as e: + raise Exception(f'Error while closing stream: {e}') diff --git a/daprdocs/content/en/python-sdk-docs/python-client.md b/daprdocs/content/en/python-sdk-docs/python-client.md index 539604dd..b4e92a9b 100644 --- a/daprdocs/content/en/python-sdk-docs/python-client.md +++ b/daprdocs/content/en/python-sdk-docs/python-client.md @@ -261,10 +261,10 @@ You can create a streaming subscription to a PubSub topic using either the `subs or `subscribe_handler` methods. The `subscribe` method returns a `Subscription` object, which allows you to pull messages from the -stream by calling the `next_message` method. This will block on the main thread while waiting for -messages. -When done, you should call the close method to terminate the subscription and stop receiving -messages. +stream by +calling the `next_message` method. This will block on the main thread while waiting for messages. +When done, you should call the close method to terminate the +subscription and stop receiving messages. The `subscribe_with_handler` method accepts a callback function that is executed for each message received from the stream. diff --git a/examples/pubsub-streaming-async/README.md b/examples/pubsub-streaming-async/README.md new file mode 100644 index 00000000..dfa7d27d --- /dev/null +++ b/examples/pubsub-streaming-async/README.md @@ -0,0 +1,122 @@ +# Example - Publish and subscribe to messages + +This example utilizes a publisher and a subscriber to show the bidirectional pubsub pattern. +It creates a publisher and calls the `publish_event` method in the `DaprClient`. +In the s`subscriber.py` file it creates a subscriber object that can call the `next_message` method to get new messages from the stream. After processing the new message, it returns a status to the stream. + + +> **Note:** Make sure to use the latest proto bindings + +## Pre-requisites + +- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started) +- [Install Python 3.8+](https://www.python.org/downloads/) + +## Install Dapr python-SDK + + + +```bash +pip3 install dapr +``` + +## Run async example where users control reading messages off the stream + +Run the following command in a terminal/command prompt: + + + +```bash +# 1. Start Subscriber +dapr run --app-id python-subscriber --app-protocol grpc python3 subscriber.py +``` + + + +In another terminal/command prompt run: + + + +```bash +# 2. Start Publisher +dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py +``` + + + +## Run async example with a handler function + +Run the following command in a terminal/command prompt: + + + +```bash +# 1. Start Subscriber +dapr run --app-id python-subscriber --app-protocol grpc python3 subscriber-handler.py +``` + + + +In another terminal/command prompt run: + + + +```bash +# 2. Start Publisher +dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py +``` + + + + +## Cleanup + + diff --git a/examples/pubsub-streaming-async/publisher.py b/examples/pubsub-streaming-async/publisher.py new file mode 100644 index 00000000..b9702355 --- /dev/null +++ b/examples/pubsub-streaming-async/publisher.py @@ -0,0 +1,45 @@ +# ------------------------------------------------------------ +# Copyright 2022 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ------------------------------------------------------------ +import asyncio +import json + +from dapr.aio.clients import DaprClient + + +async def publish_events(): + """ + Publishes events to a pubsub topic asynchronously + """ + + async with DaprClient() as d: + id = 0 + while id < 5: + id += 1 + req_data = {'id': id, 'message': 'hello world'} + + # Create a typed message with content type and body + await d.publish_event( + pubsub_name='pubsub', + topic_name='TOPIC_A', + data=json.dumps(req_data), + data_content_type='application/json', + publish_metadata={'ttlInSeconds': '100', 'rawPayload': 'false'}, + ) + + # Print the request + print(req_data, flush=True) + + await asyncio.sleep(1) + + +asyncio.run(publish_events()) diff --git a/examples/pubsub-streaming-async/subscriber-handler.py b/examples/pubsub-streaming-async/subscriber-handler.py new file mode 100644 index 00000000..f9503f06 --- /dev/null +++ b/examples/pubsub-streaming-async/subscriber-handler.py @@ -0,0 +1,42 @@ +import asyncio +from dapr.aio.clients import DaprClient +from dapr.clients.grpc._response import TopicEventResponse + +counter = 0 + + +async def process_message(message) -> TopicEventResponse: + """ + Asynchronously processes the message and returns a TopicEventResponse. + """ + + print(f'Processing message: {message.data()} from {message.topic()}...') + global counter + counter += 1 + return TopicEventResponse('success') + + +async def main(): + """ + Main function to subscribe to a pubsub topic and handle messages asynchronously. + """ + async with DaprClient() as client: + # Subscribe to the pubsub topic with the message handler + close_fn = await client.subscribe_with_handler( + pubsub_name='pubsub', + topic='TOPIC_A', + handler_fn=process_message, + dead_letter_topic='TOPIC_A_DEAD', + ) + + # Wait until 5 messages are processed + global counter + while counter < 5: + await asyncio.sleep(1) + + print('Closing subscription...') + await close_fn() + + +if __name__ == '__main__': + asyncio.run(main()) diff --git a/examples/pubsub-streaming-async/subscriber.py b/examples/pubsub-streaming-async/subscriber.py new file mode 100644 index 00000000..0f7da59b --- /dev/null +++ b/examples/pubsub-streaming-async/subscriber.py @@ -0,0 +1,53 @@ +import asyncio + +from dapr.aio.clients import DaprClient +from dapr.clients.grpc.subscription import StreamInactiveError + +counter = 0 + + +def process_message(message): + global counter + counter += 1 + # Process the message here + print(f'Processing message: {message.data()} from {message.topic()}...') + return 'success' + + +async def main(): + async with DaprClient() as client: + global counter + subscription = await client.subscribe( + pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' + ) + + try: + while counter < 5: + try: + message = await subscription.next_message() + + except StreamInactiveError: + print('Stream is inactive. Retrying...') + await asyncio.sleep(1) + continue + if message is None: + print('No message received within timeout period.') + continue + + # Process the message + response_status = process_message(message) + + if response_status == 'success': + await subscription.respond_success(message) + elif response_status == 'retry': + await subscription.respond_retry(message) + elif response_status == 'drop': + await subscription.respond_drop(message) + + finally: + print('Closing subscription...') + await subscription.close() + + +if __name__ == '__main__': + asyncio.run(main()) diff --git a/tests/clients/test_dapr_grpc_client_async.py b/tests/clients/test_dapr_grpc_client_async.py index 754abbeb..42bbd830 100644 --- a/tests/clients/test_dapr_grpc_client_async.py +++ b/tests/clients/test_dapr_grpc_client_async.py @@ -24,6 +24,7 @@ from dapr.aio.clients.grpc.client import DaprGrpcClientAsync from dapr.aio.clients import DaprClient from dapr.clients.exceptions import DaprGrpcError +from dapr.common.pubsub.subscription import StreamInactiveError from dapr.proto import common_v1 from .fake_dapr_server import FakeDaprSidecar from dapr.conf import settings @@ -260,6 +261,128 @@ async def test_publish_error(self): data=111, ) + async def test_subscribe_topic(self): + # The fake server we're using sends two messages and then closes the stream + # The client should be able to read both messages, handle the stream closure and reconnect + # which will result in reading the same two messages again. + # That's why message 3 should be the same as message 1 + dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') + subscription = await dapr.subscribe(pubsub_name='pubsub', topic='example') + + # First message - text + message1 = await subscription.next_message() + await subscription.respond_success(message1) + + self.assertEqual('111', message1.id()) + self.assertEqual('app1', message1.source()) + self.assertEqual('com.example.type2', message1.type()) + self.assertEqual('1.0', message1.spec_version()) + self.assertEqual('text/plain', message1.data_content_type()) + self.assertEqual('TOPIC_A', message1.topic()) + self.assertEqual('pubsub', message1.pubsub_name()) + self.assertEqual(b'hello2', message1.raw_data()) + self.assertEqual('text/plain', message1.data_content_type()) + self.assertEqual('hello2', message1.data()) + + # Second message - json + message2 = await subscription.next_message() + await subscription.respond_success(message2) + + self.assertEqual('222', message2.id()) + self.assertEqual('app1', message2.source()) + self.assertEqual('com.example.type2', message2.type()) + self.assertEqual('1.0', message2.spec_version()) + self.assertEqual('TOPIC_A', message2.topic()) + self.assertEqual('pubsub', message2.pubsub_name()) + self.assertEqual(b'{"a": 1}', message2.raw_data()) + self.assertEqual('application/json', message2.data_content_type()) + self.assertEqual({'a': 1}, message2.data()) + + # On this call the stream will be closed and return an error, so the message will be none + # but the client will try to reconnect + message3 = await subscription.next_message() + self.assertIsNone(message3) + + # # The client already reconnected and will start reading the messages again + # # Since we're working with a fake server, the messages will be the same + # message4 = await subscription.next_message() + # await subscription.respond_success(message4) + # self.assertEqual('111', message4.id()) + # self.assertEqual('app1', message4.source()) + # self.assertEqual('com.example.type2', message4.type()) + # self.assertEqual('1.0', message4.spec_version()) + # self.assertEqual('text/plain', message4.data_content_type()) + # self.assertEqual('TOPIC_A', message4.topic()) + # self.assertEqual('pubsub', message4.pubsub_name()) + # self.assertEqual(b'hello2', message4.raw_data()) + # self.assertEqual('text/plain', message4.data_content_type()) + # self.assertEqual('hello2', message4.data()) + + await subscription.close() + + async def test_subscribe_topic_early_close(self): + dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') + subscription = await dapr.subscribe(pubsub_name='pubsub', topic='example') + await subscription.close() + + with self.assertRaises(StreamInactiveError): + await subscription.next_message() + + # async def test_subscribe_topic_with_handler(self): + # # The fake server we're using sends two messages and then closes the stream + # # The client should be able to read both messages, handle the stream closure and reconnect + # # which will result in reading the same two messages again. + # # That's why message 3 should be the same as message 1 + # dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') + # counter = 0 + # + # async def handler(message): + # nonlocal counter + # if counter == 0: + # self.assertEqual('111', message.id()) + # self.assertEqual('app1', message.source()) + # self.assertEqual('com.example.type2', message.type()) + # self.assertEqual('1.0', message.spec_version()) + # self.assertEqual('text/plain', message.data_content_type()) + # self.assertEqual('TOPIC_A', message.topic()) + # self.assertEqual('pubsub', message.pubsub_name()) + # self.assertEqual(b'hello2', message.raw_data()) + # self.assertEqual('text/plain', message.data_content_type()) + # self.assertEqual('hello2', message.data()) + # elif counter == 1: + # self.assertEqual('222', message.id()) + # self.assertEqual('app1', message.source()) + # self.assertEqual('com.example.type2', message.type()) + # self.assertEqual('1.0', message.spec_version()) + # self.assertEqual('TOPIC_A', message.topic()) + # self.assertEqual('pubsub', message.pubsub_name()) + # self.assertEqual(b'{"a": 1}', message.raw_data()) + # self.assertEqual('application/json', message.data_content_type()) + # self.assertEqual({'a': 1}, message.data()) + # elif counter == 2: + # self.assertEqual('111', message.id()) + # self.assertEqual('app1', message.source()) + # self.assertEqual('com.example.type2', message.type()) + # self.assertEqual('1.0', message.spec_version()) + # self.assertEqual('text/plain', message.data_content_type()) + # self.assertEqual('TOPIC_A', message.topic()) + # self.assertEqual('pubsub', message.pubsub_name()) + # self.assertEqual(b'hello2', message.raw_data()) + # self.assertEqual('text/plain', message.data_content_type()) + # self.assertEqual('hello2', message.data()) + # + # counter += 1 + # + # return TopicEventResponse("success") + # + # close_fn = await dapr.subscribe_with_handler( + # pubsub_name='pubsub', topic='example', handler_fn=handler + # ) + # + # while counter < 3: + # await asyncio.sleep(0.1) # sleep to prevent a busy loop + # await close_fn() + @patch.object(settings, 'DAPR_API_TOKEN', 'test-token') async def test_dapr_api_token_insertion(self): dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') From da09a548a72bf7fac34b6eb3bee5de9f359c06d9 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 21 Oct 2024 14:15:06 +0100 Subject: [PATCH 29/33] Split up topic names between tests Signed-off-by: Elena Kolevska --- examples/pubsub-streaming-async/README.md | 28 +++++++++---------- examples/pubsub-streaming-async/publisher.py | 9 +++++- .../subscriber-handler.py | 12 ++++++-- examples/pubsub-streaming-async/subscriber.py | 10 ++++++- 4 files changed, 41 insertions(+), 18 deletions(-) diff --git a/examples/pubsub-streaming-async/README.md b/examples/pubsub-streaming-async/README.md index dfa7d27d..60c1cdef 100644 --- a/examples/pubsub-streaming-async/README.md +++ b/examples/pubsub-streaming-async/README.md @@ -27,11 +27,11 @@ Run the following command in a terminal/command prompt: @@ -63,7 +63,7 @@ sleep: 15 ```bash # 2. Start Publisher -dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py +dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check -- python3 publisher.py --topic=TOPIC_B1 ``` @@ -75,11 +75,11 @@ Run the following command in a terminal/command prompt: @@ -111,7 +111,7 @@ sleep: 15 ```bash # 2. Start Publisher -dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check python3 publisher.py +dapr run --app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 --enable-app-health-check -- python3 publisher.py --topic=TOPIC_B2 ``` diff --git a/examples/pubsub-streaming-async/publisher.py b/examples/pubsub-streaming-async/publisher.py index b9702355..e4abf359 100644 --- a/examples/pubsub-streaming-async/publisher.py +++ b/examples/pubsub-streaming-async/publisher.py @@ -10,11 +10,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------ +import argparse import asyncio import json from dapr.aio.clients import DaprClient +parser = argparse.ArgumentParser(description='Publish events to a Dapr pub/sub topic.') +parser.add_argument('--topic', type=str, required=True, help='The topic name to publish to.') +args = parser.parse_args() + +topic_name = args.topic + async def publish_events(): """ @@ -30,7 +37,7 @@ async def publish_events(): # Create a typed message with content type and body await d.publish_event( pubsub_name='pubsub', - topic_name='TOPIC_A', + topic_name=topic_name, data=json.dumps(req_data), data_content_type='application/json', publish_metadata={'ttlInSeconds': '100', 'rawPayload': 'false'}, diff --git a/examples/pubsub-streaming-async/subscriber-handler.py b/examples/pubsub-streaming-async/subscriber-handler.py index f9503f06..34129ee7 100644 --- a/examples/pubsub-streaming-async/subscriber-handler.py +++ b/examples/pubsub-streaming-async/subscriber-handler.py @@ -1,7 +1,15 @@ +import argparse import asyncio from dapr.aio.clients import DaprClient from dapr.clients.grpc._response import TopicEventResponse +parser = argparse.ArgumentParser(description='Publish events to a Dapr pub/sub topic.') +parser.add_argument('--topic', type=str, required=True, help='The topic name to publish to.') +args = parser.parse_args() + +topic_name = args.topic +dlq_topic_name = topic_name + '_DEAD' + counter = 0 @@ -24,9 +32,9 @@ async def main(): # Subscribe to the pubsub topic with the message handler close_fn = await client.subscribe_with_handler( pubsub_name='pubsub', - topic='TOPIC_A', + topic=topic_name, handler_fn=process_message, - dead_letter_topic='TOPIC_A_DEAD', + dead_letter_topic=dlq_topic_name, ) # Wait until 5 messages are processed diff --git a/examples/pubsub-streaming-async/subscriber.py b/examples/pubsub-streaming-async/subscriber.py index 0f7da59b..9a0d34a5 100644 --- a/examples/pubsub-streaming-async/subscriber.py +++ b/examples/pubsub-streaming-async/subscriber.py @@ -1,8 +1,16 @@ +import argparse import asyncio from dapr.aio.clients import DaprClient from dapr.clients.grpc.subscription import StreamInactiveError +parser = argparse.ArgumentParser(description='Publish events to a Dapr pub/sub topic.') +parser.add_argument('--topic', type=str, required=True, help='The topic name to publish to.') +args = parser.parse_args() + +topic_name = args.topic +dlq_topic_name = topic_name + '_DEAD' + counter = 0 @@ -18,7 +26,7 @@ async def main(): async with DaprClient() as client: global counter subscription = await client.subscribe( - pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' + pubsub_name='pubsub', topic=topic_name, dead_letter_topic=dlq_topic_name ) try: From 8c9ce85aaf83945f1fdb243206f017edda2e30d1 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 21 Oct 2024 14:53:51 +0100 Subject: [PATCH 30/33] updates fake server to wait for confirmation message before sending new message Signed-off-by: Elena Kolevska --- tests/clients/fake_dapr_server.py | 28 ++++++++++++++------ tests/clients/test_dapr_grpc_client_async.py | 24 ++++++++--------- tox.ini | 1 + 3 files changed, 33 insertions(+), 20 deletions(-) diff --git a/tests/clients/fake_dapr_server.py b/tests/clients/fake_dapr_server.py index 8627ab46..9ae39aa1 100644 --- a/tests/clients/fake_dapr_server.py +++ b/tests/clients/fake_dapr_server.py @@ -179,14 +179,15 @@ def PublishEvent(self, request, context): return empty_pb2.Empty() def SubscribeTopicEventsAlpha1(self, request_iterator, context): - yield api_v1.SubscribeTopicEventsResponseAlpha1( - initial_response=api_v1.SubscribeTopicEventsResponseInitialAlpha1() - ) + for request in request_iterator: + if request.HasField('initial_request'): + yield api_v1.SubscribeTopicEventsResponseAlpha1( + initial_response=api_v1.SubscribeTopicEventsResponseInitialAlpha1() + ) + break extensions = struct_pb2.Struct() - extensions['field1'] = 'value1' - extensions['field2'] = 42 - extensions['field3'] = True + extensions.update({'field1': 'value1', 'field2': 42, 'field3': True}) msg1 = appcallback_v1.TopicEventRequest( id='111', @@ -201,6 +202,10 @@ def SubscribeTopicEventsAlpha1(self, request_iterator, context): ) yield api_v1.SubscribeTopicEventsResponseAlpha1(event_message=msg1) + for request in request_iterator: + if request.HasField('event_processed'): + break + msg2 = appcallback_v1.TopicEventRequest( id='222', topic='TOPIC_A', @@ -214,9 +219,16 @@ def SubscribeTopicEventsAlpha1(self, request_iterator, context): ) yield api_v1.SubscribeTopicEventsResponseAlpha1(event_message=msg2) + for request in request_iterator: + if request.HasField('event_processed'): + break + # On the third message simulate a disconnection - status = status_pb2.Status(code=code_pb2.UNAVAILABLE, message='Simulated disconnection') - context.abort_with_status(rpc_status.to_status(status)) + context.abort_with_status( + rpc_status.to_status( + status_pb2.Status(code=code_pb2.UNAVAILABLE, message='Simulated disconnection') + ) + ) def SaveState(self, request, context): self.check_for_exception(context) diff --git a/tests/clients/test_dapr_grpc_client_async.py b/tests/clients/test_dapr_grpc_client_async.py index 42bbd830..f15a2d1a 100644 --- a/tests/clients/test_dapr_grpc_client_async.py +++ b/tests/clients/test_dapr_grpc_client_async.py @@ -305,18 +305,18 @@ async def test_subscribe_topic(self): # # The client already reconnected and will start reading the messages again # # Since we're working with a fake server, the messages will be the same - # message4 = await subscription.next_message() - # await subscription.respond_success(message4) - # self.assertEqual('111', message4.id()) - # self.assertEqual('app1', message4.source()) - # self.assertEqual('com.example.type2', message4.type()) - # self.assertEqual('1.0', message4.spec_version()) - # self.assertEqual('text/plain', message4.data_content_type()) - # self.assertEqual('TOPIC_A', message4.topic()) - # self.assertEqual('pubsub', message4.pubsub_name()) - # self.assertEqual(b'hello2', message4.raw_data()) - # self.assertEqual('text/plain', message4.data_content_type()) - # self.assertEqual('hello2', message4.data()) + message4 = await subscription.next_message() + await subscription.respond_success(message4) + self.assertEqual('111', message4.id()) + self.assertEqual('app1', message4.source()) + self.assertEqual('com.example.type2', message4.type()) + self.assertEqual('1.0', message4.spec_version()) + self.assertEqual('text/plain', message4.data_content_type()) + self.assertEqual('TOPIC_A', message4.topic()) + self.assertEqual('pubsub', message4.pubsub_name()) + self.assertEqual(b'hello2', message4.raw_data()) + self.assertEqual('text/plain', message4.data_content_type()) + self.assertEqual('hello2', message4.data()) await subscription.close() diff --git a/tox.ini b/tox.ini index 6400e329..78f23086 100644 --- a/tox.ini +++ b/tox.ini @@ -51,6 +51,7 @@ commands = ./validate.sh error_handling ./validate.sh pubsub-simple ./validate.sh pubsub-streaming + ./validate.sh pubsub-streaming-async ./validate.sh state_store ./validate.sh state_store_query ./validate.sh secret_store From ffde93505fa9cbb21e50200ef2816ff26282e8d6 Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 21 Oct 2024 15:15:55 +0100 Subject: [PATCH 31/33] Updates protos Signed-off-by: Elena Kolevska --- dapr/proto/common/v1/common_pb2.py | 26 +- dapr/proto/common/v1/common_pb2.pyi | 70 +- dapr/proto/common/v1/common_pb2_grpc.py | 20 - dapr/proto/runtime/v1/appcallback_pb2.py | 34 +- dapr/proto/runtime/v1/appcallback_pb2.pyi | 156 ++- dapr/proto/runtime/v1/appcallback_pb2_grpc.py | 167 +-- dapr/proto/runtime/v1/dapr_pb2.py | 172 +-- dapr/proto/runtime/v1/dapr_pb2.pyi | 869 +++++++------ dapr/proto/runtime/v1/dapr_pb2_grpc.py | 1099 ++++------------- tools/requirements.txt | 2 +- 10 files changed, 1008 insertions(+), 1607 deletions(-) diff --git a/dapr/proto/common/v1/common_pb2.py b/dapr/proto/common/v1/common_pb2.py index 3f7d8f25..a1bf2566 100644 --- a/dapr/proto/common/v1/common_pb2.py +++ b/dapr/proto/common/v1/common_pb2.py @@ -1,22 +1,11 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE # source: dapr/proto/common/v1/common.proto -# Protobuf Python Version: 5.27.2 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 5, - 27, - 2, - '', - 'dapr/proto/common/v1/common.proto' -) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -30,13 +19,14 @@ _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'dapr.proto.common.v1.common_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\nio.dapr.v1B\014CommonProtosZ/github.com/dapr/dapr/pkg/proto/common/v1;common\252\002\033Dapr.Client.Autogen.Grpc.v1' - _globals['_STATEITEM_METADATAENTRY']._loaded_options = None - _globals['_STATEITEM_METADATAENTRY']._serialized_options = b'8\001' - _globals['_CONFIGURATIONITEM_METADATAENTRY']._loaded_options = None - _globals['_CONFIGURATIONITEM_METADATAENTRY']._serialized_options = b'8\001' +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\nio.dapr.v1B\014CommonProtosZ/github.com/dapr/dapr/pkg/proto/common/v1;common\252\002\033Dapr.Client.Autogen.Grpc.v1' + _STATEITEM_METADATAENTRY._options = None + _STATEITEM_METADATAENTRY._serialized_options = b'8\001' + _CONFIGURATIONITEM_METADATAENTRY._options = None + _CONFIGURATIONITEM_METADATAENTRY._serialized_options = b'8\001' _globals['_HTTPEXTENSION']._serialized_start=87 _globals['_HTTPEXTENSION']._serialized_end=295 _globals['_HTTPEXTENSION_VERB']._serialized_start=181 diff --git a/dapr/proto/common/v1/common_pb2.pyi b/dapr/proto/common/v1/common_pb2.pyi index cc9537fc..0b23ce54 100644 --- a/dapr/proto/common/v1/common_pb2.pyi +++ b/dapr/proto/common/v1/common_pb2.pyi @@ -13,7 +13,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ - import builtins import collections.abc import google.protobuf.any_pb2 @@ -31,13 +30,13 @@ else: DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -@typing.final +@typing_extensions.final class HTTPExtension(google.protobuf.message.Message): """HTTPExtension includes HTTP verb and querystring when Dapr runtime delivers HTTP content. For example, when callers calls http invoke api - POST http://localhost:3500/v1.0/invoke//method/?query1=value1&query2=value2 + `POST http://localhost:3500/v1.0/invoke//method/?query1=value1&query2=value2` Dapr runtime will parse POST as a verb and extract querystring to quersytring map. """ @@ -90,11 +89,11 @@ class HTTPExtension(google.protobuf.message.Message): verb: global___HTTPExtension.Verb.ValueType = ..., querystring: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["querystring", b"querystring", "verb", b"verb"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["querystring", b"querystring", "verb", b"verb"]) -> None: ... global___HTTPExtension = HTTPExtension -@typing.final +@typing_extensions.final class InvokeRequest(google.protobuf.message.Message): """InvokeRequest is the message to invoke a method with the data. This message is used in InvokeService of Dapr gRPC Service and OnInvoke @@ -109,18 +108,17 @@ class InvokeRequest(google.protobuf.message.Message): HTTP_EXTENSION_FIELD_NUMBER: builtins.int method: builtins.str """Required. method is a method name which will be invoked by caller.""" - content_type: builtins.str - """The type of data content. - - This field is required if data delivers http request body - Otherwise, this is optional. - """ @property def data(self) -> google.protobuf.any_pb2.Any: """Required in unary RPCs. Bytes value or Protobuf message which caller sent. Dapr treats Any.value as bytes type if Any.type_url is unset. """ + content_type: builtins.str + """The type of data content. + This field is required if data delivers http request body + Otherwise, this is optional. + """ @property def http_extension(self) -> global___HTTPExtension: """HTTP specific fields if request conveys http-compatible request. @@ -128,7 +126,6 @@ class InvokeRequest(google.protobuf.message.Message): This field is required for http-compatible request. Otherwise, this field is optional. """ - def __init__( self, *, @@ -137,12 +134,12 @@ class InvokeRequest(google.protobuf.message.Message): content_type: builtins.str = ..., http_extension: global___HTTPExtension | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["data", b"data", "http_extension", b"http_extension"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["content_type", b"content_type", "data", b"data", "http_extension", b"http_extension", "method", b"method"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["data", b"data", "http_extension", b"http_extension"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["content_type", b"content_type", "data", b"data", "http_extension", b"http_extension", "method", b"method"]) -> None: ... global___InvokeRequest = InvokeRequest -@typing.final +@typing_extensions.final class InvokeResponse(google.protobuf.message.Message): """InvokeResponse is the response message including data and its content type from app callback. @@ -154,24 +151,23 @@ class InvokeResponse(google.protobuf.message.Message): DATA_FIELD_NUMBER: builtins.int CONTENT_TYPE_FIELD_NUMBER: builtins.int - content_type: builtins.str - """Required. The type of data content.""" @property def data(self) -> google.protobuf.any_pb2.Any: """Required in unary RPCs. The content body of InvokeService response.""" - + content_type: builtins.str + """Required. The type of data content.""" def __init__( self, *, data: google.protobuf.any_pb2.Any | None = ..., content_type: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["data", b"data"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["content_type", b"content_type", "data", b"data"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["data", b"data"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["content_type", b"content_type", "data", b"data"]) -> None: ... global___InvokeResponse = InvokeResponse -@typing.final +@typing_extensions.final class StreamPayload(google.protobuf.message.Message): """Chunk of data sent in a streaming request or response. This is used in requests including InternalInvokeRequestStream. @@ -195,17 +191,17 @@ class StreamPayload(google.protobuf.message.Message): data: builtins.bytes = ..., seq: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data", "seq", b"seq"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "seq", b"seq"]) -> None: ... global___StreamPayload = StreamPayload -@typing.final +@typing_extensions.final class StateItem(google.protobuf.message.Message): """StateItem represents state key, value, and additional options to save state.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -219,7 +215,7 @@ class StateItem(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... KEY_FIELD_NUMBER: builtins.int VALUE_FIELD_NUMBER: builtins.int @@ -235,15 +231,12 @@ class StateItem(google.protobuf.message.Message): """The entity tag which represents the specific version of data. The exact ETag format is defined by the corresponding data store. """ - @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata which will be passed to state store component.""" - @property def options(self) -> global___StateOptions: """Options for concurrency and consistency to save the state.""" - def __init__( self, *, @@ -253,12 +246,12 @@ class StateItem(google.protobuf.message.Message): metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., options: global___StateOptions | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["etag", b"etag", "options", b"options"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["etag", b"etag", "key", b"key", "metadata", b"metadata", "options", b"options", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["etag", b"etag", "options", b"options"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["etag", b"etag", "key", b"key", "metadata", b"metadata", "options", b"options", "value", b"value"]) -> None: ... global___StateItem = StateItem -@typing.final +@typing_extensions.final class Etag(google.protobuf.message.Message): """Etag represents a state item version""" @@ -272,11 +265,11 @@ class Etag(google.protobuf.message.Message): *, value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... global___Etag = Etag -@typing.final +@typing_extensions.final class StateOptions(google.protobuf.message.Message): """StateOptions configures concurrency and consistency for state operations""" @@ -326,17 +319,17 @@ class StateOptions(google.protobuf.message.Message): concurrency: global___StateOptions.StateConcurrency.ValueType = ..., consistency: global___StateOptions.StateConsistency.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["concurrency", b"concurrency", "consistency", b"consistency"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["concurrency", b"concurrency", "consistency", b"consistency"]) -> None: ... global___StateOptions = StateOptions -@typing.final +@typing_extensions.final class ConfigurationItem(google.protobuf.message.Message): """ConfigurationItem represents all the configuration with its name(key).""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -350,7 +343,7 @@ class ConfigurationItem(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... VALUE_FIELD_NUMBER: builtins.int VERSION_FIELD_NUMBER: builtins.int @@ -362,7 +355,6 @@ class ConfigurationItem(google.protobuf.message.Message): @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """the metadata which will be passed to/from configuration store component.""" - def __init__( self, *, @@ -370,6 +362,6 @@ class ConfigurationItem(google.protobuf.message.Message): version: builtins.str = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["metadata", b"metadata", "value", b"value", "version", b"version"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "value", b"value", "version", b"version"]) -> None: ... global___ConfigurationItem = ConfigurationItem diff --git a/dapr/proto/common/v1/common_pb2_grpc.py b/dapr/proto/common/v1/common_pb2_grpc.py index 310e7b40..2daafffe 100644 --- a/dapr/proto/common/v1/common_pb2_grpc.py +++ b/dapr/proto/common/v1/common_pb2_grpc.py @@ -1,24 +1,4 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" import grpc -import warnings - -GRPC_GENERATED_VERSION = '1.66.1' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in dapr/proto/common/v1/common_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) diff --git a/dapr/proto/runtime/v1/appcallback_pb2.py b/dapr/proto/runtime/v1/appcallback_pb2.py index 118d1959..f661ff99 100644 --- a/dapr/proto/runtime/v1/appcallback_pb2.py +++ b/dapr/proto/runtime/v1/appcallback_pb2.py @@ -1,22 +1,11 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE # source: dapr/proto/runtime/v1/appcallback.proto -# Protobuf Python Version: 5.27.2 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 5, - 27, - 2, - '', - 'dapr/proto/runtime/v1/appcallback.proto' -) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -33,17 +22,18 @@ _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'dapr.proto.runtime.v1.appcallback_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\nio.dapr.v1B\025DaprAppCallbackProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\252\002 Dapr.AppCallback.Autogen.Grpc.v1' - _globals['_TOPICEVENTBULKREQUESTENTRY_METADATAENTRY']._loaded_options = None - _globals['_TOPICEVENTBULKREQUESTENTRY_METADATAENTRY']._serialized_options = b'8\001' - _globals['_TOPICEVENTBULKREQUEST_METADATAENTRY']._loaded_options = None - _globals['_TOPICEVENTBULKREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_BINDINGEVENTREQUEST_METADATAENTRY']._loaded_options = None - _globals['_BINDINGEVENTREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_TOPICSUBSCRIPTION_METADATAENTRY']._loaded_options = None - _globals['_TOPICSUBSCRIPTION_METADATAENTRY']._serialized_options = b'8\001' +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\nio.dapr.v1B\025DaprAppCallbackProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\252\002 Dapr.AppCallback.Autogen.Grpc.v1' + _TOPICEVENTBULKREQUESTENTRY_METADATAENTRY._options = None + _TOPICEVENTBULKREQUESTENTRY_METADATAENTRY._serialized_options = b'8\001' + _TOPICEVENTBULKREQUEST_METADATAENTRY._options = None + _TOPICEVENTBULKREQUEST_METADATAENTRY._serialized_options = b'8\001' + _BINDINGEVENTREQUEST_METADATAENTRY._options = None + _BINDINGEVENTREQUEST_METADATAENTRY._serialized_options = b'8\001' + _TOPICSUBSCRIPTION_METADATAENTRY._options = None + _TOPICSUBSCRIPTION_METADATAENTRY._serialized_options = b'8\001' _globals['_JOBEVENTREQUEST']._serialized_start=188 _globals['_JOBEVENTREQUEST']._serialized_end=354 _globals['_JOBEVENTRESPONSE']._serialized_start=356 diff --git a/dapr/proto/runtime/v1/appcallback_pb2.pyi b/dapr/proto/runtime/v1/appcallback_pb2.pyi index 6c12dc57..b302559f 100644 --- a/dapr/proto/runtime/v1/appcallback_pb2.pyi +++ b/dapr/proto/runtime/v1/appcallback_pb2.pyi @@ -13,7 +13,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ - import builtins import collections.abc import dapr.proto.common.v1.common_pb2 @@ -33,7 +32,7 @@ else: DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -@typing.final +@typing_extensions.final class JobEventRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -44,6 +43,9 @@ class JobEventRequest(google.protobuf.message.Message): HTTP_EXTENSION_FIELD_NUMBER: builtins.int name: builtins.str """Job name.""" + @property + def data(self) -> google.protobuf.any_pb2.Any: + """Job data to be sent back to app.""" method: builtins.str """Required. method is a method name which will be invoked by caller.""" content_type: builtins.str @@ -52,10 +54,6 @@ class JobEventRequest(google.protobuf.message.Message): This field is required if data delivers http request body Otherwise, this is optional. """ - @property - def data(self) -> google.protobuf.any_pb2.Any: - """Job data to be sent back to app.""" - @property def http_extension(self) -> dapr.proto.common.v1.common_pb2.HTTPExtension: """HTTP specific fields if request conveys http-compatible request. @@ -63,7 +61,6 @@ class JobEventRequest(google.protobuf.message.Message): This field is required for http-compatible request. Otherwise, this field is optional. """ - def __init__( self, *, @@ -73,12 +70,12 @@ class JobEventRequest(google.protobuf.message.Message): content_type: builtins.str = ..., http_extension: dapr.proto.common.v1.common_pb2.HTTPExtension | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["data", b"data", "http_extension", b"http_extension"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["content_type", b"content_type", "data", b"data", "http_extension", b"http_extension", "method", b"method", "name", b"name"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["data", b"data", "http_extension", b"http_extension"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["content_type", b"content_type", "data", b"data", "http_extension", b"http_extension", "method", b"method", "name", b"name"]) -> None: ... global___JobEventRequest = JobEventRequest -@typing.final +@typing_extensions.final class JobEventResponse(google.protobuf.message.Message): """JobEventResponse is the response from the app when a job is triggered.""" @@ -90,7 +87,7 @@ class JobEventResponse(google.protobuf.message.Message): global___JobEventResponse = JobEventResponse -@typing.final +@typing_extensions.final class TopicEventRequest(google.protobuf.message.Message): """TopicEventRequest message is compatible with CloudEvent spec v1.0 https://github.com/cloudevents/spec/blob/v1.0/spec.md @@ -139,7 +136,6 @@ class TopicEventRequest(google.protobuf.message.Message): @property def extensions(self) -> google.protobuf.struct_pb2.Struct: """The map of additional custom properties to be sent to the app. These are considered to be cloud event extensions.""" - def __init__( self, *, @@ -154,12 +150,12 @@ class TopicEventRequest(google.protobuf.message.Message): path: builtins.str = ..., extensions: google.protobuf.struct_pb2.Struct | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["extensions", b"extensions"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["data", b"data", "data_content_type", b"data_content_type", "extensions", b"extensions", "id", b"id", "path", b"path", "pubsub_name", b"pubsub_name", "source", b"source", "spec_version", b"spec_version", "topic", b"topic", "type", b"type"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["extensions", b"extensions"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "data_content_type", b"data_content_type", "extensions", b"extensions", "id", b"id", "path", b"path", "pubsub_name", b"pubsub_name", "source", b"source", "spec_version", b"spec_version", "topic", b"topic", "type", b"type"]) -> None: ... global___TopicEventRequest = TopicEventRequest -@typing.final +@typing_extensions.final class TopicEventResponse(google.protobuf.message.Message): """TopicEventResponse is response from app on published message""" @@ -196,11 +192,11 @@ class TopicEventResponse(google.protobuf.message.Message): *, status: global___TopicEventResponse.TopicEventResponseStatus.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["status", b"status"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["status", b"status"]) -> None: ... global___TopicEventResponse = TopicEventResponse -@typing.final +@typing_extensions.final class TopicEventCERequest(google.protobuf.message.Message): """TopicEventCERequest message is compatible with CloudEvent spec v1.0""" @@ -228,7 +224,6 @@ class TopicEventCERequest(google.protobuf.message.Message): @property def extensions(self) -> google.protobuf.struct_pb2.Struct: """Custom attributes which includes cloud event extensions.""" - def __init__( self, *, @@ -240,18 +235,18 @@ class TopicEventCERequest(google.protobuf.message.Message): data: builtins.bytes = ..., extensions: google.protobuf.struct_pb2.Struct | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["extensions", b"extensions"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["data", b"data", "data_content_type", b"data_content_type", "extensions", b"extensions", "id", b"id", "source", b"source", "spec_version", b"spec_version", "type", b"type"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["extensions", b"extensions"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "data_content_type", b"data_content_type", "extensions", b"extensions", "id", b"id", "source", b"source", "spec_version", b"spec_version", "type", b"type"]) -> None: ... global___TopicEventCERequest = TopicEventCERequest -@typing.final +@typing_extensions.final class TopicEventBulkRequestEntry(google.protobuf.message.Message): """TopicEventBulkRequestEntry represents a single message inside a bulk request""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -265,7 +260,7 @@ class TopicEventBulkRequestEntry(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... ENTRY_ID_FIELD_NUMBER: builtins.int BYTES_FIELD_NUMBER: builtins.int @@ -275,14 +270,13 @@ class TopicEventBulkRequestEntry(google.protobuf.message.Message): entry_id: builtins.str """Unique identifier for the message.""" bytes: builtins.bytes - content_type: builtins.str - """content type of the event contained.""" @property def cloud_event(self) -> global___TopicEventCERequest: ... + content_type: builtins.str + """content type of the event contained.""" @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata associated with the event.""" - def __init__( self, *, @@ -292,19 +286,19 @@ class TopicEventBulkRequestEntry(google.protobuf.message.Message): content_type: builtins.str = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["bytes", b"bytes", "cloud_event", b"cloud_event", "event", b"event"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["bytes", b"bytes", "cloud_event", b"cloud_event", "content_type", b"content_type", "entry_id", b"entry_id", "event", b"event", "metadata", b"metadata"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["event", b"event"]) -> typing.Literal["bytes", "cloud_event"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["bytes", b"bytes", "cloud_event", b"cloud_event", "event", b"event"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["bytes", b"bytes", "cloud_event", b"cloud_event", "content_type", b"content_type", "entry_id", b"entry_id", "event", b"event", "metadata", b"metadata"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["event", b"event"]) -> typing_extensions.Literal["bytes", "cloud_event"] | None: ... global___TopicEventBulkRequestEntry = TopicEventBulkRequestEntry -@typing.final +@typing_extensions.final class TopicEventBulkRequest(google.protobuf.message.Message): """TopicEventBulkRequest represents request for bulk message""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -318,7 +312,7 @@ class TopicEventBulkRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... ID_FIELD_NUMBER: builtins.int ENTRIES_FIELD_NUMBER: builtins.int @@ -329,6 +323,12 @@ class TopicEventBulkRequest(google.protobuf.message.Message): PATH_FIELD_NUMBER: builtins.int id: builtins.str """Unique identifier for the bulk request.""" + @property + def entries(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TopicEventBulkRequestEntry]: + """The list of items inside this bulk request.""" + @property + def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """The metadata associated with the this bulk request.""" topic: builtins.str """The pubsub topic which publisher sent to.""" pubsub_name: builtins.str @@ -339,14 +339,6 @@ class TopicEventBulkRequest(google.protobuf.message.Message): """The matching path from TopicSubscription/routes (if specified) for this event. This value is used by OnTopicEvent to "switch" inside the handler. """ - @property - def entries(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TopicEventBulkRequestEntry]: - """The list of items inside this bulk request.""" - - @property - def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: - """The metadata associated with the this bulk request.""" - def __init__( self, *, @@ -358,11 +350,11 @@ class TopicEventBulkRequest(google.protobuf.message.Message): type: builtins.str = ..., path: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["entries", b"entries", "id", b"id", "metadata", b"metadata", "path", b"path", "pubsub_name", b"pubsub_name", "topic", b"topic", "type", b"type"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entries", b"entries", "id", b"id", "metadata", b"metadata", "path", b"path", "pubsub_name", b"pubsub_name", "topic", b"topic", "type", b"type"]) -> None: ... global___TopicEventBulkRequest = TopicEventBulkRequest -@typing.final +@typing_extensions.final class TopicEventBulkResponseEntry(google.protobuf.message.Message): """TopicEventBulkResponseEntry Represents single response, as part of TopicEventBulkResponse, to be sent by subscibed App for the corresponding single message during bulk subscribe @@ -382,11 +374,11 @@ class TopicEventBulkResponseEntry(google.protobuf.message.Message): entry_id: builtins.str = ..., status: global___TopicEventResponse.TopicEventResponseStatus.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["entry_id", b"entry_id", "status", b"status"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entry_id", b"entry_id", "status", b"status"]) -> None: ... global___TopicEventBulkResponseEntry = TopicEventBulkResponseEntry -@typing.final +@typing_extensions.final class TopicEventBulkResponse(google.protobuf.message.Message): """AppBulkResponse is response from app on published message""" @@ -396,23 +388,22 @@ class TopicEventBulkResponse(google.protobuf.message.Message): @property def statuses(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TopicEventBulkResponseEntry]: """The list of all responses for the bulk request.""" - def __init__( self, *, statuses: collections.abc.Iterable[global___TopicEventBulkResponseEntry] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["statuses", b"statuses"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["statuses", b"statuses"]) -> None: ... global___TopicEventBulkResponse = TopicEventBulkResponse -@typing.final +@typing_extensions.final class BindingEventRequest(google.protobuf.message.Message): """BindingEventRequest represents input bindings event.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -426,7 +417,7 @@ class BindingEventRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... NAME_FIELD_NUMBER: builtins.int DATA_FIELD_NUMBER: builtins.int @@ -438,7 +429,6 @@ class BindingEventRequest(google.protobuf.message.Message): @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata set by the input binging components.""" - def __init__( self, *, @@ -446,11 +436,11 @@ class BindingEventRequest(google.protobuf.message.Message): data: builtins.bytes = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data", "metadata", b"metadata", "name", b"name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "metadata", b"metadata", "name", b"name"]) -> None: ... global___BindingEventRequest = BindingEventRequest -@typing.final +@typing_extensions.final class BindingEventResponse(google.protobuf.message.Message): """BindingEventResponse includes operations to save state or send data to output bindings optionally. @@ -484,20 +474,18 @@ class BindingEventResponse(google.protobuf.message.Message): CONCURRENCY_FIELD_NUMBER: builtins.int store_name: builtins.str """The name of state store where states are saved.""" - data: builtins.bytes - """The content which will be sent to "to" output bindings.""" - concurrency: global___BindingEventResponse.BindingEventConcurrency.ValueType - """The concurrency of output bindings to send data to - "to" output bindings list. The default is SEQUENTIAL. - """ @property def states(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[dapr.proto.common.v1.common_pb2.StateItem]: """The state key values which will be stored in store_name.""" - @property def to(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: """The list of output bindings.""" - + data: builtins.bytes + """The content which will be sent to "to" output bindings.""" + concurrency: global___BindingEventResponse.BindingEventConcurrency.ValueType + """The concurrency of output bindings to send data to + "to" output bindings list. The default is SEQUENTIAL. + """ def __init__( self, *, @@ -507,11 +495,11 @@ class BindingEventResponse(google.protobuf.message.Message): data: builtins.bytes = ..., concurrency: global___BindingEventResponse.BindingEventConcurrency.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["concurrency", b"concurrency", "data", b"data", "states", b"states", "store_name", b"store_name", "to", b"to"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["concurrency", b"concurrency", "data", b"data", "states", b"states", "store_name", b"store_name", "to", b"to"]) -> None: ... global___BindingEventResponse = BindingEventResponse -@typing.final +@typing_extensions.final class ListTopicSubscriptionsResponse(google.protobuf.message.Message): """ListTopicSubscriptionsResponse is the message including the list of the subscribing topics.""" @@ -521,23 +509,22 @@ class ListTopicSubscriptionsResponse(google.protobuf.message.Message): @property def subscriptions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TopicSubscription]: """The list of topics.""" - def __init__( self, *, subscriptions: collections.abc.Iterable[global___TopicSubscription] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["subscriptions", b"subscriptions"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["subscriptions", b"subscriptions"]) -> None: ... global___ListTopicSubscriptionsResponse = ListTopicSubscriptionsResponse -@typing.final +@typing_extensions.final class TopicSubscription(google.protobuf.message.Message): """TopicSubscription represents topic and metadata.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -551,7 +538,7 @@ class TopicSubscription(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... PUBSUB_NAME_FIELD_NUMBER: builtins.int TOPIC_FIELD_NUMBER: builtins.int @@ -563,22 +550,19 @@ class TopicSubscription(google.protobuf.message.Message): """Required. The name of the pubsub containing the topic below to subscribe to.""" topic: builtins.str """Required. The name of topic which will be subscribed""" - dead_letter_topic: builtins.str - """The optional dead letter queue for this topic to send events to.""" @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The optional properties used for this topic's subscription e.g. session id""" - @property def routes(self) -> global___TopicRoutes: """The optional routing rules to match against. In the gRPC interface, OnTopicEvent is still invoked but the matching path is sent in the TopicEventRequest. """ - + dead_letter_topic: builtins.str + """The optional dead letter queue for this topic to send events to.""" @property def bulk_subscribe(self) -> global___BulkSubscribeConfig: """The optional bulk subscribe settings for this topic.""" - def __init__( self, *, @@ -589,34 +573,33 @@ class TopicSubscription(google.protobuf.message.Message): dead_letter_topic: builtins.str = ..., bulk_subscribe: global___BulkSubscribeConfig | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["bulk_subscribe", b"bulk_subscribe", "routes", b"routes"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["bulk_subscribe", b"bulk_subscribe", "dead_letter_topic", b"dead_letter_topic", "metadata", b"metadata", "pubsub_name", b"pubsub_name", "routes", b"routes", "topic", b"topic"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["bulk_subscribe", b"bulk_subscribe", "routes", b"routes"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["bulk_subscribe", b"bulk_subscribe", "dead_letter_topic", b"dead_letter_topic", "metadata", b"metadata", "pubsub_name", b"pubsub_name", "routes", b"routes", "topic", b"topic"]) -> None: ... global___TopicSubscription = TopicSubscription -@typing.final +@typing_extensions.final class TopicRoutes(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor RULES_FIELD_NUMBER: builtins.int DEFAULT_FIELD_NUMBER: builtins.int - default: builtins.str - """The default path for this topic.""" @property def rules(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TopicRule]: """The list of rules for this topic.""" - + default: builtins.str + """The default path for this topic.""" def __init__( self, *, rules: collections.abc.Iterable[global___TopicRule] | None = ..., default: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["default", b"default", "rules", b"rules"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["default", b"default", "rules", b"rules"]) -> None: ... global___TopicRoutes = TopicRoutes -@typing.final +@typing_extensions.final class TopicRule(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -638,11 +621,11 @@ class TopicRule(google.protobuf.message.Message): match: builtins.str = ..., path: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["match", b"match", "path", b"path"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["match", b"match", "path", b"path"]) -> None: ... global___TopicRule = TopicRule -@typing.final +@typing_extensions.final class BulkSubscribeConfig(google.protobuf.message.Message): """BulkSubscribeConfig is the message to pass settings for bulk subscribe""" @@ -664,11 +647,11 @@ class BulkSubscribeConfig(google.protobuf.message.Message): max_messages_count: builtins.int = ..., max_await_duration_ms: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["enabled", b"enabled", "max_await_duration_ms", b"max_await_duration_ms", "max_messages_count", b"max_messages_count"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["enabled", b"enabled", "max_await_duration_ms", b"max_await_duration_ms", "max_messages_count", b"max_messages_count"]) -> None: ... global___BulkSubscribeConfig = BulkSubscribeConfig -@typing.final +@typing_extensions.final class ListInputBindingsResponse(google.protobuf.message.Message): """ListInputBindingsResponse is the message including the list of input bindings.""" @@ -678,17 +661,16 @@ class ListInputBindingsResponse(google.protobuf.message.Message): @property def bindings(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: """The list of input bindings.""" - def __init__( self, *, bindings: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bindings", b"bindings"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["bindings", b"bindings"]) -> None: ... global___ListInputBindingsResponse = ListInputBindingsResponse -@typing.final +@typing_extensions.final class HealthCheckResponse(google.protobuf.message.Message): """HealthCheckResponse is the message with the response to the health check. This message is currently empty as used as placeholder. diff --git a/dapr/proto/runtime/v1/appcallback_pb2_grpc.py b/dapr/proto/runtime/v1/appcallback_pb2_grpc.py index cd3e63c8..b203f7db 100644 --- a/dapr/proto/runtime/v1/appcallback_pb2_grpc.py +++ b/dapr/proto/runtime/v1/appcallback_pb2_grpc.py @@ -1,31 +1,11 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" import grpc -import warnings from dapr.proto.common.v1 import common_pb2 as dapr_dot_proto_dot_common_dot_v1_dot_common__pb2 from dapr.proto.runtime.v1 import appcallback_pb2 as dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -GRPC_GENERATED_VERSION = '1.66.1' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in dapr/proto/runtime/v1/appcallback_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) - class AppCallbackStub(object): """AppCallback V1 allows user application to interact with Dapr runtime. @@ -43,27 +23,27 @@ def __init__(self, channel): '/dapr.proto.runtime.v1.AppCallback/OnInvoke', request_serializer=dapr_dot_proto_dot_common_dot_v1_dot_common__pb2.InvokeRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_common_dot_v1_dot_common__pb2.InvokeResponse.FromString, - _registered_method=True) + ) self.ListTopicSubscriptions = channel.unary_unary( '/dapr.proto.runtime.v1.AppCallback/ListTopicSubscriptions', request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.ListTopicSubscriptionsResponse.FromString, - _registered_method=True) + ) self.OnTopicEvent = channel.unary_unary( '/dapr.proto.runtime.v1.AppCallback/OnTopicEvent', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.TopicEventRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.TopicEventResponse.FromString, - _registered_method=True) + ) self.ListInputBindings = channel.unary_unary( '/dapr.proto.runtime.v1.AppCallback/ListInputBindings', request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.ListInputBindingsResponse.FromString, - _registered_method=True) + ) self.OnBindingEvent = channel.unary_unary( '/dapr.proto.runtime.v1.AppCallback/OnBindingEvent', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.BindingEventRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.BindingEventResponse.FromString, - _registered_method=True) + ) class AppCallbackServicer(object): @@ -142,7 +122,6 @@ def add_AppCallbackServicer_to_server(servicer, server): generic_handler = grpc.method_handlers_generic_handler( 'dapr.proto.runtime.v1.AppCallback', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) - server.add_registered_method_handlers('dapr.proto.runtime.v1.AppCallback', rpc_method_handlers) # This class is part of an EXPERIMENTAL API. @@ -163,21 +142,11 @@ def OnInvoke(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.AppCallback/OnInvoke', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.AppCallback/OnInvoke', dapr_dot_proto_dot_common_dot_v1_dot_common__pb2.InvokeRequest.SerializeToString, dapr_dot_proto_dot_common_dot_v1_dot_common__pb2.InvokeResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ListTopicSubscriptions(request, @@ -190,21 +159,11 @@ def ListTopicSubscriptions(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.AppCallback/ListTopicSubscriptions', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.AppCallback/ListTopicSubscriptions', google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.ListTopicSubscriptionsResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def OnTopicEvent(request, @@ -217,21 +176,11 @@ def OnTopicEvent(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.AppCallback/OnTopicEvent', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.AppCallback/OnTopicEvent', dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.TopicEventRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.TopicEventResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ListInputBindings(request, @@ -244,21 +193,11 @@ def ListInputBindings(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.AppCallback/ListInputBindings', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.AppCallback/ListInputBindings', google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.ListInputBindingsResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def OnBindingEvent(request, @@ -271,21 +210,11 @@ def OnBindingEvent(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.AppCallback/OnBindingEvent', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.AppCallback/OnBindingEvent', dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.BindingEventRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.BindingEventResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) class AppCallbackHealthCheckStub(object): @@ -303,7 +232,7 @@ def __init__(self, channel): '/dapr.proto.runtime.v1.AppCallbackHealthCheck/HealthCheck', request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.HealthCheckResponse.FromString, - _registered_method=True) + ) class AppCallbackHealthCheckServicer(object): @@ -330,7 +259,6 @@ def add_AppCallbackHealthCheckServicer_to_server(servicer, server): generic_handler = grpc.method_handlers_generic_handler( 'dapr.proto.runtime.v1.AppCallbackHealthCheck', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) - server.add_registered_method_handlers('dapr.proto.runtime.v1.AppCallbackHealthCheck', rpc_method_handlers) # This class is part of an EXPERIMENTAL API. @@ -350,21 +278,11 @@ def HealthCheck(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.AppCallbackHealthCheck/HealthCheck', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.AppCallbackHealthCheck/HealthCheck', google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.HealthCheckResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) class AppCallbackAlphaStub(object): @@ -382,12 +300,12 @@ def __init__(self, channel): '/dapr.proto.runtime.v1.AppCallbackAlpha/OnBulkTopicEventAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.TopicEventBulkRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.TopicEventBulkResponse.FromString, - _registered_method=True) + ) self.OnJobEventAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.AppCallbackAlpha/OnJobEventAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.JobEventRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.JobEventResponse.FromString, - _registered_method=True) + ) class AppCallbackAlphaServicer(object): @@ -426,7 +344,6 @@ def add_AppCallbackAlphaServicer_to_server(servicer, server): generic_handler = grpc.method_handlers_generic_handler( 'dapr.proto.runtime.v1.AppCallbackAlpha', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) - server.add_registered_method_handlers('dapr.proto.runtime.v1.AppCallbackAlpha', rpc_method_handlers) # This class is part of an EXPERIMENTAL API. @@ -446,21 +363,11 @@ def OnBulkTopicEventAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.AppCallbackAlpha/OnBulkTopicEventAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.AppCallbackAlpha/OnBulkTopicEventAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.TopicEventBulkRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.TopicEventBulkResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def OnJobEventAlpha1(request, @@ -473,18 +380,8 @@ def OnJobEventAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.AppCallbackAlpha/OnJobEventAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.AppCallbackAlpha/OnJobEventAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.JobEventRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2.JobEventResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/dapr/proto/runtime/v1/dapr_pb2.py b/dapr/proto/runtime/v1/dapr_pb2.py index 21e766bf..e2e9ccbc 100644 --- a/dapr/proto/runtime/v1/dapr_pb2.py +++ b/dapr/proto/runtime/v1/dapr_pb2.py @@ -1,22 +1,11 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE # source: dapr/proto/runtime/v1/dapr.proto -# Protobuf Python Version: 5.27.2 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 5, - 27, - 2, - '', - 'dapr/proto/runtime/v1/dapr.proto' -) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -29,78 +18,85 @@ from dapr.proto.runtime.v1 import appcallback_pb2 as dapr_dot_proto_dot_runtime_dot_v1_dot_appcallback__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n dapr/proto/runtime/v1/dapr.proto\x12\x15\x64\x61pr.proto.runtime.v1\x1a\x19google/protobuf/any.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a!dapr/proto/common/v1/common.proto\x1a\'dapr/proto/runtime/v1/appcallback.proto\"X\n\x14InvokeServiceRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x34\n\x07message\x18\x03 \x01(\x0b\x32#.dapr.proto.common.v1.InvokeRequest\"\xf5\x01\n\x0fGetStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12H\n\x0b\x63onsistency\x18\x03 \x01(\x0e\x32\x33.dapr.proto.common.v1.StateOptions.StateConsistency\x12\x46\n\x08metadata\x18\x04 \x03(\x0b\x32\x34.dapr.proto.runtime.v1.GetStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc9\x01\n\x13GetBulkStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12\x13\n\x0bparallelism\x18\x03 \x01(\x05\x12J\n\x08metadata\x18\x04 \x03(\x0b\x32\x38.dapr.proto.runtime.v1.GetBulkStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"K\n\x14GetBulkStateResponse\x12\x33\n\x05items\x18\x01 \x03(\x0b\x32$.dapr.proto.runtime.v1.BulkStateItem\"\xbe\x01\n\rBulkStateItem\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x12\r\n\x05\x65rror\x18\x04 \x01(\t\x12\x44\n\x08metadata\x18\x05 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.BulkStateItem.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa8\x01\n\x10GetStateResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x0c\n\x04\x65tag\x18\x02 \x01(\t\x12G\n\x08metadata\x18\x03 \x03(\x0b\x32\x35.dapr.proto.runtime.v1.GetStateResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x90\x02\n\x12\x44\x65leteStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12(\n\x04\x65tag\x18\x03 \x01(\x0b\x32\x1a.dapr.proto.common.v1.Etag\x12\x33\n\x07options\x18\x04 \x01(\x0b\x32\".dapr.proto.common.v1.StateOptions\x12I\n\x08metadata\x18\x05 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.DeleteStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x16\x44\x65leteBulkStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12/\n\x06states\x18\x02 \x03(\x0b\x32\x1f.dapr.proto.common.v1.StateItem\"W\n\x10SaveStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12/\n\x06states\x18\x02 \x03(\x0b\x32\x1f.dapr.proto.common.v1.StateItem\"\xbc\x01\n\x11QueryStateRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\r\n\x05query\x18\x02 \x01(\t\x12H\n\x08metadata\x18\x03 \x03(\x0b\x32\x36.dapr.proto.runtime.v1.QueryStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"H\n\x0eQueryStateItem\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x12\r\n\x05\x65rror\x18\x04 \x01(\t\"\xd7\x01\n\x12QueryStateResponse\x12\x36\n\x07results\x18\x01 \x03(\x0b\x32%.dapr.proto.runtime.v1.QueryStateItem\x12\r\n\x05token\x18\x02 \x01(\t\x12I\n\x08metadata\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.QueryStateResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdf\x01\n\x13PublishEventRequest\x12\x13\n\x0bpubsub_name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12\x19\n\x11\x64\x61ta_content_type\x18\x04 \x01(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.dapr.proto.runtime.v1.PublishEventRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xf5\x01\n\x12\x42ulkPublishRequest\x12\x13\n\x0bpubsub_name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12?\n\x07\x65ntries\x18\x03 \x03(\x0b\x32..dapr.proto.runtime.v1.BulkPublishRequestEntry\x12I\n\x08metadata\x18\x04 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.BulkPublishRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xd1\x01\n\x17\x42ulkPublishRequestEntry\x12\x10\n\x08\x65ntry_id\x18\x01 \x01(\t\x12\r\n\x05\x65vent\x18\x02 \x01(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x03 \x01(\t\x12N\n\x08metadata\x18\x04 \x03(\x0b\x32<.dapr.proto.runtime.v1.BulkPublishRequestEntry.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"c\n\x13\x42ulkPublishResponse\x12L\n\rfailedEntries\x18\x01 \x03(\x0b\x32\x35.dapr.proto.runtime.v1.BulkPublishResponseFailedEntry\"A\n\x1e\x42ulkPublishResponseFailedEntry\x12\x10\n\x08\x65ntry_id\x18\x01 \x01(\t\x12\r\n\x05\x65rror\x18\x02 \x01(\t\"\x84\x02\n!SubscribeTopicEventsRequestAlpha1\x12Z\n\x0finitial_request\x18\x01 \x01(\x0b\x32?.dapr.proto.runtime.v1.SubscribeTopicEventsRequestInitialAlpha1H\x00\x12\\\n\x0f\x65vent_processed\x18\x02 \x01(\x0b\x32\x41.dapr.proto.runtime.v1.SubscribeTopicEventsRequestProcessedAlpha1H\x00\x42%\n#subscribe_topic_events_request_type\"\x96\x02\n(SubscribeTopicEventsRequestInitialAlpha1\x12\x13\n\x0bpubsub_name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12_\n\x08metadata\x18\x03 \x03(\x0b\x32M.dapr.proto.runtime.v1.SubscribeTopicEventsRequestInitialAlpha1.MetadataEntry\x12\x1e\n\x11\x64\x65\x61\x64_letter_topic\x18\x04 \x01(\tH\x00\x88\x01\x01\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x14\n\x12_dead_letter_topic\"s\n*SubscribeTopicEventsRequestProcessedAlpha1\x12\n\n\x02id\x18\x01 \x01(\t\x12\x39\n\x06status\x18\x02 \x01(\x0b\x32).dapr.proto.runtime.v1.TopicEventResponse\"\xed\x01\n\"SubscribeTopicEventsResponseAlpha1\x12\\\n\x10initial_response\x18\x01 \x01(\x0b\x32@.dapr.proto.runtime.v1.SubscribeTopicEventsResponseInitialAlpha1H\x00\x12\x41\n\revent_message\x18\x02 \x01(\x0b\x32(.dapr.proto.runtime.v1.TopicEventRequestH\x00\x42&\n$subscribe_topic_events_response_type\"+\n)SubscribeTopicEventsResponseInitialAlpha1\"\xc3\x01\n\x14InvokeBindingRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12K\n\x08metadata\x18\x03 \x03(\x0b\x32\x39.dapr.proto.runtime.v1.InvokeBindingRequest.MetadataEntry\x12\x11\n\toperation\x18\x04 \x01(\t\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa4\x01\n\x15InvokeBindingResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12L\n\x08metadata\x18\x02 \x03(\x0b\x32:.dapr.proto.runtime.v1.InvokeBindingResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xb8\x01\n\x10GetSecretRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\x0b\n\x03key\x18\x02 \x01(\t\x12G\n\x08metadata\x18\x03 \x03(\x0b\x32\x35.dapr.proto.runtime.v1.GetSecretRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x82\x01\n\x11GetSecretResponse\x12@\n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.GetSecretResponse.DataEntry\x1a+\n\tDataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xb3\x01\n\x14GetBulkSecretRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12K\n\x08metadata\x18\x02 \x03(\x0b\x32\x39.dapr.proto.runtime.v1.GetBulkSecretRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x85\x01\n\x0eSecretResponse\x12\x43\n\x07secrets\x18\x01 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.SecretResponse.SecretsEntry\x1a.\n\x0cSecretsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xb1\x01\n\x15GetBulkSecretResponse\x12\x44\n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x36.dapr.proto.runtime.v1.GetBulkSecretResponse.DataEntry\x1aR\n\tDataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.dapr.proto.runtime.v1.SecretResponse:\x02\x38\x01\"f\n\x1bTransactionalStateOperation\x12\x15\n\roperationType\x18\x01 \x01(\t\x12\x30\n\x07request\x18\x02 \x01(\x0b\x32\x1f.dapr.proto.common.v1.StateItem\"\x83\x02\n\x1e\x45xecuteStateTransactionRequest\x12\x11\n\tstoreName\x18\x01 \x01(\t\x12\x46\n\noperations\x18\x02 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.TransactionalStateOperation\x12U\n\x08metadata\x18\x03 \x03(\x0b\x32\x43.dapr.proto.runtime.v1.ExecuteStateTransactionRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbb\x01\n\x19RegisterActorTimerRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x19\n\x08\x64ue_time\x18\x04 \x01(\tR\x07\x64ueTime\x12\x0e\n\x06period\x18\x05 \x01(\t\x12\x10\n\x08\x63\x61llback\x18\x06 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x07 \x01(\x0c\x12\x0b\n\x03ttl\x18\x08 \x01(\t\"e\n\x1bUnregisterActorTimerRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\"\xac\x01\n\x1cRegisterActorReminderRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x19\n\x08\x64ue_time\x18\x04 \x01(\tR\x07\x64ueTime\x12\x0e\n\x06period\x18\x05 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x06 \x01(\x0c\x12\x0b\n\x03ttl\x18\x07 \x01(\t\"h\n\x1eUnregisterActorReminderRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\"]\n\x14GetActorStateRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0b\n\x03key\x18\x03 \x01(\t\"\xa4\x01\n\x15GetActorStateResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12L\n\x08metadata\x18\x02 \x03(\x0b\x32:.dapr.proto.runtime.v1.GetActorStateResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xac\x01\n#ExecuteActorStateTransactionRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12K\n\noperations\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.TransactionalActorStateOperation\"\xf5\x01\n TransactionalActorStateOperation\x12\x15\n\roperationType\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12#\n\x05value\x18\x03 \x01(\x0b\x32\x14.google.protobuf.Any\x12W\n\x08metadata\x18\x04 \x03(\x0b\x32\x45.dapr.proto.runtime.v1.TransactionalActorStateOperation.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xe8\x01\n\x12InvokeActorRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0e\n\x06method\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c\x12I\n\x08metadata\x18\x05 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.InvokeActorRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"#\n\x13InvokeActorResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\"\x14\n\x12GetMetadataRequest\"\x9b\x06\n\x13GetMetadataResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12Q\n\x13\x61\x63tive_actors_count\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountB\x02\x18\x01R\x06\x61\x63tors\x12V\n\x15registered_components\x18\x03 \x03(\x0b\x32+.dapr.proto.runtime.v1.RegisteredComponentsR\ncomponents\x12\x65\n\x11\x65xtended_metadata\x18\x04 \x03(\x0b\x32@.dapr.proto.runtime.v1.GetMetadataResponse.ExtendedMetadataEntryR\x08\x65xtended\x12O\n\rsubscriptions\x18\x05 \x03(\x0b\x32).dapr.proto.runtime.v1.PubsubSubscriptionR\rsubscriptions\x12R\n\x0ehttp_endpoints\x18\x06 \x03(\x0b\x32+.dapr.proto.runtime.v1.MetadataHTTPEndpointR\rhttpEndpoints\x12j\n\x19\x61pp_connection_properties\x18\x07 \x01(\x0b\x32..dapr.proto.runtime.v1.AppConnectionPropertiesR\x17\x61ppConnectionProperties\x12\'\n\x0fruntime_version\x18\x08 \x01(\tR\x0eruntimeVersion\x12)\n\x10\x65nabled_features\x18\t \x03(\tR\x0f\x65nabledFeatures\x12H\n\ractor_runtime\x18\n \x01(\x0b\x32#.dapr.proto.runtime.v1.ActorRuntimeR\x0c\x61\x63torRuntime\x1a\x37\n\x15\x45xtendedMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbc\x02\n\x0c\x41\x63torRuntime\x12]\n\x0eruntime_status\x18\x01 \x01(\x0e\x32\x36.dapr.proto.runtime.v1.ActorRuntime.ActorRuntimeStatusR\rruntimeStatus\x12M\n\ractive_actors\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountR\x0c\x61\x63tiveActors\x12\x1d\n\nhost_ready\x18\x03 \x01(\x08R\thostReady\x12\x1c\n\tplacement\x18\x04 \x01(\tR\tplacement\"A\n\x12\x41\x63torRuntimeStatus\x12\x10\n\x0cINITIALIZING\x10\x00\x12\x0c\n\x08\x44ISABLED\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\"0\n\x11\x41\x63tiveActorsCount\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\"Y\n\x14RegisteredComponents\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x14\n\x0c\x63\x61pabilities\x18\x04 \x03(\t\"*\n\x14MetadataHTTPEndpoint\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\xd1\x01\n\x17\x41ppConnectionProperties\x12\x0c\n\x04port\x18\x01 \x01(\x05\x12\x10\n\x08protocol\x18\x02 \x01(\t\x12\'\n\x0f\x63hannel_address\x18\x03 \x01(\tR\x0e\x63hannelAddress\x12\'\n\x0fmax_concurrency\x18\x04 \x01(\x05R\x0emaxConcurrency\x12\x44\n\x06health\x18\x05 \x01(\x0b\x32\x34.dapr.proto.runtime.v1.AppConnectionHealthProperties\"\xdc\x01\n\x1d\x41ppConnectionHealthProperties\x12*\n\x11health_check_path\x18\x01 \x01(\tR\x0fhealthCheckPath\x12\x32\n\x15health_probe_interval\x18\x02 \x01(\tR\x13healthProbeInterval\x12\x30\n\x14health_probe_timeout\x18\x03 \x01(\tR\x12healthProbeTimeout\x12)\n\x10health_threshold\x18\x04 \x01(\x05R\x0fhealthThreshold\"\x86\x03\n\x12PubsubSubscription\x12\x1f\n\x0bpubsub_name\x18\x01 \x01(\tR\npubsubname\x12\x14\n\x05topic\x18\x02 \x01(\tR\x05topic\x12S\n\x08metadata\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.PubsubSubscription.MetadataEntryR\x08metadata\x12\x44\n\x05rules\x18\x04 \x01(\x0b\x32..dapr.proto.runtime.v1.PubsubSubscriptionRulesR\x05rules\x12*\n\x11\x64\x65\x61\x64_letter_topic\x18\x05 \x01(\tR\x0f\x64\x65\x61\x64LetterTopic\x12\x41\n\x04type\x18\x06 \x01(\x0e\x32-.dapr.proto.runtime.v1.PubsubSubscriptionTypeR\x04type\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"W\n\x17PubsubSubscriptionRules\x12<\n\x05rules\x18\x01 \x03(\x0b\x32-.dapr.proto.runtime.v1.PubsubSubscriptionRule\"5\n\x16PubsubSubscriptionRule\x12\r\n\x05match\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"0\n\x12SetMetadataRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\xbc\x01\n\x17GetConfigurationRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12N\n\x08metadata\x18\x03 \x03(\x0b\x32<.dapr.proto.runtime.v1.GetConfigurationRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbc\x01\n\x18GetConfigurationResponse\x12I\n\x05items\x18\x01 \x03(\x0b\x32:.dapr.proto.runtime.v1.GetConfigurationResponse.ItemsEntry\x1aU\n\nItemsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.dapr.proto.common.v1.ConfigurationItem:\x02\x38\x01\"\xc8\x01\n\x1dSubscribeConfigurationRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12T\n\x08metadata\x18\x03 \x03(\x0b\x32\x42.dapr.proto.runtime.v1.SubscribeConfigurationRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"A\n\x1fUnsubscribeConfigurationRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\t\"\xd4\x01\n\x1eSubscribeConfigurationResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12O\n\x05items\x18\x02 \x03(\x0b\x32@.dapr.proto.runtime.v1.SubscribeConfigurationResponse.ItemsEntry\x1aU\n\nItemsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.dapr.proto.common.v1.ConfigurationItem:\x02\x38\x01\"?\n UnsubscribeConfigurationResponse\x12\n\n\x02ok\x18\x01 \x01(\x08\x12\x0f\n\x07message\x18\x02 \x01(\t\"\x9b\x01\n\x0eTryLockRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\x1f\n\x0bresource_id\x18\x02 \x01(\tR\nresourceId\x12\x1d\n\nlock_owner\x18\x03 \x01(\tR\tlockOwner\x12*\n\x11\x65xpiry_in_seconds\x18\x04 \x01(\x05R\x0f\x65xpiryInSeconds\"\"\n\x0fTryLockResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\"n\n\rUnlockRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\x1f\n\x0bresource_id\x18\x02 \x01(\tR\nresourceId\x12\x1d\n\nlock_owner\x18\x03 \x01(\tR\tlockOwner\"\xae\x01\n\x0eUnlockResponse\x12<\n\x06status\x18\x01 \x01(\x0e\x32,.dapr.proto.runtime.v1.UnlockResponse.Status\"^\n\x06Status\x12\x0b\n\x07SUCCESS\x10\x00\x12\x17\n\x13LOCK_DOES_NOT_EXIST\x10\x01\x12\x1a\n\x16LOCK_BELONGS_TO_OTHERS\x10\x02\x12\x12\n\x0eINTERNAL_ERROR\x10\x03\"\xb0\x01\n\x13SubtleGetKeyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x44\n\x06\x66ormat\x18\x03 \x01(\x0e\x32\x34.dapr.proto.runtime.v1.SubtleGetKeyRequest.KeyFormat\"\x1e\n\tKeyFormat\x12\x07\n\x03PEM\x10\x00\x12\x08\n\x04JSON\x10\x01\"C\n\x14SubtleGetKeyResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1d\n\npublic_key\x18\x02 \x01(\tR\tpublicKey\"\xb6\x01\n\x14SubtleEncryptRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x11\n\tplaintext\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x06 \x01(\x0cR\x0e\x61ssociatedData\"8\n\x15SubtleEncryptResponse\x12\x12\n\nciphertext\x18\x01 \x01(\x0c\x12\x0b\n\x03tag\x18\x02 \x01(\x0c\"\xc4\x01\n\x14SubtleDecryptRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x12\n\nciphertext\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\x0b\n\x03tag\x18\x06 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x07 \x01(\x0cR\x0e\x61ssociatedData\"*\n\x15SubtleDecryptResponse\x12\x11\n\tplaintext\x18\x01 \x01(\x0c\"\xc8\x01\n\x14SubtleWrapKeyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12#\n\rplaintext_key\x18\x02 \x01(\x0cR\x0cplaintextKey\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x06 \x01(\x0cR\x0e\x61ssociatedData\"E\n\x15SubtleWrapKeyResponse\x12\x1f\n\x0bwrapped_key\x18\x01 \x01(\x0cR\nwrappedKey\x12\x0b\n\x03tag\x18\x02 \x01(\x0c\"\xd3\x01\n\x16SubtleUnwrapKeyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x1f\n\x0bwrapped_key\x18\x02 \x01(\x0cR\nwrappedKey\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\x0b\n\x03tag\x18\x06 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x07 \x01(\x0cR\x0e\x61ssociatedData\">\n\x17SubtleUnwrapKeyResponse\x12#\n\rplaintext_key\x18\x01 \x01(\x0cR\x0cplaintextKey\"x\n\x11SubtleSignRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x0e\n\x06\x64igest\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\"\'\n\x12SubtleSignResponse\x12\x11\n\tsignature\x18\x01 \x01(\x0c\"\x8d\x01\n\x13SubtleVerifyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x0e\n\x06\x64igest\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\x11\n\tsignature\x18\x05 \x01(\x0c\"%\n\x14SubtleVerifyResponse\x12\r\n\x05valid\x18\x01 \x01(\x08\"\x85\x01\n\x0e\x45ncryptRequest\x12=\n\x07options\x18\x01 \x01(\x0b\x32,.dapr.proto.runtime.v1.EncryptRequestOptions\x12\x34\n\x07payload\x18\x02 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"\xfe\x01\n\x15\x45ncryptRequestOptions\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x19\n\x08key_name\x18\x02 \x01(\tR\x07keyName\x12\x1a\n\x12key_wrap_algorithm\x18\x03 \x01(\t\x12\x1e\n\x16\x64\x61ta_encryption_cipher\x18\n \x01(\t\x12\x37\n\x18omit_decryption_key_name\x18\x0b \x01(\x08R\x15omitDecryptionKeyName\x12.\n\x13\x64\x65\x63ryption_key_name\x18\x0c \x01(\tR\x11\x64\x65\x63ryptionKeyName\"G\n\x0f\x45ncryptResponse\x12\x34\n\x07payload\x18\x01 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"\x85\x01\n\x0e\x44\x65\x63ryptRequest\x12=\n\x07options\x18\x01 \x01(\x0b\x32,.dapr.proto.runtime.v1.DecryptRequestOptions\x12\x34\n\x07payload\x18\x02 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"Y\n\x15\x44\x65\x63ryptRequestOptions\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x19\n\x08key_name\x18\x0c \x01(\tR\x07keyName\"G\n\x0f\x44\x65\x63ryptResponse\x12\x34\n\x07payload\x18\x01 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"d\n\x12GetWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"\x84\x03\n\x13GetWorkflowResponse\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12#\n\rworkflow_name\x18\x02 \x01(\tR\x0cworkflowName\x12\x39\n\ncreated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x42\n\x0flast_updated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\rlastUpdatedAt\x12%\n\x0eruntime_status\x18\x05 \x01(\tR\rruntimeStatus\x12N\n\nproperties\x18\x06 \x03(\x0b\x32:.dapr.proto.runtime.v1.GetWorkflowResponse.PropertiesEntry\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x95\x02\n\x14StartWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\x12#\n\rworkflow_name\x18\x03 \x01(\tR\x0cworkflowName\x12I\n\x07options\x18\x04 \x03(\x0b\x32\x38.dapr.proto.runtime.v1.StartWorkflowRequest.OptionsEntry\x12\r\n\x05input\x18\x05 \x01(\x0c\x1a.\n\x0cOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"8\n\x15StartWorkflowResponse\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\"j\n\x18TerminateWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"f\n\x14PauseWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"g\n\x15ResumeWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"\x9e\x01\n\x19RaiseEventWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\x12\x1d\n\nevent_name\x18\x03 \x01(\tR\teventName\x12\x12\n\nevent_data\x18\x04 \x01(\x0c\"f\n\x14PurgeWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"\x11\n\x0fShutdownRequest\"\xe8\x01\n\x03Job\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1f\n\x08schedule\x18\x02 \x01(\tH\x00R\x08schedule\x88\x01\x01\x12\x1d\n\x07repeats\x18\x03 \x01(\rH\x01R\x07repeats\x88\x01\x01\x12\x1e\n\x08\x64ue_time\x18\x04 \x01(\tH\x02R\x07\x64ueTime\x88\x01\x01\x12\x15\n\x03ttl\x18\x05 \x01(\tH\x03R\x03ttl\x88\x01\x01\x12(\n\x04\x64\x61ta\x18\x06 \x01(\x0b\x32\x14.google.protobuf.AnyR\x04\x64\x61taB\x0b\n\t_scheduleB\n\n\x08_repeatsB\x0b\n\t_due_timeB\x06\n\x04_ttl\"=\n\x12ScheduleJobRequest\x12\'\n\x03job\x18\x01 \x01(\x0b\x32\x1a.dapr.proto.runtime.v1.Job\"\x15\n\x13ScheduleJobResponse\"\x1d\n\rGetJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"9\n\x0eGetJobResponse\x12\'\n\x03job\x18\x01 \x01(\x0b\x32\x1a.dapr.proto.runtime.v1.Job\" \n\x10\x44\x65leteJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x13\n\x11\x44\x65leteJobResponse*W\n\x16PubsubSubscriptionType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0f\n\x0b\x44\x45\x43LARATIVE\x10\x01\x12\x10\n\x0cPROGRAMMATIC\x10\x02\x12\r\n\tSTREAMING\x10\x03\x32\xbc\x30\n\x04\x44\x61pr\x12\x64\n\rInvokeService\x12+.dapr.proto.runtime.v1.InvokeServiceRequest\x1a$.dapr.proto.common.v1.InvokeResponse\"\x00\x12]\n\x08GetState\x12&.dapr.proto.runtime.v1.GetStateRequest\x1a\'.dapr.proto.runtime.v1.GetStateResponse\"\x00\x12i\n\x0cGetBulkState\x12*.dapr.proto.runtime.v1.GetBulkStateRequest\x1a+.dapr.proto.runtime.v1.GetBulkStateResponse\"\x00\x12N\n\tSaveState\x12\'.dapr.proto.runtime.v1.SaveStateRequest\x1a\x16.google.protobuf.Empty\"\x00\x12i\n\x10QueryStateAlpha1\x12(.dapr.proto.runtime.v1.QueryStateRequest\x1a).dapr.proto.runtime.v1.QueryStateResponse\"\x00\x12R\n\x0b\x44\x65leteState\x12).dapr.proto.runtime.v1.DeleteStateRequest\x1a\x16.google.protobuf.Empty\"\x00\x12Z\n\x0f\x44\x65leteBulkState\x12-.dapr.proto.runtime.v1.DeleteBulkStateRequest\x1a\x16.google.protobuf.Empty\"\x00\x12j\n\x17\x45xecuteStateTransaction\x12\x35.dapr.proto.runtime.v1.ExecuteStateTransactionRequest\x1a\x16.google.protobuf.Empty\"\x00\x12T\n\x0cPublishEvent\x12*.dapr.proto.runtime.v1.PublishEventRequest\x1a\x16.google.protobuf.Empty\"\x00\x12q\n\x16\x42ulkPublishEventAlpha1\x12).dapr.proto.runtime.v1.BulkPublishRequest\x1a*.dapr.proto.runtime.v1.BulkPublishResponse\"\x00\x12\x97\x01\n\x1aSubscribeTopicEventsAlpha1\x12\x38.dapr.proto.runtime.v1.SubscribeTopicEventsRequestAlpha1\x1a\x39.dapr.proto.runtime.v1.SubscribeTopicEventsResponseAlpha1\"\x00(\x01\x30\x01\x12l\n\rInvokeBinding\x12+.dapr.proto.runtime.v1.InvokeBindingRequest\x1a,.dapr.proto.runtime.v1.InvokeBindingResponse\"\x00\x12`\n\tGetSecret\x12\'.dapr.proto.runtime.v1.GetSecretRequest\x1a(.dapr.proto.runtime.v1.GetSecretResponse\"\x00\x12l\n\rGetBulkSecret\x12+.dapr.proto.runtime.v1.GetBulkSecretRequest\x1a,.dapr.proto.runtime.v1.GetBulkSecretResponse\"\x00\x12`\n\x12RegisterActorTimer\x12\x30.dapr.proto.runtime.v1.RegisterActorTimerRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x64\n\x14UnregisterActorTimer\x12\x32.dapr.proto.runtime.v1.UnregisterActorTimerRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x66\n\x15RegisterActorReminder\x12\x33.dapr.proto.runtime.v1.RegisterActorReminderRequest\x1a\x16.google.protobuf.Empty\"\x00\x12j\n\x17UnregisterActorReminder\x12\x35.dapr.proto.runtime.v1.UnregisterActorReminderRequest\x1a\x16.google.protobuf.Empty\"\x00\x12l\n\rGetActorState\x12+.dapr.proto.runtime.v1.GetActorStateRequest\x1a,.dapr.proto.runtime.v1.GetActorStateResponse\"\x00\x12t\n\x1c\x45xecuteActorStateTransaction\x12:.dapr.proto.runtime.v1.ExecuteActorStateTransactionRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x66\n\x0bInvokeActor\x12).dapr.proto.runtime.v1.InvokeActorRequest\x1a*.dapr.proto.runtime.v1.InvokeActorResponse\"\x00\x12{\n\x16GetConfigurationAlpha1\x12..dapr.proto.runtime.v1.GetConfigurationRequest\x1a/.dapr.proto.runtime.v1.GetConfigurationResponse\"\x00\x12u\n\x10GetConfiguration\x12..dapr.proto.runtime.v1.GetConfigurationRequest\x1a/.dapr.proto.runtime.v1.GetConfigurationResponse\"\x00\x12\x8f\x01\n\x1cSubscribeConfigurationAlpha1\x12\x34.dapr.proto.runtime.v1.SubscribeConfigurationRequest\x1a\x35.dapr.proto.runtime.v1.SubscribeConfigurationResponse\"\x00\x30\x01\x12\x89\x01\n\x16SubscribeConfiguration\x12\x34.dapr.proto.runtime.v1.SubscribeConfigurationRequest\x1a\x35.dapr.proto.runtime.v1.SubscribeConfigurationResponse\"\x00\x30\x01\x12\x93\x01\n\x1eUnsubscribeConfigurationAlpha1\x12\x36.dapr.proto.runtime.v1.UnsubscribeConfigurationRequest\x1a\x37.dapr.proto.runtime.v1.UnsubscribeConfigurationResponse\"\x00\x12\x8d\x01\n\x18UnsubscribeConfiguration\x12\x36.dapr.proto.runtime.v1.UnsubscribeConfigurationRequest\x1a\x37.dapr.proto.runtime.v1.UnsubscribeConfigurationResponse\"\x00\x12`\n\rTryLockAlpha1\x12%.dapr.proto.runtime.v1.TryLockRequest\x1a&.dapr.proto.runtime.v1.TryLockResponse\"\x00\x12]\n\x0cUnlockAlpha1\x12$.dapr.proto.runtime.v1.UnlockRequest\x1a%.dapr.proto.runtime.v1.UnlockResponse\"\x00\x12\x62\n\rEncryptAlpha1\x12%.dapr.proto.runtime.v1.EncryptRequest\x1a&.dapr.proto.runtime.v1.EncryptResponse(\x01\x30\x01\x12\x62\n\rDecryptAlpha1\x12%.dapr.proto.runtime.v1.DecryptRequest\x1a&.dapr.proto.runtime.v1.DecryptResponse(\x01\x30\x01\x12\x66\n\x0bGetMetadata\x12).dapr.proto.runtime.v1.GetMetadataRequest\x1a*.dapr.proto.runtime.v1.GetMetadataResponse\"\x00\x12R\n\x0bSetMetadata\x12).dapr.proto.runtime.v1.SetMetadataRequest\x1a\x16.google.protobuf.Empty\"\x00\x12m\n\x12SubtleGetKeyAlpha1\x12*.dapr.proto.runtime.v1.SubtleGetKeyRequest\x1a+.dapr.proto.runtime.v1.SubtleGetKeyResponse\x12p\n\x13SubtleEncryptAlpha1\x12+.dapr.proto.runtime.v1.SubtleEncryptRequest\x1a,.dapr.proto.runtime.v1.SubtleEncryptResponse\x12p\n\x13SubtleDecryptAlpha1\x12+.dapr.proto.runtime.v1.SubtleDecryptRequest\x1a,.dapr.proto.runtime.v1.SubtleDecryptResponse\x12p\n\x13SubtleWrapKeyAlpha1\x12+.dapr.proto.runtime.v1.SubtleWrapKeyRequest\x1a,.dapr.proto.runtime.v1.SubtleWrapKeyResponse\x12v\n\x15SubtleUnwrapKeyAlpha1\x12-.dapr.proto.runtime.v1.SubtleUnwrapKeyRequest\x1a..dapr.proto.runtime.v1.SubtleUnwrapKeyResponse\x12g\n\x10SubtleSignAlpha1\x12(.dapr.proto.runtime.v1.SubtleSignRequest\x1a).dapr.proto.runtime.v1.SubtleSignResponse\x12m\n\x12SubtleVerifyAlpha1\x12*.dapr.proto.runtime.v1.SubtleVerifyRequest\x1a+.dapr.proto.runtime.v1.SubtleVerifyResponse\x12r\n\x13StartWorkflowAlpha1\x12+.dapr.proto.runtime.v1.StartWorkflowRequest\x1a,.dapr.proto.runtime.v1.StartWorkflowResponse\"\x00\x12l\n\x11GetWorkflowAlpha1\x12).dapr.proto.runtime.v1.GetWorkflowRequest\x1a*.dapr.proto.runtime.v1.GetWorkflowResponse\"\x00\x12\\\n\x13PurgeWorkflowAlpha1\x12+.dapr.proto.runtime.v1.PurgeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x64\n\x17TerminateWorkflowAlpha1\x12/.dapr.proto.runtime.v1.TerminateWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\\\n\x13PauseWorkflowAlpha1\x12+.dapr.proto.runtime.v1.PauseWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12^\n\x14ResumeWorkflowAlpha1\x12,.dapr.proto.runtime.v1.ResumeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x66\n\x18RaiseEventWorkflowAlpha1\x12\x30.dapr.proto.runtime.v1.RaiseEventWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12q\n\x12StartWorkflowBeta1\x12+.dapr.proto.runtime.v1.StartWorkflowRequest\x1a,.dapr.proto.runtime.v1.StartWorkflowResponse\"\x00\x12k\n\x10GetWorkflowBeta1\x12).dapr.proto.runtime.v1.GetWorkflowRequest\x1a*.dapr.proto.runtime.v1.GetWorkflowResponse\"\x00\x12[\n\x12PurgeWorkflowBeta1\x12+.dapr.proto.runtime.v1.PurgeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x63\n\x16TerminateWorkflowBeta1\x12/.dapr.proto.runtime.v1.TerminateWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12[\n\x12PauseWorkflowBeta1\x12+.dapr.proto.runtime.v1.PauseWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12]\n\x13ResumeWorkflowBeta1\x12,.dapr.proto.runtime.v1.ResumeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x65\n\x17RaiseEventWorkflowBeta1\x12\x30.dapr.proto.runtime.v1.RaiseEventWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12L\n\x08Shutdown\x12&.dapr.proto.runtime.v1.ShutdownRequest\x1a\x16.google.protobuf.Empty\"\x00\x12l\n\x11ScheduleJobAlpha1\x12).dapr.proto.runtime.v1.ScheduleJobRequest\x1a*.dapr.proto.runtime.v1.ScheduleJobResponse\"\x00\x12]\n\x0cGetJobAlpha1\x12$.dapr.proto.runtime.v1.GetJobRequest\x1a%.dapr.proto.runtime.v1.GetJobResponse\"\x00\x12\x66\n\x0f\x44\x65leteJobAlpha1\x12\'.dapr.proto.runtime.v1.DeleteJobRequest\x1a(.dapr.proto.runtime.v1.DeleteJobResponse\"\x00\x42i\n\nio.dapr.v1B\nDaprProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\xaa\x02\x1b\x44\x61pr.Client.Autogen.Grpc.v1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n dapr/proto/runtime/v1/dapr.proto\x12\x15\x64\x61pr.proto.runtime.v1\x1a\x19google/protobuf/any.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a!dapr/proto/common/v1/common.proto\x1a\'dapr/proto/runtime/v1/appcallback.proto\"X\n\x14InvokeServiceRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x34\n\x07message\x18\x03 \x01(\x0b\x32#.dapr.proto.common.v1.InvokeRequest\"\xf5\x01\n\x0fGetStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12H\n\x0b\x63onsistency\x18\x03 \x01(\x0e\x32\x33.dapr.proto.common.v1.StateOptions.StateConsistency\x12\x46\n\x08metadata\x18\x04 \x03(\x0b\x32\x34.dapr.proto.runtime.v1.GetStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc9\x01\n\x13GetBulkStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12\x13\n\x0bparallelism\x18\x03 \x01(\x05\x12J\n\x08metadata\x18\x04 \x03(\x0b\x32\x38.dapr.proto.runtime.v1.GetBulkStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"K\n\x14GetBulkStateResponse\x12\x33\n\x05items\x18\x01 \x03(\x0b\x32$.dapr.proto.runtime.v1.BulkStateItem\"\xbe\x01\n\rBulkStateItem\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x12\r\n\x05\x65rror\x18\x04 \x01(\t\x12\x44\n\x08metadata\x18\x05 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.BulkStateItem.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa8\x01\n\x10GetStateResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x0c\n\x04\x65tag\x18\x02 \x01(\t\x12G\n\x08metadata\x18\x03 \x03(\x0b\x32\x35.dapr.proto.runtime.v1.GetStateResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x90\x02\n\x12\x44\x65leteStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12(\n\x04\x65tag\x18\x03 \x01(\x0b\x32\x1a.dapr.proto.common.v1.Etag\x12\x33\n\x07options\x18\x04 \x01(\x0b\x32\".dapr.proto.common.v1.StateOptions\x12I\n\x08metadata\x18\x05 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.DeleteStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x16\x44\x65leteBulkStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12/\n\x06states\x18\x02 \x03(\x0b\x32\x1f.dapr.proto.common.v1.StateItem\"W\n\x10SaveStateRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12/\n\x06states\x18\x02 \x03(\x0b\x32\x1f.dapr.proto.common.v1.StateItem\"\xbc\x01\n\x11QueryStateRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\r\n\x05query\x18\x02 \x01(\t\x12H\n\x08metadata\x18\x03 \x03(\x0b\x32\x36.dapr.proto.runtime.v1.QueryStateRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"H\n\x0eQueryStateItem\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x12\r\n\x05\x65rror\x18\x04 \x01(\t\"\xd7\x01\n\x12QueryStateResponse\x12\x36\n\x07results\x18\x01 \x03(\x0b\x32%.dapr.proto.runtime.v1.QueryStateItem\x12\r\n\x05token\x18\x02 \x01(\t\x12I\n\x08metadata\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.QueryStateResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdf\x01\n\x13PublishEventRequest\x12\x13\n\x0bpubsub_name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12\x19\n\x11\x64\x61ta_content_type\x18\x04 \x01(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.dapr.proto.runtime.v1.PublishEventRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xf5\x01\n\x12\x42ulkPublishRequest\x12\x13\n\x0bpubsub_name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12?\n\x07\x65ntries\x18\x03 \x03(\x0b\x32..dapr.proto.runtime.v1.BulkPublishRequestEntry\x12I\n\x08metadata\x18\x04 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.BulkPublishRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xd1\x01\n\x17\x42ulkPublishRequestEntry\x12\x10\n\x08\x65ntry_id\x18\x01 \x01(\t\x12\r\n\x05\x65vent\x18\x02 \x01(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x03 \x01(\t\x12N\n\x08metadata\x18\x04 \x03(\x0b\x32<.dapr.proto.runtime.v1.BulkPublishRequestEntry.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"c\n\x13\x42ulkPublishResponse\x12L\n\rfailedEntries\x18\x01 \x03(\x0b\x32\x35.dapr.proto.runtime.v1.BulkPublishResponseFailedEntry\"A\n\x1e\x42ulkPublishResponseFailedEntry\x12\x10\n\x08\x65ntry_id\x18\x01 \x01(\t\x12\r\n\x05\x65rror\x18\x02 \x01(\t\"\x84\x02\n!SubscribeTopicEventsRequestAlpha1\x12Z\n\x0finitial_request\x18\x01 \x01(\x0b\x32?.dapr.proto.runtime.v1.SubscribeTopicEventsRequestInitialAlpha1H\x00\x12\\\n\x0f\x65vent_processed\x18\x02 \x01(\x0b\x32\x41.dapr.proto.runtime.v1.SubscribeTopicEventsRequestProcessedAlpha1H\x00\x42%\n#subscribe_topic_events_request_type\"\x96\x02\n(SubscribeTopicEventsRequestInitialAlpha1\x12\x13\n\x0bpubsub_name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12_\n\x08metadata\x18\x03 \x03(\x0b\x32M.dapr.proto.runtime.v1.SubscribeTopicEventsRequestInitialAlpha1.MetadataEntry\x12\x1e\n\x11\x64\x65\x61\x64_letter_topic\x18\x04 \x01(\tH\x00\x88\x01\x01\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x14\n\x12_dead_letter_topic\"s\n*SubscribeTopicEventsRequestProcessedAlpha1\x12\n\n\x02id\x18\x01 \x01(\t\x12\x39\n\x06status\x18\x02 \x01(\x0b\x32).dapr.proto.runtime.v1.TopicEventResponse\"\xed\x01\n\"SubscribeTopicEventsResponseAlpha1\x12\\\n\x10initial_response\x18\x01 \x01(\x0b\x32@.dapr.proto.runtime.v1.SubscribeTopicEventsResponseInitialAlpha1H\x00\x12\x41\n\revent_message\x18\x02 \x01(\x0b\x32(.dapr.proto.runtime.v1.TopicEventRequestH\x00\x42&\n$subscribe_topic_events_response_type\"+\n)SubscribeTopicEventsResponseInitialAlpha1\"\xc3\x01\n\x14InvokeBindingRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12K\n\x08metadata\x18\x03 \x03(\x0b\x32\x39.dapr.proto.runtime.v1.InvokeBindingRequest.MetadataEntry\x12\x11\n\toperation\x18\x04 \x01(\t\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa4\x01\n\x15InvokeBindingResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12L\n\x08metadata\x18\x02 \x03(\x0b\x32:.dapr.proto.runtime.v1.InvokeBindingResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xb8\x01\n\x10GetSecretRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\x0b\n\x03key\x18\x02 \x01(\t\x12G\n\x08metadata\x18\x03 \x03(\x0b\x32\x35.dapr.proto.runtime.v1.GetSecretRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x82\x01\n\x11GetSecretResponse\x12@\n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.GetSecretResponse.DataEntry\x1a+\n\tDataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xb3\x01\n\x14GetBulkSecretRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12K\n\x08metadata\x18\x02 \x03(\x0b\x32\x39.dapr.proto.runtime.v1.GetBulkSecretRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x85\x01\n\x0eSecretResponse\x12\x43\n\x07secrets\x18\x01 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.SecretResponse.SecretsEntry\x1a.\n\x0cSecretsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xb1\x01\n\x15GetBulkSecretResponse\x12\x44\n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x36.dapr.proto.runtime.v1.GetBulkSecretResponse.DataEntry\x1aR\n\tDataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.dapr.proto.runtime.v1.SecretResponse:\x02\x38\x01\"f\n\x1bTransactionalStateOperation\x12\x15\n\roperationType\x18\x01 \x01(\t\x12\x30\n\x07request\x18\x02 \x01(\x0b\x32\x1f.dapr.proto.common.v1.StateItem\"\x83\x02\n\x1e\x45xecuteStateTransactionRequest\x12\x11\n\tstoreName\x18\x01 \x01(\t\x12\x46\n\noperations\x18\x02 \x03(\x0b\x32\x32.dapr.proto.runtime.v1.TransactionalStateOperation\x12U\n\x08metadata\x18\x03 \x03(\x0b\x32\x43.dapr.proto.runtime.v1.ExecuteStateTransactionRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbb\x01\n\x19RegisterActorTimerRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x19\n\x08\x64ue_time\x18\x04 \x01(\tR\x07\x64ueTime\x12\x0e\n\x06period\x18\x05 \x01(\t\x12\x10\n\x08\x63\x61llback\x18\x06 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x07 \x01(\x0c\x12\x0b\n\x03ttl\x18\x08 \x01(\t\"e\n\x1bUnregisterActorTimerRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\"\xac\x01\n\x1cRegisterActorReminderRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x19\n\x08\x64ue_time\x18\x04 \x01(\tR\x07\x64ueTime\x12\x0e\n\x06period\x18\x05 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x06 \x01(\x0c\x12\x0b\n\x03ttl\x18\x07 \x01(\t\"h\n\x1eUnregisterActorReminderRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0c\n\x04name\x18\x03 \x01(\t\"]\n\x14GetActorStateRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0b\n\x03key\x18\x03 \x01(\t\"\xa4\x01\n\x15GetActorStateResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12L\n\x08metadata\x18\x02 \x03(\x0b\x32:.dapr.proto.runtime.v1.GetActorStateResponse.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xac\x01\n#ExecuteActorStateTransactionRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12K\n\noperations\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.TransactionalActorStateOperation\"\xf5\x01\n TransactionalActorStateOperation\x12\x15\n\roperationType\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12#\n\x05value\x18\x03 \x01(\x0b\x32\x14.google.protobuf.Any\x12W\n\x08metadata\x18\x04 \x03(\x0b\x32\x45.dapr.proto.runtime.v1.TransactionalActorStateOperation.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xe8\x01\n\x12InvokeActorRequest\x12\x1d\n\nactor_type\x18\x01 \x01(\tR\tactorType\x12\x19\n\x08\x61\x63tor_id\x18\x02 \x01(\tR\x07\x61\x63torId\x12\x0e\n\x06method\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c\x12I\n\x08metadata\x18\x05 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.InvokeActorRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"#\n\x13InvokeActorResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\"\x14\n\x12GetMetadataRequest\"\x9b\x06\n\x13GetMetadataResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12Q\n\x13\x61\x63tive_actors_count\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountB\x02\x18\x01R\x06\x61\x63tors\x12V\n\x15registered_components\x18\x03 \x03(\x0b\x32+.dapr.proto.runtime.v1.RegisteredComponentsR\ncomponents\x12\x65\n\x11\x65xtended_metadata\x18\x04 \x03(\x0b\x32@.dapr.proto.runtime.v1.GetMetadataResponse.ExtendedMetadataEntryR\x08\x65xtended\x12O\n\rsubscriptions\x18\x05 \x03(\x0b\x32).dapr.proto.runtime.v1.PubsubSubscriptionR\rsubscriptions\x12R\n\x0ehttp_endpoints\x18\x06 \x03(\x0b\x32+.dapr.proto.runtime.v1.MetadataHTTPEndpointR\rhttpEndpoints\x12j\n\x19\x61pp_connection_properties\x18\x07 \x01(\x0b\x32..dapr.proto.runtime.v1.AppConnectionPropertiesR\x17\x61ppConnectionProperties\x12\'\n\x0fruntime_version\x18\x08 \x01(\tR\x0eruntimeVersion\x12)\n\x10\x65nabled_features\x18\t \x03(\tR\x0f\x65nabledFeatures\x12H\n\ractor_runtime\x18\n \x01(\x0b\x32#.dapr.proto.runtime.v1.ActorRuntimeR\x0c\x61\x63torRuntime\x1a\x37\n\x15\x45xtendedMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbc\x02\n\x0c\x41\x63torRuntime\x12]\n\x0eruntime_status\x18\x01 \x01(\x0e\x32\x36.dapr.proto.runtime.v1.ActorRuntime.ActorRuntimeStatusR\rruntimeStatus\x12M\n\ractive_actors\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountR\x0c\x61\x63tiveActors\x12\x1d\n\nhost_ready\x18\x03 \x01(\x08R\thostReady\x12\x1c\n\tplacement\x18\x04 \x01(\tR\tplacement\"A\n\x12\x41\x63torRuntimeStatus\x12\x10\n\x0cINITIALIZING\x10\x00\x12\x0c\n\x08\x44ISABLED\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\"0\n\x11\x41\x63tiveActorsCount\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\"Y\n\x14RegisteredComponents\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x14\n\x0c\x63\x61pabilities\x18\x04 \x03(\t\"*\n\x14MetadataHTTPEndpoint\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\xd1\x01\n\x17\x41ppConnectionProperties\x12\x0c\n\x04port\x18\x01 \x01(\x05\x12\x10\n\x08protocol\x18\x02 \x01(\t\x12\'\n\x0f\x63hannel_address\x18\x03 \x01(\tR\x0e\x63hannelAddress\x12\'\n\x0fmax_concurrency\x18\x04 \x01(\x05R\x0emaxConcurrency\x12\x44\n\x06health\x18\x05 \x01(\x0b\x32\x34.dapr.proto.runtime.v1.AppConnectionHealthProperties\"\xdc\x01\n\x1d\x41ppConnectionHealthProperties\x12*\n\x11health_check_path\x18\x01 \x01(\tR\x0fhealthCheckPath\x12\x32\n\x15health_probe_interval\x18\x02 \x01(\tR\x13healthProbeInterval\x12\x30\n\x14health_probe_timeout\x18\x03 \x01(\tR\x12healthProbeTimeout\x12)\n\x10health_threshold\x18\x04 \x01(\x05R\x0fhealthThreshold\"\x86\x03\n\x12PubsubSubscription\x12\x1f\n\x0bpubsub_name\x18\x01 \x01(\tR\npubsubname\x12\x14\n\x05topic\x18\x02 \x01(\tR\x05topic\x12S\n\x08metadata\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.PubsubSubscription.MetadataEntryR\x08metadata\x12\x44\n\x05rules\x18\x04 \x01(\x0b\x32..dapr.proto.runtime.v1.PubsubSubscriptionRulesR\x05rules\x12*\n\x11\x64\x65\x61\x64_letter_topic\x18\x05 \x01(\tR\x0f\x64\x65\x61\x64LetterTopic\x12\x41\n\x04type\x18\x06 \x01(\x0e\x32-.dapr.proto.runtime.v1.PubsubSubscriptionTypeR\x04type\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"W\n\x17PubsubSubscriptionRules\x12<\n\x05rules\x18\x01 \x03(\x0b\x32-.dapr.proto.runtime.v1.PubsubSubscriptionRule\"5\n\x16PubsubSubscriptionRule\x12\r\n\x05match\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"0\n\x12SetMetadataRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\xbc\x01\n\x17GetConfigurationRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12N\n\x08metadata\x18\x03 \x03(\x0b\x32<.dapr.proto.runtime.v1.GetConfigurationRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbc\x01\n\x18GetConfigurationResponse\x12I\n\x05items\x18\x01 \x03(\x0b\x32:.dapr.proto.runtime.v1.GetConfigurationResponse.ItemsEntry\x1aU\n\nItemsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.dapr.proto.common.v1.ConfigurationItem:\x02\x38\x01\"\xc8\x01\n\x1dSubscribeConfigurationRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12T\n\x08metadata\x18\x03 \x03(\x0b\x32\x42.dapr.proto.runtime.v1.SubscribeConfigurationRequest.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"A\n\x1fUnsubscribeConfigurationRequest\x12\x12\n\nstore_name\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\t\"\xd4\x01\n\x1eSubscribeConfigurationResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12O\n\x05items\x18\x02 \x03(\x0b\x32@.dapr.proto.runtime.v1.SubscribeConfigurationResponse.ItemsEntry\x1aU\n\nItemsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.dapr.proto.common.v1.ConfigurationItem:\x02\x38\x01\"?\n UnsubscribeConfigurationResponse\x12\n\n\x02ok\x18\x01 \x01(\x08\x12\x0f\n\x07message\x18\x02 \x01(\t\"\x9b\x01\n\x0eTryLockRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\x1f\n\x0bresource_id\x18\x02 \x01(\tR\nresourceId\x12\x1d\n\nlock_owner\x18\x03 \x01(\tR\tlockOwner\x12*\n\x11\x65xpiry_in_seconds\x18\x04 \x01(\x05R\x0f\x65xpiryInSeconds\"\"\n\x0fTryLockResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\"n\n\rUnlockRequest\x12\x1d\n\nstore_name\x18\x01 \x01(\tR\tstoreName\x12\x1f\n\x0bresource_id\x18\x02 \x01(\tR\nresourceId\x12\x1d\n\nlock_owner\x18\x03 \x01(\tR\tlockOwner\"\xae\x01\n\x0eUnlockResponse\x12<\n\x06status\x18\x01 \x01(\x0e\x32,.dapr.proto.runtime.v1.UnlockResponse.Status\"^\n\x06Status\x12\x0b\n\x07SUCCESS\x10\x00\x12\x17\n\x13LOCK_DOES_NOT_EXIST\x10\x01\x12\x1a\n\x16LOCK_BELONGS_TO_OTHERS\x10\x02\x12\x12\n\x0eINTERNAL_ERROR\x10\x03\"\xb0\x01\n\x13SubtleGetKeyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x44\n\x06\x66ormat\x18\x03 \x01(\x0e\x32\x34.dapr.proto.runtime.v1.SubtleGetKeyRequest.KeyFormat\"\x1e\n\tKeyFormat\x12\x07\n\x03PEM\x10\x00\x12\x08\n\x04JSON\x10\x01\"C\n\x14SubtleGetKeyResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1d\n\npublic_key\x18\x02 \x01(\tR\tpublicKey\"\xb6\x01\n\x14SubtleEncryptRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x11\n\tplaintext\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x06 \x01(\x0cR\x0e\x61ssociatedData\"8\n\x15SubtleEncryptResponse\x12\x12\n\nciphertext\x18\x01 \x01(\x0c\x12\x0b\n\x03tag\x18\x02 \x01(\x0c\"\xc4\x01\n\x14SubtleDecryptRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x12\n\nciphertext\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\x0b\n\x03tag\x18\x06 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x07 \x01(\x0cR\x0e\x61ssociatedData\"*\n\x15SubtleDecryptResponse\x12\x11\n\tplaintext\x18\x01 \x01(\x0c\"\xc8\x01\n\x14SubtleWrapKeyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12#\n\rplaintext_key\x18\x02 \x01(\x0cR\x0cplaintextKey\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x06 \x01(\x0cR\x0e\x61ssociatedData\"E\n\x15SubtleWrapKeyResponse\x12\x1f\n\x0bwrapped_key\x18\x01 \x01(\x0cR\nwrappedKey\x12\x0b\n\x03tag\x18\x02 \x01(\x0c\"\xd3\x01\n\x16SubtleUnwrapKeyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x1f\n\x0bwrapped_key\x18\x02 \x01(\x0cR\nwrappedKey\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\r\n\x05nonce\x18\x05 \x01(\x0c\x12\x0b\n\x03tag\x18\x06 \x01(\x0c\x12\'\n\x0f\x61ssociated_data\x18\x07 \x01(\x0cR\x0e\x61ssociatedData\">\n\x17SubtleUnwrapKeyResponse\x12#\n\rplaintext_key\x18\x01 \x01(\x0cR\x0cplaintextKey\"x\n\x11SubtleSignRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x0e\n\x06\x64igest\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\"\'\n\x12SubtleSignResponse\x12\x11\n\tsignature\x18\x01 \x01(\x0c\"\x8d\x01\n\x13SubtleVerifyRequest\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x0e\n\x06\x64igest\x18\x02 \x01(\x0c\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x19\n\x08key_name\x18\x04 \x01(\tR\x07keyName\x12\x11\n\tsignature\x18\x05 \x01(\x0c\"%\n\x14SubtleVerifyResponse\x12\r\n\x05valid\x18\x01 \x01(\x08\"\x85\x01\n\x0e\x45ncryptRequest\x12=\n\x07options\x18\x01 \x01(\x0b\x32,.dapr.proto.runtime.v1.EncryptRequestOptions\x12\x34\n\x07payload\x18\x02 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"\xfe\x01\n\x15\x45ncryptRequestOptions\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x19\n\x08key_name\x18\x02 \x01(\tR\x07keyName\x12\x1a\n\x12key_wrap_algorithm\x18\x03 \x01(\t\x12\x1e\n\x16\x64\x61ta_encryption_cipher\x18\n \x01(\t\x12\x37\n\x18omit_decryption_key_name\x18\x0b \x01(\x08R\x15omitDecryptionKeyName\x12.\n\x13\x64\x65\x63ryption_key_name\x18\x0c \x01(\tR\x11\x64\x65\x63ryptionKeyName\"G\n\x0f\x45ncryptResponse\x12\x34\n\x07payload\x18\x01 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"\x85\x01\n\x0e\x44\x65\x63ryptRequest\x12=\n\x07options\x18\x01 \x01(\x0b\x32,.dapr.proto.runtime.v1.DecryptRequestOptions\x12\x34\n\x07payload\x18\x02 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"Y\n\x15\x44\x65\x63ryptRequestOptions\x12%\n\x0e\x63omponent_name\x18\x01 \x01(\tR\rcomponentName\x12\x19\n\x08key_name\x18\x0c \x01(\tR\x07keyName\"G\n\x0f\x44\x65\x63ryptResponse\x12\x34\n\x07payload\x18\x01 \x01(\x0b\x32#.dapr.proto.common.v1.StreamPayload\"d\n\x12GetWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"\x84\x03\n\x13GetWorkflowResponse\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12#\n\rworkflow_name\x18\x02 \x01(\tR\x0cworkflowName\x12\x39\n\ncreated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x42\n\x0flast_updated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\rlastUpdatedAt\x12%\n\x0eruntime_status\x18\x05 \x01(\tR\rruntimeStatus\x12N\n\nproperties\x18\x06 \x03(\x0b\x32:.dapr.proto.runtime.v1.GetWorkflowResponse.PropertiesEntry\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x95\x02\n\x14StartWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\x12#\n\rworkflow_name\x18\x03 \x01(\tR\x0cworkflowName\x12I\n\x07options\x18\x04 \x03(\x0b\x32\x38.dapr.proto.runtime.v1.StartWorkflowRequest.OptionsEntry\x12\r\n\x05input\x18\x05 \x01(\x0c\x1a.\n\x0cOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"8\n\x15StartWorkflowResponse\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\"j\n\x18TerminateWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"f\n\x14PauseWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"g\n\x15ResumeWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"\x9e\x01\n\x19RaiseEventWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\x12\x1d\n\nevent_name\x18\x03 \x01(\tR\teventName\x12\x12\n\nevent_data\x18\x04 \x01(\x0c\"f\n\x14PurgeWorkflowRequest\x12\x1f\n\x0binstance_id\x18\x01 \x01(\tR\ninstanceID\x12-\n\x12workflow_component\x18\x02 \x01(\tR\x11workflowComponent\"\x11\n\x0fShutdownRequest\"\xe8\x01\n\x03Job\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1f\n\x08schedule\x18\x02 \x01(\tH\x00R\x08schedule\x88\x01\x01\x12\x1d\n\x07repeats\x18\x03 \x01(\rH\x01R\x07repeats\x88\x01\x01\x12\x1e\n\x08\x64ue_time\x18\x04 \x01(\tH\x02R\x07\x64ueTime\x88\x01\x01\x12\x15\n\x03ttl\x18\x05 \x01(\tH\x03R\x03ttl\x88\x01\x01\x12(\n\x04\x64\x61ta\x18\x06 \x01(\x0b\x32\x14.google.protobuf.AnyR\x04\x64\x61taB\x0b\n\t_scheduleB\n\n\x08_repeatsB\x0b\n\t_due_timeB\x06\n\x04_ttl\"=\n\x12ScheduleJobRequest\x12\'\n\x03job\x18\x01 \x01(\x0b\x32\x1a.dapr.proto.runtime.v1.Job\"\x15\n\x13ScheduleJobResponse\"\x1d\n\rGetJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"9\n\x0eGetJobResponse\x12\'\n\x03job\x18\x01 \x01(\x0b\x32\x1a.dapr.proto.runtime.v1.Job\" \n\x10\x44\x65leteJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x13\n\x11\x44\x65leteJobResponse\"\xf9\x03\n\x19\x43onversationAlpha1Request\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\tcontextID\x18\x02 \x01(\tH\x00\x88\x01\x01\x12\x38\n\x06inputs\x18\x03 \x03(\x0b\x32(.dapr.proto.runtime.v1.ConversationInput\x12T\n\nparameters\x18\x04 \x03(\x0b\x32@.dapr.proto.runtime.v1.ConversationAlpha1Request.ParametersEntry\x12P\n\x08metadata\x18\x05 \x03(\x0b\x32>.dapr.proto.runtime.v1.ConversationAlpha1Request.MetadataEntry\x12\x15\n\x08scrubPII\x18\x06 \x01(\x08H\x01\x88\x01\x01\x12\x18\n\x0btemperature\x18\x07 \x01(\x01H\x02\x88\x01\x01\x1aG\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any:\x02\x38\x01\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0c\n\n_contextIDB\x0b\n\t_scrubPIIB\x0e\n\x0c_temperature\"d\n\x11\x43onversationInput\x12\x0f\n\x07message\x18\x01 \x01(\t\x12\x11\n\x04role\x18\x02 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08scrubPII\x18\x03 \x01(\x08H\x01\x88\x01\x01\x42\x07\n\x05_roleB\x0b\n\t_scrubPII\"\xc8\x01\n\x18\x43onversationAlpha1Result\x12\x0e\n\x06result\x18\x01 \x01(\t\x12S\n\nparameters\x18\x02 \x03(\x0b\x32?.dapr.proto.runtime.v1.ConversationAlpha1Result.ParametersEntry\x1aG\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any:\x02\x38\x01\"\x84\x01\n\x1a\x43onversationAlpha1Response\x12\x16\n\tcontextID\x18\x01 \x01(\tH\x00\x88\x01\x01\x12@\n\x07outputs\x18\x02 \x03(\x0b\x32/.dapr.proto.runtime.v1.ConversationAlpha1ResultB\x0c\n\n_contextID*W\n\x16PubsubSubscriptionType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0f\n\x0b\x44\x45\x43LARATIVE\x10\x01\x12\x10\n\x0cPROGRAMMATIC\x10\x02\x12\r\n\tSTREAMING\x10\x03\x32\xb5\x31\n\x04\x44\x61pr\x12\x64\n\rInvokeService\x12+.dapr.proto.runtime.v1.InvokeServiceRequest\x1a$.dapr.proto.common.v1.InvokeResponse\"\x00\x12]\n\x08GetState\x12&.dapr.proto.runtime.v1.GetStateRequest\x1a\'.dapr.proto.runtime.v1.GetStateResponse\"\x00\x12i\n\x0cGetBulkState\x12*.dapr.proto.runtime.v1.GetBulkStateRequest\x1a+.dapr.proto.runtime.v1.GetBulkStateResponse\"\x00\x12N\n\tSaveState\x12\'.dapr.proto.runtime.v1.SaveStateRequest\x1a\x16.google.protobuf.Empty\"\x00\x12i\n\x10QueryStateAlpha1\x12(.dapr.proto.runtime.v1.QueryStateRequest\x1a).dapr.proto.runtime.v1.QueryStateResponse\"\x00\x12R\n\x0b\x44\x65leteState\x12).dapr.proto.runtime.v1.DeleteStateRequest\x1a\x16.google.protobuf.Empty\"\x00\x12Z\n\x0f\x44\x65leteBulkState\x12-.dapr.proto.runtime.v1.DeleteBulkStateRequest\x1a\x16.google.protobuf.Empty\"\x00\x12j\n\x17\x45xecuteStateTransaction\x12\x35.dapr.proto.runtime.v1.ExecuteStateTransactionRequest\x1a\x16.google.protobuf.Empty\"\x00\x12T\n\x0cPublishEvent\x12*.dapr.proto.runtime.v1.PublishEventRequest\x1a\x16.google.protobuf.Empty\"\x00\x12q\n\x16\x42ulkPublishEventAlpha1\x12).dapr.proto.runtime.v1.BulkPublishRequest\x1a*.dapr.proto.runtime.v1.BulkPublishResponse\"\x00\x12\x97\x01\n\x1aSubscribeTopicEventsAlpha1\x12\x38.dapr.proto.runtime.v1.SubscribeTopicEventsRequestAlpha1\x1a\x39.dapr.proto.runtime.v1.SubscribeTopicEventsResponseAlpha1\"\x00(\x01\x30\x01\x12l\n\rInvokeBinding\x12+.dapr.proto.runtime.v1.InvokeBindingRequest\x1a,.dapr.proto.runtime.v1.InvokeBindingResponse\"\x00\x12`\n\tGetSecret\x12\'.dapr.proto.runtime.v1.GetSecretRequest\x1a(.dapr.proto.runtime.v1.GetSecretResponse\"\x00\x12l\n\rGetBulkSecret\x12+.dapr.proto.runtime.v1.GetBulkSecretRequest\x1a,.dapr.proto.runtime.v1.GetBulkSecretResponse\"\x00\x12`\n\x12RegisterActorTimer\x12\x30.dapr.proto.runtime.v1.RegisterActorTimerRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x64\n\x14UnregisterActorTimer\x12\x32.dapr.proto.runtime.v1.UnregisterActorTimerRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x66\n\x15RegisterActorReminder\x12\x33.dapr.proto.runtime.v1.RegisterActorReminderRequest\x1a\x16.google.protobuf.Empty\"\x00\x12j\n\x17UnregisterActorReminder\x12\x35.dapr.proto.runtime.v1.UnregisterActorReminderRequest\x1a\x16.google.protobuf.Empty\"\x00\x12l\n\rGetActorState\x12+.dapr.proto.runtime.v1.GetActorStateRequest\x1a,.dapr.proto.runtime.v1.GetActorStateResponse\"\x00\x12t\n\x1c\x45xecuteActorStateTransaction\x12:.dapr.proto.runtime.v1.ExecuteActorStateTransactionRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x66\n\x0bInvokeActor\x12).dapr.proto.runtime.v1.InvokeActorRequest\x1a*.dapr.proto.runtime.v1.InvokeActorResponse\"\x00\x12{\n\x16GetConfigurationAlpha1\x12..dapr.proto.runtime.v1.GetConfigurationRequest\x1a/.dapr.proto.runtime.v1.GetConfigurationResponse\"\x00\x12u\n\x10GetConfiguration\x12..dapr.proto.runtime.v1.GetConfigurationRequest\x1a/.dapr.proto.runtime.v1.GetConfigurationResponse\"\x00\x12\x8f\x01\n\x1cSubscribeConfigurationAlpha1\x12\x34.dapr.proto.runtime.v1.SubscribeConfigurationRequest\x1a\x35.dapr.proto.runtime.v1.SubscribeConfigurationResponse\"\x00\x30\x01\x12\x89\x01\n\x16SubscribeConfiguration\x12\x34.dapr.proto.runtime.v1.SubscribeConfigurationRequest\x1a\x35.dapr.proto.runtime.v1.SubscribeConfigurationResponse\"\x00\x30\x01\x12\x93\x01\n\x1eUnsubscribeConfigurationAlpha1\x12\x36.dapr.proto.runtime.v1.UnsubscribeConfigurationRequest\x1a\x37.dapr.proto.runtime.v1.UnsubscribeConfigurationResponse\"\x00\x12\x8d\x01\n\x18UnsubscribeConfiguration\x12\x36.dapr.proto.runtime.v1.UnsubscribeConfigurationRequest\x1a\x37.dapr.proto.runtime.v1.UnsubscribeConfigurationResponse\"\x00\x12`\n\rTryLockAlpha1\x12%.dapr.proto.runtime.v1.TryLockRequest\x1a&.dapr.proto.runtime.v1.TryLockResponse\"\x00\x12]\n\x0cUnlockAlpha1\x12$.dapr.proto.runtime.v1.UnlockRequest\x1a%.dapr.proto.runtime.v1.UnlockResponse\"\x00\x12\x62\n\rEncryptAlpha1\x12%.dapr.proto.runtime.v1.EncryptRequest\x1a&.dapr.proto.runtime.v1.EncryptResponse(\x01\x30\x01\x12\x62\n\rDecryptAlpha1\x12%.dapr.proto.runtime.v1.DecryptRequest\x1a&.dapr.proto.runtime.v1.DecryptResponse(\x01\x30\x01\x12\x66\n\x0bGetMetadata\x12).dapr.proto.runtime.v1.GetMetadataRequest\x1a*.dapr.proto.runtime.v1.GetMetadataResponse\"\x00\x12R\n\x0bSetMetadata\x12).dapr.proto.runtime.v1.SetMetadataRequest\x1a\x16.google.protobuf.Empty\"\x00\x12m\n\x12SubtleGetKeyAlpha1\x12*.dapr.proto.runtime.v1.SubtleGetKeyRequest\x1a+.dapr.proto.runtime.v1.SubtleGetKeyResponse\x12p\n\x13SubtleEncryptAlpha1\x12+.dapr.proto.runtime.v1.SubtleEncryptRequest\x1a,.dapr.proto.runtime.v1.SubtleEncryptResponse\x12p\n\x13SubtleDecryptAlpha1\x12+.dapr.proto.runtime.v1.SubtleDecryptRequest\x1a,.dapr.proto.runtime.v1.SubtleDecryptResponse\x12p\n\x13SubtleWrapKeyAlpha1\x12+.dapr.proto.runtime.v1.SubtleWrapKeyRequest\x1a,.dapr.proto.runtime.v1.SubtleWrapKeyResponse\x12v\n\x15SubtleUnwrapKeyAlpha1\x12-.dapr.proto.runtime.v1.SubtleUnwrapKeyRequest\x1a..dapr.proto.runtime.v1.SubtleUnwrapKeyResponse\x12g\n\x10SubtleSignAlpha1\x12(.dapr.proto.runtime.v1.SubtleSignRequest\x1a).dapr.proto.runtime.v1.SubtleSignResponse\x12m\n\x12SubtleVerifyAlpha1\x12*.dapr.proto.runtime.v1.SubtleVerifyRequest\x1a+.dapr.proto.runtime.v1.SubtleVerifyResponse\x12r\n\x13StartWorkflowAlpha1\x12+.dapr.proto.runtime.v1.StartWorkflowRequest\x1a,.dapr.proto.runtime.v1.StartWorkflowResponse\"\x00\x12l\n\x11GetWorkflowAlpha1\x12).dapr.proto.runtime.v1.GetWorkflowRequest\x1a*.dapr.proto.runtime.v1.GetWorkflowResponse\"\x00\x12\\\n\x13PurgeWorkflowAlpha1\x12+.dapr.proto.runtime.v1.PurgeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x64\n\x17TerminateWorkflowAlpha1\x12/.dapr.proto.runtime.v1.TerminateWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\\\n\x13PauseWorkflowAlpha1\x12+.dapr.proto.runtime.v1.PauseWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12^\n\x14ResumeWorkflowAlpha1\x12,.dapr.proto.runtime.v1.ResumeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x66\n\x18RaiseEventWorkflowAlpha1\x12\x30.dapr.proto.runtime.v1.RaiseEventWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12q\n\x12StartWorkflowBeta1\x12+.dapr.proto.runtime.v1.StartWorkflowRequest\x1a,.dapr.proto.runtime.v1.StartWorkflowResponse\"\x00\x12k\n\x10GetWorkflowBeta1\x12).dapr.proto.runtime.v1.GetWorkflowRequest\x1a*.dapr.proto.runtime.v1.GetWorkflowResponse\"\x00\x12[\n\x12PurgeWorkflowBeta1\x12+.dapr.proto.runtime.v1.PurgeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x63\n\x16TerminateWorkflowBeta1\x12/.dapr.proto.runtime.v1.TerminateWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12[\n\x12PauseWorkflowBeta1\x12+.dapr.proto.runtime.v1.PauseWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12]\n\x13ResumeWorkflowBeta1\x12,.dapr.proto.runtime.v1.ResumeWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x65\n\x17RaiseEventWorkflowBeta1\x12\x30.dapr.proto.runtime.v1.RaiseEventWorkflowRequest\x1a\x16.google.protobuf.Empty\"\x00\x12L\n\x08Shutdown\x12&.dapr.proto.runtime.v1.ShutdownRequest\x1a\x16.google.protobuf.Empty\"\x00\x12l\n\x11ScheduleJobAlpha1\x12).dapr.proto.runtime.v1.ScheduleJobRequest\x1a*.dapr.proto.runtime.v1.ScheduleJobResponse\"\x00\x12]\n\x0cGetJobAlpha1\x12$.dapr.proto.runtime.v1.GetJobRequest\x1a%.dapr.proto.runtime.v1.GetJobResponse\"\x00\x12\x66\n\x0f\x44\x65leteJobAlpha1\x12\'.dapr.proto.runtime.v1.DeleteJobRequest\x1a(.dapr.proto.runtime.v1.DeleteJobResponse\"\x00\x12w\n\x0e\x43onverseAlpha1\x12\x30.dapr.proto.runtime.v1.ConversationAlpha1Request\x1a\x31.dapr.proto.runtime.v1.ConversationAlpha1Response\"\x00\x42i\n\nio.dapr.v1B\nDaprProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\xaa\x02\x1b\x44\x61pr.Client.Autogen.Grpc.v1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'dapr.proto.runtime.v1.dapr_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\nio.dapr.v1B\nDaprProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\252\002\033Dapr.Client.Autogen.Grpc.v1' - _globals['_GETSTATEREQUEST_METADATAENTRY']._loaded_options = None - _globals['_GETSTATEREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_GETBULKSTATEREQUEST_METADATAENTRY']._loaded_options = None - _globals['_GETBULKSTATEREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_BULKSTATEITEM_METADATAENTRY']._loaded_options = None - _globals['_BULKSTATEITEM_METADATAENTRY']._serialized_options = b'8\001' - _globals['_GETSTATERESPONSE_METADATAENTRY']._loaded_options = None - _globals['_GETSTATERESPONSE_METADATAENTRY']._serialized_options = b'8\001' - _globals['_DELETESTATEREQUEST_METADATAENTRY']._loaded_options = None - _globals['_DELETESTATEREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_QUERYSTATEREQUEST_METADATAENTRY']._loaded_options = None - _globals['_QUERYSTATEREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_QUERYSTATERESPONSE_METADATAENTRY']._loaded_options = None - _globals['_QUERYSTATERESPONSE_METADATAENTRY']._serialized_options = b'8\001' - _globals['_PUBLISHEVENTREQUEST_METADATAENTRY']._loaded_options = None - _globals['_PUBLISHEVENTREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_BULKPUBLISHREQUEST_METADATAENTRY']._loaded_options = None - _globals['_BULKPUBLISHREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_BULKPUBLISHREQUESTENTRY_METADATAENTRY']._loaded_options = None - _globals['_BULKPUBLISHREQUESTENTRY_METADATAENTRY']._serialized_options = b'8\001' - _globals['_SUBSCRIBETOPICEVENTSREQUESTINITIALALPHA1_METADATAENTRY']._loaded_options = None - _globals['_SUBSCRIBETOPICEVENTSREQUESTINITIALALPHA1_METADATAENTRY']._serialized_options = b'8\001' - _globals['_INVOKEBINDINGREQUEST_METADATAENTRY']._loaded_options = None - _globals['_INVOKEBINDINGREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_INVOKEBINDINGRESPONSE_METADATAENTRY']._loaded_options = None - _globals['_INVOKEBINDINGRESPONSE_METADATAENTRY']._serialized_options = b'8\001' - _globals['_GETSECRETREQUEST_METADATAENTRY']._loaded_options = None - _globals['_GETSECRETREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_GETSECRETRESPONSE_DATAENTRY']._loaded_options = None - _globals['_GETSECRETRESPONSE_DATAENTRY']._serialized_options = b'8\001' - _globals['_GETBULKSECRETREQUEST_METADATAENTRY']._loaded_options = None - _globals['_GETBULKSECRETREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_SECRETRESPONSE_SECRETSENTRY']._loaded_options = None - _globals['_SECRETRESPONSE_SECRETSENTRY']._serialized_options = b'8\001' - _globals['_GETBULKSECRETRESPONSE_DATAENTRY']._loaded_options = None - _globals['_GETBULKSECRETRESPONSE_DATAENTRY']._serialized_options = b'8\001' - _globals['_EXECUTESTATETRANSACTIONREQUEST_METADATAENTRY']._loaded_options = None - _globals['_EXECUTESTATETRANSACTIONREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_GETACTORSTATERESPONSE_METADATAENTRY']._loaded_options = None - _globals['_GETACTORSTATERESPONSE_METADATAENTRY']._serialized_options = b'8\001' - _globals['_TRANSACTIONALACTORSTATEOPERATION_METADATAENTRY']._loaded_options = None - _globals['_TRANSACTIONALACTORSTATEOPERATION_METADATAENTRY']._serialized_options = b'8\001' - _globals['_INVOKEACTORREQUEST_METADATAENTRY']._loaded_options = None - _globals['_INVOKEACTORREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._loaded_options = None - _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_options = b'8\001' - _globals['_GETMETADATARESPONSE'].fields_by_name['active_actors_count']._loaded_options = None - _globals['_GETMETADATARESPONSE'].fields_by_name['active_actors_count']._serialized_options = b'\030\001' - _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._loaded_options = None - _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._serialized_options = b'8\001' - _globals['_GETCONFIGURATIONREQUEST_METADATAENTRY']._loaded_options = None - _globals['_GETCONFIGURATIONREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_GETCONFIGURATIONRESPONSE_ITEMSENTRY']._loaded_options = None - _globals['_GETCONFIGURATIONRESPONSE_ITEMSENTRY']._serialized_options = b'8\001' - _globals['_SUBSCRIBECONFIGURATIONREQUEST_METADATAENTRY']._loaded_options = None - _globals['_SUBSCRIBECONFIGURATIONREQUEST_METADATAENTRY']._serialized_options = b'8\001' - _globals['_SUBSCRIBECONFIGURATIONRESPONSE_ITEMSENTRY']._loaded_options = None - _globals['_SUBSCRIBECONFIGURATIONRESPONSE_ITEMSENTRY']._serialized_options = b'8\001' - _globals['_GETWORKFLOWRESPONSE_PROPERTIESENTRY']._loaded_options = None - _globals['_GETWORKFLOWRESPONSE_PROPERTIESENTRY']._serialized_options = b'8\001' - _globals['_STARTWORKFLOWREQUEST_OPTIONSENTRY']._loaded_options = None - _globals['_STARTWORKFLOWREQUEST_OPTIONSENTRY']._serialized_options = b'8\001' - _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_start=15086 - _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_end=15173 +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\nio.dapr.v1B\nDaprProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\252\002\033Dapr.Client.Autogen.Grpc.v1' + _GETSTATEREQUEST_METADATAENTRY._options = None + _GETSTATEREQUEST_METADATAENTRY._serialized_options = b'8\001' + _GETBULKSTATEREQUEST_METADATAENTRY._options = None + _GETBULKSTATEREQUEST_METADATAENTRY._serialized_options = b'8\001' + _BULKSTATEITEM_METADATAENTRY._options = None + _BULKSTATEITEM_METADATAENTRY._serialized_options = b'8\001' + _GETSTATERESPONSE_METADATAENTRY._options = None + _GETSTATERESPONSE_METADATAENTRY._serialized_options = b'8\001' + _DELETESTATEREQUEST_METADATAENTRY._options = None + _DELETESTATEREQUEST_METADATAENTRY._serialized_options = b'8\001' + _QUERYSTATEREQUEST_METADATAENTRY._options = None + _QUERYSTATEREQUEST_METADATAENTRY._serialized_options = b'8\001' + _QUERYSTATERESPONSE_METADATAENTRY._options = None + _QUERYSTATERESPONSE_METADATAENTRY._serialized_options = b'8\001' + _PUBLISHEVENTREQUEST_METADATAENTRY._options = None + _PUBLISHEVENTREQUEST_METADATAENTRY._serialized_options = b'8\001' + _BULKPUBLISHREQUEST_METADATAENTRY._options = None + _BULKPUBLISHREQUEST_METADATAENTRY._serialized_options = b'8\001' + _BULKPUBLISHREQUESTENTRY_METADATAENTRY._options = None + _BULKPUBLISHREQUESTENTRY_METADATAENTRY._serialized_options = b'8\001' + _SUBSCRIBETOPICEVENTSREQUESTINITIALALPHA1_METADATAENTRY._options = None + _SUBSCRIBETOPICEVENTSREQUESTINITIALALPHA1_METADATAENTRY._serialized_options = b'8\001' + _INVOKEBINDINGREQUEST_METADATAENTRY._options = None + _INVOKEBINDINGREQUEST_METADATAENTRY._serialized_options = b'8\001' + _INVOKEBINDINGRESPONSE_METADATAENTRY._options = None + _INVOKEBINDINGRESPONSE_METADATAENTRY._serialized_options = b'8\001' + _GETSECRETREQUEST_METADATAENTRY._options = None + _GETSECRETREQUEST_METADATAENTRY._serialized_options = b'8\001' + _GETSECRETRESPONSE_DATAENTRY._options = None + _GETSECRETRESPONSE_DATAENTRY._serialized_options = b'8\001' + _GETBULKSECRETREQUEST_METADATAENTRY._options = None + _GETBULKSECRETREQUEST_METADATAENTRY._serialized_options = b'8\001' + _SECRETRESPONSE_SECRETSENTRY._options = None + _SECRETRESPONSE_SECRETSENTRY._serialized_options = b'8\001' + _GETBULKSECRETRESPONSE_DATAENTRY._options = None + _GETBULKSECRETRESPONSE_DATAENTRY._serialized_options = b'8\001' + _EXECUTESTATETRANSACTIONREQUEST_METADATAENTRY._options = None + _EXECUTESTATETRANSACTIONREQUEST_METADATAENTRY._serialized_options = b'8\001' + _GETACTORSTATERESPONSE_METADATAENTRY._options = None + _GETACTORSTATERESPONSE_METADATAENTRY._serialized_options = b'8\001' + _TRANSACTIONALACTORSTATEOPERATION_METADATAENTRY._options = None + _TRANSACTIONALACTORSTATEOPERATION_METADATAENTRY._serialized_options = b'8\001' + _INVOKEACTORREQUEST_METADATAENTRY._options = None + _INVOKEACTORREQUEST_METADATAENTRY._serialized_options = b'8\001' + _GETMETADATARESPONSE_EXTENDEDMETADATAENTRY._options = None + _GETMETADATARESPONSE_EXTENDEDMETADATAENTRY._serialized_options = b'8\001' + _GETMETADATARESPONSE.fields_by_name['active_actors_count']._options = None + _GETMETADATARESPONSE.fields_by_name['active_actors_count']._serialized_options = b'\030\001' + _PUBSUBSUBSCRIPTION_METADATAENTRY._options = None + _PUBSUBSUBSCRIPTION_METADATAENTRY._serialized_options = b'8\001' + _GETCONFIGURATIONREQUEST_METADATAENTRY._options = None + _GETCONFIGURATIONREQUEST_METADATAENTRY._serialized_options = b'8\001' + _GETCONFIGURATIONRESPONSE_ITEMSENTRY._options = None + _GETCONFIGURATIONRESPONSE_ITEMSENTRY._serialized_options = b'8\001' + _SUBSCRIBECONFIGURATIONREQUEST_METADATAENTRY._options = None + _SUBSCRIBECONFIGURATIONREQUEST_METADATAENTRY._serialized_options = b'8\001' + _SUBSCRIBECONFIGURATIONRESPONSE_ITEMSENTRY._options = None + _SUBSCRIBECONFIGURATIONRESPONSE_ITEMSENTRY._serialized_options = b'8\001' + _GETWORKFLOWRESPONSE_PROPERTIESENTRY._options = None + _GETWORKFLOWRESPONSE_PROPERTIESENTRY._serialized_options = b'8\001' + _STARTWORKFLOWREQUEST_OPTIONSENTRY._options = None + _STARTWORKFLOWREQUEST_OPTIONSENTRY._serialized_options = b'8\001' + _CONVERSATIONALPHA1REQUEST_PARAMETERSENTRY._options = None + _CONVERSATIONALPHA1REQUEST_PARAMETERSENTRY._serialized_options = b'8\001' + _CONVERSATIONALPHA1REQUEST_METADATAENTRY._options = None + _CONVERSATIONALPHA1REQUEST_METADATAENTRY._serialized_options = b'8\001' + _CONVERSATIONALPHA1RESULT_PARAMETERSENTRY._options = None + _CONVERSATIONALPHA1RESULT_PARAMETERSENTRY._serialized_options = b'8\001' + _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_start=16034 + _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_end=16121 _globals['_INVOKESERVICEREQUEST']._serialized_start=224 _globals['_INVOKESERVICEREQUEST']._serialized_end=312 _globals['_GETSTATEREQUEST']._serialized_start=315 @@ -367,6 +363,20 @@ _globals['_DELETEJOBREQUEST']._serialized_end=15063 _globals['_DELETEJOBRESPONSE']._serialized_start=15065 _globals['_DELETEJOBRESPONSE']._serialized_end=15084 - _globals['_DAPR']._serialized_start=15176 - _globals['_DAPR']._serialized_end=21380 + _globals['_CONVERSATIONALPHA1REQUEST']._serialized_start=15087 + _globals['_CONVERSATIONALPHA1REQUEST']._serialized_end=15592 + _globals['_CONVERSATIONALPHA1REQUEST_PARAMETERSENTRY']._serialized_start=15429 + _globals['_CONVERSATIONALPHA1REQUEST_PARAMETERSENTRY']._serialized_end=15500 + _globals['_CONVERSATIONALPHA1REQUEST_METADATAENTRY']._serialized_start=513 + _globals['_CONVERSATIONALPHA1REQUEST_METADATAENTRY']._serialized_end=560 + _globals['_CONVERSATIONINPUT']._serialized_start=15594 + _globals['_CONVERSATIONINPUT']._serialized_end=15694 + _globals['_CONVERSATIONALPHA1RESULT']._serialized_start=15697 + _globals['_CONVERSATIONALPHA1RESULT']._serialized_end=15897 + _globals['_CONVERSATIONALPHA1RESULT_PARAMETERSENTRY']._serialized_start=15429 + _globals['_CONVERSATIONALPHA1RESULT_PARAMETERSENTRY']._serialized_end=15500 + _globals['_CONVERSATIONALPHA1RESPONSE']._serialized_start=15900 + _globals['_CONVERSATIONALPHA1RESPONSE']._serialized_end=16032 + _globals['_DAPR']._serialized_start=16124 + _globals['_DAPR']._serialized_end=22449 # @@protoc_insertion_point(module_scope) diff --git a/dapr/proto/runtime/v1/dapr_pb2.pyi b/dapr/proto/runtime/v1/dapr_pb2.pyi index c9a99f8b..11f695a5 100644 --- a/dapr/proto/runtime/v1/dapr_pb2.pyi +++ b/dapr/proto/runtime/v1/dapr_pb2.pyi @@ -13,7 +13,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ - import builtins import collections.abc import dapr.proto.common.v1.common_pb2 @@ -62,7 +61,7 @@ STREAMING: PubsubSubscriptionType.ValueType # 3 """Bidirectional Streaming subscription""" global___PubsubSubscriptionType = PubsubSubscriptionType -@typing.final +@typing_extensions.final class InvokeServiceRequest(google.protobuf.message.Message): """InvokeServiceRequest represents the request message for Service invocation.""" @@ -75,25 +74,24 @@ class InvokeServiceRequest(google.protobuf.message.Message): @property def message(self) -> dapr.proto.common.v1.common_pb2.InvokeRequest: """Required. message which will be delivered to callee.""" - def __init__( self, *, id: builtins.str = ..., message: dapr.proto.common.v1.common_pb2.InvokeRequest | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["message", b"message"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["id", b"id", "message", b"message"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["message", b"message"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["id", b"id", "message", b"message"]) -> None: ... global___InvokeServiceRequest = InvokeServiceRequest -@typing.final +@typing_extensions.final class GetStateRequest(google.protobuf.message.Message): """GetStateRequest is the message to get key-value states from specific state store.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -107,7 +105,7 @@ class GetStateRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... STORE_NAME_FIELD_NUMBER: builtins.int KEY_FIELD_NUMBER: builtins.int @@ -122,7 +120,6 @@ class GetStateRequest(google.protobuf.message.Message): @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata which will be sent to state store components.""" - def __init__( self, *, @@ -131,17 +128,17 @@ class GetStateRequest(google.protobuf.message.Message): consistency: dapr.proto.common.v1.common_pb2.StateOptions.StateConsistency.ValueType = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["consistency", b"consistency", "key", b"key", "metadata", b"metadata", "store_name", b"store_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["consistency", b"consistency", "key", b"key", "metadata", b"metadata", "store_name", b"store_name"]) -> None: ... global___GetStateRequest = GetStateRequest -@typing.final +@typing_extensions.final class GetBulkStateRequest(google.protobuf.message.Message): """GetBulkStateRequest is the message to get a list of key-value states from specific state store.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -155,7 +152,7 @@ class GetBulkStateRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... STORE_NAME_FIELD_NUMBER: builtins.int KEYS_FIELD_NUMBER: builtins.int @@ -163,16 +160,14 @@ class GetBulkStateRequest(google.protobuf.message.Message): METADATA_FIELD_NUMBER: builtins.int store_name: builtins.str """The name of state store.""" - parallelism: builtins.int - """The number of parallel operations executed on the state store for a get operation.""" @property def keys(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: """The keys to get.""" - + parallelism: builtins.int + """The number of parallel operations executed on the state store for a get operation.""" @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata which will be sent to state store components.""" - def __init__( self, *, @@ -181,11 +176,11 @@ class GetBulkStateRequest(google.protobuf.message.Message): parallelism: builtins.int = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["keys", b"keys", "metadata", b"metadata", "parallelism", b"parallelism", "store_name", b"store_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys", "metadata", b"metadata", "parallelism", b"parallelism", "store_name", b"store_name"]) -> None: ... global___GetBulkStateRequest = GetBulkStateRequest -@typing.final +@typing_extensions.final class GetBulkStateResponse(google.protobuf.message.Message): """GetBulkStateResponse is the response conveying the list of state values.""" @@ -195,17 +190,16 @@ class GetBulkStateResponse(google.protobuf.message.Message): @property def items(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BulkStateItem]: """The list of items containing the keys to get values for.""" - def __init__( self, *, items: collections.abc.Iterable[global___BulkStateItem] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["items", b"items"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["items", b"items"]) -> None: ... global___GetBulkStateResponse = GetBulkStateResponse -@typing.final +@typing_extensions.final class BulkStateItem(google.protobuf.message.Message): """BulkStateItem is the response item for a bulk get operation. Return values include the item key, data and etag. @@ -213,7 +207,7 @@ class BulkStateItem(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -227,7 +221,7 @@ class BulkStateItem(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... KEY_FIELD_NUMBER: builtins.int DATA_FIELD_NUMBER: builtins.int @@ -247,7 +241,6 @@ class BulkStateItem(google.protobuf.message.Message): @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata which will be sent to app.""" - def __init__( self, *, @@ -257,17 +250,17 @@ class BulkStateItem(google.protobuf.message.Message): error: builtins.str = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data", "error", b"error", "etag", b"etag", "key", b"key", "metadata", b"metadata"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "error", b"error", "etag", b"etag", "key", b"key", "metadata", b"metadata"]) -> None: ... global___BulkStateItem = BulkStateItem -@typing.final +@typing_extensions.final class GetStateResponse(google.protobuf.message.Message): """GetStateResponse is the response conveying the state value and etag.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -281,7 +274,7 @@ class GetStateResponse(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... DATA_FIELD_NUMBER: builtins.int ETAG_FIELD_NUMBER: builtins.int @@ -295,7 +288,6 @@ class GetStateResponse(google.protobuf.message.Message): @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata which will be sent to app.""" - def __init__( self, *, @@ -303,17 +295,17 @@ class GetStateResponse(google.protobuf.message.Message): etag: builtins.str = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data", "etag", b"etag", "metadata", b"metadata"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "etag", b"etag", "metadata", b"metadata"]) -> None: ... global___GetStateResponse = GetStateResponse -@typing.final +@typing_extensions.final class DeleteStateRequest(google.protobuf.message.Message): """DeleteStateRequest is the message to delete key-value states in the specific state store.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -327,7 +319,7 @@ class DeleteStateRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... STORE_NAME_FIELD_NUMBER: builtins.int KEY_FIELD_NUMBER: builtins.int @@ -343,17 +335,14 @@ class DeleteStateRequest(google.protobuf.message.Message): """The entity tag which represents the specific version of data. The exact ETag format is defined by the corresponding data store. """ - @property def options(self) -> dapr.proto.common.v1.common_pb2.StateOptions: """State operation options which includes concurrency/ consistency/retry_policy. """ - @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata which will be sent to state store components.""" - def __init__( self, *, @@ -363,12 +352,12 @@ class DeleteStateRequest(google.protobuf.message.Message): options: dapr.proto.common.v1.common_pb2.StateOptions | None = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["etag", b"etag", "options", b"options"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["etag", b"etag", "key", b"key", "metadata", b"metadata", "options", b"options", "store_name", b"store_name"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["etag", b"etag", "options", b"options"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["etag", b"etag", "key", b"key", "metadata", b"metadata", "options", b"options", "store_name", b"store_name"]) -> None: ... global___DeleteStateRequest = DeleteStateRequest -@typing.final +@typing_extensions.final class DeleteBulkStateRequest(google.protobuf.message.Message): """DeleteBulkStateRequest is the message to delete a list of key-value states from specific state store.""" @@ -381,18 +370,17 @@ class DeleteBulkStateRequest(google.protobuf.message.Message): @property def states(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[dapr.proto.common.v1.common_pb2.StateItem]: """The array of the state key values.""" - def __init__( self, *, store_name: builtins.str = ..., states: collections.abc.Iterable[dapr.proto.common.v1.common_pb2.StateItem] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["states", b"states", "store_name", b"store_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["states", b"states", "store_name", b"store_name"]) -> None: ... global___DeleteBulkStateRequest = DeleteBulkStateRequest -@typing.final +@typing_extensions.final class SaveStateRequest(google.protobuf.message.Message): """SaveStateRequest is the message to save multiple states into state store.""" @@ -405,24 +393,23 @@ class SaveStateRequest(google.protobuf.message.Message): @property def states(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[dapr.proto.common.v1.common_pb2.StateItem]: """The array of the state key values.""" - def __init__( self, *, store_name: builtins.str = ..., states: collections.abc.Iterable[dapr.proto.common.v1.common_pb2.StateItem] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["states", b"states", "store_name", b"store_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["states", b"states", "store_name", b"store_name"]) -> None: ... global___SaveStateRequest = SaveStateRequest -@typing.final +@typing_extensions.final class QueryStateRequest(google.protobuf.message.Message): """QueryStateRequest is the message to query state store.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -436,7 +423,7 @@ class QueryStateRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... STORE_NAME_FIELD_NUMBER: builtins.int QUERY_FIELD_NUMBER: builtins.int @@ -448,7 +435,6 @@ class QueryStateRequest(google.protobuf.message.Message): @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata which will be sent to state store components.""" - def __init__( self, *, @@ -456,11 +442,11 @@ class QueryStateRequest(google.protobuf.message.Message): query: builtins.str = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["metadata", b"metadata", "query", b"query", "store_name", b"store_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "query", b"query", "store_name", b"store_name"]) -> None: ... global___QueryStateRequest = QueryStateRequest -@typing.final +@typing_extensions.final class QueryStateItem(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -486,17 +472,17 @@ class QueryStateItem(google.protobuf.message.Message): etag: builtins.str = ..., error: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data", "error", b"error", "etag", b"etag", "key", b"key"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "error", b"error", "etag", b"etag", "key", b"key"]) -> None: ... global___QueryStateItem = QueryStateItem -@typing.final +@typing_extensions.final class QueryStateResponse(google.protobuf.message.Message): """QueryStateResponse is the response conveying the query results.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -510,21 +496,19 @@ class QueryStateResponse(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... RESULTS_FIELD_NUMBER: builtins.int TOKEN_FIELD_NUMBER: builtins.int METADATA_FIELD_NUMBER: builtins.int - token: builtins.str - """Pagination token.""" @property def results(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___QueryStateItem]: """An array of query results.""" - + token: builtins.str + """Pagination token.""" @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata which will be sent to app.""" - def __init__( self, *, @@ -532,17 +516,17 @@ class QueryStateResponse(google.protobuf.message.Message): token: builtins.str = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["metadata", b"metadata", "results", b"results", "token", b"token"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "results", b"results", "token", b"token"]) -> None: ... global___QueryStateResponse = QueryStateResponse -@typing.final +@typing_extensions.final class PublishEventRequest(google.protobuf.message.Message): """PublishEventRequest is the message to publish event data to pubsub topic""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -556,7 +540,7 @@ class PublishEventRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... PUBSUB_NAME_FIELD_NUMBER: builtins.int TOPIC_FIELD_NUMBER: builtins.int @@ -578,7 +562,6 @@ class PublishEventRequest(google.protobuf.message.Message): metadata property: - key : the key of the message. """ - def __init__( self, *, @@ -588,17 +571,17 @@ class PublishEventRequest(google.protobuf.message.Message): data_content_type: builtins.str = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data", "data_content_type", b"data_content_type", "metadata", b"metadata", "pubsub_name", b"pubsub_name", "topic", b"topic"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "data_content_type", b"data_content_type", "metadata", b"metadata", "pubsub_name", b"pubsub_name", "topic", b"topic"]) -> None: ... global___PublishEventRequest = PublishEventRequest -@typing.final +@typing_extensions.final class BulkPublishRequest(google.protobuf.message.Message): """BulkPublishRequest is the message to bulk publish events to pubsub topic""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -612,7 +595,7 @@ class BulkPublishRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... PUBSUB_NAME_FIELD_NUMBER: builtins.int TOPIC_FIELD_NUMBER: builtins.int @@ -625,11 +608,9 @@ class BulkPublishRequest(google.protobuf.message.Message): @property def entries(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BulkPublishRequestEntry]: """The entries which contain the individual events and associated details to be published""" - @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The request level metadata passing to to the pubsub components""" - def __init__( self, *, @@ -638,17 +619,17 @@ class BulkPublishRequest(google.protobuf.message.Message): entries: collections.abc.Iterable[global___BulkPublishRequestEntry] | None = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["entries", b"entries", "metadata", b"metadata", "pubsub_name", b"pubsub_name", "topic", b"topic"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entries", b"entries", "metadata", b"metadata", "pubsub_name", b"pubsub_name", "topic", b"topic"]) -> None: ... global___BulkPublishRequest = BulkPublishRequest -@typing.final +@typing_extensions.final class BulkPublishRequestEntry(google.protobuf.message.Message): """BulkPublishRequestEntry is the message containing the event to be bulk published""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -662,7 +643,7 @@ class BulkPublishRequestEntry(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... ENTRY_ID_FIELD_NUMBER: builtins.int EVENT_FIELD_NUMBER: builtins.int @@ -677,7 +658,6 @@ class BulkPublishRequestEntry(google.protobuf.message.Message): @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The event level metadata passing to the pubsub component""" - def __init__( self, *, @@ -686,11 +666,11 @@ class BulkPublishRequestEntry(google.protobuf.message.Message): content_type: builtins.str = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["content_type", b"content_type", "entry_id", b"entry_id", "event", b"event", "metadata", b"metadata"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["content_type", b"content_type", "entry_id", b"entry_id", "event", b"event", "metadata", b"metadata"]) -> None: ... global___BulkPublishRequestEntry = BulkPublishRequestEntry -@typing.final +@typing_extensions.final class BulkPublishResponse(google.protobuf.message.Message): """BulkPublishResponse is the message returned from a BulkPublishEvent call""" @@ -700,17 +680,16 @@ class BulkPublishResponse(google.protobuf.message.Message): @property def failedEntries(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BulkPublishResponseFailedEntry]: """The entries for different events that failed publish in the BulkPublishEvent call""" - def __init__( self, *, failedEntries: collections.abc.Iterable[global___BulkPublishResponseFailedEntry] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["failedEntries", b"failedEntries"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["failedEntries", b"failedEntries"]) -> None: ... global___BulkPublishResponse = BulkPublishResponse -@typing.final +@typing_extensions.final class BulkPublishResponseFailedEntry(google.protobuf.message.Message): """BulkPublishResponseFailedEntry is the message containing the entryID and error of a failed event in BulkPublishEvent call""" @@ -728,11 +707,11 @@ class BulkPublishResponseFailedEntry(google.protobuf.message.Message): entry_id: builtins.str = ..., error: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["entry_id", b"entry_id", "error", b"error"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entry_id", b"entry_id", "error", b"error"]) -> None: ... global___BulkPublishResponseFailedEntry = BulkPublishResponseFailedEntry -@typing.final +@typing_extensions.final class SubscribeTopicEventsRequestAlpha1(google.protobuf.message.Message): """SubscribeTopicEventsRequestAlpha1 is a message containing the details for subscribing to a topic via streaming. @@ -754,13 +733,13 @@ class SubscribeTopicEventsRequestAlpha1(google.protobuf.message.Message): initial_request: global___SubscribeTopicEventsRequestInitialAlpha1 | None = ..., event_processed: global___SubscribeTopicEventsRequestProcessedAlpha1 | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["event_processed", b"event_processed", "initial_request", b"initial_request", "subscribe_topic_events_request_type", b"subscribe_topic_events_request_type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["event_processed", b"event_processed", "initial_request", b"initial_request", "subscribe_topic_events_request_type", b"subscribe_topic_events_request_type"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["subscribe_topic_events_request_type", b"subscribe_topic_events_request_type"]) -> typing.Literal["initial_request", "event_processed"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["event_processed", b"event_processed", "initial_request", b"initial_request", "subscribe_topic_events_request_type", b"subscribe_topic_events_request_type"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["event_processed", b"event_processed", "initial_request", b"initial_request", "subscribe_topic_events_request_type", b"subscribe_topic_events_request_type"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["subscribe_topic_events_request_type", b"subscribe_topic_events_request_type"]) -> typing_extensions.Literal["initial_request", "event_processed"] | None: ... global___SubscribeTopicEventsRequestAlpha1 = SubscribeTopicEventsRequestAlpha1 -@typing.final +@typing_extensions.final class SubscribeTopicEventsRequestInitialAlpha1(google.protobuf.message.Message): """SubscribeTopicEventsRequestInitialAlpha1 is the initial message containing the details for subscribing to a topic via streaming. @@ -768,7 +747,7 @@ class SubscribeTopicEventsRequestInitialAlpha1(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -782,7 +761,7 @@ class SubscribeTopicEventsRequestInitialAlpha1(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... PUBSUB_NAME_FIELD_NUMBER: builtins.int TOPIC_FIELD_NUMBER: builtins.int @@ -792,10 +771,6 @@ class SubscribeTopicEventsRequestInitialAlpha1(google.protobuf.message.Message): """The name of the pubsub component""" topic: builtins.str """The pubsub topic""" - dead_letter_topic: builtins.str - """dead_letter_topic is the topic to which messages that fail to be processed - are sent. - """ @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata passing to pub components @@ -803,7 +778,10 @@ class SubscribeTopicEventsRequestInitialAlpha1(google.protobuf.message.Message): metadata property: - key : the key of the message. """ - + dead_letter_topic: builtins.str + """dead_letter_topic is the topic to which messages that fail to be processed + are sent. + """ def __init__( self, *, @@ -812,13 +790,13 @@ class SubscribeTopicEventsRequestInitialAlpha1(google.protobuf.message.Message): metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., dead_letter_topic: builtins.str | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_dead_letter_topic", b"_dead_letter_topic", "dead_letter_topic", b"dead_letter_topic"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_dead_letter_topic", b"_dead_letter_topic", "dead_letter_topic", b"dead_letter_topic", "metadata", b"metadata", "pubsub_name", b"pubsub_name", "topic", b"topic"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["_dead_letter_topic", b"_dead_letter_topic"]) -> typing.Literal["dead_letter_topic"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["_dead_letter_topic", b"_dead_letter_topic", "dead_letter_topic", b"dead_letter_topic"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_dead_letter_topic", b"_dead_letter_topic", "dead_letter_topic", b"dead_letter_topic", "metadata", b"metadata", "pubsub_name", b"pubsub_name", "topic", b"topic"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_dead_letter_topic", b"_dead_letter_topic"]) -> typing_extensions.Literal["dead_letter_topic"] | None: ... global___SubscribeTopicEventsRequestInitialAlpha1 = SubscribeTopicEventsRequestInitialAlpha1 -@typing.final +@typing_extensions.final class SubscribeTopicEventsRequestProcessedAlpha1(google.protobuf.message.Message): """SubscribeTopicEventsRequestProcessedAlpha1 is the message containing the subscription to a topic. @@ -833,19 +811,18 @@ class SubscribeTopicEventsRequestProcessedAlpha1(google.protobuf.message.Message @property def status(self) -> dapr.proto.runtime.v1.appcallback_pb2.TopicEventResponse: """status is the result of the subscription request.""" - def __init__( self, *, id: builtins.str = ..., status: dapr.proto.runtime.v1.appcallback_pb2.TopicEventResponse | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["status", b"status"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["id", b"id", "status", b"status"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["status", b"status"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["id", b"id", "status", b"status"]) -> None: ... global___SubscribeTopicEventsRequestProcessedAlpha1 = SubscribeTopicEventsRequestProcessedAlpha1 -@typing.final +@typing_extensions.final class SubscribeTopicEventsResponseAlpha1(google.protobuf.message.Message): """SubscribeTopicEventsResponseAlpha1 is a message returned from daprd when subscribing to a topic via streaming. @@ -865,13 +842,13 @@ class SubscribeTopicEventsResponseAlpha1(google.protobuf.message.Message): initial_response: global___SubscribeTopicEventsResponseInitialAlpha1 | None = ..., event_message: dapr.proto.runtime.v1.appcallback_pb2.TopicEventRequest | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["event_message", b"event_message", "initial_response", b"initial_response", "subscribe_topic_events_response_type", b"subscribe_topic_events_response_type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["event_message", b"event_message", "initial_response", b"initial_response", "subscribe_topic_events_response_type", b"subscribe_topic_events_response_type"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["subscribe_topic_events_response_type", b"subscribe_topic_events_response_type"]) -> typing.Literal["initial_response", "event_message"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["event_message", b"event_message", "initial_response", b"initial_response", "subscribe_topic_events_response_type", b"subscribe_topic_events_response_type"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["event_message", b"event_message", "initial_response", b"initial_response", "subscribe_topic_events_response_type", b"subscribe_topic_events_response_type"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["subscribe_topic_events_response_type", b"subscribe_topic_events_response_type"]) -> typing_extensions.Literal["initial_response", "event_message"] | None: ... global___SubscribeTopicEventsResponseAlpha1 = SubscribeTopicEventsResponseAlpha1 -@typing.final +@typing_extensions.final class SubscribeTopicEventsResponseInitialAlpha1(google.protobuf.message.Message): """SubscribeTopicEventsResponseInitialAlpha1 is the initial response from daprd when subscribing to a topic. @@ -885,13 +862,13 @@ class SubscribeTopicEventsResponseInitialAlpha1(google.protobuf.message.Message) global___SubscribeTopicEventsResponseInitialAlpha1 = SubscribeTopicEventsResponseInitialAlpha1 -@typing.final +@typing_extensions.final class InvokeBindingRequest(google.protobuf.message.Message): """InvokeBindingRequest is the message to send data to output bindings""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -905,7 +882,7 @@ class InvokeBindingRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... NAME_FIELD_NUMBER: builtins.int DATA_FIELD_NUMBER: builtins.int @@ -915,19 +892,19 @@ class InvokeBindingRequest(google.protobuf.message.Message): """The name of the output binding to invoke.""" data: builtins.bytes """The data which will be sent to output binding.""" - operation: builtins.str - """The name of the operation type for the binding to invoke""" @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata passing to output binding components Common metadata property: - ttlInSeconds : the time to live in seconds for the message. + If set in the binding definition will cause all messages to have a default time to live. The message ttl overrides any value in the binding definition. """ - + operation: builtins.str + """The name of the operation type for the binding to invoke""" def __init__( self, *, @@ -936,17 +913,17 @@ class InvokeBindingRequest(google.protobuf.message.Message): metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., operation: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data", "metadata", b"metadata", "name", b"name", "operation", b"operation"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "metadata", b"metadata", "name", b"name", "operation", b"operation"]) -> None: ... global___InvokeBindingRequest = InvokeBindingRequest -@typing.final +@typing_extensions.final class InvokeBindingResponse(google.protobuf.message.Message): """InvokeBindingResponse is the message returned from an output binding invocation""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -960,7 +937,7 @@ class InvokeBindingResponse(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... DATA_FIELD_NUMBER: builtins.int METADATA_FIELD_NUMBER: builtins.int @@ -969,24 +946,23 @@ class InvokeBindingResponse(google.protobuf.message.Message): @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata returned from an external system""" - def __init__( self, *, data: builtins.bytes = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data", "metadata", b"metadata"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "metadata", b"metadata"]) -> None: ... global___InvokeBindingResponse = InvokeBindingResponse -@typing.final +@typing_extensions.final class GetSecretRequest(google.protobuf.message.Message): """GetSecretRequest is the message to get secret from secret store.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1000,7 +976,7 @@ class GetSecretRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... STORE_NAME_FIELD_NUMBER: builtins.int KEY_FIELD_NUMBER: builtins.int @@ -1012,7 +988,6 @@ class GetSecretRequest(google.protobuf.message.Message): @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata which will be sent to secret store components.""" - def __init__( self, *, @@ -1020,17 +995,17 @@ class GetSecretRequest(google.protobuf.message.Message): key: builtins.str = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "metadata", b"metadata", "store_name", b"store_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "metadata", b"metadata", "store_name", b"store_name"]) -> None: ... global___GetSecretRequest = GetSecretRequest -@typing.final +@typing_extensions.final class GetSecretResponse(google.protobuf.message.Message): """GetSecretResponse is the response message to convey the requested secret.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class DataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1044,7 +1019,7 @@ class GetSecretResponse(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... DATA_FIELD_NUMBER: builtins.int @property @@ -1052,23 +1027,22 @@ class GetSecretResponse(google.protobuf.message.Message): """data is the secret value. Some secret store, such as kubernetes secret store, can save multiple secrets for single secret key. """ - def __init__( self, *, data: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data"]) -> None: ... global___GetSecretResponse = GetSecretResponse -@typing.final +@typing_extensions.final class GetBulkSecretRequest(google.protobuf.message.Message): """GetBulkSecretRequest is the message to get the secrets from secret store.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1082,7 +1056,7 @@ class GetBulkSecretRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... STORE_NAME_FIELD_NUMBER: builtins.int METADATA_FIELD_NUMBER: builtins.int @@ -1091,24 +1065,23 @@ class GetBulkSecretRequest(google.protobuf.message.Message): @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata which will be sent to secret store components.""" - def __init__( self, *, store_name: builtins.str = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["metadata", b"metadata", "store_name", b"store_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "store_name", b"store_name"]) -> None: ... global___GetBulkSecretRequest = GetBulkSecretRequest -@typing.final +@typing_extensions.final class SecretResponse(google.protobuf.message.Message): """SecretResponse is a map of decrypted string/string values""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class SecretsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1122,7 +1095,7 @@ class SecretResponse(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... SECRETS_FIELD_NUMBER: builtins.int @property @@ -1132,17 +1105,17 @@ class SecretResponse(google.protobuf.message.Message): *, secrets: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["secrets", b"secrets"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["secrets", b"secrets"]) -> None: ... global___SecretResponse = SecretResponse -@typing.final +@typing_extensions.final class GetBulkSecretResponse(google.protobuf.message.Message): """GetBulkSecretResponse is the response message to convey the requested secrets.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class DataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1157,8 +1130,8 @@ class GetBulkSecretResponse(google.protobuf.message.Message): key: builtins.str = ..., value: global___SecretResponse | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... DATA_FIELD_NUMBER: builtins.int @property @@ -1166,17 +1139,16 @@ class GetBulkSecretResponse(google.protobuf.message.Message): """data hold the secret values. Some secret store, such as kubernetes secret store, can save multiple secrets for single secret key. """ - def __init__( self, *, data: collections.abc.Mapping[builtins.str, global___SecretResponse] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data"]) -> None: ... global___GetBulkSecretResponse = GetBulkSecretResponse -@typing.final +@typing_extensions.final class TransactionalStateOperation(google.protobuf.message.Message): """TransactionalStateOperation is the message to execute a specified operation with a key-value pair.""" @@ -1189,25 +1161,24 @@ class TransactionalStateOperation(google.protobuf.message.Message): @property def request(self) -> dapr.proto.common.v1.common_pb2.StateItem: """State values to be operated on""" - def __init__( self, *, operationType: builtins.str = ..., request: dapr.proto.common.v1.common_pb2.StateItem | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["request", b"request"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["operationType", b"operationType", "request", b"request"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["request", b"request"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["operationType", b"operationType", "request", b"request"]) -> None: ... global___TransactionalStateOperation = TransactionalStateOperation -@typing.final +@typing_extensions.final class ExecuteStateTransactionRequest(google.protobuf.message.Message): """ExecuteStateTransactionRequest is the message to execute multiple operations on a specified store.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1221,7 +1192,7 @@ class ExecuteStateTransactionRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... STORENAME_FIELD_NUMBER: builtins.int OPERATIONS_FIELD_NUMBER: builtins.int @@ -1231,11 +1202,9 @@ class ExecuteStateTransactionRequest(google.protobuf.message.Message): @property def operations(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TransactionalStateOperation]: """Required. transactional operation list.""" - @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata used for transactional operations.""" - def __init__( self, *, @@ -1243,11 +1212,11 @@ class ExecuteStateTransactionRequest(google.protobuf.message.Message): operations: collections.abc.Iterable[global___TransactionalStateOperation] | None = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["metadata", b"metadata", "operations", b"operations", "storeName", b"storeName"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "operations", b"operations", "storeName", b"storeName"]) -> None: ... global___ExecuteStateTransactionRequest = ExecuteStateTransactionRequest -@typing.final +@typing_extensions.final class RegisterActorTimerRequest(google.protobuf.message.Message): """RegisterActorTimerRequest is the message to register a timer for an actor of a given type and id.""" @@ -1281,11 +1250,11 @@ class RegisterActorTimerRequest(google.protobuf.message.Message): data: builtins.bytes = ..., ttl: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["actor_id", b"actor_id", "actor_type", b"actor_type", "callback", b"callback", "data", b"data", "due_time", b"due_time", "name", b"name", "period", b"period", "ttl", b"ttl"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["actor_id", b"actor_id", "actor_type", b"actor_type", "callback", b"callback", "data", b"data", "due_time", b"due_time", "name", b"name", "period", b"period", "ttl", b"ttl"]) -> None: ... global___RegisterActorTimerRequest = RegisterActorTimerRequest -@typing.final +@typing_extensions.final class UnregisterActorTimerRequest(google.protobuf.message.Message): """UnregisterActorTimerRequest is the message to unregister an actor timer""" @@ -1304,11 +1273,11 @@ class UnregisterActorTimerRequest(google.protobuf.message.Message): actor_id: builtins.str = ..., name: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["actor_id", b"actor_id", "actor_type", b"actor_type", "name", b"name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["actor_id", b"actor_id", "actor_type", b"actor_type", "name", b"name"]) -> None: ... global___UnregisterActorTimerRequest = UnregisterActorTimerRequest -@typing.final +@typing_extensions.final class RegisterActorReminderRequest(google.protobuf.message.Message): """RegisterActorReminderRequest is the message to register a reminder for an actor of a given type and id.""" @@ -1339,11 +1308,11 @@ class RegisterActorReminderRequest(google.protobuf.message.Message): data: builtins.bytes = ..., ttl: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["actor_id", b"actor_id", "actor_type", b"actor_type", "data", b"data", "due_time", b"due_time", "name", b"name", "period", b"period", "ttl", b"ttl"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["actor_id", b"actor_id", "actor_type", b"actor_type", "data", b"data", "due_time", b"due_time", "name", b"name", "period", b"period", "ttl", b"ttl"]) -> None: ... global___RegisterActorReminderRequest = RegisterActorReminderRequest -@typing.final +@typing_extensions.final class UnregisterActorReminderRequest(google.protobuf.message.Message): """UnregisterActorReminderRequest is the message to unregister an actor reminder.""" @@ -1362,11 +1331,11 @@ class UnregisterActorReminderRequest(google.protobuf.message.Message): actor_id: builtins.str = ..., name: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["actor_id", b"actor_id", "actor_type", b"actor_type", "name", b"name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["actor_id", b"actor_id", "actor_type", b"actor_type", "name", b"name"]) -> None: ... global___UnregisterActorReminderRequest = UnregisterActorReminderRequest -@typing.final +@typing_extensions.final class GetActorStateRequest(google.protobuf.message.Message): """GetActorStateRequest is the message to get key-value states from specific actor.""" @@ -1385,17 +1354,17 @@ class GetActorStateRequest(google.protobuf.message.Message): actor_id: builtins.str = ..., key: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["actor_id", b"actor_id", "actor_type", b"actor_type", "key", b"key"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["actor_id", b"actor_id", "actor_type", b"actor_type", "key", b"key"]) -> None: ... global___GetActorStateRequest = GetActorStateRequest -@typing.final +@typing_extensions.final class GetActorStateResponse(google.protobuf.message.Message): """GetActorStateResponse is the response conveying the actor's state value.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1409,7 +1378,7 @@ class GetActorStateResponse(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... DATA_FIELD_NUMBER: builtins.int METADATA_FIELD_NUMBER: builtins.int @@ -1417,18 +1386,17 @@ class GetActorStateResponse(google.protobuf.message.Message): @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata which will be sent to app.""" - def __init__( self, *, data: builtins.bytes = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data", "metadata", b"metadata"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "metadata", b"metadata"]) -> None: ... global___GetActorStateResponse = GetActorStateResponse -@typing.final +@typing_extensions.final class ExecuteActorStateTransactionRequest(google.protobuf.message.Message): """ExecuteActorStateTransactionRequest is the message to execute multiple operations on a specified actor.""" @@ -1448,17 +1416,17 @@ class ExecuteActorStateTransactionRequest(google.protobuf.message.Message): actor_id: builtins.str = ..., operations: collections.abc.Iterable[global___TransactionalActorStateOperation] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["actor_id", b"actor_id", "actor_type", b"actor_type", "operations", b"operations"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["actor_id", b"actor_id", "actor_type", b"actor_type", "operations", b"operations"]) -> None: ... global___ExecuteActorStateTransactionRequest = ExecuteActorStateTransactionRequest -@typing.final +@typing_extensions.final class TransactionalActorStateOperation(google.protobuf.message.Message): """TransactionalActorStateOperation is the message to execute a specified operation with a key-value pair.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1472,7 +1440,7 @@ class TransactionalActorStateOperation(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... OPERATIONTYPE_FIELD_NUMBER: builtins.int KEY_FIELD_NUMBER: builtins.int @@ -1489,7 +1457,6 @@ class TransactionalActorStateOperation(google.protobuf.message.Message): Common metadata property: - ttlInSeconds : the time to live in seconds for the stored value. """ - def __init__( self, *, @@ -1498,18 +1465,18 @@ class TransactionalActorStateOperation(google.protobuf.message.Message): value: google.protobuf.any_pb2.Any | None = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "metadata", b"metadata", "operationType", b"operationType", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "metadata", b"metadata", "operationType", b"operationType", "value", b"value"]) -> None: ... global___TransactionalActorStateOperation = TransactionalActorStateOperation -@typing.final +@typing_extensions.final class InvokeActorRequest(google.protobuf.message.Message): """InvokeActorRequest is the message to call an actor.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1523,7 +1490,7 @@ class InvokeActorRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... ACTOR_TYPE_FIELD_NUMBER: builtins.int ACTOR_ID_FIELD_NUMBER: builtins.int @@ -1545,11 +1512,11 @@ class InvokeActorRequest(google.protobuf.message.Message): data: builtins.bytes = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["actor_id", b"actor_id", "actor_type", b"actor_type", "data", b"data", "metadata", b"metadata", "method", b"method"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["actor_id", b"actor_id", "actor_type", b"actor_type", "data", b"data", "metadata", b"metadata", "method", b"method"]) -> None: ... global___InvokeActorRequest = InvokeActorRequest -@typing.final +@typing_extensions.final class InvokeActorResponse(google.protobuf.message.Message): """InvokeActorResponse is the method that returns an actor invocation response.""" @@ -1562,11 +1529,11 @@ class InvokeActorResponse(google.protobuf.message.Message): *, data: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data"]) -> None: ... global___InvokeActorResponse = InvokeActorResponse -@typing.final +@typing_extensions.final class GetMetadataRequest(google.protobuf.message.Message): """GetMetadataRequest is the message for the GetMetadata request. Empty @@ -1580,13 +1547,13 @@ class GetMetadataRequest(google.protobuf.message.Message): global___GetMetadataRequest = GetMetadataRequest -@typing.final +@typing_extensions.final class GetMetadataResponse(google.protobuf.message.Message): """GetMetadataResponse is a message that is returned on GetMetadata rpc call.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class ExtendedMetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1600,7 +1567,7 @@ class GetMetadataResponse(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... ID_FIELD_NUMBER: builtins.int ACTIVE_ACTORS_COUNT_FIELD_NUMBER: builtins.int @@ -1613,11 +1580,9 @@ class GetMetadataResponse(google.protobuf.message.Message): ENABLED_FEATURES_FIELD_NUMBER: builtins.int ACTOR_RUNTIME_FIELD_NUMBER: builtins.int id: builtins.str - runtime_version: builtins.str @property def active_actors_count(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ActiveActorsCount]: """Deprecated alias for actor_runtime.active_actors.""" - @property def registered_components(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RegisteredComponents]: ... @property @@ -1628,12 +1593,12 @@ class GetMetadataResponse(google.protobuf.message.Message): def http_endpoints(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MetadataHTTPEndpoint]: ... @property def app_connection_properties(self) -> global___AppConnectionProperties: ... + runtime_version: builtins.str @property def enabled_features(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... @property def actor_runtime(self) -> global___ActorRuntime: """TODO: Cassie: probably add scheduler runtime status""" - def __init__( self, *, @@ -1648,12 +1613,12 @@ class GetMetadataResponse(google.protobuf.message.Message): enabled_features: collections.abc.Iterable[builtins.str] | None = ..., actor_runtime: global___ActorRuntime | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["actor_runtime", b"actor_runtime", "app_connection_properties", b"app_connection_properties"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["active_actors_count", b"active_actors_count", "actor_runtime", b"actor_runtime", "app_connection_properties", b"app_connection_properties", "enabled_features", b"enabled_features", "extended_metadata", b"extended_metadata", "http_endpoints", b"http_endpoints", "id", b"id", "registered_components", b"registered_components", "runtime_version", b"runtime_version", "subscriptions", b"subscriptions"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["actor_runtime", b"actor_runtime", "app_connection_properties", b"app_connection_properties"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["active_actors_count", b"active_actors_count", "actor_runtime", b"actor_runtime", "app_connection_properties", b"app_connection_properties", "enabled_features", b"enabled_features", "extended_metadata", b"extended_metadata", "http_endpoints", b"http_endpoints", "id", b"id", "registered_components", b"registered_components", "runtime_version", b"runtime_version", "subscriptions", b"subscriptions"]) -> None: ... global___GetMetadataResponse = GetMetadataResponse -@typing.final +@typing_extensions.final class ActorRuntime(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1688,14 +1653,13 @@ class ActorRuntime(google.protobuf.message.Message): PLACEMENT_FIELD_NUMBER: builtins.int runtime_status: global___ActorRuntime.ActorRuntimeStatus.ValueType """Contains an enum indicating whether the actor runtime has been initialized.""" + @property + def active_actors(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ActiveActorsCount]: + """Count of active actors per type.""" host_ready: builtins.bool """Indicates whether the actor runtime is ready to host actors.""" placement: builtins.str """Custom message from the placement provider.""" - @property - def active_actors(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ActiveActorsCount]: - """Count of active actors per type.""" - def __init__( self, *, @@ -1704,11 +1668,11 @@ class ActorRuntime(google.protobuf.message.Message): host_ready: builtins.bool = ..., placement: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["active_actors", b"active_actors", "host_ready", b"host_ready", "placement", b"placement", "runtime_status", b"runtime_status"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["active_actors", b"active_actors", "host_ready", b"host_ready", "placement", b"placement", "runtime_status", b"runtime_status"]) -> None: ... global___ActorRuntime = ActorRuntime -@typing.final +@typing_extensions.final class ActiveActorsCount(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1722,11 +1686,11 @@ class ActiveActorsCount(google.protobuf.message.Message): type: builtins.str = ..., count: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["count", b"count", "type", b"type"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["count", b"count", "type", b"type"]) -> None: ... global___ActiveActorsCount = ActiveActorsCount -@typing.final +@typing_extensions.final class RegisteredComponents(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1747,11 +1711,11 @@ class RegisteredComponents(google.protobuf.message.Message): version: builtins.str = ..., capabilities: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["capabilities", b"capabilities", "name", b"name", "type", b"type", "version", b"version"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["capabilities", b"capabilities", "name", b"name", "type", b"type", "version", b"version"]) -> None: ... global___RegisteredComponents = RegisteredComponents -@typing.final +@typing_extensions.final class MetadataHTTPEndpoint(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1762,11 +1726,11 @@ class MetadataHTTPEndpoint(google.protobuf.message.Message): *, name: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name"]) -> None: ... global___MetadataHTTPEndpoint = MetadataHTTPEndpoint -@typing.final +@typing_extensions.final class AppConnectionProperties(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1790,12 +1754,12 @@ class AppConnectionProperties(google.protobuf.message.Message): max_concurrency: builtins.int = ..., health: global___AppConnectionHealthProperties | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["health", b"health"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["channel_address", b"channel_address", "health", b"health", "max_concurrency", b"max_concurrency", "port", b"port", "protocol", b"protocol"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["health", b"health"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["channel_address", b"channel_address", "health", b"health", "max_concurrency", b"max_concurrency", "port", b"port", "protocol", b"protocol"]) -> None: ... global___AppConnectionProperties = AppConnectionProperties -@typing.final +@typing_extensions.final class AppConnectionHealthProperties(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1815,15 +1779,15 @@ class AppConnectionHealthProperties(google.protobuf.message.Message): health_probe_timeout: builtins.str = ..., health_threshold: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["health_check_path", b"health_check_path", "health_probe_interval", b"health_probe_interval", "health_probe_timeout", b"health_probe_timeout", "health_threshold", b"health_threshold"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["health_check_path", b"health_check_path", "health_probe_interval", b"health_probe_interval", "health_probe_timeout", b"health_probe_timeout", "health_threshold", b"health_threshold"]) -> None: ... global___AppConnectionHealthProperties = AppConnectionHealthProperties -@typing.final +@typing_extensions.final class PubsubSubscription(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1837,7 +1801,7 @@ class PubsubSubscription(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... PUBSUB_NAME_FIELD_NUMBER: builtins.int TOPIC_FIELD_NUMBER: builtins.int @@ -1847,12 +1811,12 @@ class PubsubSubscription(google.protobuf.message.Message): TYPE_FIELD_NUMBER: builtins.int pubsub_name: builtins.str topic: builtins.str - dead_letter_topic: builtins.str - type: global___PubsubSubscriptionType.ValueType @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... @property def rules(self) -> global___PubsubSubscriptionRules: ... + dead_letter_topic: builtins.str + type: global___PubsubSubscriptionType.ValueType def __init__( self, *, @@ -1863,12 +1827,12 @@ class PubsubSubscription(google.protobuf.message.Message): dead_letter_topic: builtins.str = ..., type: global___PubsubSubscriptionType.ValueType = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["rules", b"rules"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["dead_letter_topic", b"dead_letter_topic", "metadata", b"metadata", "pubsub_name", b"pubsub_name", "rules", b"rules", "topic", b"topic", "type", b"type"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["rules", b"rules"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["dead_letter_topic", b"dead_letter_topic", "metadata", b"metadata", "pubsub_name", b"pubsub_name", "rules", b"rules", "topic", b"topic", "type", b"type"]) -> None: ... global___PubsubSubscription = PubsubSubscription -@typing.final +@typing_extensions.final class PubsubSubscriptionRules(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1880,11 +1844,11 @@ class PubsubSubscriptionRules(google.protobuf.message.Message): *, rules: collections.abc.Iterable[global___PubsubSubscriptionRule] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["rules", b"rules"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["rules", b"rules"]) -> None: ... global___PubsubSubscriptionRules = PubsubSubscriptionRules -@typing.final +@typing_extensions.final class PubsubSubscriptionRule(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1898,11 +1862,11 @@ class PubsubSubscriptionRule(google.protobuf.message.Message): match: builtins.str = ..., path: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["match", b"match", "path", b"path"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["match", b"match", "path", b"path"]) -> None: ... global___PubsubSubscriptionRule = PubsubSubscriptionRule -@typing.final +@typing_extensions.final class SetMetadataRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1916,17 +1880,17 @@ class SetMetadataRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... global___SetMetadataRequest = SetMetadataRequest -@typing.final +@typing_extensions.final class GetConfigurationRequest(google.protobuf.message.Message): """GetConfigurationRequest is the message to get a list of key-value configuration from specified configuration store.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1940,7 +1904,7 @@ class GetConfigurationRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... STORE_NAME_FIELD_NUMBER: builtins.int KEYS_FIELD_NUMBER: builtins.int @@ -1953,11 +1917,9 @@ class GetConfigurationRequest(google.protobuf.message.Message): If set, only query for the specified configuration items. Empty list means fetch all. """ - @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """Optional. The metadata which will be sent to configuration store components.""" - def __init__( self, *, @@ -1965,11 +1927,11 @@ class GetConfigurationRequest(google.protobuf.message.Message): keys: collections.abc.Iterable[builtins.str] | None = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["keys", b"keys", "metadata", b"metadata", "store_name", b"store_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys", "metadata", b"metadata", "store_name", b"store_name"]) -> None: ... global___GetConfigurationRequest = GetConfigurationRequest -@typing.final +@typing_extensions.final class GetConfigurationResponse(google.protobuf.message.Message): """GetConfigurationResponse is the response conveying the list of configuration values. It should be the FULL configuration of specified application which contains all of its configuration items. @@ -1977,7 +1939,7 @@ class GetConfigurationResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class ItemsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1992,8 +1954,8 @@ class GetConfigurationResponse(google.protobuf.message.Message): key: builtins.str = ..., value: dapr.proto.common.v1.common_pb2.ConfigurationItem | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... ITEMS_FIELD_NUMBER: builtins.int @property @@ -2003,17 +1965,17 @@ class GetConfigurationResponse(google.protobuf.message.Message): *, items: collections.abc.Mapping[builtins.str, dapr.proto.common.v1.common_pb2.ConfigurationItem] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["items", b"items"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["items", b"items"]) -> None: ... global___GetConfigurationResponse = GetConfigurationResponse -@typing.final +@typing_extensions.final class SubscribeConfigurationRequest(google.protobuf.message.Message): """SubscribeConfigurationRequest is the message to get a list of key-value configuration from specified configuration store.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2027,7 +1989,7 @@ class SubscribeConfigurationRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... STORE_NAME_FIELD_NUMBER: builtins.int KEYS_FIELD_NUMBER: builtins.int @@ -2040,11 +2002,9 @@ class SubscribeConfigurationRequest(google.protobuf.message.Message): If set, only query for the specified configuration items. Empty list means fetch all. """ - @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """The metadata which will be sent to configuration store components.""" - def __init__( self, *, @@ -2052,11 +2012,11 @@ class SubscribeConfigurationRequest(google.protobuf.message.Message): keys: collections.abc.Iterable[builtins.str] | None = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["keys", b"keys", "metadata", b"metadata", "store_name", b"store_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys", "metadata", b"metadata", "store_name", b"store_name"]) -> None: ... global___SubscribeConfigurationRequest = SubscribeConfigurationRequest -@typing.final +@typing_extensions.final class UnsubscribeConfigurationRequest(google.protobuf.message.Message): """UnSubscribeConfigurationRequest is the message to stop watching the key-value configuration.""" @@ -2074,15 +2034,15 @@ class UnsubscribeConfigurationRequest(google.protobuf.message.Message): store_name: builtins.str = ..., id: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["id", b"id", "store_name", b"store_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["id", b"id", "store_name", b"store_name"]) -> None: ... global___UnsubscribeConfigurationRequest = UnsubscribeConfigurationRequest -@typing.final +@typing_extensions.final class SubscribeConfigurationResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class ItemsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2097,8 +2057,8 @@ class SubscribeConfigurationResponse(google.protobuf.message.Message): key: builtins.str = ..., value: dapr.proto.common.v1.common_pb2.ConfigurationItem | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... ID_FIELD_NUMBER: builtins.int ITEMS_FIELD_NUMBER: builtins.int @@ -2107,18 +2067,17 @@ class SubscribeConfigurationResponse(google.protobuf.message.Message): @property def items(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, dapr.proto.common.v1.common_pb2.ConfigurationItem]: """The list of items containing configuration values""" - def __init__( self, *, id: builtins.str = ..., items: collections.abc.Mapping[builtins.str, dapr.proto.common.v1.common_pb2.ConfigurationItem] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["id", b"id", "items", b"items"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["id", b"id", "items", b"items"]) -> None: ... global___SubscribeConfigurationResponse = SubscribeConfigurationResponse -@typing.final +@typing_extensions.final class UnsubscribeConfigurationResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2132,11 +2091,11 @@ class UnsubscribeConfigurationResponse(google.protobuf.message.Message): ok: builtins.bool = ..., message: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["message", b"message", "ok", b"ok"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["message", b"message", "ok", b"ok"]) -> None: ... global___UnsubscribeConfigurationResponse = UnsubscribeConfigurationResponse -@typing.final +@typing_extensions.final class TryLockRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2161,11 +2120,11 @@ class TryLockRequest(google.protobuf.message.Message): The reason why we don't make it automatically generated is: 1. If it is automatically generated,there must be a 'my_lock_owner_id' field in the response. - This name is so weird that we think it is inappropriate to put it into the api spec + This name is so weird that we think it is inappropriate to put it into the api spec 2. If we change the field 'my_lock_owner_id' in the response to 'lock_owner',which means the current lock owner of this lock, - we find that in some lock services users can't get the current lock owner.Actually users don't need it at all. + we find that in some lock services users can't get the current lock owner.Actually users don't need it at all. 3. When reentrant lock is needed,the existing lock_owner is required to identify client and check "whether this client can reenter this lock". - So this field in the request shouldn't be removed. + So this field in the request shouldn't be removed. """ expiry_in_seconds: builtins.int """Required. The time before expiry.The time unit is second.""" @@ -2177,11 +2136,11 @@ class TryLockRequest(google.protobuf.message.Message): lock_owner: builtins.str = ..., expiry_in_seconds: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["expiry_in_seconds", b"expiry_in_seconds", "lock_owner", b"lock_owner", "resource_id", b"resource_id", "store_name", b"store_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["expiry_in_seconds", b"expiry_in_seconds", "lock_owner", b"lock_owner", "resource_id", b"resource_id", "store_name", b"store_name"]) -> None: ... global___TryLockRequest = TryLockRequest -@typing.final +@typing_extensions.final class TryLockResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2192,11 +2151,11 @@ class TryLockResponse(google.protobuf.message.Message): *, success: builtins.bool = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["success", b"success"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["success", b"success"]) -> None: ... global___TryLockResponse = TryLockResponse -@typing.final +@typing_extensions.final class UnlockRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2214,11 +2173,11 @@ class UnlockRequest(google.protobuf.message.Message): resource_id: builtins.str = ..., lock_owner: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["lock_owner", b"lock_owner", "resource_id", b"resource_id", "store_name", b"store_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["lock_owner", b"lock_owner", "resource_id", b"resource_id", "store_name", b"store_name"]) -> None: ... global___UnlockRequest = UnlockRequest -@typing.final +@typing_extensions.final class UnlockResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2246,11 +2205,11 @@ class UnlockResponse(google.protobuf.message.Message): *, status: global___UnlockResponse.Status.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["status", b"status"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["status", b"status"]) -> None: ... global___UnlockResponse = UnlockResponse -@typing.final +@typing_extensions.final class SubtleGetKeyRequest(google.protobuf.message.Message): """SubtleGetKeyRequest is the request object for SubtleGetKeyAlpha1.""" @@ -2289,11 +2248,11 @@ class SubtleGetKeyRequest(google.protobuf.message.Message): name: builtins.str = ..., format: global___SubtleGetKeyRequest.KeyFormat.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["component_name", b"component_name", "format", b"format", "name", b"name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["component_name", b"component_name", "format", b"format", "name", b"name"]) -> None: ... global___SubtleGetKeyRequest = SubtleGetKeyRequest -@typing.final +@typing_extensions.final class SubtleGetKeyResponse(google.protobuf.message.Message): """SubtleGetKeyResponse is the response for SubtleGetKeyAlpha1.""" @@ -2313,11 +2272,11 @@ class SubtleGetKeyResponse(google.protobuf.message.Message): name: builtins.str = ..., public_key: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["name", b"name", "public_key", b"public_key"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "public_key", b"public_key"]) -> None: ... global___SubtleGetKeyResponse = SubtleGetKeyResponse -@typing.final +@typing_extensions.final class SubtleEncryptRequest(google.protobuf.message.Message): """SubtleEncryptRequest is the request for SubtleEncryptAlpha1.""" @@ -2353,11 +2312,11 @@ class SubtleEncryptRequest(google.protobuf.message.Message): nonce: builtins.bytes = ..., associated_data: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["algorithm", b"algorithm", "associated_data", b"associated_data", "component_name", b"component_name", "key_name", b"key_name", "nonce", b"nonce", "plaintext", b"plaintext"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["algorithm", b"algorithm", "associated_data", b"associated_data", "component_name", b"component_name", "key_name", b"key_name", "nonce", b"nonce", "plaintext", b"plaintext"]) -> None: ... global___SubtleEncryptRequest = SubtleEncryptRequest -@typing.final +@typing_extensions.final class SubtleEncryptResponse(google.protobuf.message.Message): """SubtleEncryptResponse is the response for SubtleEncryptAlpha1.""" @@ -2377,11 +2336,11 @@ class SubtleEncryptResponse(google.protobuf.message.Message): ciphertext: builtins.bytes = ..., tag: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["ciphertext", b"ciphertext", "tag", b"tag"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["ciphertext", b"ciphertext", "tag", b"tag"]) -> None: ... global___SubtleEncryptResponse = SubtleEncryptResponse -@typing.final +@typing_extensions.final class SubtleDecryptRequest(google.protobuf.message.Message): """SubtleDecryptRequest is the request for SubtleDecryptAlpha1.""" @@ -2423,11 +2382,11 @@ class SubtleDecryptRequest(google.protobuf.message.Message): tag: builtins.bytes = ..., associated_data: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["algorithm", b"algorithm", "associated_data", b"associated_data", "ciphertext", b"ciphertext", "component_name", b"component_name", "key_name", b"key_name", "nonce", b"nonce", "tag", b"tag"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["algorithm", b"algorithm", "associated_data", b"associated_data", "ciphertext", b"ciphertext", "component_name", b"component_name", "key_name", b"key_name", "nonce", b"nonce", "tag", b"tag"]) -> None: ... global___SubtleDecryptRequest = SubtleDecryptRequest -@typing.final +@typing_extensions.final class SubtleDecryptResponse(google.protobuf.message.Message): """SubtleDecryptResponse is the response for SubtleDecryptAlpha1.""" @@ -2441,11 +2400,11 @@ class SubtleDecryptResponse(google.protobuf.message.Message): *, plaintext: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["plaintext", b"plaintext"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["plaintext", b"plaintext"]) -> None: ... global___SubtleDecryptResponse = SubtleDecryptResponse -@typing.final +@typing_extensions.final class SubtleWrapKeyRequest(google.protobuf.message.Message): """SubtleWrapKeyRequest is the request for SubtleWrapKeyAlpha1.""" @@ -2481,11 +2440,11 @@ class SubtleWrapKeyRequest(google.protobuf.message.Message): nonce: builtins.bytes = ..., associated_data: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["algorithm", b"algorithm", "associated_data", b"associated_data", "component_name", b"component_name", "key_name", b"key_name", "nonce", b"nonce", "plaintext_key", b"plaintext_key"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["algorithm", b"algorithm", "associated_data", b"associated_data", "component_name", b"component_name", "key_name", b"key_name", "nonce", b"nonce", "plaintext_key", b"plaintext_key"]) -> None: ... global___SubtleWrapKeyRequest = SubtleWrapKeyRequest -@typing.final +@typing_extensions.final class SubtleWrapKeyResponse(google.protobuf.message.Message): """SubtleWrapKeyResponse is the response for SubtleWrapKeyAlpha1.""" @@ -2505,11 +2464,11 @@ class SubtleWrapKeyResponse(google.protobuf.message.Message): wrapped_key: builtins.bytes = ..., tag: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["tag", b"tag", "wrapped_key", b"wrapped_key"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["tag", b"tag", "wrapped_key", b"wrapped_key"]) -> None: ... global___SubtleWrapKeyResponse = SubtleWrapKeyResponse -@typing.final +@typing_extensions.final class SubtleUnwrapKeyRequest(google.protobuf.message.Message): """SubtleUnwrapKeyRequest is the request for SubtleUnwrapKeyAlpha1.""" @@ -2551,11 +2510,11 @@ class SubtleUnwrapKeyRequest(google.protobuf.message.Message): tag: builtins.bytes = ..., associated_data: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["algorithm", b"algorithm", "associated_data", b"associated_data", "component_name", b"component_name", "key_name", b"key_name", "nonce", b"nonce", "tag", b"tag", "wrapped_key", b"wrapped_key"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["algorithm", b"algorithm", "associated_data", b"associated_data", "component_name", b"component_name", "key_name", b"key_name", "nonce", b"nonce", "tag", b"tag", "wrapped_key", b"wrapped_key"]) -> None: ... global___SubtleUnwrapKeyRequest = SubtleUnwrapKeyRequest -@typing.final +@typing_extensions.final class SubtleUnwrapKeyResponse(google.protobuf.message.Message): """SubtleUnwrapKeyResponse is the response for SubtleUnwrapKeyAlpha1.""" @@ -2569,11 +2528,11 @@ class SubtleUnwrapKeyResponse(google.protobuf.message.Message): *, plaintext_key: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["plaintext_key", b"plaintext_key"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["plaintext_key", b"plaintext_key"]) -> None: ... global___SubtleUnwrapKeyResponse = SubtleUnwrapKeyResponse -@typing.final +@typing_extensions.final class SubtleSignRequest(google.protobuf.message.Message): """SubtleSignRequest is the request for SubtleSignAlpha1.""" @@ -2599,11 +2558,11 @@ class SubtleSignRequest(google.protobuf.message.Message): algorithm: builtins.str = ..., key_name: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["algorithm", b"algorithm", "component_name", b"component_name", "digest", b"digest", "key_name", b"key_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["algorithm", b"algorithm", "component_name", b"component_name", "digest", b"digest", "key_name", b"key_name"]) -> None: ... global___SubtleSignRequest = SubtleSignRequest -@typing.final +@typing_extensions.final class SubtleSignResponse(google.protobuf.message.Message): """SubtleSignResponse is the response for SubtleSignAlpha1.""" @@ -2617,11 +2576,11 @@ class SubtleSignResponse(google.protobuf.message.Message): *, signature: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["signature", b"signature"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["signature", b"signature"]) -> None: ... global___SubtleSignResponse = SubtleSignResponse -@typing.final +@typing_extensions.final class SubtleVerifyRequest(google.protobuf.message.Message): """SubtleVerifyRequest is the request for SubtleVerifyAlpha1.""" @@ -2651,11 +2610,11 @@ class SubtleVerifyRequest(google.protobuf.message.Message): key_name: builtins.str = ..., signature: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["algorithm", b"algorithm", "component_name", b"component_name", "digest", b"digest", "key_name", b"key_name", "signature", b"signature"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["algorithm", b"algorithm", "component_name", b"component_name", "digest", b"digest", "key_name", b"key_name", "signature", b"signature"]) -> None: ... global___SubtleVerifyRequest = SubtleVerifyRequest -@typing.final +@typing_extensions.final class SubtleVerifyResponse(google.protobuf.message.Message): """SubtleVerifyResponse is the response for SubtleVerifyAlpha1.""" @@ -2669,11 +2628,11 @@ class SubtleVerifyResponse(google.protobuf.message.Message): *, valid: builtins.bool = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["valid", b"valid"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["valid", b"valid"]) -> None: ... global___SubtleVerifyResponse = SubtleVerifyResponse -@typing.final +@typing_extensions.final class EncryptRequest(google.protobuf.message.Message): """EncryptRequest is the request for EncryptAlpha1.""" @@ -2684,23 +2643,21 @@ class EncryptRequest(google.protobuf.message.Message): @property def options(self) -> global___EncryptRequestOptions: """Request details. Must be present in the first message only.""" - @property def payload(self) -> dapr.proto.common.v1.common_pb2.StreamPayload: """Chunk of data of arbitrary size.""" - def __init__( self, *, options: global___EncryptRequestOptions | None = ..., payload: dapr.proto.common.v1.common_pb2.StreamPayload | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["options", b"options", "payload", b"payload"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["options", b"options", "payload", b"payload"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["options", b"options", "payload", b"payload"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["options", b"options", "payload", b"payload"]) -> None: ... global___EncryptRequest = EncryptRequest -@typing.final +@typing_extensions.final class EncryptRequestOptions(google.protobuf.message.Message): """EncryptRequestOptions contains options for the first message in the EncryptAlpha1 request.""" @@ -2743,11 +2700,11 @@ class EncryptRequestOptions(google.protobuf.message.Message): omit_decryption_key_name: builtins.bool = ..., decryption_key_name: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["component_name", b"component_name", "data_encryption_cipher", b"data_encryption_cipher", "decryption_key_name", b"decryption_key_name", "key_name", b"key_name", "key_wrap_algorithm", b"key_wrap_algorithm", "omit_decryption_key_name", b"omit_decryption_key_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["component_name", b"component_name", "data_encryption_cipher", b"data_encryption_cipher", "decryption_key_name", b"decryption_key_name", "key_name", b"key_name", "key_wrap_algorithm", b"key_wrap_algorithm", "omit_decryption_key_name", b"omit_decryption_key_name"]) -> None: ... global___EncryptRequestOptions = EncryptRequestOptions -@typing.final +@typing_extensions.final class EncryptResponse(google.protobuf.message.Message): """EncryptResponse is the response for EncryptAlpha1.""" @@ -2757,18 +2714,17 @@ class EncryptResponse(google.protobuf.message.Message): @property def payload(self) -> dapr.proto.common.v1.common_pb2.StreamPayload: """Chunk of data.""" - def __init__( self, *, payload: dapr.proto.common.v1.common_pb2.StreamPayload | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["payload", b"payload"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["payload", b"payload"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["payload", b"payload"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["payload", b"payload"]) -> None: ... global___EncryptResponse = EncryptResponse -@typing.final +@typing_extensions.final class DecryptRequest(google.protobuf.message.Message): """DecryptRequest is the request for DecryptAlpha1.""" @@ -2779,23 +2735,21 @@ class DecryptRequest(google.protobuf.message.Message): @property def options(self) -> global___DecryptRequestOptions: """Request details. Must be present in the first message only.""" - @property def payload(self) -> dapr.proto.common.v1.common_pb2.StreamPayload: """Chunk of data of arbitrary size.""" - def __init__( self, *, options: global___DecryptRequestOptions | None = ..., payload: dapr.proto.common.v1.common_pb2.StreamPayload | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["options", b"options", "payload", b"payload"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["options", b"options", "payload", b"payload"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["options", b"options", "payload", b"payload"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["options", b"options", "payload", b"payload"]) -> None: ... global___DecryptRequest = DecryptRequest -@typing.final +@typing_extensions.final class DecryptRequestOptions(google.protobuf.message.Message): """DecryptRequestOptions contains options for the first message in the DecryptAlpha1 request.""" @@ -2816,11 +2770,11 @@ class DecryptRequestOptions(google.protobuf.message.Message): component_name: builtins.str = ..., key_name: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["component_name", b"component_name", "key_name", b"key_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["component_name", b"component_name", "key_name", b"key_name"]) -> None: ... global___DecryptRequestOptions = DecryptRequestOptions -@typing.final +@typing_extensions.final class DecryptResponse(google.protobuf.message.Message): """DecryptResponse is the response for DecryptAlpha1.""" @@ -2830,18 +2784,17 @@ class DecryptResponse(google.protobuf.message.Message): @property def payload(self) -> dapr.proto.common.v1.common_pb2.StreamPayload: """Chunk of data.""" - def __init__( self, *, payload: dapr.proto.common.v1.common_pb2.StreamPayload | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["payload", b"payload"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["payload", b"payload"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["payload", b"payload"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["payload", b"payload"]) -> None: ... global___DecryptResponse = DecryptResponse -@typing.final +@typing_extensions.final class GetWorkflowRequest(google.protobuf.message.Message): """GetWorkflowRequest is the request for GetWorkflowBeta1.""" @@ -2859,17 +2812,17 @@ class GetWorkflowRequest(google.protobuf.message.Message): instance_id: builtins.str = ..., workflow_component: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["instance_id", b"instance_id", "workflow_component", b"workflow_component"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["instance_id", b"instance_id", "workflow_component", b"workflow_component"]) -> None: ... global___GetWorkflowRequest = GetWorkflowRequest -@typing.final +@typing_extensions.final class GetWorkflowResponse(google.protobuf.message.Message): """GetWorkflowResponse is the response for GetWorkflowBeta1.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class PropertiesEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2883,7 +2836,7 @@ class GetWorkflowResponse(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... INSTANCE_ID_FIELD_NUMBER: builtins.int WORKFLOW_NAME_FIELD_NUMBER: builtins.int @@ -2895,20 +2848,17 @@ class GetWorkflowResponse(google.protobuf.message.Message): """ID of the workflow instance.""" workflow_name: builtins.str """Name of the workflow.""" - runtime_status: builtins.str - """The current status of the workflow instance, for example, "PENDING", "RUNNING", "SUSPENDED", "COMPLETED", "FAILED", and "TERMINATED".""" @property def created_at(self) -> google.protobuf.timestamp_pb2.Timestamp: """The time at which the workflow instance was created.""" - @property def last_updated_at(self) -> google.protobuf.timestamp_pb2.Timestamp: """The last time at which the workflow instance had its state changed.""" - + runtime_status: builtins.str + """The current status of the workflow instance, for example, "PENDING", "RUNNING", "SUSPENDED", "COMPLETED", "FAILED", and "TERMINATED".""" @property def properties(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """Additional component-specific properties of the workflow instance.""" - def __init__( self, *, @@ -2919,18 +2869,18 @@ class GetWorkflowResponse(google.protobuf.message.Message): runtime_status: builtins.str = ..., properties: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["created_at", b"created_at", "last_updated_at", b"last_updated_at"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["created_at", b"created_at", "instance_id", b"instance_id", "last_updated_at", b"last_updated_at", "properties", b"properties", "runtime_status", b"runtime_status", "workflow_name", b"workflow_name"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_at", b"created_at", "last_updated_at", b"last_updated_at"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_at", b"created_at", "instance_id", b"instance_id", "last_updated_at", b"last_updated_at", "properties", b"properties", "runtime_status", b"runtime_status", "workflow_name", b"workflow_name"]) -> None: ... global___GetWorkflowResponse = GetWorkflowResponse -@typing.final +@typing_extensions.final class StartWorkflowRequest(google.protobuf.message.Message): """StartWorkflowRequest is the request for StartWorkflowBeta1.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class OptionsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2944,7 +2894,7 @@ class StartWorkflowRequest(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... INSTANCE_ID_FIELD_NUMBER: builtins.int WORKFLOW_COMPONENT_FIELD_NUMBER: builtins.int @@ -2957,12 +2907,11 @@ class StartWorkflowRequest(google.protobuf.message.Message): """Name of the workflow component.""" workflow_name: builtins.str """Name of the workflow.""" - input: builtins.bytes - """Input data for the workflow instance.""" @property def options(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """Additional component-specific options for starting the workflow instance.""" - + input: builtins.bytes + """Input data for the workflow instance.""" def __init__( self, *, @@ -2972,11 +2921,11 @@ class StartWorkflowRequest(google.protobuf.message.Message): options: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., input: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["input", b"input", "instance_id", b"instance_id", "options", b"options", "workflow_component", b"workflow_component", "workflow_name", b"workflow_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["input", b"input", "instance_id", b"instance_id", "options", b"options", "workflow_component", b"workflow_component", "workflow_name", b"workflow_name"]) -> None: ... global___StartWorkflowRequest = StartWorkflowRequest -@typing.final +@typing_extensions.final class StartWorkflowResponse(google.protobuf.message.Message): """StartWorkflowResponse is the response for StartWorkflowBeta1.""" @@ -2990,11 +2939,11 @@ class StartWorkflowResponse(google.protobuf.message.Message): *, instance_id: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["instance_id", b"instance_id"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["instance_id", b"instance_id"]) -> None: ... global___StartWorkflowResponse = StartWorkflowResponse -@typing.final +@typing_extensions.final class TerminateWorkflowRequest(google.protobuf.message.Message): """TerminateWorkflowRequest is the request for TerminateWorkflowBeta1.""" @@ -3012,11 +2961,11 @@ class TerminateWorkflowRequest(google.protobuf.message.Message): instance_id: builtins.str = ..., workflow_component: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["instance_id", b"instance_id", "workflow_component", b"workflow_component"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["instance_id", b"instance_id", "workflow_component", b"workflow_component"]) -> None: ... global___TerminateWorkflowRequest = TerminateWorkflowRequest -@typing.final +@typing_extensions.final class PauseWorkflowRequest(google.protobuf.message.Message): """PauseWorkflowRequest is the request for PauseWorkflowBeta1.""" @@ -3034,11 +2983,11 @@ class PauseWorkflowRequest(google.protobuf.message.Message): instance_id: builtins.str = ..., workflow_component: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["instance_id", b"instance_id", "workflow_component", b"workflow_component"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["instance_id", b"instance_id", "workflow_component", b"workflow_component"]) -> None: ... global___PauseWorkflowRequest = PauseWorkflowRequest -@typing.final +@typing_extensions.final class ResumeWorkflowRequest(google.protobuf.message.Message): """ResumeWorkflowRequest is the request for ResumeWorkflowBeta1.""" @@ -3056,11 +3005,11 @@ class ResumeWorkflowRequest(google.protobuf.message.Message): instance_id: builtins.str = ..., workflow_component: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["instance_id", b"instance_id", "workflow_component", b"workflow_component"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["instance_id", b"instance_id", "workflow_component", b"workflow_component"]) -> None: ... global___ResumeWorkflowRequest = ResumeWorkflowRequest -@typing.final +@typing_extensions.final class RaiseEventWorkflowRequest(google.protobuf.message.Message): """RaiseEventWorkflowRequest is the request for RaiseEventWorkflowBeta1.""" @@ -3086,11 +3035,11 @@ class RaiseEventWorkflowRequest(google.protobuf.message.Message): event_name: builtins.str = ..., event_data: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["event_data", b"event_data", "event_name", b"event_name", "instance_id", b"instance_id", "workflow_component", b"workflow_component"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["event_data", b"event_data", "event_name", b"event_name", "instance_id", b"instance_id", "workflow_component", b"workflow_component"]) -> None: ... global___RaiseEventWorkflowRequest = RaiseEventWorkflowRequest -@typing.final +@typing_extensions.final class PurgeWorkflowRequest(google.protobuf.message.Message): """PurgeWorkflowRequest is the request for PurgeWorkflowBeta1.""" @@ -3108,11 +3057,11 @@ class PurgeWorkflowRequest(google.protobuf.message.Message): instance_id: builtins.str = ..., workflow_component: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["instance_id", b"instance_id", "workflow_component", b"workflow_component"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["instance_id", b"instance_id", "workflow_component", b"workflow_component"]) -> None: ... global___PurgeWorkflowRequest = PurgeWorkflowRequest -@typing.final +@typing_extensions.final class ShutdownRequest(google.protobuf.message.Message): """ShutdownRequest is the request for Shutdown. Empty @@ -3126,7 +3075,7 @@ class ShutdownRequest(google.protobuf.message.Message): global___ShutdownRequest = ShutdownRequest -@typing.final +@typing_extensions.final class Job(google.protobuf.message.Message): """Job is the definition of a job. At least one of schedule or due_time must be provided but can also be provided together. @@ -3149,20 +3098,20 @@ class Job(google.protobuf.message.Message): Systemd timer style cron accepts 6 fields: seconds | minutes | hours | day of month | month | day of week - 0-59 | 0-59 | 0-23 | 1-31 | 1-12/jan-dec | 0-7/sun-sat + 0-59 | 0-59 | 0-23 | 1-31 | 1-12/jan-dec | 0-6/sun-sat "0 30 * * * *" - every hour on the half hour "0 15 3 * * *" - every day at 03:15 Period string expressions: - Entry | Description | Equivalent To - ----- | ----------- | ------------- - @every | Run every (e.g. '@every 1h30m') | N/A - @yearly (or @annually) | Run once a year, midnight, Jan. 1st | 0 0 0 1 1 * - @monthly | Run once a month, midnight, first of month | 0 0 0 1 * * - @weekly | Run once a week, midnight on Sunday | 0 0 0 * * 0 - @daily (or @midnight) | Run once a day, midnight | 0 0 0 * * * - @hourly | Run once an hour, beginning of hour | 0 0 * * * * + Entry | Description | Equivalent To + ----- | ----------- | ------------- + @every `` | Run every `` (e.g. '@every 1h30m') | N/A + @yearly (or @annually) | Run once a year, midnight, Jan. 1st | 0 0 0 1 1 * + @monthly | Run once a month, midnight, first of month | 0 0 0 1 * * + @weekly | Run once a week, midnight on Sunday | 0 0 0 * * 0 + @daily (or @midnight) | Run once a day, midnight | 0 0 0 * * * + @hourly | Run once an hour, beginning of hour | 0 0 * * * * """ repeats: builtins.int """repeats is the optional number of times in which the job should be @@ -3184,7 +3133,6 @@ class Job(google.protobuf.message.Message): """payload is the serialized job payload that will be sent to the recipient when the job is triggered. """ - def __init__( self, *, @@ -3195,20 +3143,20 @@ class Job(google.protobuf.message.Message): ttl: builtins.str | None = ..., data: google.protobuf.any_pb2.Any | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_due_time", b"_due_time", "_repeats", b"_repeats", "_schedule", b"_schedule", "_ttl", b"_ttl", "data", b"data", "due_time", b"due_time", "repeats", b"repeats", "schedule", b"schedule", "ttl", b"ttl"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_due_time", b"_due_time", "_repeats", b"_repeats", "_schedule", b"_schedule", "_ttl", b"_ttl", "data", b"data", "due_time", b"due_time", "name", b"name", "repeats", b"repeats", "schedule", b"schedule", "ttl", b"ttl"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_due_time", b"_due_time", "_repeats", b"_repeats", "_schedule", b"_schedule", "_ttl", b"_ttl", "data", b"data", "due_time", b"due_time", "repeats", b"repeats", "schedule", b"schedule", "ttl", b"ttl"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_due_time", b"_due_time", "_repeats", b"_repeats", "_schedule", b"_schedule", "_ttl", b"_ttl", "data", b"data", "due_time", b"due_time", "name", b"name", "repeats", b"repeats", "schedule", b"schedule", "ttl", b"ttl"]) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_due_time", b"_due_time"]) -> typing.Literal["due_time"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_due_time", b"_due_time"]) -> typing_extensions.Literal["due_time"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_repeats", b"_repeats"]) -> typing.Literal["repeats"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_repeats", b"_repeats"]) -> typing_extensions.Literal["repeats"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_schedule", b"_schedule"]) -> typing.Literal["schedule"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_schedule", b"_schedule"]) -> typing_extensions.Literal["schedule"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_ttl", b"_ttl"]) -> typing.Literal["ttl"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_ttl", b"_ttl"]) -> typing_extensions.Literal["ttl"] | None: ... global___Job = Job -@typing.final +@typing_extensions.final class ScheduleJobRequest(google.protobuf.message.Message): """ScheduleJobRequest is the message to create/schedule the job.""" @@ -3218,18 +3166,17 @@ class ScheduleJobRequest(google.protobuf.message.Message): @property def job(self) -> global___Job: """The job details.""" - def __init__( self, *, job: global___Job | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["job", b"job"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["job", b"job"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["job", b"job"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["job", b"job"]) -> None: ... global___ScheduleJobRequest = ScheduleJobRequest -@typing.final +@typing_extensions.final class ScheduleJobResponse(google.protobuf.message.Message): """ScheduleJobResponse is the message response to create/schedule the job. Empty @@ -3243,7 +3190,7 @@ class ScheduleJobResponse(google.protobuf.message.Message): global___ScheduleJobResponse = ScheduleJobResponse -@typing.final +@typing_extensions.final class GetJobRequest(google.protobuf.message.Message): """GetJobRequest is the message to retrieve a job.""" @@ -3257,11 +3204,11 @@ class GetJobRequest(google.protobuf.message.Message): *, name: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name"]) -> None: ... global___GetJobRequest = GetJobRequest -@typing.final +@typing_extensions.final class GetJobResponse(google.protobuf.message.Message): """GetJobResponse is the message's response for a job retrieved.""" @@ -3271,18 +3218,17 @@ class GetJobResponse(google.protobuf.message.Message): @property def job(self) -> global___Job: """The job details.""" - def __init__( self, *, job: global___Job | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["job", b"job"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["job", b"job"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["job", b"job"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["job", b"job"]) -> None: ... global___GetJobResponse = GetJobResponse -@typing.final +@typing_extensions.final class DeleteJobRequest(google.protobuf.message.Message): """DeleteJobRequest is the message to delete the job by name.""" @@ -3296,11 +3242,11 @@ class DeleteJobRequest(google.protobuf.message.Message): *, name: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name"]) -> None: ... global___DeleteJobRequest = DeleteJobRequest -@typing.final +@typing_extensions.final class DeleteJobResponse(google.protobuf.message.Message): """DeleteJobResponse is the message response to delete the job by name. Empty @@ -3313,3 +3259,184 @@ class DeleteJobResponse(google.protobuf.message.Message): ) -> None: ... global___DeleteJobResponse = DeleteJobResponse + +@typing_extensions.final +class ConversationAlpha1Request(google.protobuf.message.Message): + """ConversationAlpha1Request is the request object for Conversation.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class ParametersEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> google.protobuf.any_pb2.Any: ... + def __init__( + self, + *, + key: builtins.str = ..., + value: google.protobuf.any_pb2.Any | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + @typing_extensions.final + class MetadataEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + CONTEXTID_FIELD_NUMBER: builtins.int + INPUTS_FIELD_NUMBER: builtins.int + PARAMETERS_FIELD_NUMBER: builtins.int + METADATA_FIELD_NUMBER: builtins.int + SCRUBPII_FIELD_NUMBER: builtins.int + TEMPERATURE_FIELD_NUMBER: builtins.int + name: builtins.str + """The name of Coverstaion component""" + contextID: builtins.str + """The ID of an existing chat (like in ChatGPT)""" + @property + def inputs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ConversationInput]: + """Inputs for the conversation, support multiple input in one time.""" + @property + def parameters(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, google.protobuf.any_pb2.Any]: + """Parameters for all custom fields.""" + @property + def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """The metadata passing to conversation components.""" + scrubPII: builtins.bool + """Scrub PII data that comes back from the LLM""" + temperature: builtins.float + """Temperature for the LLM to optimize for creativity or predictability""" + def __init__( + self, + *, + name: builtins.str = ..., + contextID: builtins.str | None = ..., + inputs: collections.abc.Iterable[global___ConversationInput] | None = ..., + parameters: collections.abc.Mapping[builtins.str, google.protobuf.any_pb2.Any] | None = ..., + metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + scrubPII: builtins.bool | None = ..., + temperature: builtins.float | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_contextID", b"_contextID", "_scrubPII", b"_scrubPII", "_temperature", b"_temperature", "contextID", b"contextID", "scrubPII", b"scrubPII", "temperature", b"temperature"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_contextID", b"_contextID", "_scrubPII", b"_scrubPII", "_temperature", b"_temperature", "contextID", b"contextID", "inputs", b"inputs", "metadata", b"metadata", "name", b"name", "parameters", b"parameters", "scrubPII", b"scrubPII", "temperature", b"temperature"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing_extensions.Literal["_contextID", b"_contextID"]) -> typing_extensions.Literal["contextID"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing_extensions.Literal["_scrubPII", b"_scrubPII"]) -> typing_extensions.Literal["scrubPII"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing_extensions.Literal["_temperature", b"_temperature"]) -> typing_extensions.Literal["temperature"] | None: ... + +global___ConversationAlpha1Request = ConversationAlpha1Request + +@typing_extensions.final +class ConversationInput(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + MESSAGE_FIELD_NUMBER: builtins.int + ROLE_FIELD_NUMBER: builtins.int + SCRUBPII_FIELD_NUMBER: builtins.int + message: builtins.str + """The message to send to the llm""" + role: builtins.str + """The role to set for the message""" + scrubPII: builtins.bool + """Scrub PII data that goes into the LLM""" + def __init__( + self, + *, + message: builtins.str = ..., + role: builtins.str | None = ..., + scrubPII: builtins.bool | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_role", b"_role", "_scrubPII", b"_scrubPII", "role", b"role", "scrubPII", b"scrubPII"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_role", b"_role", "_scrubPII", b"_scrubPII", "message", b"message", "role", b"role", "scrubPII", b"scrubPII"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing_extensions.Literal["_role", b"_role"]) -> typing_extensions.Literal["role"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing_extensions.Literal["_scrubPII", b"_scrubPII"]) -> typing_extensions.Literal["scrubPII"] | None: ... + +global___ConversationInput = ConversationInput + +@typing_extensions.final +class ConversationAlpha1Result(google.protobuf.message.Message): + """ConversationAlpha1Result is the result for one input.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class ParametersEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> google.protobuf.any_pb2.Any: ... + def __init__( + self, + *, + key: builtins.str = ..., + value: google.protobuf.any_pb2.Any | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + RESULT_FIELD_NUMBER: builtins.int + PARAMETERS_FIELD_NUMBER: builtins.int + result: builtins.str + """Result for the one conversation input.""" + @property + def parameters(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, google.protobuf.any_pb2.Any]: + """Parameters for all custom fields.""" + def __init__( + self, + *, + result: builtins.str = ..., + parameters: collections.abc.Mapping[builtins.str, google.protobuf.any_pb2.Any] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["parameters", b"parameters", "result", b"result"]) -> None: ... + +global___ConversationAlpha1Result = ConversationAlpha1Result + +@typing_extensions.final +class ConversationAlpha1Response(google.protobuf.message.Message): + """ConversationAlpha1Response is the response for Conversation.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CONTEXTID_FIELD_NUMBER: builtins.int + OUTPUTS_FIELD_NUMBER: builtins.int + contextID: builtins.str + """The ID of an existing chat (like in ChatGPT)""" + @property + def outputs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ConversationAlpha1Result]: + """An array of results.""" + def __init__( + self, + *, + contextID: builtins.str | None = ..., + outputs: collections.abc.Iterable[global___ConversationAlpha1Result] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_contextID", b"_contextID", "contextID", b"contextID"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_contextID", b"_contextID", "contextID", b"contextID", "outputs", b"outputs"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_contextID", b"_contextID"]) -> typing_extensions.Literal["contextID"] | None: ... + +global___ConversationAlpha1Response = ConversationAlpha1Response diff --git a/dapr/proto/runtime/v1/dapr_pb2_grpc.py b/dapr/proto/runtime/v1/dapr_pb2_grpc.py index b97d7f02..60b4c241 100644 --- a/dapr/proto/runtime/v1/dapr_pb2_grpc.py +++ b/dapr/proto/runtime/v1/dapr_pb2_grpc.py @@ -1,31 +1,11 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" import grpc -import warnings from dapr.proto.common.v1 import common_pb2 as dapr_dot_proto_dot_common_dot_v1_dot_common__pb2 from dapr.proto.runtime.v1 import dapr_pb2 as dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -GRPC_GENERATED_VERSION = '1.66.1' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in dapr/proto/runtime/v1/dapr_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) - class DaprStub(object): """Dapr service provides APIs to user application to access Dapr building blocks. @@ -41,292 +21,297 @@ def __init__(self, channel): '/dapr.proto.runtime.v1.Dapr/InvokeService', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.InvokeServiceRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_common_dot_v1_dot_common__pb2.InvokeResponse.FromString, - _registered_method=True) + ) self.GetState = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/GetState', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetStateRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetStateResponse.FromString, - _registered_method=True) + ) self.GetBulkState = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/GetBulkState', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetBulkStateRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetBulkStateResponse.FromString, - _registered_method=True) + ) self.SaveState = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/SaveState', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SaveStateRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.QueryStateAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/QueryStateAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.QueryStateRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.QueryStateResponse.FromString, - _registered_method=True) + ) self.DeleteState = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/DeleteState', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.DeleteStateRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.DeleteBulkState = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/DeleteBulkState', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.DeleteBulkStateRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.ExecuteStateTransaction = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/ExecuteStateTransaction', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ExecuteStateTransactionRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.PublishEvent = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/PublishEvent', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.PublishEventRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.BulkPublishEventAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/BulkPublishEventAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.BulkPublishRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.BulkPublishResponse.FromString, - _registered_method=True) + ) self.SubscribeTopicEventsAlpha1 = channel.stream_stream( '/dapr.proto.runtime.v1.Dapr/SubscribeTopicEventsAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeTopicEventsRequestAlpha1.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeTopicEventsResponseAlpha1.FromString, - _registered_method=True) + ) self.InvokeBinding = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/InvokeBinding', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.InvokeBindingRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.InvokeBindingResponse.FromString, - _registered_method=True) + ) self.GetSecret = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/GetSecret', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetSecretRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetSecretResponse.FromString, - _registered_method=True) + ) self.GetBulkSecret = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/GetBulkSecret', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetBulkSecretRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetBulkSecretResponse.FromString, - _registered_method=True) + ) self.RegisterActorTimer = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/RegisterActorTimer', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.RegisterActorTimerRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.UnregisterActorTimer = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/UnregisterActorTimer', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnregisterActorTimerRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.RegisterActorReminder = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/RegisterActorReminder', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.RegisterActorReminderRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.UnregisterActorReminder = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/UnregisterActorReminder', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnregisterActorReminderRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.GetActorState = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/GetActorState', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetActorStateRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetActorStateResponse.FromString, - _registered_method=True) + ) self.ExecuteActorStateTransaction = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/ExecuteActorStateTransaction', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ExecuteActorStateTransactionRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.InvokeActor = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/InvokeActor', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.InvokeActorRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.InvokeActorResponse.FromString, - _registered_method=True) + ) self.GetConfigurationAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/GetConfigurationAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetConfigurationRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetConfigurationResponse.FromString, - _registered_method=True) + ) self.GetConfiguration = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/GetConfiguration', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetConfigurationRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetConfigurationResponse.FromString, - _registered_method=True) + ) self.SubscribeConfigurationAlpha1 = channel.unary_stream( '/dapr.proto.runtime.v1.Dapr/SubscribeConfigurationAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeConfigurationRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeConfigurationResponse.FromString, - _registered_method=True) + ) self.SubscribeConfiguration = channel.unary_stream( '/dapr.proto.runtime.v1.Dapr/SubscribeConfiguration', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeConfigurationRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeConfigurationResponse.FromString, - _registered_method=True) + ) self.UnsubscribeConfigurationAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/UnsubscribeConfigurationAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnsubscribeConfigurationRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnsubscribeConfigurationResponse.FromString, - _registered_method=True) + ) self.UnsubscribeConfiguration = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/UnsubscribeConfiguration', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnsubscribeConfigurationRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnsubscribeConfigurationResponse.FromString, - _registered_method=True) + ) self.TryLockAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/TryLockAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.TryLockRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.TryLockResponse.FromString, - _registered_method=True) + ) self.UnlockAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/UnlockAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnlockRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnlockResponse.FromString, - _registered_method=True) + ) self.EncryptAlpha1 = channel.stream_stream( '/dapr.proto.runtime.v1.Dapr/EncryptAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.EncryptRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.EncryptResponse.FromString, - _registered_method=True) + ) self.DecryptAlpha1 = channel.stream_stream( '/dapr.proto.runtime.v1.Dapr/DecryptAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.DecryptRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.DecryptResponse.FromString, - _registered_method=True) + ) self.GetMetadata = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/GetMetadata', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetMetadataRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetMetadataResponse.FromString, - _registered_method=True) + ) self.SetMetadata = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/SetMetadata', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SetMetadataRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.SubtleGetKeyAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/SubtleGetKeyAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleGetKeyRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleGetKeyResponse.FromString, - _registered_method=True) + ) self.SubtleEncryptAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/SubtleEncryptAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleEncryptRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleEncryptResponse.FromString, - _registered_method=True) + ) self.SubtleDecryptAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/SubtleDecryptAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleDecryptRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleDecryptResponse.FromString, - _registered_method=True) + ) self.SubtleWrapKeyAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/SubtleWrapKeyAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleWrapKeyRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleWrapKeyResponse.FromString, - _registered_method=True) + ) self.SubtleUnwrapKeyAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/SubtleUnwrapKeyAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleUnwrapKeyRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleUnwrapKeyResponse.FromString, - _registered_method=True) + ) self.SubtleSignAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/SubtleSignAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleSignRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleSignResponse.FromString, - _registered_method=True) + ) self.SubtleVerifyAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/SubtleVerifyAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleVerifyRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleVerifyResponse.FromString, - _registered_method=True) + ) self.StartWorkflowAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/StartWorkflowAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.StartWorkflowRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.StartWorkflowResponse.FromString, - _registered_method=True) + ) self.GetWorkflowAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/GetWorkflowAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetWorkflowRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetWorkflowResponse.FromString, - _registered_method=True) + ) self.PurgeWorkflowAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/PurgeWorkflowAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.PurgeWorkflowRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.TerminateWorkflowAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/TerminateWorkflowAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.TerminateWorkflowRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.PauseWorkflowAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/PauseWorkflowAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.PauseWorkflowRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.ResumeWorkflowAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/ResumeWorkflowAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ResumeWorkflowRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.RaiseEventWorkflowAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/RaiseEventWorkflowAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.RaiseEventWorkflowRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.StartWorkflowBeta1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/StartWorkflowBeta1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.StartWorkflowRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.StartWorkflowResponse.FromString, - _registered_method=True) + ) self.GetWorkflowBeta1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/GetWorkflowBeta1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetWorkflowRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetWorkflowResponse.FromString, - _registered_method=True) + ) self.PurgeWorkflowBeta1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/PurgeWorkflowBeta1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.PurgeWorkflowRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.TerminateWorkflowBeta1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/TerminateWorkflowBeta1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.TerminateWorkflowRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.PauseWorkflowBeta1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/PauseWorkflowBeta1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.PauseWorkflowRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.ResumeWorkflowBeta1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/ResumeWorkflowBeta1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ResumeWorkflowRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.RaiseEventWorkflowBeta1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/RaiseEventWorkflowBeta1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.RaiseEventWorkflowRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.Shutdown = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/Shutdown', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ShutdownRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - _registered_method=True) + ) self.ScheduleJobAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/ScheduleJobAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ScheduleJobRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ScheduleJobResponse.FromString, - _registered_method=True) + ) self.GetJobAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/GetJobAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetJobRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetJobResponse.FromString, - _registered_method=True) + ) self.DeleteJobAlpha1 = channel.unary_unary( '/dapr.proto.runtime.v1.Dapr/DeleteJobAlpha1', request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.DeleteJobRequest.SerializeToString, response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.DeleteJobResponse.FromString, - _registered_method=True) + ) + self.ConverseAlpha1 = channel.unary_unary( + '/dapr.proto.runtime.v1.Dapr/ConverseAlpha1', + request_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ConversationAlpha1Request.SerializeToString, + response_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ConversationAlpha1Response.FromString, + ) class DaprServicer(object): @@ -741,6 +726,13 @@ def DeleteJobAlpha1(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def ConverseAlpha1(self, request, context): + """Converse with a LLM service + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def add_DaprServicer_to_server(servicer, server): rpc_method_handlers = { @@ -1034,11 +1026,15 @@ def add_DaprServicer_to_server(servicer, server): request_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.DeleteJobRequest.FromString, response_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.DeleteJobResponse.SerializeToString, ), + 'ConverseAlpha1': grpc.unary_unary_rpc_method_handler( + servicer.ConverseAlpha1, + request_deserializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ConversationAlpha1Request.FromString, + response_serializer=dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ConversationAlpha1Response.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( 'dapr.proto.runtime.v1.Dapr', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) - server.add_registered_method_handlers('dapr.proto.runtime.v1.Dapr', rpc_method_handlers) # This class is part of an EXPERIMENTAL API. @@ -1057,21 +1053,11 @@ def InvokeService(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/InvokeService', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/InvokeService', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.InvokeServiceRequest.SerializeToString, dapr_dot_proto_dot_common_dot_v1_dot_common__pb2.InvokeResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetState(request, @@ -1084,21 +1070,11 @@ def GetState(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/GetState', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/GetState', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetStateRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetStateResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetBulkState(request, @@ -1111,21 +1087,11 @@ def GetBulkState(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/GetBulkState', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/GetBulkState', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetBulkStateRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetBulkStateResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SaveState(request, @@ -1138,21 +1104,11 @@ def SaveState(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/SaveState', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/SaveState', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SaveStateRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def QueryStateAlpha1(request, @@ -1165,21 +1121,11 @@ def QueryStateAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/QueryStateAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/QueryStateAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.QueryStateRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.QueryStateResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def DeleteState(request, @@ -1192,21 +1138,11 @@ def DeleteState(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/DeleteState', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/DeleteState', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.DeleteStateRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def DeleteBulkState(request, @@ -1219,21 +1155,11 @@ def DeleteBulkState(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/DeleteBulkState', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/DeleteBulkState', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.DeleteBulkStateRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ExecuteStateTransaction(request, @@ -1246,21 +1172,11 @@ def ExecuteStateTransaction(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/ExecuteStateTransaction', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/ExecuteStateTransaction', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ExecuteStateTransactionRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def PublishEvent(request, @@ -1273,21 +1189,11 @@ def PublishEvent(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/PublishEvent', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/PublishEvent', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.PublishEventRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def BulkPublishEventAlpha1(request, @@ -1300,21 +1206,11 @@ def BulkPublishEventAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/BulkPublishEventAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/BulkPublishEventAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.BulkPublishRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.BulkPublishResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SubscribeTopicEventsAlpha1(request_iterator, @@ -1327,21 +1223,11 @@ def SubscribeTopicEventsAlpha1(request_iterator, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.stream_stream( - request_iterator, - target, - '/dapr.proto.runtime.v1.Dapr/SubscribeTopicEventsAlpha1', + return grpc.experimental.stream_stream(request_iterator, target, '/dapr.proto.runtime.v1.Dapr/SubscribeTopicEventsAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeTopicEventsRequestAlpha1.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeTopicEventsResponseAlpha1.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def InvokeBinding(request, @@ -1354,21 +1240,11 @@ def InvokeBinding(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/InvokeBinding', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/InvokeBinding', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.InvokeBindingRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.InvokeBindingResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetSecret(request, @@ -1381,21 +1257,11 @@ def GetSecret(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/GetSecret', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/GetSecret', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetSecretRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetSecretResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetBulkSecret(request, @@ -1408,21 +1274,11 @@ def GetBulkSecret(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/GetBulkSecret', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/GetBulkSecret', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetBulkSecretRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetBulkSecretResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def RegisterActorTimer(request, @@ -1435,21 +1291,11 @@ def RegisterActorTimer(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/RegisterActorTimer', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/RegisterActorTimer', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.RegisterActorTimerRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def UnregisterActorTimer(request, @@ -1462,21 +1308,11 @@ def UnregisterActorTimer(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/UnregisterActorTimer', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/UnregisterActorTimer', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnregisterActorTimerRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def RegisterActorReminder(request, @@ -1489,21 +1325,11 @@ def RegisterActorReminder(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/RegisterActorReminder', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/RegisterActorReminder', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.RegisterActorReminderRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def UnregisterActorReminder(request, @@ -1516,21 +1342,11 @@ def UnregisterActorReminder(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/UnregisterActorReminder', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/UnregisterActorReminder', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnregisterActorReminderRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetActorState(request, @@ -1543,21 +1359,11 @@ def GetActorState(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/GetActorState', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/GetActorState', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetActorStateRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetActorStateResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ExecuteActorStateTransaction(request, @@ -1570,21 +1376,11 @@ def ExecuteActorStateTransaction(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/ExecuteActorStateTransaction', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/ExecuteActorStateTransaction', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ExecuteActorStateTransactionRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def InvokeActor(request, @@ -1597,21 +1393,11 @@ def InvokeActor(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/InvokeActor', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/InvokeActor', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.InvokeActorRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.InvokeActorResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetConfigurationAlpha1(request, @@ -1624,21 +1410,11 @@ def GetConfigurationAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/GetConfigurationAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/GetConfigurationAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetConfigurationRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetConfigurationResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetConfiguration(request, @@ -1651,21 +1427,11 @@ def GetConfiguration(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/GetConfiguration', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/GetConfiguration', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetConfigurationRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetConfigurationResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SubscribeConfigurationAlpha1(request, @@ -1678,21 +1444,11 @@ def SubscribeConfigurationAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_stream( - request, - target, - '/dapr.proto.runtime.v1.Dapr/SubscribeConfigurationAlpha1', + return grpc.experimental.unary_stream(request, target, '/dapr.proto.runtime.v1.Dapr/SubscribeConfigurationAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeConfigurationRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeConfigurationResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SubscribeConfiguration(request, @@ -1705,21 +1461,11 @@ def SubscribeConfiguration(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_stream( - request, - target, - '/dapr.proto.runtime.v1.Dapr/SubscribeConfiguration', + return grpc.experimental.unary_stream(request, target, '/dapr.proto.runtime.v1.Dapr/SubscribeConfiguration', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeConfigurationRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubscribeConfigurationResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def UnsubscribeConfigurationAlpha1(request, @@ -1732,21 +1478,11 @@ def UnsubscribeConfigurationAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/UnsubscribeConfigurationAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/UnsubscribeConfigurationAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnsubscribeConfigurationRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnsubscribeConfigurationResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def UnsubscribeConfiguration(request, @@ -1759,21 +1495,11 @@ def UnsubscribeConfiguration(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/UnsubscribeConfiguration', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/UnsubscribeConfiguration', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnsubscribeConfigurationRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnsubscribeConfigurationResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def TryLockAlpha1(request, @@ -1786,21 +1512,11 @@ def TryLockAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/TryLockAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/TryLockAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.TryLockRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.TryLockResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def UnlockAlpha1(request, @@ -1813,21 +1529,11 @@ def UnlockAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/UnlockAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/UnlockAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnlockRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.UnlockResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def EncryptAlpha1(request_iterator, @@ -1840,21 +1546,11 @@ def EncryptAlpha1(request_iterator, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.stream_stream( - request_iterator, - target, - '/dapr.proto.runtime.v1.Dapr/EncryptAlpha1', + return grpc.experimental.stream_stream(request_iterator, target, '/dapr.proto.runtime.v1.Dapr/EncryptAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.EncryptRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.EncryptResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def DecryptAlpha1(request_iterator, @@ -1867,21 +1563,11 @@ def DecryptAlpha1(request_iterator, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.stream_stream( - request_iterator, - target, - '/dapr.proto.runtime.v1.Dapr/DecryptAlpha1', + return grpc.experimental.stream_stream(request_iterator, target, '/dapr.proto.runtime.v1.Dapr/DecryptAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.DecryptRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.DecryptResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetMetadata(request, @@ -1894,21 +1580,11 @@ def GetMetadata(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/GetMetadata', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/GetMetadata', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetMetadataRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetMetadataResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SetMetadata(request, @@ -1921,21 +1597,11 @@ def SetMetadata(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/SetMetadata', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/SetMetadata', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SetMetadataRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SubtleGetKeyAlpha1(request, @@ -1948,21 +1614,11 @@ def SubtleGetKeyAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/SubtleGetKeyAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/SubtleGetKeyAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleGetKeyRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleGetKeyResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SubtleEncryptAlpha1(request, @@ -1975,21 +1631,11 @@ def SubtleEncryptAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/SubtleEncryptAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/SubtleEncryptAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleEncryptRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleEncryptResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SubtleDecryptAlpha1(request, @@ -2002,21 +1648,11 @@ def SubtleDecryptAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/SubtleDecryptAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/SubtleDecryptAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleDecryptRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleDecryptResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SubtleWrapKeyAlpha1(request, @@ -2029,21 +1665,11 @@ def SubtleWrapKeyAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/SubtleWrapKeyAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/SubtleWrapKeyAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleWrapKeyRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleWrapKeyResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SubtleUnwrapKeyAlpha1(request, @@ -2056,21 +1682,11 @@ def SubtleUnwrapKeyAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/SubtleUnwrapKeyAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/SubtleUnwrapKeyAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleUnwrapKeyRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleUnwrapKeyResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SubtleSignAlpha1(request, @@ -2083,21 +1699,11 @@ def SubtleSignAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/SubtleSignAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/SubtleSignAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleSignRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleSignResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SubtleVerifyAlpha1(request, @@ -2110,21 +1716,11 @@ def SubtleVerifyAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/SubtleVerifyAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/SubtleVerifyAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleVerifyRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.SubtleVerifyResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def StartWorkflowAlpha1(request, @@ -2137,21 +1733,11 @@ def StartWorkflowAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/StartWorkflowAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/StartWorkflowAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.StartWorkflowRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.StartWorkflowResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetWorkflowAlpha1(request, @@ -2164,21 +1750,11 @@ def GetWorkflowAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/GetWorkflowAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/GetWorkflowAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetWorkflowRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetWorkflowResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def PurgeWorkflowAlpha1(request, @@ -2191,21 +1767,11 @@ def PurgeWorkflowAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/PurgeWorkflowAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/PurgeWorkflowAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.PurgeWorkflowRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def TerminateWorkflowAlpha1(request, @@ -2218,21 +1784,11 @@ def TerminateWorkflowAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/TerminateWorkflowAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/TerminateWorkflowAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.TerminateWorkflowRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def PauseWorkflowAlpha1(request, @@ -2245,21 +1801,11 @@ def PauseWorkflowAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/PauseWorkflowAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/PauseWorkflowAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.PauseWorkflowRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ResumeWorkflowAlpha1(request, @@ -2272,21 +1818,11 @@ def ResumeWorkflowAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/ResumeWorkflowAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/ResumeWorkflowAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ResumeWorkflowRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def RaiseEventWorkflowAlpha1(request, @@ -2299,21 +1835,11 @@ def RaiseEventWorkflowAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/RaiseEventWorkflowAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/RaiseEventWorkflowAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.RaiseEventWorkflowRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def StartWorkflowBeta1(request, @@ -2326,21 +1852,11 @@ def StartWorkflowBeta1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/StartWorkflowBeta1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/StartWorkflowBeta1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.StartWorkflowRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.StartWorkflowResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetWorkflowBeta1(request, @@ -2353,21 +1869,11 @@ def GetWorkflowBeta1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/GetWorkflowBeta1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/GetWorkflowBeta1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetWorkflowRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetWorkflowResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def PurgeWorkflowBeta1(request, @@ -2380,21 +1886,11 @@ def PurgeWorkflowBeta1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/PurgeWorkflowBeta1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/PurgeWorkflowBeta1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.PurgeWorkflowRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def TerminateWorkflowBeta1(request, @@ -2407,21 +1903,11 @@ def TerminateWorkflowBeta1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/TerminateWorkflowBeta1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/TerminateWorkflowBeta1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.TerminateWorkflowRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def PauseWorkflowBeta1(request, @@ -2434,21 +1920,11 @@ def PauseWorkflowBeta1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/PauseWorkflowBeta1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/PauseWorkflowBeta1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.PauseWorkflowRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ResumeWorkflowBeta1(request, @@ -2461,21 +1937,11 @@ def ResumeWorkflowBeta1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/ResumeWorkflowBeta1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/ResumeWorkflowBeta1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ResumeWorkflowRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def RaiseEventWorkflowBeta1(request, @@ -2488,21 +1954,11 @@ def RaiseEventWorkflowBeta1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/RaiseEventWorkflowBeta1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/RaiseEventWorkflowBeta1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.RaiseEventWorkflowRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def Shutdown(request, @@ -2515,21 +1971,11 @@ def Shutdown(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/Shutdown', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/Shutdown', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ShutdownRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ScheduleJobAlpha1(request, @@ -2542,21 +1988,11 @@ def ScheduleJobAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/ScheduleJobAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/ScheduleJobAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ScheduleJobRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ScheduleJobResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def GetJobAlpha1(request, @@ -2569,21 +2005,11 @@ def GetJobAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/GetJobAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/GetJobAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetJobRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.GetJobResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def DeleteJobAlpha1(request, @@ -2596,18 +2022,25 @@ def DeleteJobAlpha1(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/dapr.proto.runtime.v1.Dapr/DeleteJobAlpha1', + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/DeleteJobAlpha1', dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.DeleteJobRequest.SerializeToString, dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.DeleteJobResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ConverseAlpha1(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/dapr.proto.runtime.v1.Dapr/ConverseAlpha1', + dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ConversationAlpha1Request.SerializeToString, + dapr_dot_proto_dot_runtime_dot_v1_dot_dapr__pb2.ConversationAlpha1Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/tools/requirements.txt b/tools/requirements.txt index d0503c99..d3d503c9 100644 --- a/tools/requirements.txt +++ b/tools/requirements.txt @@ -1 +1 @@ -grpcio-tools>=1.49 +grpcio-tools>=1.57.0 From c237e7bf04bc7ae57638b64b1e14a62f0aaf1efa Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 21 Oct 2024 15:50:22 +0100 Subject: [PATCH 32/33] Adds stream cancelled error Signed-off-by: Elena Kolevska --- dapr/aio/clients/grpc/subscription.py | 7 +++++-- dapr/clients/grpc/subscription.py | 7 +++++-- dapr/common/pubsub/subscription.py | 3 +++ examples/pubsub-streaming-async/subscriber.py | 12 ++++++++---- examples/pubsub-streaming/subscriber.py | 12 +++++++++--- 5 files changed, 30 insertions(+), 11 deletions(-) diff --git a/dapr/aio/clients/grpc/subscription.py b/dapr/aio/clients/grpc/subscription.py index 84542bb4..020e208b 100644 --- a/dapr/aio/clients/grpc/subscription.py +++ b/dapr/aio/clients/grpc/subscription.py @@ -4,7 +4,8 @@ from dapr.clients.grpc._response import TopicEventResponse from dapr.clients.health import DaprHealth -from dapr.common.pubsub.subscription import StreamInactiveError, SubscriptionMessage +from dapr.common.pubsub.subscription import StreamInactiveError, SubscriptionMessage, \ + StreamCancelledError from dapr.proto import api_v1, appcallback_v1 @@ -69,7 +70,9 @@ async def next_message(self): f'Attempting to reconnect...' ) await self.reconnect_stream() - elif e.code() != StatusCode.CANCELLED: + elif e.code() == StatusCode.CANCELLED: + raise StreamCancelledError('Stream has been cancelled') + else: raise Exception(f'gRPC error while reading from subscription stream: {e} ') except Exception as e: raise Exception(f'Error while fetching message: {e}') diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index 80db4fbf..bfeaec9c 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -2,7 +2,8 @@ from dapr.clients.grpc._response import TopicEventResponse from dapr.clients.health import DaprHealth -from dapr.common.pubsub.subscription import StreamInactiveError, SubscriptionMessage +from dapr.common.pubsub.subscription import StreamInactiveError, SubscriptionMessage, \ + StreamCancelledError from dapr.proto import api_v1, appcallback_v1 import queue import threading @@ -85,7 +86,9 @@ def next_message(self): f'gRPC error while reading from stream: {e.details()}, Status Code: {e.code()}' ) self.reconnect_stream() - elif e.code() != StatusCode.CANCELLED: + elif e.code() == StatusCode.CANCELLED: + raise StreamCancelledError('Stream has been cancelled') + else: raise Exception( f'gRPC error while reading from subscription stream: {e.details()} ' f'Status Code: {e.code()}' diff --git a/dapr/common/pubsub/subscription.py b/dapr/common/pubsub/subscription.py index ad6f6f56..e0ce1321 100644 --- a/dapr/common/pubsub/subscription.py +++ b/dapr/common/pubsub/subscription.py @@ -86,3 +86,6 @@ def _parse_data_content(self): class StreamInactiveError(Exception): pass + +class StreamCancelledError(Exception): + pass diff --git a/examples/pubsub-streaming-async/subscriber.py b/examples/pubsub-streaming-async/subscriber.py index 9a0d34a5..aca9e466 100644 --- a/examples/pubsub-streaming-async/subscriber.py +++ b/examples/pubsub-streaming-async/subscriber.py @@ -3,6 +3,7 @@ from dapr.aio.clients import DaprClient from dapr.clients.grpc.subscription import StreamInactiveError +from dapr.common.pubsub.subscription import StreamCancelledError parser = argparse.ArgumentParser(description='Publish events to a Dapr pub/sub topic.') parser.add_argument('--topic', type=str, required=True, help='The topic name to publish to.') @@ -33,15 +34,18 @@ async def main(): while counter < 5: try: message = await subscription.next_message() + if message is None: + print('No message received within timeout period. ' + 'The stream might have been cancelled.') + continue except StreamInactiveError: print('Stream is inactive. Retrying...') await asyncio.sleep(1) continue - if message is None: - print('No message received within timeout period.') - continue - + except StreamCancelledError as e: + print('Stream was cancelled') + break # Process the message response_status = process_message(message) diff --git a/examples/pubsub-streaming/subscriber.py b/examples/pubsub-streaming/subscriber.py index 2c79235a..f6371d75 100644 --- a/examples/pubsub-streaming/subscriber.py +++ b/examples/pubsub-streaming/subscriber.py @@ -3,6 +3,7 @@ from dapr.clients import DaprClient from dapr.clients.grpc.subscription import StreamInactiveError +from dapr.common.pubsub.subscription import StreamCancelledError counter = 0 @@ -38,17 +39,22 @@ def main(): while counter < 5: try: message = subscription.next_message() + if message is None: + print('No message received within timeout period. ' + 'The stream might have been cancelled.') + continue except StreamInactiveError as e: print('Stream is inactive. Retrying...') time.sleep(1) continue + except StreamCancelledError as e: + print('Stream was cancelled') + break except Exception as e: print(f'Error occurred: {e}') pass - if message is None: - print('No message received within timeout period.') - continue + # Process the message response_status = process_message(message) From 86cc67c9381a2cb1cc208b54d53e01eeb738924c Mon Sep 17 00:00:00 2001 From: Elena Kolevska Date: Mon, 21 Oct 2024 16:43:59 +0100 Subject: [PATCH 33/33] linter Signed-off-by: Elena Kolevska --- dapr/aio/clients/grpc/subscription.py | 7 +++++-- dapr/clients/grpc/subscription.py | 7 +++++-- dapr/common/pubsub/subscription.py | 1 + examples/pubsub-streaming-async/subscriber.py | 6 ++++-- examples/pubsub-streaming/subscriber.py | 7 ++++--- 5 files changed, 19 insertions(+), 9 deletions(-) diff --git a/dapr/aio/clients/grpc/subscription.py b/dapr/aio/clients/grpc/subscription.py index 020e208b..a526ee86 100644 --- a/dapr/aio/clients/grpc/subscription.py +++ b/dapr/aio/clients/grpc/subscription.py @@ -4,8 +4,11 @@ from dapr.clients.grpc._response import TopicEventResponse from dapr.clients.health import DaprHealth -from dapr.common.pubsub.subscription import StreamInactiveError, SubscriptionMessage, \ - StreamCancelledError +from dapr.common.pubsub.subscription import ( + StreamInactiveError, + SubscriptionMessage, + StreamCancelledError, +) from dapr.proto import api_v1, appcallback_v1 diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index bfeaec9c..d67bed9d 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -2,8 +2,11 @@ from dapr.clients.grpc._response import TopicEventResponse from dapr.clients.health import DaprHealth -from dapr.common.pubsub.subscription import StreamInactiveError, SubscriptionMessage, \ - StreamCancelledError +from dapr.common.pubsub.subscription import ( + StreamInactiveError, + SubscriptionMessage, + StreamCancelledError, +) from dapr.proto import api_v1, appcallback_v1 import queue import threading diff --git a/dapr/common/pubsub/subscription.py b/dapr/common/pubsub/subscription.py index e0ce1321..6f68e180 100644 --- a/dapr/common/pubsub/subscription.py +++ b/dapr/common/pubsub/subscription.py @@ -87,5 +87,6 @@ def _parse_data_content(self): class StreamInactiveError(Exception): pass + class StreamCancelledError(Exception): pass diff --git a/examples/pubsub-streaming-async/subscriber.py b/examples/pubsub-streaming-async/subscriber.py index aca9e466..7907bb5f 100644 --- a/examples/pubsub-streaming-async/subscriber.py +++ b/examples/pubsub-streaming-async/subscriber.py @@ -35,8 +35,10 @@ async def main(): try: message = await subscription.next_message() if message is None: - print('No message received within timeout period. ' - 'The stream might have been cancelled.') + print( + 'No message received within timeout period. ' + 'The stream might have been cancelled.' + ) continue except StreamInactiveError: diff --git a/examples/pubsub-streaming/subscriber.py b/examples/pubsub-streaming/subscriber.py index f6371d75..88744c88 100644 --- a/examples/pubsub-streaming/subscriber.py +++ b/examples/pubsub-streaming/subscriber.py @@ -40,8 +40,10 @@ def main(): try: message = subscription.next_message() if message is None: - print('No message received within timeout period. ' - 'The stream might have been cancelled.') + print( + 'No message received within timeout period. ' + 'The stream might have been cancelled.' + ) continue except StreamInactiveError as e: @@ -55,7 +57,6 @@ def main(): print(f'Error occurred: {e}') pass - # Process the message response_status = process_message(message)