From a935ac28c458198096c44f50b3dce448c2cf3346 Mon Sep 17 00:00:00 2001 From: Yuce Tekol Date: Fri, 12 Dec 2025 11:27:20 +0300 Subject: [PATCH 1/3] Added asyncio client docs --- docs/asyncio_client.rst | 4 + docs/index.rst | 1 + .../map/map_portable_versioning_example.py | 1 + hazelcast/client.py | 17 + hazelcast/core.py | 1 + hazelcast/internal/asyncio_client.py | 131 ++++ hazelcast/internal/asyncio_proxy/map.py | 644 ++++++++++++++++++ .../asyncio_proxy/vector_collection.py | 182 ++++- hazelcast/proxy/vector_collection.py | 6 +- hazelcast/serialization/__init__.py | 1 + hazelcast/serialization/api.py | 1 + .../serialization/serialization_const.py | 1 + .../serialization/serializers_test.py | 6 +- tests/integration/dbapi/dbapi20.py | 16 +- tests/unit/serialization/serializers_test.py | 2 +- 15 files changed, 997 insertions(+), 17 deletions(-) create mode 100644 docs/asyncio_client.rst diff --git a/docs/asyncio_client.rst b/docs/asyncio_client.rst new file mode 100644 index 0000000000..ce31ae291b --- /dev/null +++ b/docs/asyncio_client.rst @@ -0,0 +1,4 @@ +Asyncio HazelcastClient API (BETA) Documentation +================================================ + +.. automodule:: hazelcast.asyncio diff --git a/docs/index.rst b/docs/index.rst index 1baa1e1d20..f06234a7a5 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -117,6 +117,7 @@ Features :hidden: client + asyncio_client config api/modules getting_started diff --git a/examples/map/map_portable_versioning_example.py b/examples/map/map_portable_versioning_example.py index 80d7c0a488..1291a376b8 100644 --- a/examples/map/map_portable_versioning_example.py +++ b/examples/map/map_portable_versioning_example.py @@ -91,6 +91,7 @@ def __eq__(self, other): # However, having a version that changes across incompatible field types such as int and String will cause # a type error as members with older versions of the class tries to access it. We will demonstrate this below. + # Version3: Changed age field type from int to String. (Incompatible type change) class Employee3(Portable): FACTORY_ID = 666 diff --git a/hazelcast/client.py b/hazelcast/client.py index 2962b31732..4f432e83ea 100644 --- a/hazelcast/client.py +++ b/hazelcast/client.py @@ -391,6 +391,15 @@ def create_vector_collection_config( merge_policy: str = "PutIfAbsentMergePolicy", merge_batch_size: int = 100, ) -> None: + """Creates a vector collection with the given configuration. + + Args: + name: Name of the distributed map. + indexes: One or more index configurations. The index names must be unique. + backup_count: Number of backups to keep for the vector collection. + split_brain_protection_name: Name of the split brain protection configuration. See https://docs.hazelcast.com/hazelcast/5.6/data-structures/vector-collections#split-brain-protection + merge_policy: The merge policy to use while recovering in a split brain situation. See https://docs.hazelcast.com/hazelcast/5.6/data-structures/vector-collections#merge-policy + """ # check that indexes have different names if indexes: index_names = set(index.name for index in indexes) @@ -411,6 +420,14 @@ def create_vector_collection_config( invocation.future.result() def get_vector_collection(self, name: str) -> VectorCollection: + """Returns the vector collection instance with the specified name. + + Args: + name: Name of the vector collection. + + Returns: + Vector collection instance with the specified name. + """ return self._proxy_manager.get_or_create(VECTOR_SERVICE, name) def new_transaction( diff --git a/hazelcast/core.py b/hazelcast/core.py index 3443759e44..9bc2bc62c5 100644 --- a/hazelcast/core.py +++ b/hazelcast/core.py @@ -1,4 +1,5 @@ """Hazelcast Core objects and constants.""" + import json import typing import uuid diff --git a/hazelcast/internal/asyncio_client.py b/hazelcast/internal/asyncio_client.py index 1829e83f0f..8fc50d484e 100644 --- a/hazelcast/internal/asyncio_client.py +++ b/hazelcast/internal/asyncio_client.py @@ -41,16 +41,91 @@ class HazelcastClient: + """Hazelcast client instance to access and manipulate distributed data + structures on the Hazelcast clusters. + + The client can be configured either by: + + - providing a configuration object as the first parameter of the + constructor + + .. code:: python + + from hazelcast.asyncio import HazelcastClient + from hazelcast.config import Config + + config = Config() + config.cluster_name = "a-cluster" + client = await HazelcastClient.create_and_start(config) + + - passing configuration options as keyword arguments + + .. code:: python + + from hazelcast.asyncio import HazelcastClient + + client = await HazelcastClient.crate_and_start( + cluster_name="a-cluster", + ) + + Warning: + Asyncio client is not thread-safe, do not access it from other threads. + + Warning: + Asyncio client is BETA. + Its public API may change until General Availability release. + + See the :class:`hazelcast.config.Config` documentation for the possible + configuration options. + """ _CLIENT_ID = AtomicInteger() @classmethod async def create_and_start(cls, config: Config | None = None, **kwargs) -> "HazelcastClient": + """Creates a HazelcastClient instance, and starts it. + + .. code:: python + + from hazelcast.asyncio import HazelcastClient + + client = await HazelcastClient.create_and_start() + + See the :class:`hazelcast.config.Config` documentation for the possible + configuration options. + + Args: + config: Optional configuration object. + **kwargs: Optional keyword arguments of the client configuration. + + """ + client = HazelcastClient(config, **kwargs) await client._start() return client def __init__(self, config: Config | None = None, **kwargs): + """Creates a HazelcastClient instance. + + This call just creates the instance, without starting it. + + The preferred way of creating and starting the client instance is using the ``create_and_start`` method: + + .. code:: python + + from hazelcast.asyncio import HazelcastClient + + client = await HazelcastClient.create_and_start() + + See the :class:`hazelcast.config.Config` documentation for the possible + configuration options. + + Args: + config: Optional configuration object. + **kwargs: Optional keyword arguments of the client configuration. + + """ + if config: if kwargs: raise InvalidConfigurationError( @@ -174,6 +249,14 @@ async def _start(self): _logger.info("Client started") async def get_map(self, name: str) -> Map[KeyType, ValueType]: + """Returns the distributed map instance with the specified name. + + Args: + name: Name of the distributed map. + + Returns: + Distributed map instance with the specified name. + """ return await self._proxy_manager.get_or_create(MAP_SERVICE, name) async def create_vector_collection_config( @@ -186,6 +269,15 @@ async def create_vector_collection_config( merge_policy: str = "PutIfAbsentMergePolicy", merge_batch_size: int = 100, ) -> None: + """Creates a vector collection with the given configuration. + + Args: + name: Name of the distributed map. + indexes: One or more index configurations. The index names must be unique. + backup_count: Number of backups to keep for the vector collection. + split_brain_protection_name: Name of the split brain protection configuration. See https://docs.hazelcast.com/hazelcast/5.6/data-structures/vector-collections#split-brain-protection + merge_policy: The merge policy to use while recovering in a split brain situation. See https://docs.hazelcast.com/hazelcast/5.6/data-structures/vector-collections#merge-policy + """ # check that indexes have different names if indexes: index_names = set(index.name for index in indexes) @@ -205,11 +297,29 @@ async def create_vector_collection_config( await self._invocation_service.ainvoke(invocation) async def get_vector_collection(self, name: str) -> VectorCollection: + """Returns the vector collection instance with the specified name. + + Args: + name: Name of the vector collection. + + Returns: + Vector collection instance with the specified name. + """ return await self._proxy_manager.get_or_create(VECTOR_SERVICE, name) async def add_distributed_object_listener( self, listener_func: typing.Callable[[DistributedObjectEvent], None] ) -> str: + """Adds a listener which will be notified when a new distributed object + is created or destroyed. + + Args: + listener_func: Function to be called when a distributed object is + created or destroyed. + + Returns: + A registration id which is used as a key to remove the listener. + """ is_smart = self._config.smart_routing codec = client_add_distributed_object_listener_codec request = codec.encode_request(is_smart) @@ -229,9 +339,20 @@ def event_handler(client_message): ) async def remove_distributed_object_listener(self, registration_id: str) -> bool: + """Removes the specified distributed object listener. + + Returns silently if there is no such listener added before. + + Args: + registration_id: The id of registered listener. + + Returns: + ``True`` if registration is removed, ``False`` otherwise. + """ return await self._listener_service.deregister_listener(registration_id) async def shutdown(self) -> None: + """Shuts down this HazelcastClient.""" async with self._shutdown_lock: if self._internal_lifecycle_service.running: self._internal_lifecycle_service.fire_lifecycle_event(LifecycleState.SHUTTING_DOWN) @@ -244,18 +365,28 @@ async def shutdown(self) -> None: @property def name(self) -> str: + """Name of the client.""" return self._name @property def lifecycle_service(self) -> LifecycleService: + """Lifecycle service allows you to check if the client is running and + add and remove lifecycle listeners. + """ return self._lifecycle_service @property def partition_service(self) -> PartitionService: + """Partition service allows you to get partition count, introspect + the partition owners, and partition ids of keys. + """ return self._partition_service @property def cluster_service(self) -> ClusterService: + """ClusterService: Cluster service allows you to get the list of + the cluster members and add and remove membership listeners. + """ return self._cluster_service def _create_address_provider(self): diff --git a/hazelcast/internal/asyncio_proxy/map.py b/hazelcast/internal/asyncio_proxy/map.py index 84c8ecaa14..a911ef8a38 100644 --- a/hazelcast/internal/asyncio_proxy/map.py +++ b/hazelcast/internal/asyncio_proxy/map.py @@ -90,6 +90,28 @@ class Map(Proxy, typing.Generic[KeyType, ValueType]): + """Hazelcast Map client proxy to access the map on the cluster. + + Concurrent, distributed, observable and queryable map. + + Example: + >>> my_map = await client.get_map("my_map") + >>> print("map.put", await my_map.put("key", "value")) + >>> print("map.contains_key", await my_map.contains_key("key")) + >>> print("map.get", await my_map.get("key")) + >>> print("map.size", await my_map.size()) + + This class does not allow ``None`` to be used as a key or value. + + Warning: + Asyncio client map proxy is not thread-safe, do not access it from other threads. + + Warning: + Asyncio client is BETA. + Its public API may change until General Availability release. + + """ + def __init__(self, service_name, name, context): super(Map, self).__init__(service_name, name, context) self._reference_id_generator = context.lock_reference_id_generator @@ -109,6 +131,38 @@ async def add_entry_listener( expired_func: EntryEventCallable = None, loaded_func: EntryEventCallable = None, ) -> str: + """Adds a continuous entry listener for this map. + + Listener will get notified for map events filtered with given + parameters. + + The listener functions must not block. + + Args: + include_value: Whether received event should include the value or + not. + key: Key for filtering the events. + predicate: Predicate for filtering the events. + added_func: Function to be called when an entry is added to map. + removed_func: Function to be called when an entry is removed from + map. + updated_func: Function to be called when an entry is updated. + evicted_func: Function to be called when an entry is evicted from + map. + evict_all_func: Function to be called when entries are evicted + from map. + clear_all_func: Function to be called when entries are cleared + from map. + merged_func: Function to be called when WAN replicated entry is + merged. + expired_func: Function to be called when an entry's live time is + expired. + loaded_func: Function to be called when an entry is loaded from a + map loader. + + Returns: + A registration id which is used as a key to remove the listener. + """ flags = get_entry_listener_flags( ADDED=added_func, REMOVED=removed_func, @@ -257,6 +311,56 @@ async def add_index( name: str = None, bitmap_index_options: typing.Dict[str, typing.Any] = None, ) -> None: + """Adds an index to this map for the specified entries so that queries + can run faster. + + Example: + Let's say your map values are Employee objects. + + >>> class Employee(IdentifiedDataSerializable): + >>> active = false + >>> age = None + >>> name = None + >>> #other fields + >>> + >>> #methods + + If you query your values mostly based on age and active fields, + you should consider indexing these. + + >>> employees = await client.get_map("employees") + >>> await employees.add_index(attributes=["age"]) # Sorted index for range queries + >>> await employees.add_index(attributes=["active"], index_type=IndexType.HASH)) # Hash index for equality predicates + + Index attribute should either have a getter method or be public. + You should also make sure to add the indexes before adding + entries to this map. + + Indexing time is executed in parallel on each partition by operation + threads. The Map is not blocked during this operation. The time taken + in proportional to the size of the Map and the number Members. + + Until the index finishes being created, any searches for the attribute + will use a full Map scan, thus avoiding using a partially built index + and returning incorrect results. + + Args: + attributes: List of indexed attributes. + index_type: Type of the index. By default, set to ``SORTED``. + name: Name of the index. + bitmap_index_options: Bitmap index options. + + - **unique_key:** (str): The unique key attribute is used as a + source of values which uniquely identify each entry being + inserted into an index. Defaults to ``KEY_ATTRIBUTE_NAME``. + See the :class:`hazelcast.config.QueryConstants` for + possible values. + - **unique_key_transformation** (int|str): The transformation + is applied to every value extracted from the unique key + attribue. Defaults to ``OBJECT``. See the + :class:`hazelcast.config.UniqueKeyTransformation` for + possible values. + """ d = { "name": name, "type": index_type, @@ -269,6 +373,18 @@ async def add_index( return await self._invoke(request) async def add_interceptor(self, interceptor: typing.Any) -> str: + """Adds an interceptor for this map. + + Added interceptor will intercept operations and execute user defined + methods. + + Args: + interceptor: Interceptor for the map which includes user defined + methods. + + Returns: + Id of registered interceptor. + """ try: interceptor_data = self._to_data(interceptor) except SchemaNotReplicatedError as e: @@ -280,6 +396,16 @@ async def add_interceptor(self, interceptor: typing.Any) -> str: async def aggregate( self, aggregator: Aggregator[AggregatorResultType], predicate: Predicate = None ) -> AggregatorResultType: + """Applies the aggregation logic on map entries and filter the result + with the predicate, if given. + + Args: + aggregator: Aggregator to aggregate the entries with. + predicate: Predicate to filter the entries with. + + Returns: + The result of the aggregation. + """ check_not_none(aggregator, "aggregator can't be none") if predicate: if isinstance(predicate, _PagingPredicate): @@ -311,10 +437,28 @@ def handler(message): return await self._invoke(request, handler) async def clear(self) -> None: + """Clears the map. + + The ``MAP_CLEARED`` event is fired for any registered listeners. + """ request = map_clear_codec.encode_request(self.name) return await self._invoke(request) async def contains_key(self, key: KeyType) -> bool: + """Determines whether this map contains an entry with the key. + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: The specified key. + + Returns: + ``True`` if this map contains an entry for the specified key, + ``False`` otherwise. + """ check_not_none(key, "key can't be None") try: key_data = self._to_data(key) @@ -324,6 +468,16 @@ async def contains_key(self, key: KeyType) -> bool: return await self._contains_key_internal(key_data) async def contains_value(self, value: ValueType) -> bool: + """Determines whether this map contains one or more keys for the + specified value. + + Args: + value: The specified value. + + Returns: + ``True`` if this map contains an entry for the specified value, + ``False`` otherwise. + """ check_not_none(value, "value can't be None") try: value_data = self._to_data(value) @@ -333,6 +487,27 @@ async def contains_value(self, value: ValueType) -> bool: return await self._invoke(request, map_contains_value_codec.decode_response) async def delete(self, key: KeyType) -> None: + """Removes the mapping for a key from this map if it is present + (optional operation). + + Unlike remove(object), this operation does not return the removed + value, which avoids the serialization cost of the returned value. + If the removed value will not be used, a delete operation is preferred + over a remove operation for better performance. + + The map will not contain a mapping for the specified key once the call + returns. + + Warning: + This method breaks the contract of EntryListener. + When an entry is removed by delete(), it fires an ``EntryEvent`` + with a ``None`` ``old_value``. Also, a listener with predicates + will have ``None`` values, so only the keys can be queried + via predicates. + + Args: + key: Key of the mapping to be deleted. + """ check_not_none(key, "key can't be None") try: key_data = self._to_data(key) @@ -343,6 +518,18 @@ async def delete(self, key: KeyType) -> None: async def entry_set( self, predicate: Predicate = None ) -> typing.List[typing.Tuple[KeyType, ValueType]]: + """Returns a list clone of the mappings contained in this map. + + Warning: + The list is NOT backed by the map, so changes to the map are NOT + reflected in the list, and vice-versa. + + Args: + predicate: Predicate for the map to filter entries. + + Returns: + The list of key-value tuples in the map. + """ if predicate: if isinstance(predicate, _PagingPredicate): predicate.iteration_type = IterationType.ENTRY @@ -382,6 +569,19 @@ def handler(message): return await self._invoke(request, handler) async def evict(self, key: KeyType) -> bool: + """Evicts the specified key from this map. + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: Key to evict. + + Returns: + ``True`` if the key is evicted, ``False`` otherwise. + """ check_not_none(key, "key can't be None") try: key_data = self._to_data(key) @@ -391,12 +591,32 @@ async def evict(self, key: KeyType) -> bool: return await self._evict_internal(key_data) async def evict_all(self) -> None: + """Evicts all keys from this map except the locked ones. + + The ``EVICT_ALL`` event is fired for any registered listeners. + """ request = map_evict_all_codec.encode_request(self.name) return await self._invoke(request) async def execute_on_entries( self, entry_processor: typing.Any, predicate: Predicate | None = None ) -> typing.List[typing.Any]: + """Applies the user defined EntryProcessor to all the entries in the + map or entries in the map which satisfies the predicate if provided. + Returns the results mapped by each key in the map. + + Args: + entry_processor: A stateful serializable object which represents + the EntryProcessor defined on server side. This object must + have a serializable EntryProcessor counter part registered + on server side with the actual + ``com.hazelcast.map.EntryProcessor`` implementation. + predicate: Predicate for filtering the entries. + + Returns: + List of map entries which includes the keys and the results of the + entry process. + """ if predicate: try: entry_processor_data = self._to_data(entry_processor) @@ -430,6 +650,21 @@ def handler(message): return await self._invoke(request, handler) async def execute_on_key(self, key: KeyType, entry_processor: typing.Any) -> typing.Any: + """Applies the user defined EntryProcessor to the entry mapped by the + key. Returns the object which is the result of EntryProcessor's + process method. + + Args: + key: Specified key for the entry to be processed. + entry_processor: A stateful serializable object which represents + the EntryProcessor defined on server side. This object must + have a serializable EntryProcessor counter part registered on + server side with the actual + ``com.hazelcast.map.EntryProcessor`` implementation. + + Returns: + Result of entry process. + """ check_not_none(key, "key can't be None") try: key_data = self._to_data(key) @@ -442,6 +677,22 @@ async def execute_on_key(self, key: KeyType, entry_processor: typing.Any) -> typ async def execute_on_keys( self, keys: typing.Sequence[KeyType], entry_processor: typing.Any ) -> typing.List[typing.Any]: + """Applies the user defined EntryProcessor to the entries mapped by the + collection of keys. Returns the results mapped by each key in the + collection. + + Args: + keys: Collection of the keys for the entries to be processed. + entry_processor: A stateful serializable object which represents + the EntryProcessor defined on server side. This object must + have a serializable EntryProcessor counter part registered on + server side with the actual + ``com.hazelcast.map.EntryProcessor`` implementation. + + Returns: + List of map entries which includes the keys and the results of the + entry process. + """ if len(keys) == 0: return [] try: @@ -464,10 +715,34 @@ def handler(message): return await self._invoke(request, handler) async def flush(self) -> None: + """Flushes all the local dirty entries.""" request = map_flush_codec.encode_request(self.name) return await self._invoke(request) async def get(self, key: KeyType) -> typing.Optional[ValueType]: + """Returns the value for the specified key, or ``None`` if this map + does not contain this key. + + Warning: + This method returns a clone of original value, modifying the + returned value does not change the actual value in the map. One + should put modified value back to make changes visible to all nodes. + + >>> value = await my_map.get(key) + >>> value.update_some_property() + >>> await my_map.put(key,value) + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: The specified key. + + Returns: + The value for the specified key. + """ check_not_none(key, "key can't be None") try: key_data = self._to_data(key) @@ -476,6 +751,24 @@ async def get(self, key: KeyType) -> typing.Optional[ValueType]: return await self._get_internal(key_data) async def get_all(self, keys: typing.Sequence[KeyType]) -> typing.Dict[KeyType, ValueType]: + """Returns the entries for the given keys. + + Warning: + The returned map is NOT backed by the original map, so changes to + the original map are NOT reflected in the returned map, and + vice-versa. + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + keys: Keys to get. + + Returns: + Dictionary of map entries. + """ check_not_none(keys, "keys can't be None") if not keys: return {} @@ -496,6 +789,25 @@ async def get_all(self, keys: typing.Sequence[KeyType]) -> typing.Dict[KeyType, return await self._get_all_internal(partition_to_keys) async def get_entry_view(self, key: KeyType) -> SimpleEntryView[KeyType, ValueType]: + """Returns the EntryView for the specified key. + + Warning: + This method returns a clone of original mapping, modifying the + returned value does not change the actual value in the map. One + should put modified value back to make changes visible to all + nodes. + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: The key of the entry. + + Returns: + EntryView of the specified key. + """ check_not_none(key, "key can't be None") try: key_data = self._to_data(key) @@ -515,10 +827,29 @@ def handler(message): return await self._invoke_on_key(request, key_data, handler) async def is_empty(self) -> bool: + """Returns whether this map contains no key-value mappings or not. + + Returns: + ``True`` if this map contains no key-value mappings, ``False`` + otherwise. + """ request = map_is_empty_codec.encode_request(self.name) return await self._invoke(request, map_is_empty_codec.decode_response) async def key_set(self, predicate: Predicate | None = None) -> typing.List[ValueType]: + """Returns a List clone of the keys contained in this map or the keys + of the entries filtered with the predicate if provided. + + Warning: + The list is NOT backed by the map, so changes to the map are NOT + reflected in the list, and vice-versa. + + Args: + predicate: Predicate to filter the entries. + + Returns: + A list of the clone of the keys. + """ if predicate: if isinstance(predicate, _PagingPredicate): predicate.iteration_type = IterationType.KEY @@ -561,6 +892,15 @@ def handler(message): async def load_all( self, keys: typing.Sequence[KeyType] = None, replace_existing_values: bool = True ) -> None: + """Loads all keys from the store at server side or loads the given + keys if provided. + + Args: + keys: Keys of the entry values to load. + replace_existing_values: Whether the existing values will be + replaced or not with those loaded from the server side + MapLoader. + """ if keys: try: key_data_list = [self._to_data(key) for key in keys] @@ -577,6 +917,16 @@ async def load_all( async def project( self, projection: Projection[ProjectionType], predicate: Predicate = None ) -> ProjectionType: + """Applies the projection logic on map entries and filter the result + with the predicate, if given. + + Args: + projection: Projection to project the entries with. + predicate: Predicate to filter the entries with. + + Returns: + The result of the projection. + """ check_not_none(projection, "Projection can't be none") if predicate: if isinstance(predicate, _PagingPredicate): @@ -611,6 +961,37 @@ def handler(message): async def put( self, key: KeyType, value: ValueType, ttl: float = None, max_idle: float = None ) -> typing.Optional[ValueType]: + """Associates the specified value with the specified key in this map. + + If the map previously contained a mapping for the key, the old value is + replaced by the specified value. If ttl is provided, entry will expire + and get evicted after the ttl. + + Warning: + This method returns a clone of the previous value, not the original + (identically equal) value previously put into the map. + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: The specified key. + value: The value to associate with the key. + ttl: Maximum time in seconds for this entry to stay in the map. If + not provided, the value configured on the server side + configuration will be used. Setting this to ``0`` means + infinite time-to-live. + max_idle: Maximum time in seconds for this entry to stay idle in + the map. If not provided, the value configured on the server + side configuration will be used. Setting this to ``0`` means + infinite max idle time. + + Returns: + Previous value associated with key or ``None`` if there was no + mapping for key. + """ check_not_none(key, "key can't be None") check_not_none(value, "value can't be None") try: @@ -622,6 +1003,14 @@ async def put( return await self._put_internal(key_data, value_data, ttl, max_idle) async def put_all(self, map: typing.Dict[KeyType, ValueType]) -> None: + """Copies all the mappings from the specified map to this map. + + No atomicity guarantees are given. In the case of a failure, some + key-value tuples may get written, while others are not. + + Args: + map: Dictionary which includes mappings to be stored in this map. + """ check_not_none(map, "map can't be None") if not map: return None @@ -651,6 +1040,43 @@ async def put_all(self, map: typing.Dict[KeyType, ValueType]) -> None: async def put_if_absent( self, key: KeyType, value: ValueType, ttl: float = None, max_idle: float = None ) -> typing.Optional[ValueType]: + """Associates the specified key with the given value if it is not + already associated. + + If ttl is provided, entry will expire and get evicted after the ttl. + + This is equivalent to below, except that the action is performed + atomically: + + >>> if not (await my_map.contains_key(key)): + >>> return await my_map.put(key,value) + >>> else: + >>> return await my_map.get(key) + + Warning: + This method returns a clone of the previous value, not the original + (identically equal) value previously put into the map. + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: Key of the entry. + value: Value of the entry. + ttl: Maximum time in seconds for this entry to stay in the map. If + not provided, the value configured on the server side + configuration will be used. Setting this to ``0`` means + infinite time-to-live. + max_idle: Maximum time in seconds for this entry to stay idle in + the map. If not provided, the value configured on the server + side configuration will be used. Setting this to ``0`` means + infinite max idle time. + + Returns: + Old value of the entry. + """ check_not_none(key, "key can't be None") check_not_none(value, "value can't be None") try: @@ -666,6 +1092,26 @@ async def put_if_absent( async def put_transient( self, key: KeyType, value: ValueType, ttl: float = None, max_idle: float = None ) -> None: + """Same as ``put``, but MapStore defined at the server side will not + be called. + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: Key of the entry. + value: Value of the entry. + ttl: Maximum time in seconds for this entry to stay in the map. If + not provided, the value configured on the server side + configuration will be used. Setting this to ``0`` means + infinite time-to-live. + max_idle: Maximum time in seconds for this entry to stay idle in + the map. If not provided, the value configured on the server + side configuration will be used. Setting this to ``0`` means + infinite max idle time. + """ check_not_none(key, "key can't be None") check_not_none(value, "value can't be None") try: @@ -679,6 +1125,23 @@ async def put_transient( return await self._put_transient_internal(key_data, value_data, ttl, max_idle) async def remove(self, key: KeyType) -> typing.Optional[ValueType]: + """Removes the mapping for a key from this map if it is present. + + The map will not contain a mapping for the specified key once the call + returns. + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: Key of the mapping to be deleted. + + Returns: + The previous value associated with key, or ``None`` if there was + no mapping for key. + """ check_not_none(key, "key can't be None") try: key_data = self._to_data(key) @@ -688,6 +1151,11 @@ async def remove(self, key: KeyType) -> typing.Optional[ValueType]: return await self._remove_internal(key_data) async def remove_all(self, predicate: Predicate) -> None: + """Removes all entries which match with the supplied predicate. + + Args: + predicate: Used to select entries to be removed from map. + """ check_not_none(predicate, "predicate can't be None") try: predicate_data = self._to_data(predicate) @@ -697,6 +1165,30 @@ async def remove_all(self, predicate: Predicate) -> None: return await self._remove_all_internal(predicate_data) async def remove_if_same(self, key: KeyType, value: ValueType) -> bool: + """Removes the entry for a key only if it is currently mapped to a + given value. + + This is equivalent to below, except that the action is performed + atomically: + + >>> if (await my_map.contains_key(key)) and (await my_map.get(key) == value): + >>> await my_map.remove(key) + >>> return True + >>> else: + >>> return False + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: The specified key. + value: Remove the key if it has this value. + + Returns: + ``True`` if the value was removed, ``False`` otherwise. + """ check_not_none(key, "key can't be None") check_not_none(value, "value can't be None") try: @@ -707,14 +1199,61 @@ async def remove_if_same(self, key: KeyType, value: ValueType) -> bool: return await self._remove_if_same_internal_(key_data, value_data) async def remove_entry_listener(self, registration_id: str) -> bool: + """Removes the specified entry listener. + + Returns silently if there is no such listener added before. + + Args: + registration_id: Id of registered listener. + + Returns: + ``True`` if registration is removed, ``False`` otherwise. + """ return await self._deregister_listener(registration_id) async def remove_interceptor(self, registration_id: str) -> bool: + """Removes the given interceptor for this map, so it will not intercept + operations anymore. + + Args: + registration_id: Registration ID of the map interceptor. + + Returns: + ``True`` if the interceptor is removed, ``False`` otherwise. + """ check_not_none(registration_id, "Interceptor registration id should not be None") request = map_remove_interceptor_codec.encode_request(self.name, registration_id) return await self._invoke(request, map_remove_interceptor_codec.decode_response) async def replace(self, key: KeyType, value: ValueType) -> typing.Optional[ValueType]: + """Replaces the entry for a key only if it is currently mapped to some + value. + + This is equivalent to below, except that the action is performed + atomically: + + >>> if await my_map.contains_key(key): + >>> return await my_map.put(key,value) + >>> else: + >>> return None + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Warning: + This method returns a clone of the previous value, not the original + (identically equal) value previously put into the map. + + Args: + key: The specified key. + value: The value to replace the previous value. + + Returns: + Previous value associated with key, or ``None`` if there was no + mapping for key. + """ check_not_none(key, "key can't be None") check_not_none(value, "value can't be None") try: @@ -727,6 +1266,31 @@ async def replace(self, key: KeyType, value: ValueType) -> typing.Optional[Value async def replace_if_same( self, key: ValueType, old_value: ValueType, new_value: ValueType ) -> bool: + """Replaces the entry for a key only if it is currently mapped to a + given value. + + This is equivalent to below, except that the action is performed + atomically: + + >>> if (await my_map.contains_key(key)) and (await my_map.get(key) == old_value): + >>> await my_map.put(key, new_value) + >>> return True + >>> else: + >>> return False + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: The specified key. + old_value: Replace the key value if it is the old value. + new_value: The new value to replace the old value. + + Returns: + ``True`` if the value was replaced, ``False`` otherwise. + """ check_not_none(key, "key can't be None") check_not_none(old_value, "old_value can't be None") check_not_none(new_value, "new_value can't be None") @@ -744,6 +1308,29 @@ async def replace_if_same( async def set( self, key: KeyType, value: ValueType, ttl: float = None, max_idle: float = None ) -> None: + """Puts an entry into this map. + + Similar to the put operation except that set doesn't return the old + value, which is more efficient. If ttl is provided, entry will expire + and get evicted after the ttl. + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: Key of the entry. + value: Value of the entry. + ttl: Maximum time in seconds for this entry to stay in the map. If + not provided, the value configured on the server side + configuration will be used. Setting this to ``0`` means + infinite time-to-live. + max_idle: Maximum time in seconds for this entry to stay idle in + the map. If not provided, the value configured on the server + side configuration will be used. Setting this to ``0`` means + infinite max idle time. + """ check_not_none(key, "key can't be None") check_not_none(value, "value can't be None") try: @@ -754,6 +1341,18 @@ async def set( return await self._set_internal(key_data, value_data, ttl, max_idle) async def set_ttl(self, key: KeyType, ttl: float) -> None: + """Updates the TTL (time to live) value of the entry specified by the + given key with a new TTL value. + + New TTL value is valid starting from the time this operation is + invoked, not since the time the entry was created. If the entry does + not exist or is already expired, this call has no effect. + + Args: + key: The key of the map entry. + ttl: Maximum time in seconds for this entry to stay in the map. + Setting this to ``0`` means infinite time-to-live. + """ check_not_none(key, "key can't be None") check_not_none(ttl, "ttl can't be None") try: @@ -763,10 +1362,29 @@ async def set_ttl(self, key: KeyType, ttl: float) -> None: return await self._set_ttl_internal(key_data, ttl) async def size(self) -> int: + """Returns the number of entries in this map. + + Returns: + Number of entries in this map. + """ request = map_size_codec.encode_request(self.name) return await self._invoke(request, map_size_codec.decode_response) async def try_put(self, key: KeyType, value: ValueType, timeout: float = 0) -> bool: + """Tries to put the given key and value into this map and returns + immediately if timeout is not provided. + + If timeout is provided, operation waits until it is completed or + timeout is reached. + + Args: + key: Key of the entry. + value: Value of the entry. + timeout: Maximum time in seconds to wait. + + Returns: + ``True`` if the put is successful, ``False`` otherwise. + """ check_not_none(key, "key can't be None") check_not_none(value, "value can't be None") try: @@ -777,6 +1395,19 @@ async def try_put(self, key: KeyType, value: ValueType, timeout: float = 0) -> b return await self._try_put_internal(key_data, value_data, timeout) async def try_remove(self, key: KeyType, timeout: float = 0) -> bool: + """Tries to remove the given key from this map and returns immediately + if timeout is not provided. + + If timeout is provided, operation waits until it is completed or + timeout is reached. + + Args: + key: Key of the entry to be deleted. + timeout: Maximum time in seconds to wait. + + Returns: + ``True`` if the remove is successful, ``False`` otherwise. + """ check_not_none(key, "key can't be None") try: key_data = self._to_data(key) @@ -785,6 +1416,19 @@ async def try_remove(self, key: KeyType, timeout: float = 0) -> bool: return await self._try_remove_internal(key_data, timeout) async def values(self, predicate: Predicate = None) -> typing.List[ValueType]: + """Returns a list clone of the values contained in this map or values + of the entries which are filtered with the predicate if provided. + + Warning: + The list is NOT backed by the map, so changes to the map are NOT + reflected in the list, and vice-versa. + + Args: + predicate: Predicate to filter the entries. + + Returns: + A list of clone of the values contained in this map. + """ if predicate: if isinstance(predicate, _PagingPredicate): predicate.iteration_type = IterationType.VALUE diff --git a/hazelcast/internal/asyncio_proxy/vector_collection.py b/hazelcast/internal/asyncio_proxy/vector_collection.py index 4abe7fb3b2..61bc04055f 100644 --- a/hazelcast/internal/asyncio_proxy/vector_collection.py +++ b/hazelcast/internal/asyncio_proxy/vector_collection.py @@ -32,26 +32,118 @@ class VectorCollection(Proxy, typing.Generic[KeyType, ValueType]): + """VectorCollection contains documents with vectors. + + Concurrent, distributed, observable and searchable vector collection. + + The configuration of the vector collection must exist before it can be used. + + Example: + + >>> await client.create_vector_collection_config("my_vc", [ + >>> IndexConfig(name="default-vector", metric=Metric.COSINE, dimension=2) + >>> ] + >>> my_vc = await client.get_vector_collection("my_vc") + >>> await my_vc.set("key1", Vector("default-vector", Type.DENSE, [0.1, 0.2]) + + Warning: + Asyncio client vector collection proxy is not thread-safe, do not access it from other threads. + + Warning: + Asyncio client is BETA. + Its public API may change until General Availability release. + + """ + def __init__(self, service_name, name, context): super(VectorCollection, self).__init__(service_name, name, context) async def get(self, key: Any) -> Document | None: + """Returns the Document for the specified key, or ``None`` if this VectorCollection + does not contain this key. + + Warning: + This method returns a clone of original Document, modifying the + returned Document does not change the actual Document in the VectorCollection. One + should put modified Document back to make changes visible to all nodes. + + >>> doc = await my_vc.get(key) + >>> doc.value.update_some_property() + >>> await my_vc.set(key, doc) + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: The specified key. + + Returns: + The Document for the specified key or ``None`` if there was no + mapping for key. + """ check_not_none(key, "key can't be None") return await self._get_internal(key) async def set(self, key: Any, document: Document) -> None: + """Sets a document for the given key in the VectorCollection. + + Similar to the put operation except that set doesn't return the old + document, which is more efficient. + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: Key of the entry. + document: Document of the entry. + """ check_not_none(key, "key can't be None") check_not_none(document, "document can't be None") check_not_none(document.value, "document value can't be None") return await self._set_internal(key, document) async def put(self, key: Any, document: Document) -> Document | None: + """Associates the specified Document with the specified key in this VectorCollection. + + If the VectorCollection previously contained a mapping for the key, the old Document is + replaced by the specified Document. In case the previous value is not needed, using + the ``set`` method is more efficient. + + Warning: + This method returns a clone of the previous Document, not the original + (identically equal) Document previously put into the VectorCollection. + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: Key of the entry. + document: Document of the entry. + + Returns: + Previous Document associated with key or ``None`` if there was no + mapping for key. + """ check_not_none(key, "key can't be None") check_not_none(document, "document can't be None") check_not_none(document.value, "document value can't be None") return await self._put_internal(key, document) async def put_all(self, map: Dict[Any, Document]) -> None: + """Copies all the mappings from the specified dictionary to this VectorCollection. + + No atomicity guarantees are given. In the case of a failure, some + key-document tuples may get written, while others are not. + + Args: + map: Dictionary which includes mappings to be stored in this VectorCollection. + """ check_not_none(map, "map can't be None") if not map: return None @@ -78,6 +170,25 @@ async def put_all(self, map: Dict[Any, Document]) -> None: return None async def put_if_absent(self, key: Any, document: Document) -> Document | None: + """Associates the specified key with the given Document if it is not + already associated. + + Warning: + This method returns a clone of the previous Document, not the original + (identically equal) Document previously put into the VectorCollection. + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: Key of the entry. + document: Document of the entry. + + Returns: + Old Document for the given key or ``None`` if there is not one. + """ check_not_none(key, "key can't be None") check_not_none(document, "document can't be None") check_not_none(document.value, "document value can't be None") @@ -92,6 +203,22 @@ async def search_near_vector( limit: int = 10, hints: Dict[str, str] = None ) -> List[SearchResult]: + """Returns the Documents closest to the given vector. + + The search is performed using distance metric set when + creating the vector index. + + Args: + vector: The vector to be used as the reference. + It must have the same dimension as specified when creating the vector index. + include_value: Return value attached to the Document. + include_vectors: Return vectors attached to the Document. + limit: Limit the maximum number of Documents returned. + If not set, ``10`` is used as the default limit. + + Returns: + List of search results. + """ check_not_none(vector, "vector can't be None") if limit <= 0: raise AssertionError("limit must be positive") @@ -104,24 +231,73 @@ async def search_near_vector( ) async def remove(self, key: Any) -> Document | None: + """Removes the mapping for a key from this VectorCollection if it is present + (optional operation). + + The VectorCollection will not contain a mapping for the specified key once the call + returns. + + Warning: + This method uses ``__hash__`` and ``__eq__`` methods of binary form + of the key, not the actual implementations of ``__hash__`` and + ``__eq__`` defined in key's class. + + Args: + key: Key of the mapping to be deleted. + + Returns: + The Document associated with key, or ``None`` if there was + no mapping for key. + """ check_not_none(key, "key can't be None") return await self._remove_internal(key) async def delete(self, key: Any) -> None: + """Removes the mapping for a key from this VectorCollection if it is present + (optional operation). + + Unlike remove(object), this operation does not return the removed + Document, which avoids the serialization cost of the returned Document. + If the removed Document will not be used, a delete operation is preferred + over a remove operation for better performance. + + The VectorCollection will not contain a mapping for the specified key once the call + returns. + + Args: + key: Key of the mapping to be deleted. + """ check_not_none(key, "key can't be None") return await self._delete_internal(key) async def optimize(self, index_name: str = None) -> None: + """Optimize index by fully removing nodes marked for deletion, trimming neighbor sets + to the advertised degree, and updating the entry node as necessary. + + Warning: + This operation can take long time to execute and consume a lot of server resources. + + Args: + index_name: Name of the index to optimize. If not specified, the only index defined + for the collection will be used. Must be specified if the collection has more than + one index. + """ request = vector_collection_optimize_codec.encode_request( self.name, index_name, uuid.uuid4() ) return await self._invoke(request) async def clear(self) -> None: + """Clears the VectorCollection.""" request = vector_collection_clear_codec.encode_request(self.name) return await self._invoke(request) async def size(self) -> int: + """Returns the number of Documents in this VectorCollection. + + Returns: + Number of Documents in this VectorCollection. + """ request = vector_collection_size_codec.encode_request(self.name) return await self._invoke(request, vector_collection_size_codec.decode_response) @@ -165,9 +341,9 @@ def _search_near_vector_internal( hints: Dict[str, str] = None ) -> asyncio.Future[List[SearchResult]]: def handler(message): - results: List[ - SearchResult - ] = vector_collection_search_near_vector_codec.decode_response(message) + results: List[SearchResult] = ( + vector_collection_search_near_vector_codec.decode_response(message) + ) for result in results: if result.key is not None: result.key = self._to_object(result.key) diff --git a/hazelcast/proxy/vector_collection.py b/hazelcast/proxy/vector_collection.py index 424227a9e6..fef9a56ae2 100644 --- a/hazelcast/proxy/vector_collection.py +++ b/hazelcast/proxy/vector_collection.py @@ -342,9 +342,9 @@ def _search_near_vector_internal( hints: Dict[str, str] = None ) -> Future[List[SearchResult]]: def handler(message): - results: List[ - SearchResult - ] = vector_collection_search_near_vector_codec.decode_response(message) + results: List[SearchResult] = ( + vector_collection_search_near_vector_codec.decode_response(message) + ) for result in results: if result.key is not None: result.key = self._to_object(result.key) diff --git a/hazelcast/serialization/__init__.py b/hazelcast/serialization/__init__.py index c9ea28baa9..35a1e18983 100644 --- a/hazelcast/serialization/__init__.py +++ b/hazelcast/serialization/__init__.py @@ -1,5 +1,6 @@ """ Serialization Module """ + from hazelcast.serialization.bits import * from hazelcast.serialization.service import SerializationServiceV1 diff --git a/hazelcast/serialization/api.py b/hazelcast/serialization/api.py index 6b323b7930..454c235d62 100644 --- a/hazelcast/serialization/api.py +++ b/hazelcast/serialization/api.py @@ -1,6 +1,7 @@ """ User API for Serialization. """ + import abc import datetime import decimal diff --git a/hazelcast/serialization/serialization_const.py b/hazelcast/serialization/serialization_const.py index 3af96c37a0..182ffa5cfb 100644 --- a/hazelcast/serialization/serialization_const.py +++ b/hazelcast/serialization/serialization_const.py @@ -1,6 +1,7 @@ """ Serialization type ids """ + # Serialization Constants CONSTANT_TYPE_NULL = 0 CONSTANT_TYPE_PORTABLE = -1 diff --git a/tests/integration/backward_compatible/serialization/serializers_test.py b/tests/integration/backward_compatible/serialization/serializers_test.py index 246c9d16d8..cc2688a3dc 100644 --- a/tests/integration/backward_compatible/serialization/serializers_test.py +++ b/tests/integration/backward_compatible/serialization/serializers_test.py @@ -132,7 +132,7 @@ def test_emoji(self): self.assertEqual(value, response) def test_utf_chars(self): - value = "\u0040\u0041\u01DF\u06A0\u12E0\u1D306" + value = "\u0040\u0041\u01df\u06a0\u12e0\u1d306" self.map.set("key", value) self.assertEqual(value, self.map.get("key")) response = self.get_from_server() @@ -359,7 +359,9 @@ def test_double_array_from_server(self): self.assertEqual([3123.0, -123.0], self.map.get("key")) def test_string_array_from_server(self): - self.assertTrue(self.set_on_server('Java.to(["hey", "1βšδΈ­πŸ’¦2πŸ˜­β€πŸ™†πŸ˜”5"], "java.lang.String[]")')) + self.assertTrue( + self.set_on_server('Java.to(["hey", "1βšδΈ­πŸ’¦2πŸ˜­β€πŸ™†πŸ˜”5"], "java.lang.String[]")') + ) self.assertEqual(["hey", "1βšδΈ­πŸ’¦2πŸ˜­β€πŸ™†πŸ˜”5"], self.map.get("key")) def test_date_from_server(self): diff --git a/tests/integration/dbapi/dbapi20.py b/tests/integration/dbapi/dbapi20.py index d23d0bc70d..723028421c 100644 --- a/tests/integration/dbapi/dbapi20.py +++ b/tests/integration/dbapi/dbapi20.py @@ -1,14 +1,14 @@ #!/usr/bin/env python -""" Python DB API 2.0 driver compliance unit test suite. - - This software is Public Domain and may be used without restrictions. +"""Python DB API 2.0 driver compliance unit test suite. - "Now we have booze and barflies entering the discussion, plus rumours of - DBAs on drugs... and I won't tell you what flashes through my mind each - time I read the subject line with 'Anal Compliance' in it. All around - this is turning out to be a thoroughly unwholesome unit test." + This software is Public Domain and may be used without restrictions. - -- Ian Bicking +"Now we have booze and barflies entering the discussion, plus rumours of + DBAs on drugs... and I won't tell you what flashes through my mind each + time I read the subject line with 'Anal Compliance' in it. All around + this is turning out to be a thoroughly unwholesome unit test." + + -- Ian Bicking """ __version__ = "1.15.0" diff --git a/tests/unit/serialization/serializers_test.py b/tests/unit/serialization/serializers_test.py index 6b5813753b..889966e7b6 100644 --- a/tests/unit/serialization/serializers_test.py +++ b/tests/unit/serialization/serializers_test.py @@ -51,7 +51,7 @@ def test_string(self): self.validate("client") self.validate("1βšδΈ­πŸ’¦2πŸ˜­β€πŸ™†πŸ˜”5") self.validate("IΓ±tΓ«rnΓ’tiΓ΄nΓ lizΓ¦tiΓΈn") - self.validate("\u0040\u0041\u01DF\u06A0\u12E0\u1D30") + self.validate("\u0040\u0041\u01df\u06a0\u12e0\u1d30") def test_bytearray(self): self.validate(bytearray("abc".encode())) From effdc7b8ba74e6723cf8e693bf8a3a84b5bef3eb Mon Sep 17 00:00:00 2001 From: Yuce Tekol Date: Fri, 12 Dec 2025 11:31:14 +0300 Subject: [PATCH 2/3] Black --- hazelcast/internal/asyncio_proxy/vector_collection.py | 6 +++--- hazelcast/proxy/vector_collection.py | 6 +++--- .../backward_compatible/serialization/serializers_test.py | 4 +--- 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/hazelcast/internal/asyncio_proxy/vector_collection.py b/hazelcast/internal/asyncio_proxy/vector_collection.py index 61bc04055f..ec7b1304ff 100644 --- a/hazelcast/internal/asyncio_proxy/vector_collection.py +++ b/hazelcast/internal/asyncio_proxy/vector_collection.py @@ -341,9 +341,9 @@ def _search_near_vector_internal( hints: Dict[str, str] = None ) -> asyncio.Future[List[SearchResult]]: def handler(message): - results: List[SearchResult] = ( - vector_collection_search_near_vector_codec.decode_response(message) - ) + results: List[ + SearchResult + ] = vector_collection_search_near_vector_codec.decode_response(message) for result in results: if result.key is not None: result.key = self._to_object(result.key) diff --git a/hazelcast/proxy/vector_collection.py b/hazelcast/proxy/vector_collection.py index fef9a56ae2..424227a9e6 100644 --- a/hazelcast/proxy/vector_collection.py +++ b/hazelcast/proxy/vector_collection.py @@ -342,9 +342,9 @@ def _search_near_vector_internal( hints: Dict[str, str] = None ) -> Future[List[SearchResult]]: def handler(message): - results: List[SearchResult] = ( - vector_collection_search_near_vector_codec.decode_response(message) - ) + results: List[ + SearchResult + ] = vector_collection_search_near_vector_codec.decode_response(message) for result in results: if result.key is not None: result.key = self._to_object(result.key) diff --git a/tests/integration/backward_compatible/serialization/serializers_test.py b/tests/integration/backward_compatible/serialization/serializers_test.py index cc2688a3dc..297ec2f676 100644 --- a/tests/integration/backward_compatible/serialization/serializers_test.py +++ b/tests/integration/backward_compatible/serialization/serializers_test.py @@ -359,9 +359,7 @@ def test_double_array_from_server(self): self.assertEqual([3123.0, -123.0], self.map.get("key")) def test_string_array_from_server(self): - self.assertTrue( - self.set_on_server('Java.to(["hey", "1βšδΈ­πŸ’¦2πŸ˜­β€πŸ™†πŸ˜”5"], "java.lang.String[]")') - ) + self.assertTrue(self.set_on_server('Java.to(["hey", "1βšδΈ­πŸ’¦2πŸ˜­β€πŸ™†πŸ˜”5"], "java.lang.String[]")')) self.assertEqual(["hey", "1βšδΈ­πŸ’¦2πŸ˜­β€πŸ™†πŸ˜”5"], self.map.get("key")) def test_date_from_server(self): From b5d80f17db17ed3a5079bc861a7f9ea8ae783e1e Mon Sep 17 00:00:00 2001 From: Yuce Tekol Date: Fri, 12 Dec 2025 11:46:57 +0300 Subject: [PATCH 3/3] Remove unnecessary formatting --- examples/map/map_portable_versioning_example.py | 1 - hazelcast/core.py | 1 - hazelcast/serialization/__init__.py | 1 - hazelcast/serialization/api.py | 1 - hazelcast/serialization/serialization_const.py | 1 - .../serialization/serializers_test.py | 2 +- tests/integration/dbapi/dbapi20.py | 16 ++++++++-------- tests/unit/serialization/serializers_test.py | 2 +- 8 files changed, 10 insertions(+), 15 deletions(-) diff --git a/examples/map/map_portable_versioning_example.py b/examples/map/map_portable_versioning_example.py index 1291a376b8..80d7c0a488 100644 --- a/examples/map/map_portable_versioning_example.py +++ b/examples/map/map_portable_versioning_example.py @@ -91,7 +91,6 @@ def __eq__(self, other): # However, having a version that changes across incompatible field types such as int and String will cause # a type error as members with older versions of the class tries to access it. We will demonstrate this below. - # Version3: Changed age field type from int to String. (Incompatible type change) class Employee3(Portable): FACTORY_ID = 666 diff --git a/hazelcast/core.py b/hazelcast/core.py index 9bc2bc62c5..3443759e44 100644 --- a/hazelcast/core.py +++ b/hazelcast/core.py @@ -1,5 +1,4 @@ """Hazelcast Core objects and constants.""" - import json import typing import uuid diff --git a/hazelcast/serialization/__init__.py b/hazelcast/serialization/__init__.py index 35a1e18983..c9ea28baa9 100644 --- a/hazelcast/serialization/__init__.py +++ b/hazelcast/serialization/__init__.py @@ -1,6 +1,5 @@ """ Serialization Module """ - from hazelcast.serialization.bits import * from hazelcast.serialization.service import SerializationServiceV1 diff --git a/hazelcast/serialization/api.py b/hazelcast/serialization/api.py index 454c235d62..6b323b7930 100644 --- a/hazelcast/serialization/api.py +++ b/hazelcast/serialization/api.py @@ -1,7 +1,6 @@ """ User API for Serialization. """ - import abc import datetime import decimal diff --git a/hazelcast/serialization/serialization_const.py b/hazelcast/serialization/serialization_const.py index 182ffa5cfb..3af96c37a0 100644 --- a/hazelcast/serialization/serialization_const.py +++ b/hazelcast/serialization/serialization_const.py @@ -1,7 +1,6 @@ """ Serialization type ids """ - # Serialization Constants CONSTANT_TYPE_NULL = 0 CONSTANT_TYPE_PORTABLE = -1 diff --git a/tests/integration/backward_compatible/serialization/serializers_test.py b/tests/integration/backward_compatible/serialization/serializers_test.py index 297ec2f676..246c9d16d8 100644 --- a/tests/integration/backward_compatible/serialization/serializers_test.py +++ b/tests/integration/backward_compatible/serialization/serializers_test.py @@ -132,7 +132,7 @@ def test_emoji(self): self.assertEqual(value, response) def test_utf_chars(self): - value = "\u0040\u0041\u01df\u06a0\u12e0\u1d306" + value = "\u0040\u0041\u01DF\u06A0\u12E0\u1D306" self.map.set("key", value) self.assertEqual(value, self.map.get("key")) response = self.get_from_server() diff --git a/tests/integration/dbapi/dbapi20.py b/tests/integration/dbapi/dbapi20.py index 723028421c..d23d0bc70d 100644 --- a/tests/integration/dbapi/dbapi20.py +++ b/tests/integration/dbapi/dbapi20.py @@ -1,14 +1,14 @@ #!/usr/bin/env python -"""Python DB API 2.0 driver compliance unit test suite. +""" Python DB API 2.0 driver compliance unit test suite. + + This software is Public Domain and may be used without restrictions. - This software is Public Domain and may be used without restrictions. + "Now we have booze and barflies entering the discussion, plus rumours of + DBAs on drugs... and I won't tell you what flashes through my mind each + time I read the subject line with 'Anal Compliance' in it. All around + this is turning out to be a thoroughly unwholesome unit test." -"Now we have booze and barflies entering the discussion, plus rumours of - DBAs on drugs... and I won't tell you what flashes through my mind each - time I read the subject line with 'Anal Compliance' in it. All around - this is turning out to be a thoroughly unwholesome unit test." - - -- Ian Bicking + -- Ian Bicking """ __version__ = "1.15.0" diff --git a/tests/unit/serialization/serializers_test.py b/tests/unit/serialization/serializers_test.py index 889966e7b6..6b5813753b 100644 --- a/tests/unit/serialization/serializers_test.py +++ b/tests/unit/serialization/serializers_test.py @@ -51,7 +51,7 @@ def test_string(self): self.validate("client") self.validate("1βšδΈ­πŸ’¦2πŸ˜­β€πŸ™†πŸ˜”5") self.validate("IΓ±tΓ«rnΓ’tiΓ΄nΓ lizΓ¦tiΓΈn") - self.validate("\u0040\u0041\u01df\u06a0\u12e0\u1d30") + self.validate("\u0040\u0041\u01DF\u06A0\u12E0\u1D30") def test_bytearray(self): self.validate(bytearray("abc".encode()))