Skip to content

Commit

Permalink
Enable Array writes (#210)
Browse files Browse the repository at this point in the history
* Enable Array writes by adding specific handling for it.
* Update documentation
* Add debug info regarding false read_after_write
* Compare serialized values in attr_read/write/read - enable Array read in scan_device for zigpy>=0.58.1
* Ensure data read reported in event is jsonable

---------

Co-authored-by: Tomas Bedrich <tomas@bbcontrol.cz>
Co-authored-by: mdeweerd <mdeweerd@users.noreply.github.com>
  • Loading branch information
3 people committed Oct 27, 2023
1 parent 33d6086 commit b2ab02a
Show file tree
Hide file tree
Showing 4 changed files with 173 additions and 20 deletions.
59 changes: 59 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -793,6 +793,65 @@ data:
write_if_equal: false
```

In case ZCL Array type needs to be written, `attr_val` needs to be provided
as a raw sequence of bytes, i.e. user is responsible to generate a sequence
which complies to the ZCL spec.\
The following examples illustrates
configuration of Ubisys C4 (see
[the device manual](https://www.ubisys.de/wp-content/uploads/ubisys-c4-technical-reference.pdf)
\- section 7.8.5.2. InputActions Attribute - example):

```yaml
service: zha_toolkit.attr_write
data:
ieee: 00:1f:ee:00:00:aa:aa:aa
endpoint: 232
cluster: 64512
attribute: 1
attr_type: 0x48
# For the array type (type 0x48):
# - The first byte is the type of items. here 65 or 0x41: octet str.
# - The second and third byte compose the length (little endian)
# So here: `4, 0` is 0x0004, so four octet strings the array.
# - All the octet strings in this example have a length of 6.
attr_val: [65, 4, 0, 6, 0, 13, 1, 6, 0, 2, 6, 1, 13, 2, 6, 0, 2, 6, 2, 13, 3, 6,
0, 2, 6, 3, 13, 4, 6, 0, 2]
read_before_write: false
read_after_write: false
use_cache: false
```
Such a packet decoded using tshark/wireshark, the above results in:
```plaintext
ZigBee Cluster Library Frame, Command: Write Attributes, Seq: 40
Frame Control Field: Profile-wide (0x00)
.... ..00 = Frame Type: Profile-wide (0x0)
.... .0.. = Manufacturer Specific: False
.... 0... = Direction: Client to Server
...0 .... = Disable Default Response: False
Sequence Number: 40
Command: Write Attributes (0x02)
Attribute Field
Attribute: Unknown (0xfde8)
Data Type: Array (0x48)
Elements Type: Octet String (0x41)
Elements Number: 4
Element #1, Octets: 00:0d:01:06:00:02
Octet String: 00:0d:01:06:00:02
Element #2, Octets: 01:0d:02:06:00:02
Octet String: 01:0d:02:06:00:02
Element #3, Octets: 02:0d:03:06:00:02
Octet String: 02:0d:03:06:00:02
Element #4, Octets: 03:0d:04:06:00:02
Octet String: 03:0d:04:06:00:02

Decrypted ZigBee Payload (45 bytes) - only Array related data is shown:
0000 48 41 04 @........(...HA.
0010 00 06 00 0d 01 06 00 02 06 01 0d 02 06 00 02 06 ................
0020 02 0d 03 06 00 02 06 03 0d 04 06 00 02 .............
```

Using the symbolic name of the attribute, and automatic endpoint selection.

```yaml
Expand Down
7 changes: 4 additions & 3 deletions custom_components/zha_toolkit/scan_device.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,9 +219,10 @@ async def discover_attributes_extended(cluster, manufacturer=None, tries=3):
attr_type = foundation.DATA_TYPES.get(attr_rec.datatype)
access_acl = t.uint8_t(attr_rec.acl)

if attr_rec.datatype not in [0x48] and (
access_acl & foundation.AttributeAccessControl.READ != 0
):
# Note: reading back Array type was fixed in zigpy 0.58.1 .
if (
not u.is_zigpy_ge("0.58.1") or attr_rec.datatype not in [0x48]
) and (access_acl & foundation.AttributeAccessControl.READ != 0):
to_read.append(attr_id)

attr_type_hex = f"0x{attr_rec.datatype:02x}"
Expand Down
67 changes: 65 additions & 2 deletions custom_components/zha_toolkit/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@

LOGGER = logging.getLogger(__name__)

# pylint: disable=too-many-lines

HA_VERSION = get_distribution("homeassistant").version
ZIGPY_VERSION = get_distribution("zigpy").version

Expand Down Expand Up @@ -444,6 +446,30 @@ def get_cluster_from_params(
return cluster


def dict_to_jsonable(src_dict):
result = {}
if isJsonable(src_dict):
return src_dict
for key, value in src_dict.items():
if not isJsonable(value):
LOGGER.debug(
"Can't convert %r to JSON, serializing if possible.", value
)
if callable(getattr(value, "serialize", None)):
# Serialization results in "bytes"
value = value.serialize()
if isinstance(value, bytes):
# "bytes" is not compatible with json, get a "string"
value = str(value, encoding="ascii")
else:
# Anything else: get a textual representation
value = repr(value)

result[key] = value

return result


def write_json_to_file(
data, subdir, fname, desc, listener=None, normalize_name=False
):
Expand Down Expand Up @@ -603,16 +629,53 @@ def attr_encode(attr_val_in, attr_type): # noqa C901
)

attr_obj = f.TypeValue(attr_type, t.LVBytes(attr_val_in))
elif attr_type == 0x48: # Array, (+Bag?, Set?)
# TODO: apply to Bag and Set ?
#
# Array List of bytes currently is:
# First byte: type of array items
# Next bytes: bytes for array items
#
# Maybe in future accept:
# Specifying array item type in 'attr_items_type:'
# (/detect items type from read).

if isinstance(attr_val_in, str):
attr_val_in = str.encode(attr_val_in[1:])

# Determine value to compare read values
# with the value (to be) written [see attr_write].
compare_val = t.List[t.uint8_t](attr_val_in)

# Get type of array items
array_item_type = attr_val_in[0]

# Get body / array items.
array_body = t.SerializableBytes(bytes(attr_val_in[1:]))

# Construct value to write as specific zigpy object
attr_obj = f.TypeValue(attr_type, f.Array(array_item_type, array_body))
elif attr_type == 0xFF or attr_type is None:
compare_val = str2int(attr_val_in)
# This should not happen ideally
attr_obj = f.TypeValue(attr_type, t.LVBytes(compare_val))
else:
# Try to apply conversion using foundation DATA_TYPES table
# Note: this is not perfect and specific conversions may be needed.
data_type = f.DATA_TYPES[attr_type][1]
LOGGER.debug(f"Data type '{data_type}' for attr type {attr_type}")
compare_val = data_type(str2int(attr_val_in))
attr_obj = f.TypeValue(attr_type, data_type(compare_val))
if isinstance(attr_val_in, list):
# Without length byte after serialisation:
compare_val = t.List[t.uint8_t](attr_val_in)
# With length byte after serialisation:
# compare_val = t.LVBytes(attr_val_in)

attr_obj = data_type(compare_val)
# Not using : attr_obj = data_type(attr_type, compare_val)
# which may add extra bytes
else:
compare_val = data_type(str2int(attr_val_in))
attr_obj = data_type(attr_type, compare_val)
LOGGER.debug(
"Converted %s to %s - will compare to %s - Type: 0x%02X",
attr_val_in,
Expand Down
60 changes: 45 additions & 15 deletions custom_components/zha_toolkit/zcl_attr.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from __future__ import annotations

import asyncio
import importlib
import logging

from homeassistant.helpers.template import Template
Expand Down Expand Up @@ -346,10 +345,7 @@ async def attr_write( # noqa: C901
attr_type = params[p.ATTR_TYPE]

result_read = None
if (
params[p.READ_BEFORE_WRITE]
or (attr_read_list and cmd == S.ATTR_READ)
):
if params[p.READ_BEFORE_WRITE] or (attr_read_list and cmd == S.ATTR_READ):
if use_cache > 0:
# Try to get value from cache
if attr_id in cluster._attr_cache:
Expand Down Expand Up @@ -427,16 +423,30 @@ async def attr_write( # noqa: C901
attr = f.Attribute(attr_id, value=attr_val)
attr_write_list.append(attr) # Write list

# Use serialize to compare if the compare_val allows it
use_serialize = callable(getattr(compare_val, "serialize", None))

if attr_type is not None:
event_data["attr_type"] = f"0x{attr_type:02X}"

# True if value that should be written is the equal to the read one
write_is_equal = (
(params[p.READ_BEFORE_WRITE])
and (len(attr_write_list) != 0)
and compare_val is not None
and (
(attr_id in result_read[0]) # type:ignore[index]
and (result_read[0][attr_id] == compare_val) # type:ignore[index]
and (
result_read[0][ # type:ignore[index]
attr_id
].serialize() # type:ignore[union-attr]
== compare_val.serialize()
if use_serialize
else result_read[0][ # type:ignore[index]
attr_id
] # type:ignore[union-attr]
== compare_val
)
)
)

Expand Down Expand Up @@ -491,11 +501,29 @@ async def attr_write( # noqa: C901
f"Reading attr result (attrs, status): {result_read!r}"
)
# read_is_equal = (result_read[0][attr_id] == compare_val)
success = (
success
and (len(result_read[1]) == 0 and len(result_read[0]) == 1)
and (result_read[0][attr_id] == compare_val)
success = success and (
len(result_read[1]) == 0 and len(result_read[0]) == 1
)
if success and compare_val is not None:
if (
result_read[0][attr_id].serialize()
!= compare_val.serialize()
if use_serialize
else result_read[0][attr_id] != compare_val
):
success = False
msg = "Read does not match expected: {!r} <> {!r}".format(
result_read[0][attr_id].serialize()
if use_serialize
else result_read[0][attr_id],
compare_val.serialize()
if use_serialize
else compare_val,
)
LOGGER.warning(msg)
if "warnings" not in event_data:
event_data["warnings"] = []
event_data["warnings"].append(msg)

if result_read is not None:
event_data["result_read"] = result_read
Expand Down Expand Up @@ -608,11 +636,13 @@ async def attr_write( # noqa: C901
listener=listener,
)

importlib.reload(u)
if "result_read" in event_data and not u.isJsonable(
event_data["result_read"]
):
event_data["result_read"] = repr(event_data["result_read"])
for key in ["read_before", "result_read"]:
if key not in event_data:
continue
event_data[key] = (
u.dict_to_jsonable(event_data[key][0]),
event_data[key][1],
)

# For internal use
return result_read
Expand Down

0 comments on commit b2ab02a

Please sign in to comment.