From 8fe1ab56afdceb707d037db6e7a4056cec24d79e Mon Sep 17 00:00:00 2001 From: Nick DeBoom Date: Tue, 17 Feb 2026 13:56:24 -0600 Subject: [PATCH] Matter Camera: Support additional setStream attributes The camera sub-driver lacks handling for the resolution, type, label, and viewport aguments of the videoStreamSettings.setStream command. This change checks if any of these attributes have changed and deallocates the stream before reallocating a new stream with the updated parameters. Additionally, the viewport aspect of videoStreamSettings, which was not previously being set correctly, is now set based on the DPTZStreams attribute of the CAVSULM cluster. --- .../camera_handlers/attribute_handlers.lua | 94 ++- .../camera_handlers/capability_handlers.lua | 148 +++- .../camera/camera_utils/fields.lua | 16 +- .../sub_drivers/camera/camera_utils/utils.lua | 3 +- .../src/sub_drivers/camera/init.lua | 5 +- .../src/test/test_matter_camera.lua | 713 +++++++++++++++++- 6 files changed, 937 insertions(+), 42 deletions(-) diff --git a/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_handlers/attribute_handlers.lua b/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_handlers/attribute_handlers.lua index df99d50f94..140ba6d313 100644 --- a/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_handlers/attribute_handlers.lua +++ b/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_handlers/attribute_handlers.lua @@ -182,14 +182,35 @@ end function CameraAttributeHandlers.allocated_video_streams_handler(driver, device, ib, response) if not ib.data.elements then return end + + local dptz_viewports = device:get_field(camera_fields.DPTZ_VIEWPORTS) or {} local streams = {} + + local previous_streams = device:get_latest_state( + camera_fields.profile_components.main, + capabilities.videoStreamSettings.ID, + capabilities.videoStreamSettings.videoStreams.NAME + ) or {} + + local previous_stream_labels = {} + + for _, stream in pairs(previous_streams) do + previous_stream_labels[stream.streamId] = stream.data.label + end + for i, v in ipairs(ib.data.elements) do local stream = v.elements + local stream_id = stream.video_stream_id.value + + -- Use label from existing capability state, if available + local capability_label = previous_stream_labels[stream_id] + local video_stream = { - streamId = stream.video_stream_id.value, + streamId = stream_id, data = { - label = "Stream " .. i, - type = stream.stream_usage.value == clusters.Global.types.StreamUsageEnum.LIVE_VIEW and "liveStream" or "clipRecording", + label = capability_label or "Stream " .. i, + type = stream.stream_usage.value == + clusters.Global.types.StreamUsageEnum.LIVE_VIEW and "liveStream" or "clipRecording", resolution = { width = stream.min_resolution.elements.width.value, height = stream.min_resolution.elements.height.value, @@ -197,21 +218,31 @@ function CameraAttributeHandlers.allocated_video_streams_handler(driver, device, } } } - local viewport = device:get_field(camera_fields.VIEWPORT) - if viewport then - video_stream.data.viewport = viewport + + if dptz_viewports[stream_id] ~= nil then + video_stream.data.viewport = dptz_viewports[stream_id] + else + video_stream.data.viewport = { + upperLeftVertex = { x = 0, y = 0 }, + lowerRightVertex = { + x = stream.min_resolution.elements.width.value, + y = stream.min_resolution.elements.height.value + } + } end - if camera_utils.feature_supported(device, clusters.CameraAvStreamManagement.ID, clusters.CameraAvStreamManagement.types.Feature.WATERMARK) then + + if camera_utils.feature_supported(device, clusters.CameraAvStreamManagement.ID, + clusters.CameraAvStreamManagement.types.Feature.WATERMARK) then video_stream.data.watermark = stream.watermark_enabled.value and "enabled" or "disabled" end - if camera_utils.feature_supported(device, clusters.CameraAvStreamManagement.ID, clusters.CameraAvStreamManagement.types.Feature.ON_SCREEN_DISPLAY) then + if camera_utils.feature_supported(device, clusters.CameraAvStreamManagement.ID, + clusters.CameraAvStreamManagement.types.Feature.ON_SCREEN_DISPLAY) then video_stream.data.onScreenDisplay = stream.osd_enabled.value and "enabled" or "disabled" end table.insert(streams, video_stream) end - if #streams > 0 then - device:emit_event_for_endpoint(ib, capabilities.videoStreamSettings.videoStreams(streams)) - end + + device:emit_event_for_endpoint(ib, capabilities.videoStreamSettings.videoStreams(streams)) end function CameraAttributeHandlers.viewport_handler(driver, device, ib, response) @@ -221,6 +252,45 @@ function CameraAttributeHandlers.viewport_handler(driver, device, ib, response) })) end +function CameraAttributeHandlers.dptz_streams_handler(driver, device, ib, response) + if not ib.data.elements then return end + + local dptz_viewports = {} + for _, v in ipairs(ib.data.elements) do + local dptz_struct = v.elements + local stream_id = dptz_struct.video_stream_id.value + local viewport = dptz_struct.viewport.elements + + dptz_viewports[stream_id] = { + upperLeftVertex = { x = viewport.x1.value, y = viewport.y1.value }, + lowerRightVertex = { x = viewport.x2.value, y = viewport.y2.value } + } + end + + device:set_field(camera_fields.DPTZ_VIEWPORTS, dptz_viewports) + + local current_streams = device:get_latest_state( + camera_fields.profile_components.main, + capabilities.videoStreamSettings.ID, + capabilities.videoStreamSettings.videoStreams.NAME + ) or {} + local updated_streams = {} + for _, stream in pairs(current_streams) do + local updated_stream = { + streamId = stream.streamId, + data = stream.data + } + if dptz_viewports[stream.streamId] ~= nil then + updated_stream.data.viewport = dptz_viewports[stream.streamId] + end + table.insert(updated_streams, updated_stream) + end + + if #updated_streams > 0 then + device:emit_event_for_endpoint(ib, capabilities.videoStreamSettings.videoStreams(updated_streams)) + end +end + function CameraAttributeHandlers.ptz_position_handler(driver, device, ib, response) local ptz_map = camera_utils.get_ptz_map(device) local emit_event = function(idx, value) @@ -399,4 +469,4 @@ function CameraAttributeHandlers.camera_av_stream_management_attribute_list_hand camera_cfg.match_profile(device, status_light_enabled_present, status_light_brightness_present) end -return CameraAttributeHandlers \ No newline at end of file +return CameraAttributeHandlers diff --git a/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_handlers/capability_handlers.lua b/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_handlers/capability_handlers.lua index 09134a9757..443c83956b 100644 --- a/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_handlers/capability_handlers.lua +++ b/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_handlers/capability_handlers.lua @@ -336,17 +336,153 @@ function CameraCapabilityHandlers.handle_set_selected_sound(driver, device, cmd) device:send(clusters.Chime.attributes.SelectedChime:write(device, endpoint_id, cmd.args.id)) end +local function update_viewport_or_label(device, current_streams, streamId, label, viewport) + current_streams = current_streams or {} + for _, stream in ipairs(current_streams) do + if stream.streamId == streamId then + if label ~= nil then stream.data.label = label end + if viewport ~= nil then stream.data.viewport = viewport end + break + end + end + local endpoint_id = device:component_to_endpoint() + device:emit_event_for_endpoint(endpoint_id, capabilities.videoStreamSettings.videoStreams(current_streams)) +end + function CameraCapabilityHandlers.handle_set_stream(driver, device, cmd) local endpoint_id = device:component_to_endpoint(cmd.component) + local watermark_enabled, on_screen_display_enabled - if camera_utils.feature_supported(device, clusters.CameraAvStreamManagement.ID, clusters.CameraAvStreamManagement.types.Feature.WATERMARK) then - watermark_enabled = cmd.args.watermark == "enabled" + if camera_utils.feature_supported(device, clusters.CameraAvStreamManagement.ID, + clusters.CameraAvStreamManagement.types.Feature.WATERMARK) then + if cmd.args.watermark ~= nil then + watermark_enabled = cmd.args.watermark == "enabled" + end end - if camera_utils.feature_supported(device, clusters.CameraAvStreamManagement.ID, clusters.CameraAvStreamManagement.types.Feature.ON_SCREEN_DISPLAY) then - on_screen_display_enabled = cmd.args.onScreenDisplay == "enabled" + if camera_utils.feature_supported(device, clusters.CameraAvStreamManagement.ID, + clusters.CameraAvStreamManagement.types.Feature.ON_SCREEN_DISPLAY) then + if cmd.args.onScreenDisplay ~= nil then + on_screen_display_enabled = cmd.args.onScreenDisplay == "enabled" + end end - device:send(clusters.CameraAvStreamManagement.server.commands.VideoStreamModify(device, endpoint_id, - cmd.args.streamId, watermark_enabled, on_screen_display_enabled + + local current_streams = device:get_latest_state( + camera_fields.profile_components.main, + capabilities.videoStreamSettings.ID, + capabilities.videoStreamSettings.videoStreams.NAME + ) or {} + local current_stream + for _, stream in pairs(current_streams) do + if stream.streamId == cmd.args.streamId then + current_stream = stream.data + break + end + end + + local needs_reallocation = false + if current_stream ~= nil and current_stream.type ~= cmd.args.type then + needs_reallocation = true + elseif current_stream ~= nil and cmd.args.resolution ~= nil then + if current_stream.resolution.width ~= cmd.args.resolution.width or + current_stream.resolution.height ~= cmd.args.resolution.height or + current_stream.resolution.fps ~= cmd.args.resolution.fps then + needs_reallocation = true + end + elseif current_stream == nil and (cmd.args.type ~= nil or cmd.args.resolution ~= nil) then + needs_reallocation = true + end + + local viewport_changed = false + if cmd.args.viewport ~= nil and + camera_utils.feature_supported(device, clusters.CameraAvSettingsUserLevelManagement.ID, + clusters.CameraAvSettingsUserLevelManagement.types.Feature.DIGITALPTZ) then + if current_stream ~= nil and current_stream.viewport ~= nil then + if current_stream.viewport.upperLeftVertex.x ~= cmd.args.viewport.upperLeftVertex.x or + current_stream.viewport.upperLeftVertex.y ~= cmd.args.viewport.upperLeftVertex.y or + current_stream.viewport.lowerRightVertex.x ~= cmd.args.viewport.lowerRightVertex.x or + current_stream.viewport.lowerRightVertex.y ~= cmd.args.viewport.lowerRightVertex.y then + viewport_changed = true + end + elseif current_stream == nil or current_stream.viewport == nil then + viewport_changed = true + end + + if viewport_changed then + device:send(clusters.CameraAvSettingsUserLevelManagement.server.commands.DPTZSetViewport(device, endpoint_id, + cmd.args.streamId, + clusters.Global.types.ViewportStruct({ + x1 = cmd.args.viewport.upperLeftVertex.x, + x2 = cmd.args.viewport.lowerRightVertex.x, + y1 = cmd.args.viewport.upperLeftVertex.y, + y2 = cmd.args.viewport.lowerRightVertex.y + }) + )) + end + end + + local label_changed = cmd.args.label ~= nil and cmd.args.label ~= current_stream.label + + if viewport_changed or label_changed then + update_viewport_or_label(device, current_streams, cmd.args.streamId, cmd.args.label, cmd.args.viewport) + end + + if not needs_reallocation then + local watermark_changed = watermark_enabled ~= nil and current_stream.watermark ~= nil and + ((watermark_enabled and current_stream.watermark == "disabled") or + (not watermark_enabled and current_stream.watermark == "enabled")) + local on_screen_display_changed = on_screen_display_enabled ~= nil and current_stream.onScreenDisplay ~= nil and + ((on_screen_display_enabled and current_stream.onScreenDisplay == "disabled") or + (not on_screen_display_enabled and current_stream.onScreenDisplay == "enabled")) + if watermark_changed or on_screen_display_changed then + device:send(clusters.CameraAvStreamManagement.server.commands.VideoStreamModify( + device, endpoint_id, cmd.args.streamId, watermark_enabled, on_screen_display_enabled + )) + end + return + end + + device:send(clusters.CameraAvStreamManagement.server.commands.VideoStreamDeallocate( + device, endpoint_id, cmd.args.streamId + )) + + local stream_usage = cmd.args.type == "liveStream" and + clusters.Global.types.StreamUsageEnum.LIVE_VIEW or clusters.Global.types.StreamUsageEnum.RECORDING + + local min_resolution, max_resolution + if cmd.args.resolution ~= nil then + min_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({ + width = cmd.args.resolution.width, + height = cmd.args.resolution.height + }) + max_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({ + width = cmd.args.resolution.width, + height = cmd.args.resolution.height + }) + end + + if watermark_enabled == nil then + watermark_enabled = camera_fields.video_stream_defaults.watermark_enabled + end + + if on_screen_display_enabled == nil then + on_screen_display_enabled = camera_fields.video_stream_defaults.on_screen_display_enabled + end + + -- Use the same resolution (if available) for MinResolution and MaxResolution to force the server to allocate the + -- stream with the desired resolution. + device:send(clusters.CameraAvStreamManagement.server.commands.VideoStreamAllocate(device, endpoint_id, + stream_usage, + camera_fields.video_stream_defaults.codec, + camera_fields.video_stream_defaults.min_frame_rate, + math.min(device:get_field(camera_fields.MAX_FRAMES_PER_SECOND) or camera_fields.video_stream_defaults.max_frame_rate, + camera_fields.video_stream_defaults.max_frame_rate), + min_resolution or device:get_field(camera_fields.MIN_RESOLUTION) or camera_fields.video_stream_defaults.min_resolution, + max_resolution or device:get_field(camera_fields.MAX_RESOLUTION) or camera_fields.video_stream_defaults.max_resolution, + camera_fields.video_stream_defaults.min_bitrate, + camera_fields.video_stream_defaults.max_bitrate, + camera_fields.video_stream_defaults.key_frame_interval, + watermark_enabled, + on_screen_display_enabled )) end diff --git a/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_utils/fields.lua b/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_utils/fields.lua index 7598b89893..000008fa51 100644 --- a/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_utils/fields.lua +++ b/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_utils/fields.lua @@ -13,7 +13,7 @@ CameraFields.SUPPORTED_RESOLUTIONS = "__supported_resolutions" CameraFields.MAX_RESOLUTION = "__max_resolution" CameraFields.MIN_RESOLUTION = "__min_resolution" CameraFields.TRIGGERED_ZONES = "__triggered_zones" -CameraFields.VIEWPORT = "__viewport" +CameraFields.DPTZ_VIEWPORTS = "__dptz_viewports" CameraFields.PAN_IDX = "PAN" CameraFields.TILT_IDX = "TILT" @@ -47,4 +47,18 @@ CameraFields.ABS_ZOOM_MIN = 1 CameraFields.ABS_VOL_MAX = 254.0 CameraFields.ABS_VOL_MIN = 0.0 +-- Define defaults for allocating new streams. Note that these are the same values use by the hub. +CameraFields.video_stream_defaults = { + codec = clusters.CameraAvStreamManagement.types.VideoCodecEnum.H264, + min_frame_rate = 30, + max_frame_rate = 60, + min_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 320, height = 240}), + max_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1920, height = 1080}), + min_bitrate = 10000, + max_bitrate = 2000000, + key_frame_interval = 4000, + watermark_enabled = false, + on_screen_display_enabled = false +} + return CameraFields diff --git a/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_utils/utils.lua b/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_utils/utils.lua index 1caa9737bb..12341f493e 100644 --- a/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_utils/utils.lua +++ b/drivers/SmartThings/matter-switch/src/sub_drivers/camera/camera_utils/utils.lua @@ -253,7 +253,8 @@ function CameraUtils.subscribe(device) clusters.CameraAvStreamManagement.attributes.RateDistortionTradeOffPoints, clusters.CameraAvStreamManagement.attributes.MaxEncodedPixelRate, clusters.CameraAvStreamManagement.attributes.VideoSensorParams, - clusters.CameraAvStreamManagement.attributes.AllocatedVideoStreams + clusters.CameraAvStreamManagement.attributes.AllocatedVideoStreams, + clusters.CameraAvSettingsUserLevelManagement.attributes.DPTZStreams }, [capabilities.zoneManagement.ID] = { clusters.ZoneManagement.attributes.MaxZones, diff --git a/drivers/SmartThings/matter-switch/src/sub_drivers/camera/init.lua b/drivers/SmartThings/matter-switch/src/sub_drivers/camera/init.lua index f13589ff41..179ed54742 100644 --- a/drivers/SmartThings/matter-switch/src/sub_drivers/camera/init.lua +++ b/drivers/SmartThings/matter-switch/src/sub_drivers/camera/init.lua @@ -108,14 +108,15 @@ local camera_handler = { [clusters.CameraAvSettingsUserLevelManagement.attributes.PanMax.ID] = attribute_handlers.pt_range_handler_factory(capabilities.mechanicalPanTiltZoom.panRange, camera_fields.pt_range_fields[camera_fields.PAN_IDX].max), [clusters.CameraAvSettingsUserLevelManagement.attributes.PanMin.ID] = attribute_handlers.pt_range_handler_factory(capabilities.mechanicalPanTiltZoom.panRange, camera_fields.pt_range_fields[camera_fields.PAN_IDX].min), [clusters.CameraAvSettingsUserLevelManagement.attributes.TiltMax.ID] = attribute_handlers.pt_range_handler_factory(capabilities.mechanicalPanTiltZoom.tiltRange, camera_fields.pt_range_fields[camera_fields.TILT_IDX].max), - [clusters.CameraAvSettingsUserLevelManagement.attributes.TiltMin.ID] = attribute_handlers.pt_range_handler_factory(capabilities.mechanicalPanTiltZoom.tiltRange, camera_fields.pt_range_fields[camera_fields.TILT_IDX].min) + [clusters.CameraAvSettingsUserLevelManagement.attributes.TiltMin.ID] = attribute_handlers.pt_range_handler_factory(capabilities.mechanicalPanTiltZoom.tiltRange, camera_fields.pt_range_fields[camera_fields.TILT_IDX].min), + [clusters.CameraAvSettingsUserLevelManagement.attributes.DPTZStreams.ID] = attribute_handlers.dptz_streams_handler }, [clusters.ZoneManagement.ID] = { [clusters.ZoneManagement.attributes.MaxZones.ID] = attribute_handlers.max_zones_handler, [clusters.ZoneManagement.attributes.Zones.ID] = attribute_handlers.zones_handler, [clusters.ZoneManagement.attributes.Triggers.ID] = attribute_handlers.triggers_handler, [clusters.ZoneManagement.attributes.SensitivityMax.ID] = attribute_handlers.sensitivity_max_handler, - [clusters.ZoneManagement.attributes.Sensitivity.ID] = attribute_handlers.sensitivity_handler, + [clusters.ZoneManagement.attributes.Sensitivity.ID] = attribute_handlers.sensitivity_handler }, [clusters.Chime.ID] = { [clusters.Chime.attributes.InstalledChimeSounds.ID] = attribute_handlers.installed_chime_sounds_handler, diff --git a/drivers/SmartThings/matter-switch/src/test/test_matter_camera.lua b/drivers/SmartThings/matter-switch/src/test/test_matter_camera.lua index ed856bd5da..2eee0b778e 100644 --- a/drivers/SmartThings/matter-switch/src/test/test_matter_camera.lua +++ b/drivers/SmartThings/matter-switch/src/test/test_matter_camera.lua @@ -46,7 +46,8 @@ local mock_device = test.mock_device.build_test_matter_device({ }, { cluster_id = clusters.CameraAvSettingsUserLevelManagement.ID, - feature_map = clusters.CameraAvSettingsUserLevelManagement.types.Feature.MECHANICAL_PAN | + feature_map = clusters.CameraAvSettingsUserLevelManagement.types.Feature.DIGITALPTZ | + clusters.CameraAvSettingsUserLevelManagement.types.Feature.MECHANICAL_PAN | clusters.CameraAvSettingsUserLevelManagement.types.Feature.MECHANICAL_TILT | clusters.CameraAvSettingsUserLevelManagement.types.Feature.MECHANICAL_ZOOM | clusters.CameraAvSettingsUserLevelManagement.types.Feature.MECHANICAL_PRESETS, @@ -201,6 +202,7 @@ local additional_subscribed_attributes = { clusters.CameraAvSettingsUserLevelManagement.attributes.PanMin, clusters.CameraAvSettingsUserLevelManagement.attributes.TiltMax, clusters.CameraAvSettingsUserLevelManagement.attributes.TiltMin, + clusters.CameraAvSettingsUserLevelManagement.attributes.DPTZStreams, clusters.Chime.attributes.InstalledChimeSounds, clusters.Chime.attributes.SelectedChime, clusters.ZoneManagement.attributes.MaxZones, @@ -1967,38 +1969,117 @@ test.register_coroutine_test( ) test.register_coroutine_test( - "Stream management commands should send the appropriate commands", + "setStream with label and viewport changes should emit capability event", function() update_device_profile() test.wait_for_events() + -- Set up an existing stream + test.socket.matter:__queue_receive({ + mock_device.id, + clusters.CameraAvStreamManagement.attributes.AllocatedVideoStreams:build_test_report_data( + mock_device, CAMERA_EP, { + clusters.CameraAvStreamManagement.types.VideoStreamStruct({ + video_stream_id = 3, + stream_usage = clusters.Global.types.StreamUsageEnum.LIVE_VIEW, + video_codec = clusters.CameraAvStreamManagement.types.VideoCodecEnum.H264, + min_frame_rate = 30, + max_frame_rate = 60, + min_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1920, height = 1080}), + max_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1920, height = 1080}), + min_bit_rate = 10000, + max_bit_rate = 10000, + key_frame_interval = 4000, + watermark_enabled = false, + osd_enabled = false, + reference_count = 0 + }) + } + ) + }) + test.socket.capability:__expect_send( + mock_device:generate_test_message("main", capabilities.videoStreamSettings.videoStreams({ + { + streamId = 3, + data = { + label = "Stream 1", + type = "liveStream", + resolution = { + width = 1920, + height = 1080, + fps = 30 + }, + viewport = { + upperLeftVertex = { x = 0, y = 0 }, + lowerRightVertex = { x = 1920, y = 1080 } + }, + watermark = "disabled", + onScreenDisplay = "disabled" + } + } + })) + ) + test.wait_for_events() + -- Change label and viewport only test.socket.capability:__queue_receive({ mock_device.id, - { capability = "videoStreamSettings", component = "main", command = "setStream", args = { + { + capability = "videoStreamSettings", component = "main", command = "setStream", args = { 3, - "liveStream", - "Stream 3", - { width = 1920, height = 1080, fps = 30 }, - { upperLeftVertex = {x = 0, y = 0}, lowerRightVertex = {x = 1920, y = 1080} }, - "enabled", - "disabled" + "liveStream", -- type + "My Stream", -- label + { width = 1920, height = 1080, fps = 30 }, -- resolution + { upperLeftVertex = {x = 100, y = 100}, lowerRightVertex = {x = 1820, y = 980} }, -- viewport + "disabled", -- watermark + "disabled" -- onScreenDisplay }} }) + -- Should send DPTZSetViewport command test.socket.matter:__expect_send({ - mock_device.id, clusters.CameraAvStreamManagement.server.commands.VideoStreamModify(mock_device, CAMERA_EP, - 3, true, false + mock_device.id, clusters.CameraAvSettingsUserLevelManagement.server.commands.DPTZSetViewport(mock_device, CAMERA_EP, + 3, + clusters.Global.types.ViewportStruct({ + x1 = 100, + x2 = 1820, + y1 = 100, + y2 = 980 + }) ) }) + -- Should emit updated capability directly, no stream reallocation + test.socket.capability:__expect_send( + mock_device:generate_test_message("main", capabilities.videoStreamSettings.videoStreams({ + { + streamId = 3, + data = { + label = "My Stream", + type = "liveStream", + resolution = { + width = 1920, + height = 1080, + fps = 30 + }, + viewport = { + upperLeftVertex = { x = 100, y = 100 }, + lowerRightVertex = { x = 1820, y = 980 } + }, + watermark = "disabled", + onScreenDisplay = "disabled" + } + } + })) + ) end, { - min_api_version = 19 + min_api_version = 19 } ) test.register_coroutine_test( - "Stream management setStream command should modify an existing stream", + "setStream with only watermark/OSD changes should use VideoStreamModify", function() update_device_profile() test.wait_for_events() + -- Set up an existing stream test.socket.matter:__queue_receive({ mock_device.id, clusters.CameraAvStreamManagement.attributes.AllocatedVideoStreams:build_test_report_data( @@ -2033,22 +2114,29 @@ test.register_coroutine_test( height = 360, fps = 30 }, + viewport = { + upperLeftVertex = { x = 0, y = 0 }, + lowerRightVertex = { x = 640, y = 360 } + }, watermark = "enabled", onScreenDisplay = "disabled" } } })) ) + test.wait_for_events() + -- Change watermark and OSD only test.socket.capability:__queue_receive({ mock_device.id, - { capability = "videoStreamSettings", component = "main", command = "setStream", args = { + { + capability = "videoStreamSettings", component = "main", command = "setStream", args = { 1, - "liveStream", - "Stream 1", - { width = 640, height = 360, fps = 30 }, - { upperLeftVertex = {x = 0, y = 0}, lowerRightVertex = {x = 640, y = 360} }, - "disabled", - "enabled" + "liveStream", -- type + "Stream 1", -- label + { width = 640, height = 360, fps = 30 }, -- resolution + { upperLeftVertex = {x = 0, y = 0}, lowerRightVertex = {x = 640, y = 360} }, -- viewport + "disabled", -- watermark + "enabled" -- onScreenDisplay }} }) test.socket.matter:__expect_send({ @@ -2062,6 +2150,591 @@ test.register_coroutine_test( } ) +test.register_coroutine_test( + "setStream with only label change should emit capability event", + function() + update_device_profile() + test.wait_for_events() + -- Set up existing stream + test.socket.matter:__queue_receive({ + mock_device.id, + clusters.CameraAvStreamManagement.attributes.AllocatedVideoStreams:build_test_report_data( + mock_device, CAMERA_EP, { + clusters.CameraAvStreamManagement.types.VideoStreamStruct({ + video_stream_id = 2, + stream_usage = clusters.Global.types.StreamUsageEnum.RECORDING, + video_codec = clusters.CameraAvStreamManagement.types.VideoCodecEnum.H264, + min_frame_rate = 15, + max_frame_rate = 30, + min_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1280, height = 720}), + max_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1280, height = 720}), + min_bit_rate = 10000, + max_bit_rate = 10000, + key_frame_interval = 4000, + watermark_enabled = false, + osd_enabled = false, + reference_count = 0 + }) + } + ) + }) + test.socket.capability:__expect_send( + mock_device:generate_test_message("main", capabilities.videoStreamSettings.videoStreams({ + { + streamId = 2, + data = { + label = "Stream 1", + type = "clipRecording", + resolution = { + width = 1280, + height = 720, + fps = 15 + }, + viewport = { + upperLeftVertex = { x = 0, y = 0 }, + lowerRightVertex = { x = 1280, y = 720 } + }, + watermark = "disabled", + onScreenDisplay = "disabled" + } + } + })) + ) + test.wait_for_events() + -- Change label only + test.socket.capability:__queue_receive({ + mock_device.id, + { + capability = "videoStreamSettings", component = "main", command = "setStream", args = { + 2, + "clipRecording", -- type + "Recording Stream", -- label + { width = 1280, height = 720, fps = 15 }, -- resolution + { upperLeftVertex = {x = 0, y = 0}, lowerRightVertex = {x = 1280, y = 720} }, -- viewport + "disabled", -- watermark + "disabled" -- onScreenDisplay + }} + }) + -- Should emit updated capability directly, no stream reallocation + test.socket.capability:__expect_send( + mock_device:generate_test_message("main", capabilities.videoStreamSettings.videoStreams({ + { + streamId = 2, + data = { + label = "Recording Stream", + type = "clipRecording", + resolution = { + width = 1280, + height = 720, + fps = 15 + }, + viewport = { + upperLeftVertex = { x = 0, y = 0 }, + lowerRightVertex = { x = 1280, y = 720 } + }, + watermark = "disabled", + onScreenDisplay = "disabled" + } + } + })) + ) + end +) + +test.register_coroutine_test( + "setStream with only viewport change should send DPTZSetViewport command", + function() + update_device_profile() + test.wait_for_events() + -- Set up existing stream + test.socket.matter:__queue_receive({ + mock_device.id, + clusters.CameraAvStreamManagement.attributes.AllocatedVideoStreams:build_test_report_data( + mock_device, CAMERA_EP, { + clusters.CameraAvStreamManagement.types.VideoStreamStruct({ + video_stream_id = 5, + stream_usage = clusters.Global.types.StreamUsageEnum.LIVE_VIEW, + video_codec = clusters.CameraAvStreamManagement.types.VideoCodecEnum.H264, + min_frame_rate = 30, + max_frame_rate = 60, + min_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 3840, height = 2160}), + max_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 3840, height = 2160}), + min_bit_rate = 10000, + max_bit_rate = 10000, + key_frame_interval = 4000, + watermark_enabled = false, + osd_enabled = true, + reference_count = 0 + }) + } + ) + }) + test.socket.capability:__expect_send( + mock_device:generate_test_message("main", capabilities.videoStreamSettings.videoStreams({ + { + streamId = 5, + data = { + label = "Stream 1", + type = "liveStream", + resolution = { + width = 3840, + height = 2160, + fps = 30 + }, + viewport = { + upperLeftVertex = { x = 0, y = 0 }, + lowerRightVertex = { x = 3840, y = 2160 } + }, + watermark = "disabled", + onScreenDisplay = "enabled" + } + } + })) + ) + test.wait_for_events() + -- Change only viewport + test.socket.capability:__queue_receive({ + mock_device.id, + { + capability = "videoStreamSettings", component = "main", command = "setStream", args = { + 5, + "liveStream", -- type + "Stream 1", -- label + { width = 3840, height = 2160, fps = 30 }, -- resolution + { upperLeftVertex = {x = 500, y = 500}, lowerRightVertex = {x = 3340, y = 1660} }, -- viewport + "disabled", -- watermark + "enabled" -- onScreenDisplay + }} + }) + test.socket.matter:__expect_send({ + mock_device.id, clusters.CameraAvSettingsUserLevelManagement.server.commands.DPTZSetViewport(mock_device, CAMERA_EP, + 5, + clusters.Global.types.ViewportStruct({ + x1 = 500, + x2 = 3340, + y1 = 500, + y2 = 1660 + }) + ) + }) + -- Should emit updated capability directly, no stream reallocation + test.socket.capability:__expect_send( + mock_device:generate_test_message("main", capabilities.videoStreamSettings.videoStreams({ + { + streamId = 5, + data = { + label = "Stream 1", + type = "liveStream", + resolution = { + width = 3840, + height = 2160, + fps = 30 + }, + viewport = { + upperLeftVertex = { x = 500, y = 500 }, + lowerRightVertex = { x = 3340, y = 1660 } + }, + watermark = "disabled", + onScreenDisplay = "enabled" + } + } + })) + ) + end +) + +test.register_coroutine_test( + "setStream with resolution change should trigger reallocation", + function() + update_device_profile() + test.wait_for_events() + -- Set up existing stream + test.socket.matter:__queue_receive({ + mock_device.id, + clusters.CameraAvStreamManagement.attributes.AllocatedVideoStreams:build_test_report_data( + mock_device, CAMERA_EP, { + clusters.CameraAvStreamManagement.types.VideoStreamStruct({ + video_stream_id = 1, + stream_usage = clusters.Global.types.StreamUsageEnum.LIVE_VIEW, + video_codec = clusters.CameraAvStreamManagement.types.VideoCodecEnum.H264, + min_frame_rate = 30, + max_frame_rate = 60, + min_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1280, height = 720}), + max_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1280, height = 720}), + min_bit_rate = 10000, + max_bit_rate = 10000, + key_frame_interval = 4000, + watermark_enabled = true, + osd_enabled = false, + reference_count = 0 + }) + } + ) + }) + test.socket.capability:__expect_send( + mock_device:generate_test_message("main", capabilities.videoStreamSettings.videoStreams({ + { + streamId = 1, + data = { + label = "Stream 1", + type = "liveStream", + resolution = { + width = 1280, + height = 720, + fps = 30 + }, + viewport = { + upperLeftVertex = { x = 0, y = 0 }, + lowerRightVertex = { x = 1280, y = 720 } + }, + watermark = "enabled", + onScreenDisplay = "disabled" + } + } + })) + ) + test.wait_for_events() + -- Change resolution and reallocate stream + test.socket.capability:__queue_receive({ + mock_device.id, + { + capability = "videoStreamSettings", component = "main", command = "setStream", args = { + 1, + "liveStream", -- type + "HD Stream", -- label + { width = 1920, height = 1080, fps = 30 }, -- resolution + { upperLeftVertex = {x = 0, y = 0}, lowerRightVertex = {x = 1280, y = 720} }, -- viewport + "enabled", -- watermark + "disabled" -- onScreenDisplay + }} + }) + test.socket.capability:__expect_send( + mock_device:generate_test_message("main", capabilities.videoStreamSettings.videoStreams({ + { + streamId = 1, + data = { + label = "HD Stream", + type = "liveStream", + resolution = { + width = 1280, + height = 720, + fps = 30 + }, + viewport = { + upperLeftVertex = { x = 0, y = 0 }, + lowerRightVertex = { x = 1280, y = 720 } + }, + watermark = "enabled", + onScreenDisplay = "disabled" + } + } + })) + ) + test.socket.matter:__expect_send({ + mock_device.id, clusters.CameraAvStreamManagement.server.commands.VideoStreamDeallocate(mock_device, CAMERA_EP, 1) + }) + test.socket.matter:__expect_send({ + mock_device.id, clusters.CameraAvStreamManagement.server.commands.VideoStreamAllocate(mock_device, CAMERA_EP, + clusters.Global.types.StreamUsageEnum.LIVE_VIEW, + clusters.CameraAvStreamManagement.types.VideoCodecEnum.H264, + 30, + 60, + clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1920, height = 1080}), + clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1920, height = 1080}), + 10000, + 2000000, + 4000, + true, + false + ) + }) + test.wait_for_events() + test.socket.matter:__queue_receive({ + mock_device.id, + clusters.CameraAvStreamManagement.attributes.AllocatedVideoStreams:build_test_report_data( + mock_device, CAMERA_EP, { + clusters.CameraAvStreamManagement.types.VideoStreamStruct({ + video_stream_id = 1, + stream_usage = clusters.Global.types.StreamUsageEnum.LIVE_VIEW, + video_codec = clusters.CameraAvStreamManagement.types.VideoCodecEnum.H264, + min_frame_rate = 30, + max_frame_rate = 60, + min_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1920, height = 1080}), + max_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1920, height = 1080}), + min_bit_rate = 10000, + max_bit_rate = 10000, + key_frame_interval = 4000, + watermark_enabled = false, + osd_enabled = false, + reference_count = 0 + }) + } + ) + }) + test.socket.capability:__expect_send( + mock_device:generate_test_message("main", capabilities.videoStreamSettings.videoStreams({ + { + streamId = 1, + data = { + label = "HD Stream", + type = "liveStream", + resolution = { + width = 1920, + height = 1080, + fps = 30 + }, + viewport = { + upperLeftVertex = { x = 0, y = 0 }, + lowerRightVertex = { x = 1920, y = 1080 } + }, + watermark = "disabled", + onScreenDisplay = "disabled" + } + } + })) + ) + end +) + +test.register_coroutine_test( + "Stream label should persist across attribute reports", + function() + update_device_profile() + test.wait_for_events() + -- Set up existing stream + test.socket.matter:__queue_receive({ + mock_device.id, + clusters.CameraAvStreamManagement.attributes.AllocatedVideoStreams:build_test_report_data( + mock_device, CAMERA_EP, { + clusters.CameraAvStreamManagement.types.VideoStreamStruct({ + video_stream_id = 3, + stream_usage = clusters.Global.types.StreamUsageEnum.LIVE_VIEW, + video_codec = clusters.CameraAvStreamManagement.types.VideoCodecEnum.H264, + min_frame_rate = 30, + max_frame_rate = 60, + min_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 640, height = 480}), + max_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 640, height = 480}), + min_bit_rate = 10000, + max_bit_rate = 10000, + key_frame_interval = 4000, + watermark_enabled = false, + osd_enabled = false, + reference_count = 0 + }) + } + ) + }) + test.socket.capability:__expect_send( + mock_device:generate_test_message("main", capabilities.videoStreamSettings.videoStreams({ + { + streamId = 3, + data = { + label = "Stream 1", + type = "liveStream", + resolution = { width = 640, height = 480, fps = 30 }, + viewport = { upperLeftVertex = { x = 0, y = 0 }, lowerRightVertex = { x = 640, y = 480 } }, + watermark = "disabled", + onScreenDisplay = "disabled" + } + } + })) + ) + test.wait_for_events() + -- Change label + test.socket.capability:__queue_receive({ + mock_device.id, + { + capability = "videoStreamSettings", component = "main", command = "setStream", args = { + 3, + "liveStream", -- type + "My Camera", -- label + { width = 640, height = 480, fps = 30 }, -- resolution + { upperLeftVertex = {x = 0, y = 0}, lowerRightVertex = {x = 640, y = 480} }, -- viewport + "disabled", -- watermark + "disabled" -- onScreenDisplay + }} + }) + test.socket.capability:__expect_send( + mock_device:generate_test_message("main", capabilities.videoStreamSettings.videoStreams({ + { + streamId = 3, + data = { + label = "My Camera", + type = "liveStream", + resolution = { width = 640, height = 480, fps = 30 }, + viewport = { upperLeftVertex = { x = 0, y = 0 }, lowerRightVertex = { x = 640, y = 480 } }, + watermark = "disabled", + onScreenDisplay = "disabled" + } + } + })) + ) + test.wait_for_events() + -- Simulate another AllocatedVideoStreams report + test.socket.matter:__queue_receive({ + mock_device.id, + clusters.CameraAvStreamManagement.attributes.AllocatedVideoStreams:build_test_report_data( + mock_device, CAMERA_EP, { + clusters.CameraAvStreamManagement.types.VideoStreamStruct({ + video_stream_id = 3, + stream_usage = clusters.Global.types.StreamUsageEnum.LIVE_VIEW, + video_codec = clusters.CameraAvStreamManagement.types.VideoCodecEnum.H264, + min_frame_rate = 30, + max_frame_rate = 60, + min_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 640, height = 480}), + max_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 640, height = 480}), + min_bit_rate = 10000, + max_bit_rate = 10000, + key_frame_interval = 4000, + watermark_enabled = false, + osd_enabled = false, + reference_count = 0 + }) + } + ) + }) + -- Should preserve the custom label from capability state + test.socket.capability:__expect_send( + mock_device:generate_test_message("main", capabilities.videoStreamSettings.videoStreams({ + { + streamId = 3, + data = { + label = "My Camera", + type = "liveStream", + resolution = { width = 640, height = 480, fps = 30 }, + viewport = { upperLeftVertex = { x = 0, y = 0 }, lowerRightVertex = { x = 640, y = 480 } }, + watermark = "disabled", + onScreenDisplay = "disabled" + } + } + })) + ) + end +) + +test.register_coroutine_test( + "DPTZStreams attribute should update viewports in capability", + function() + update_device_profile() + test.wait_for_events() + -- Set up multiple existing streams + test.socket.matter:__queue_receive({ + mock_device.id, + clusters.CameraAvStreamManagement.attributes.AllocatedVideoStreams:build_test_report_data( + mock_device, CAMERA_EP, { + clusters.CameraAvStreamManagement.types.VideoStreamStruct({ + video_stream_id = 1, + stream_usage = clusters.Global.types.StreamUsageEnum.LIVE_VIEW, + video_codec = clusters.CameraAvStreamManagement.types.VideoCodecEnum.H264, + min_frame_rate = 30, + max_frame_rate = 60, + min_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1920, height = 1080}), + max_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1920, height = 1080}), + min_bit_rate = 10000, + max_bit_rate = 10000, + key_frame_interval = 4000, + watermark_enabled = false, + osd_enabled = false, + reference_count = 0 + }), + clusters.CameraAvStreamManagement.types.VideoStreamStruct({ + video_stream_id = 2, + stream_usage = clusters.Global.types.StreamUsageEnum.RECORDING, + video_codec = clusters.CameraAvStreamManagement.types.VideoCodecEnum.H264, + min_frame_rate = 15, + max_frame_rate = 30, + min_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1280, height = 720}), + max_resolution = clusters.CameraAvStreamManagement.types.VideoResolutionStruct({width = 1280, height = 720}), + min_bit_rate = 10000, + max_bit_rate = 10000, + key_frame_interval = 4000, + watermark_enabled = false, + osd_enabled = false, + reference_count = 0 + }) + } + ) + }) + test.socket.capability:__expect_send( + mock_device:generate_test_message("main", capabilities.videoStreamSettings.videoStreams({ + { + streamId = 1, + data = { + label = "Stream 1", + type = "liveStream", + resolution = { width = 1920, height = 1080, fps = 30 }, + viewport = { upperLeftVertex = { x = 0, y = 0 }, lowerRightVertex = { x = 1920, y = 1080 } }, + watermark = "disabled", + onScreenDisplay = "disabled" + } + }, + { + streamId = 2, + data = { + label = "Stream 2", + type = "clipRecording", + resolution = { width = 1280, height = 720, fps = 15 }, + viewport = { upperLeftVertex = { x = 0, y = 0 }, lowerRightVertex = { x = 1280, y = 720 } }, + watermark = "disabled", + onScreenDisplay = "disabled" + } + } + })) + ) + test.socket.matter:__queue_receive({ + mock_device.id, + clusters.CameraAvSettingsUserLevelManagement.attributes.DPTZStreams:build_test_report_data( + mock_device, CAMERA_EP, { + clusters.CameraAvSettingsUserLevelManagement.types.DPTZStruct({ + video_stream_id = 1, + viewport = clusters.Global.types.ViewportStruct({ + x1 = 200, + x2 = 1720, + y1 = 100, + y2 = 980 + }) + }), + clusters.CameraAvSettingsUserLevelManagement.types.DPTZStruct({ + video_stream_id = 2, + viewport = clusters.Global.types.ViewportStruct({ + x1 = 50, + x2 = 1230, + y1 = 50, + y2 = 670 + }) + }) + } + ) + }) + test.socket.capability:__expect_send( + mock_device:generate_test_message("main", capabilities.videoStreamSettings.videoStreams({ + { + streamId = 1, + data = { + label = "Stream 1", + type = "liveStream", + resolution = { width = 1920, height = 1080, fps = 30 }, + viewport = { upperLeftVertex = { x = 200, y = 100 }, lowerRightVertex = { x = 1720, y = 980 } }, + watermark = "disabled", + onScreenDisplay = "disabled" + } + }, + { + streamId = 2, + data = { + label = "Stream 2", + type = "clipRecording", + resolution = { width = 1280, height = 720, fps = 15 }, + viewport = { upperLeftVertex = { x = 50, y = 50 }, lowerRightVertex = { x = 1230, y = 670 } }, + watermark = "disabled", + onScreenDisplay = "disabled" + } + } + })) + ) + end +) + test.register_coroutine_test( "Camera profile should not update for an unchanged Status Light AttributeList report", function()