How to show camera stream in a circled window in GTK window?

79 Views Asked by At

I'm using deepstream sdk to play camrea stream, and want to display the video in a circled window, for now , my solution is using two gtk windows: one is camera window, which stream can be displayed in a regular gtk window, the other window is a mask window, which has a transparent circle in the middle of the window, and rest of the area are black. So that the video will play inside the circle. I'm wondering if there any way I can just use one window to achieve this.
I tried just draw a circle in one window using drawingarea, ,I can only show a colored circle in the middle of the window,just cant let the video display in the circle, any help would be appreciated! below is my code:

window.py

class MainWindow(Gtk.Window):
    def __init__(self, sink):
        super().__init__()
        self.sink = sink
        self.set_decorated(False)  # Show the window decorations
        self.connect("realize", self.on_realize)

    def on_realize(self, widget, data=None):
        window = widget.get_window()
        xid = window.get_xid()
        self.sink.set_window_handle(xid)


class MaskWindow(Gtk.Window):
    def __init__(self):
        super().__init__()
        self.set_app_paintable(True)
        self.connect("draw", self.on_draw)
        self.set_decorated(False)  # Hide the window decorations
        self.set_visual(self.get_screen().get_rgba_visual())  # Enable alpha channel for this window

    def on_draw(self, widget, ctx):
        width = widget.get_allocated_width()
        height = widget.get_allocated_height()
        self.draw_circle(ctx, width, height)

    def draw_circle(self, ctx, width, height):
        ctx.set_source_rgb(0, 0, 0)  
        ctx.paint()
        radius = min(width, height) / 2.2
        ctx.translate(width / 2, height / 2)
        ctx.arc(0, 0, radius, 0, 2 * math.pi)
        ctx.clip()
        ctx.set_operator(cairo.OPERATOR_CLEAR)
        ctx.paint()
        ctx.reset_clip()

pipeline.py

class Pipeline:
    def __init__(self, codec='H264'):
        self.nvvidconv1 = None
        self.record_bin = None
        self.source = None
        self.camerabin = None
        self.v4l2src_caps = None
        self.file_sink = None
        self.gl_window = None
        self.mask_window = None
        self.device = '/dev/video0'
        self.pipeline = None
        self.codec = codec
        self.sink = None
        self.videobalance = None
        self.scale_caps = None
        self.videoscale = None
        self.videocrop = None
        self.videobox = None

    def init_pipeline(self) -> bool:
        Gst.init(None)
        Gtk.init()
        self.pipeline = Gst.Pipeline()
        if not self.pipeline:
            print("Unable to create Pipeline")
            return False
        if not self.assmeble_pipeline():
            print("Unable to assemble the pipeline")
            return False
        return True

    def start(self):
        if not self.pipeline:
            print("Pipeline not initialized")
            return
        threading.Thread(target=self.run_streamer).start()

    def stop(self):
        self.mask_window.hide()
        self.gl_window.hide()
        self.pipeline.set_state(Gst.State.NULL)
        Gtk.main_quit()

    def run_streamer(self):
        print("Running streamer")
        self.gl_window = MainWindow(self.sink)
        self.gl_window.set_default_size(2560, 1440)
        self.gl_window.set_position(Gtk.WindowPosition.CENTER)
        self.gl_window.show_all()

        self.mask_window = MaskWindow()
        self.mask_window.set_default_size(2560, 1440)
        self.mask_window.set_position(Gtk.WindowPosition.CENTER)
        self.mask_window.show_all()

        if self.pipeline.set_state(Gst.State.PLAYING) == Gst.StateChangeReturn.FAILURE:
            self.pipeline.set_state(Gst.State.NULL)
        else:
            GLib.MainLoop().run()

     def assmeble_pipeline(self) -> bool:
        self.source = create_element("v4l2src", "usb-cam-source")
        self.v4l2src_caps = create_element("capsfilter", "v4l2src_caps")
        nvvidconv = create_element("nvvideoconvert", "convertor")
        self.nvvidconv1 = create_element("nvvideoconvert", "nvvidconv")
        decoder = create_element("jpegdec", "jpegdec-decoder")
        vidconvsrc = create_element("videoconvert", "convertor_src1")
        caps_vidconvsrc = create_element("capsfilter", "nvmm_caps")
        self.videobalance = create_element("videobalance", "videobalance")
        self.sink = create_element("xvimagesink", None)
        self.videocrop = create_element("videocrop", "videocrop")
        self.videoscale = create_element("videoscale", "videoscale")
        self.scale_caps = create_element("capsfilter", "scale_caps")
        # set properties
        self.source.set_property('device', self.device)
        self.v4l2src_caps.set_property('caps',
                                       Gst.Caps.from_string(
                                           "image/jpeg,width=1280, height=720, framerate=30/1"))

        caps_vidconvsrc.set_property('caps',
                                     Gst.Caps.from_string("video/x-raw(memory:NVMM)"))
        self.sink.set_property('sync', 0)

        element_list = [self.source, self.v4l2src_caps, decoder, vidconvsrc,
                        nvvidconv, caps_vidconvsrc, self.nvvidconv1, self.videobalance,
                        self.videocrop, self.videoscale, self.scale_caps,
                        self.sink]

        add_sequential(self.pipeline, element_list)

        if not link_sequential(element_list):
            print("Unable to link the elements")
            return False
            # Add a pad probe for dynamic adjustments
        loop = GLib.MainLoop()
        bus = self.pipeline.get_bus()
        bus.add_signal_watch()
        bus.connect("message", bus_call, loop)
        pad = vidconvsrc.get_static_pad("src")
        pad.add_probe(Gst.PadProbeType.BUFFER, self.pad_probe_callback, None)
        print("Assembled pipeline Success")
        return True

0

There are 0 best solutions below