HDMI data type as 32 bits

Hello,

I am following the HDMI documentation to get a frame from the HDMI to apply a filter and output the result. The issue is that I am getting an array of u8 per channel and I would like to have an array of 4 channels per pixel, so a u32 per pixel. I configured it but I still get u8. This is what I have running:

from pynq import Overlay
from pynq.lib.video import *
from pynq import allocate
import numpy as np
import cv2

overlay = Overlay('/home/xilinx/base.bit')

dma = overlay.ceti_dma
hdmi_in = overlay.video.hdmi_in
hdmi_out = overlay.video.hdmi_out

hdmi_in.configure(PIXEL_RGB)
hdmi_out.configure(hdmi_in.mode, PIXEL_RGB)
hdmi_in.cacheable_frames = False
hdmi_out.cacheable_frames = False
pixel_in = overlay.video.hdmi_in.pixel_pack
pixel_out = overlay.video.hdmi_out.pixel_unpack
pixel_in.bits_per_pixel = 32
pixel_out.bits_per_pixel = 32
pixel_out.resample = False

hdmi_in.start()
hdmi_out.start()
mymode = hdmi_in.mode
print("My mode: "+str(mymode))
height = hdmi_in.mode.height
width = hdmi_in.mode.width
bpp = hdmi_in.mode.bits_per_pixel

outframe = hdmi_out.newframe()
inframe = hdmi_in.readframe()
outframe = inframe
hdmi_out.writeframe(outframe)

So, when I run inframe.size I get 6220800 which is 1920x1080x3. And when I print inframe I get:

PynqBuffer([[[22, 16, 12],
             [ 0, 22, 16],
             [12,  0, 22],
             ...,
             [ 0, 21, 14],
             [18,  0, 25],
             [17, 20,  0]],

            [[25, 18, 15],
             [ 0, 25, 18],
             [15,  0, 25],
             ...,
             [ 0, 26, 19],
             [22,  0, 29],
             [21, 24,  0]],

            [[66, 60, 57],
             [ 0, 66, 60],
             [57,  0, 64],
             ...,
             [ 0, 31, 24],
             [28,  0, 43],
             [35, 38,  0]],

            ...,

            [[29, 20, 19],
             [ 0, 29, 20],
             [19,  0, 31],
             ...,
             [ 0, 23, 17],
             [16,  0, 23],
             [17, 16,  0]],

            [[25, 16, 16],
             [ 0, 25, 16],
             [16,  0, 27],
             ...,
             [ 0, 23, 17],
             [16,  0, 22],
             [16, 15,  0]],

            [[26, 17, 17],
             [ 0, 26, 17],
             [17,  0, 27],
             ...,
             [ 0, 26, 19],
             [18,  0, 23],
             [17, 16,  0]]], dtype=uint8)

So, how can I get the dtype to be u32? Shouldn’t be pixel_in.bits_per_pixel = 32 the way to configure the HDMI IP?

Thanks for the help.