On Tue, 27 Jun 2017 22:50:44 +0100
Mark Thompson <[email protected]> wrote:

> ---
>  configure                      |    5 +-
>  doc/APIchanges                 |    4 +
>  libavutil/Makefile             |    2 +
>  libavutil/hwcontext.c          |    4 +
>  libavutil/hwcontext.h          |    1 +
>  libavutil/hwcontext_internal.h |    1 +
>  libavutil/hwcontext_opencl.c   | 1303 
> ++++++++++++++++++++++++++++++++++++++++
>  libavutil/hwcontext_opencl.h   |   96 +++
>  libavutil/version.h            |    4 +-
>  9 files changed, 1417 insertions(+), 3 deletions(-)
>  create mode 100644 libavutil/hwcontext_opencl.c
>  create mode 100644 libavutil/hwcontext_opencl.h


> +static int opencl_get_plane_format(enum AVPixelFormat pixfmt,
> +                                   int plane, int width, int height,
> +                                   cl_image_format *image_format,
> +                                   cl_image_desc *image_desc)
> +{
> +    const AVPixFmtDescriptor *desc;
> +    const AVComponentDescriptor *comp;
> +    int channels = 0, order = 0, depth = 0, step = 0;
> +    int wsub, hsub, alpha;
> +    int c;
> +
> +    if (plane >= AV_NUM_DATA_POINTERS)
> +        return AVERROR(ENOENT);
> +
> +    desc = av_pix_fmt_desc_get(pixfmt);
> +
> +    // Only normal images are allowed.
> +    if (desc->flags & (AV_PIX_FMT_FLAG_BITSTREAM |
> +                       AV_PIX_FMT_FLAG_HWACCEL   |
> +                       AV_PIX_FMT_FLAG_PAL))
> +        return AVERROR(EINVAL);
> +
> +    wsub = 1 << desc->log2_chroma_w;
> +    hsub = 1 << desc->log2_chroma_h;
> +    // Subsampled components must be exact.
> +    if (width & wsub - 1 || height & hsub - 1)
> +        return AVERROR(EINVAL);
> +
> +    for (c = 0; c < desc->nb_components; c++) {
> +        comp = &desc->comp[c];
> +        if (comp->plane != plane)
> +            continue;
> +        // The step size must be a power of two.
> +        if (comp->step != 1 && comp->step != 2 &&
> +            comp->step != 4 && comp->step != 8)
> +            return AVERROR(EINVAL);
> +        // The bits in each component must be packed in the
> +        // most-significant-bits of the relevant bytes.
> +        if (comp->shift + comp->depth != 8 &&
> +            comp->shift + comp->depth != 16)
> +            return AVERROR(EINVAL);
> +        // The depth must not vary between components.
> +        if (depth && comp->depth != depth)
> +            return AVERROR(EINVAL);
> +        // If a single data element crosses multiple bytes then
> +        // it must match the native endianness.
> +        if (comp->depth > 8 &&
> +            HAVE_BIGENDIAN == !(desc->flags & AV_PIX_FMT_FLAG_BE))
> +            return AVERROR(EINVAL);
> +        // A single data element must not contain multiple samples
> +        // from the same component.
> +        if (step && comp->step != step)
> +            return AVERROR(EINVAL);
> +        order = order * 10 + c + 1;
> +        depth = comp->depth;
> +        step  = comp->step;
> +        alpha = (desc->flags & AV_PIX_FMT_FLAG_ALPHA &&
> +                 c == desc->nb_components - 1);
> +        ++channels;
> +    }
> +    if (channels == 0)
> +        return AVERROR(ENOENT);
> +
> +    memset(image_format, 0, sizeof(*image_format));
> +    memset(image_desc,   0, sizeof(*image_desc));
> +    image_desc->image_type = CL_MEM_OBJECT_IMAGE2D;
> +
> +    if (plane == 0 || alpha) {
> +        image_desc->image_width     = width;
> +        image_desc->image_height    = height;
> +        image_desc->image_row_pitch = step * width;
> +    } else {
> +        image_desc->image_width     = width  / wsub;
> +        image_desc->image_height    = height / hsub;
> +        image_desc->image_row_pitch = step * width / wsub;
> +    }
> +
> +    if (depth <= 8) {
> +        image_format->image_channel_data_type = CL_UNORM_INT8;
> +    } else {
> +        if (depth <= 16)
> +            image_format->image_channel_data_type = CL_UNORM_INT16;
> +        else
> +            return AVERROR(EINVAL);
> +    }
> +
> +#define CHANNEL_ORDER(order, type) \
> +    case order: image_format->image_channel_order = type; break;
> +    switch (order) {
> +        CHANNEL_ORDER(1,    CL_R);
> +        CHANNEL_ORDER(2,    CL_R);
> +        CHANNEL_ORDER(3,    CL_R);
> +        CHANNEL_ORDER(4,    CL_R);
> +        CHANNEL_ORDER(12,   CL_RG);
> +        CHANNEL_ORDER(23,   CL_RG);
> +        CHANNEL_ORDER(1234, CL_RGBA);
> +        CHANNEL_ORDER(3214, CL_BGRA);
> +        CHANNEL_ORDER(4123, CL_ARGB);
> +#ifdef CL_ABGR
> +        CHANNEL_ORDER(4321, CL_ABGR);
> +#endif
> +    default:
> +        return AVERROR(EINVAL);
> +    }
> +#undef CHANNEL_ORDER
> +
> +    return 0;
> +}

I suggest we make a generic helper for this.  I "often" need to know
about component order and whether formats are byte-aligned too. The
pixdesc struct is so generic yet insufficient that this can be quite
tricky and complex.


> +/**
> + * OpenCL frame descriptor for pool allocation.
> + *
> + * In user-allocated pools, AVHWFramesContext.pool must return AVBufferRefs
> + * with the data pointer pointing at an object of this type describing the
> + * planes of the frame.
> + */
> +typedef struct AVOpenCLFrameDescriptor {
> +    /**
> +     * Number of planes in the frame.
> +     */
> +    int nb_planes;
> +    /**
> +     * OpenCL image2d objects for each plane of the frame.
> +     */
> +    cl_mem planes[AV_NUM_DATA_POINTERS];
> +} AVOpenCLFrameDescriptor;

Not sure if this should have more metadata about the formats?

> +
> +/**
> + * OpenCL device details.
> + *
> + * Allocated as AVHWDeviceContext.hwctx
> + */
> +typedef struct AVOpenCLDeviceContext {
> +    /**
> +     * The primary device ID of the device.  If multiple OpenCL devices
> +     * are associated with the context then this is the one which will
> +     * be used for all operations internal to Libav.
> +     */
> +    cl_device_id device_id;
> +    /**
> +     * The OpenCL context which will contain all operations and frames on
> +     * this device.
> +     */
> +    cl_context context;
> +    /**
> +     * The default command queue for this device, which will be used by all
> +     * frames contexts which do not have their own command queue.  If not
> +     * intialised by the user, a default queue will be created on the
> +     * primary device.
> +     */
> +    cl_command_queue command_queue;
> +} AVOpenCLDeviceContext;

Is the default queue also set on the public struct if created by Libav?

> +
> +/**
> + * OpenCL-specific data associated with a frame pool.
> + *
> + * Allocated as AVHWFramesContext.hwctx.
> + */
> +typedef struct AVOpenCLFramesContext {
> +    /**
> +     * The command queue used for internal asynchronous operations on this
> +     * device (av_hwframe_transfer_data(), av_hwframe_map()).
> +     *
> +     * If this is not set, the command queue from the associated device is
> +     * used instead.
> +     */
> +    cl_command_queue command_queue;
> +} AVOpenCLFramesContext;

Same question.


Otherwise, can't say much.
_______________________________________________
libav-devel mailing list
[email protected]
https://lists.libav.org/mailman/listinfo/libav-devel

Reply via email to