1
/**************************************************************************
3
* Copyright 2009 Younes Manton.
6
* Permission is hereby granted, free of charge, to any person obtaining a
7
* copy of this software and associated documentation files (the
8
* "Software"), to deal in the Software without restriction, including
9
* without limitation the rights to use, copy, modify, merge, publish,
10
* distribute, sub license, and/or sell copies of the Software, and to
11
* permit persons to whom the Software is furnished to do so, subject to
12
* the following conditions:
14
* The above copyright notice and this permission notice (including the
15
* next paragraph) shall be included in all copies or substantial portions
18
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21
* IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26
**************************************************************************/
30
#include "pipe/p_compiler.h"
31
#include "pipe/p_context.h"
33
#include "util/u_memory.h"
34
#include "util/u_draw.h"
35
#include "util/u_surface.h"
37
#include "tgsi/tgsi_ureg.h"
41
#include "vl_compositor.h"
44
#define MAX_DIRTY (1 << 15)
46
typedef float csc_matrix[16];
49
create_vert_shader(struct vl_compositor *c)
51
struct ureg_program *shader;
52
struct ureg_src vpos, vtex;
53
struct ureg_dst o_vpos, o_vtex;
55
shader = ureg_create(TGSI_PROCESSOR_VERTEX);
59
vpos = ureg_DECL_vs_input(shader, 0);
60
vtex = ureg_DECL_vs_input(shader, 1);
61
o_vpos = ureg_DECL_output(shader, TGSI_SEMANTIC_POSITION, 0);
62
o_vtex = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, 1);
68
ureg_MOV(shader, o_vpos, vpos);
69
ureg_MOV(shader, o_vtex, vtex);
73
return ureg_create_shader_and_destroy(shader, c->pipe);
77
create_frag_shader_video_buffer(struct vl_compositor *c)
79
struct ureg_program *shader;
81
struct ureg_src csc[3];
82
struct ureg_src sampler[3];
83
struct ureg_dst texel;
84
struct ureg_dst fragment;
87
shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
91
tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
92
for (i = 0; i < 3; ++i) {
93
csc[i] = ureg_DECL_constant(shader, i);
94
sampler[i] = ureg_DECL_sampler(shader, i);
96
texel = ureg_DECL_temporary(shader);
97
fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
100
* texel.xyz = tex(tc, sampler[i])
101
* fragment = csc * texel
103
for (i = 0; i < 3; ++i)
104
ureg_TEX(shader, ureg_writemask(texel, TGSI_WRITEMASK_X << i), TGSI_TEXTURE_2D, tc, sampler[i]);
106
ureg_MOV(shader, ureg_writemask(texel, TGSI_WRITEMASK_W), ureg_imm1f(shader, 1.0f));
108
for (i = 0; i < 3; ++i)
109
ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i], ureg_src(texel));
111
ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), ureg_imm1f(shader, 1.0f));
113
ureg_release_temporary(shader, texel);
116
return ureg_create_shader_and_destroy(shader, c->pipe);
120
create_frag_shader_palette(struct vl_compositor *c, bool include_cc)
122
struct ureg_program *shader;
123
struct ureg_src csc[3];
125
struct ureg_src sampler;
126
struct ureg_src palette;
127
struct ureg_dst texel;
128
struct ureg_dst fragment;
131
shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
135
for (i = 0; include_cc && i < 3; ++i)
136
csc[i] = ureg_DECL_constant(shader, i);
138
tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
139
sampler = ureg_DECL_sampler(shader, 0);
140
palette = ureg_DECL_sampler(shader, 1);
142
texel = ureg_DECL_temporary(shader);
143
fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
146
* texel = tex(tc, sampler)
147
* fragment.xyz = tex(texel, palette) * csc
148
* fragment.a = texel.a
150
ureg_TEX(shader, texel, TGSI_TEXTURE_2D, tc, sampler);
151
ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), ureg_src(texel));
154
ureg_TEX(shader, texel, TGSI_TEXTURE_1D, ureg_src(texel), palette);
155
for (i = 0; i < 3; ++i)
156
ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i], ureg_src(texel));
158
ureg_TEX(shader, ureg_writemask(fragment, TGSI_WRITEMASK_XYZ),
159
TGSI_TEXTURE_1D, ureg_src(texel), palette);
162
ureg_release_temporary(shader, texel);
165
return ureg_create_shader_and_destroy(shader, c->pipe);
169
create_frag_shader_rgba(struct vl_compositor *c)
171
struct ureg_program *shader;
173
struct ureg_src sampler;
174
struct ureg_dst fragment;
176
shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
180
tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
181
sampler = ureg_DECL_sampler(shader, 0);
182
fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
185
* fragment = tex(tc, sampler)
187
ureg_TEX(shader, fragment, TGSI_TEXTURE_2D, tc, sampler);
190
return ureg_create_shader_and_destroy(shader, c->pipe);
194
init_shaders(struct vl_compositor *c)
198
c->vs = create_vert_shader(c);
200
debug_printf("Unable to create vertex shader.\n");
204
c->fs_video_buffer = create_frag_shader_video_buffer(c);
205
if (!c->fs_video_buffer) {
206
debug_printf("Unable to create YCbCr-to-RGB fragment shader.\n");
210
c->fs_palette.yuv = create_frag_shader_palette(c, true);
211
if (!c->fs_palette.yuv) {
212
debug_printf("Unable to create YUV-Palette-to-RGB fragment shader.\n");
216
c->fs_palette.rgb = create_frag_shader_palette(c, false);
217
if (!c->fs_palette.rgb) {
218
debug_printf("Unable to create RGB-Palette-to-RGB fragment shader.\n");
222
c->fs_rgba = create_frag_shader_rgba(c);
224
debug_printf("Unable to create RGB-to-RGB fragment shader.\n");
231
static void cleanup_shaders(struct vl_compositor *c)
235
c->pipe->delete_vs_state(c->pipe, c->vs);
236
c->pipe->delete_fs_state(c->pipe, c->fs_video_buffer);
237
c->pipe->delete_fs_state(c->pipe, c->fs_palette.yuv);
238
c->pipe->delete_fs_state(c->pipe, c->fs_palette.rgb);
239
c->pipe->delete_fs_state(c->pipe, c->fs_rgba);
243
init_pipe_state(struct vl_compositor *c)
245
struct pipe_rasterizer_state rast;
246
struct pipe_sampler_state sampler;
247
struct pipe_blend_state blend;
248
struct pipe_depth_stencil_alpha_state dsa;
253
c->fb_state.nr_cbufs = 1;
254
c->fb_state.zsbuf = NULL;
256
c->viewport.scale[2] = 1;
257
c->viewport.scale[3] = 1;
258
c->viewport.translate[2] = 0;
259
c->viewport.translate[3] = 0;
261
memset(&sampler, 0, sizeof(sampler));
262
sampler.wrap_s = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
263
sampler.wrap_t = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
264
sampler.wrap_r = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
265
sampler.min_img_filter = PIPE_TEX_FILTER_LINEAR;
266
sampler.min_mip_filter = PIPE_TEX_MIPFILTER_NONE;
267
sampler.mag_img_filter = PIPE_TEX_FILTER_LINEAR;
268
sampler.compare_mode = PIPE_TEX_COMPARE_NONE;
269
sampler.compare_func = PIPE_FUNC_ALWAYS;
270
sampler.normalized_coords = 1;
272
c->sampler_linear = c->pipe->create_sampler_state(c->pipe, &sampler);
274
sampler.min_img_filter = PIPE_TEX_FILTER_NEAREST;
275
sampler.mag_img_filter = PIPE_TEX_FILTER_NEAREST;
276
c->sampler_nearest = c->pipe->create_sampler_state(c->pipe, &sampler);
278
memset(&blend, 0, sizeof blend);
279
blend.independent_blend_enable = 0;
280
blend.rt[0].blend_enable = 0;
281
blend.logicop_enable = 0;
282
blend.logicop_func = PIPE_LOGICOP_CLEAR;
283
blend.rt[0].colormask = PIPE_MASK_RGBA;
285
c->blend_clear = c->pipe->create_blend_state(c->pipe, &blend);
287
blend.rt[0].blend_enable = 1;
288
blend.rt[0].rgb_func = PIPE_BLEND_ADD;
289
blend.rt[0].rgb_src_factor = PIPE_BLENDFACTOR_SRC_ALPHA;
290
blend.rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_INV_SRC_ALPHA;
291
blend.rt[0].alpha_func = PIPE_BLEND_ADD;
292
blend.rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ONE;
293
blend.rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ONE;
294
c->blend_add = c->pipe->create_blend_state(c->pipe, &blend);
296
memset(&rast, 0, sizeof rast);
299
rast.cull_face = PIPE_FACE_NONE;
300
rast.fill_back = PIPE_POLYGON_MODE_FILL;
301
rast.fill_front = PIPE_POLYGON_MODE_FILL;
304
rast.point_size_per_vertex = 1;
305
rast.offset_units = 1;
306
rast.offset_scale = 1;
307
rast.gl_rasterization_rules = 1;
310
c->rast = c->pipe->create_rasterizer_state(c->pipe, &rast);
312
memset(&dsa, 0, sizeof dsa);
313
dsa.depth.enabled = 0;
314
dsa.depth.writemask = 0;
315
dsa.depth.func = PIPE_FUNC_ALWAYS;
316
for (i = 0; i < 2; ++i) {
317
dsa.stencil[i].enabled = 0;
318
dsa.stencil[i].func = PIPE_FUNC_ALWAYS;
319
dsa.stencil[i].fail_op = PIPE_STENCIL_OP_KEEP;
320
dsa.stencil[i].zpass_op = PIPE_STENCIL_OP_KEEP;
321
dsa.stencil[i].zfail_op = PIPE_STENCIL_OP_KEEP;
322
dsa.stencil[i].valuemask = 0;
323
dsa.stencil[i].writemask = 0;
325
dsa.alpha.enabled = 0;
326
dsa.alpha.func = PIPE_FUNC_ALWAYS;
327
dsa.alpha.ref_value = 0;
328
c->dsa = c->pipe->create_depth_stencil_alpha_state(c->pipe, &dsa);
329
c->pipe->bind_depth_stencil_alpha_state(c->pipe, c->dsa);
334
static void cleanup_pipe_state(struct vl_compositor *c)
338
/* Asserted in softpipe_delete_fs_state() for some reason */
339
c->pipe->bind_vs_state(c->pipe, NULL);
340
c->pipe->bind_fs_state(c->pipe, NULL);
342
c->pipe->delete_depth_stencil_alpha_state(c->pipe, c->dsa);
343
c->pipe->delete_sampler_state(c->pipe, c->sampler_linear);
344
c->pipe->delete_sampler_state(c->pipe, c->sampler_nearest);
345
c->pipe->delete_blend_state(c->pipe, c->blend_clear);
346
c->pipe->delete_blend_state(c->pipe, c->blend_add);
347
c->pipe->delete_rasterizer_state(c->pipe, c->rast);
351
create_vertex_buffer(struct vl_compositor *c)
355
pipe_resource_reference(&c->vertex_buf.buffer, NULL);
356
c->vertex_buf.buffer = pipe_buffer_create
359
PIPE_BIND_VERTEX_BUFFER,
361
sizeof(struct vertex4f) * VL_COMPOSITOR_MAX_LAYERS * 4
364
return c->vertex_buf.buffer != NULL;
368
init_buffers(struct vl_compositor *c)
370
struct pipe_vertex_element vertex_elems[2];
375
* Create our vertex buffer and vertex buffer elements
377
c->vertex_buf.stride = sizeof(struct vertex4f);
378
c->vertex_buf.buffer_offset = 0;
379
create_vertex_buffer(c);
381
vertex_elems[0].src_offset = 0;
382
vertex_elems[0].instance_divisor = 0;
383
vertex_elems[0].vertex_buffer_index = 0;
384
vertex_elems[0].src_format = PIPE_FORMAT_R32G32_FLOAT;
385
vertex_elems[1].src_offset = sizeof(struct vertex2f);
386
vertex_elems[1].instance_divisor = 0;
387
vertex_elems[1].vertex_buffer_index = 0;
388
vertex_elems[1].src_format = PIPE_FORMAT_R32G32_FLOAT;
389
c->vertex_elems_state = c->pipe->create_vertex_elements_state(c->pipe, 2, vertex_elems);
392
* Create our fragment shader's constant buffer
393
* Const buffer contains the color conversion matrix and bias vectors
395
/* XXX: Create with IMMUTABLE/STATIC... although it does change every once in a long while... */
396
c->csc_matrix = pipe_buffer_create
399
PIPE_BIND_CONSTANT_BUFFER,
408
cleanup_buffers(struct vl_compositor *c)
412
c->pipe->delete_vertex_elements_state(c->pipe, c->vertex_elems_state);
413
pipe_resource_reference(&c->vertex_buf.buffer, NULL);
414
pipe_resource_reference(&c->csc_matrix, NULL);
417
static INLINE struct pipe_video_rect
418
default_rect(struct vl_compositor_layer *layer)
420
struct pipe_resource *res = layer->sampler_views[0]->texture;
421
struct pipe_video_rect rect = { 0, 0, res->width0, res->height0 };
425
static INLINE struct vertex2f
426
calc_topleft(struct vertex2f size, struct pipe_video_rect rect)
428
struct vertex2f res = { rect.x / size.x, rect.y / size.y };
432
static INLINE struct vertex2f
433
calc_bottomright(struct vertex2f size, struct pipe_video_rect rect)
435
struct vertex2f res = { (rect.x + rect.w) / size.x, (rect.y + rect.h) / size.y };
440
calc_src_and_dst(struct vl_compositor_layer *layer, unsigned width, unsigned height,
441
struct pipe_video_rect src, struct pipe_video_rect dst)
443
struct vertex2f size = { width, height };
445
layer->src.tl = calc_topleft(size, src);
446
layer->src.br = calc_bottomright(size, src);
447
layer->dst.tl = calc_topleft(size, dst);
448
layer->dst.br = calc_bottomright(size, dst);
452
gen_rect_verts(struct vertex4f *vb, struct vl_compositor_layer *layer)
456
vb[0].x = layer->dst.tl.x;
457
vb[0].y = layer->dst.tl.y;
458
vb[0].z = layer->src.tl.x;
459
vb[0].w = layer->src.tl.y;
461
vb[1].x = layer->dst.br.x;
462
vb[1].y = layer->dst.tl.y;
463
vb[1].z = layer->src.br.x;
464
vb[1].w = layer->src.tl.y;
466
vb[2].x = layer->dst.br.x;
467
vb[2].y = layer->dst.br.y;
468
vb[2].z = layer->src.br.x;
469
vb[2].w = layer->src.br.y;
471
vb[3].x = layer->dst.tl.x;
472
vb[3].y = layer->dst.br.y;
473
vb[3].z = layer->src.tl.x;
474
vb[3].w = layer->src.br.y;
477
static INLINE struct u_rect
478
calc_drawn_area(struct vl_compositor *c, struct vl_compositor_layer *layer)
480
struct u_rect result;
483
result.x0 = layer->dst.tl.x * c->viewport.scale[0] + c->viewport.translate[0];
484
result.y0 = layer->dst.tl.y * c->viewport.scale[1] + c->viewport.translate[1];
485
result.x1 = layer->dst.br.x * c->viewport.scale[0] + c->viewport.translate[0];
486
result.y1 = layer->dst.br.y * c->viewport.scale[1] + c->viewport.translate[1];
489
result.x0 = MAX2(result.x0, c->scissor.minx);
490
result.y0 = MAX2(result.y0, c->scissor.miny);
491
result.x1 = MIN2(result.x1, c->scissor.maxx);
492
result.y1 = MIN2(result.y1, c->scissor.maxy);
497
gen_vertex_data(struct vl_compositor *c, struct u_rect *dirty)
500
struct pipe_transfer *buf_transfer;
505
vb = pipe_buffer_map(c->pipe, c->vertex_buf.buffer,
506
PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD_RANGE | PIPE_TRANSFER_DONTBLOCK,
510
// If buffer is still locked from last draw create a new one
511
create_vertex_buffer(c);
512
vb = pipe_buffer_map(c->pipe, c->vertex_buf.buffer,
513
PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD_RANGE,
517
for (i = 0; i < VL_COMPOSITOR_MAX_LAYERS; i++) {
518
if (c->used_layers & (1 << i)) {
519
struct vl_compositor_layer *layer = &c->layers[i];
520
gen_rect_verts(vb, layer);
523
if (dirty && layer->clearing) {
524
struct u_rect drawn = calc_drawn_area(c, layer);
526
dirty->x0 >= drawn.x0 &&
527
dirty->y0 >= drawn.y0 &&
528
dirty->x1 <= drawn.x1 &&
529
dirty->y1 <= drawn.y1) {
531
// We clear the dirty area anyway, no need for clear_render_target
532
dirty->x0 = dirty->y0 = MAX_DIRTY;
533
dirty->x1 = dirty->y1 = MIN_DIRTY;
539
pipe_buffer_unmap(c->pipe, buf_transfer);
543
draw_layers(struct vl_compositor *c, struct u_rect *dirty)
545
unsigned vb_index, i;
549
for (i = 0, vb_index = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
550
if (c->used_layers & (1 << i)) {
551
struct vl_compositor_layer *layer = &c->layers[i];
552
struct pipe_sampler_view **samplers = &layer->sampler_views[0];
553
unsigned num_sampler_views = !samplers[1] ? 1 : !samplers[2] ? 2 : 3;
555
c->pipe->bind_blend_state(c->pipe, layer->blend);
556
c->pipe->bind_fs_state(c->pipe, layer->fs);
557
c->pipe->bind_fragment_sampler_states(c->pipe, num_sampler_views, layer->samplers);
558
c->pipe->set_fragment_sampler_views(c->pipe, num_sampler_views, samplers);
559
util_draw_arrays(c->pipe, PIPE_PRIM_QUADS, vb_index * 4, 4);
563
// Remember the currently drawn area as dirty for the next draw command
564
struct u_rect drawn = calc_drawn_area(c, layer);
565
dirty->x0 = MIN2(drawn.x0, dirty->x0);
566
dirty->y0 = MIN2(drawn.y0, dirty->y0);
567
dirty->x1 = MAX2(drawn.x1, dirty->x1);
568
dirty->y1 = MAX2(drawn.y1, dirty->y1);
575
vl_compositor_reset_dirty_area(struct u_rect *dirty)
579
dirty->x0 = dirty->y0 = MIN_DIRTY;
580
dirty->x1 = dirty->y1 = MAX_DIRTY;
584
vl_compositor_set_clear_color(struct vl_compositor *c, union pipe_color_union *color)
588
c->clear_color = *color;
592
vl_compositor_get_clear_color(struct vl_compositor *c, union pipe_color_union *color)
597
*color = c->clear_color;
601
vl_compositor_clear_layers(struct vl_compositor *c)
608
for ( i = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
609
c->layers[i].clearing = i ? false : true;
610
c->layers[i].blend = i ? c->blend_add : c->blend_clear;
611
c->layers[i].fs = NULL;
612
for ( j = 0; j < 3; j++)
613
pipe_sampler_view_reference(&c->layers[i].sampler_views[j], NULL);
618
vl_compositor_cleanup(struct vl_compositor *c)
622
vl_compositor_clear_layers(c);
626
cleanup_pipe_state(c);
630
vl_compositor_set_csc_matrix(struct vl_compositor *c, const float matrix[16])
632
struct pipe_transfer *buf_transfer;
638
pipe_buffer_map(c->pipe, c->csc_matrix,
639
PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD_RANGE,
645
pipe_buffer_unmap(c->pipe, buf_transfer);
649
vl_compositor_set_layer_blend(struct vl_compositor *c,
650
unsigned layer, void *blend,
655
assert(layer < VL_COMPOSITOR_MAX_LAYERS);
657
c->layers[layer].clearing = is_clearing;
658
c->layers[layer].blend = blend;
662
vl_compositor_set_buffer_layer(struct vl_compositor *c,
664
struct pipe_video_buffer *buffer,
665
struct pipe_video_rect *src_rect,
666
struct pipe_video_rect *dst_rect)
668
struct pipe_sampler_view **sampler_views;
673
assert(layer < VL_COMPOSITOR_MAX_LAYERS);
675
c->used_layers |= 1 << layer;
676
c->layers[layer].fs = c->fs_video_buffer;
678
sampler_views = buffer->get_sampler_view_components(buffer);
679
for (i = 0; i < 3; ++i) {
680
c->layers[layer].samplers[i] = c->sampler_linear;
681
pipe_sampler_view_reference(&c->layers[layer].sampler_views[i], sampler_views[i]);
684
calc_src_and_dst(&c->layers[layer], buffer->width, buffer->height,
685
src_rect ? *src_rect : default_rect(&c->layers[layer]),
686
dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
690
vl_compositor_set_palette_layer(struct vl_compositor *c,
692
struct pipe_sampler_view *indexes,
693
struct pipe_sampler_view *palette,
694
struct pipe_video_rect *src_rect,
695
struct pipe_video_rect *dst_rect,
696
bool include_color_conversion)
698
assert(c && indexes && palette);
700
assert(layer < VL_COMPOSITOR_MAX_LAYERS);
702
c->used_layers |= 1 << layer;
704
c->layers[layer].fs = include_color_conversion ?
705
c->fs_palette.yuv : c->fs_palette.rgb;
707
c->layers[layer].samplers[0] = c->sampler_linear;
708
c->layers[layer].samplers[1] = c->sampler_nearest;
709
c->layers[layer].samplers[2] = NULL;
710
pipe_sampler_view_reference(&c->layers[layer].sampler_views[0], indexes);
711
pipe_sampler_view_reference(&c->layers[layer].sampler_views[1], palette);
712
pipe_sampler_view_reference(&c->layers[layer].sampler_views[2], NULL);
713
calc_src_and_dst(&c->layers[layer], indexes->texture->width0, indexes->texture->height0,
714
src_rect ? *src_rect : default_rect(&c->layers[layer]),
715
dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
719
vl_compositor_set_rgba_layer(struct vl_compositor *c,
721
struct pipe_sampler_view *rgba,
722
struct pipe_video_rect *src_rect,
723
struct pipe_video_rect *dst_rect)
727
assert(layer < VL_COMPOSITOR_MAX_LAYERS);
729
c->used_layers |= 1 << layer;
730
c->layers[layer].fs = c->fs_rgba;
731
c->layers[layer].samplers[0] = c->sampler_linear;
732
c->layers[layer].samplers[1] = NULL;
733
c->layers[layer].samplers[2] = NULL;
734
pipe_sampler_view_reference(&c->layers[layer].sampler_views[0], rgba);
735
pipe_sampler_view_reference(&c->layers[layer].sampler_views[1], NULL);
736
pipe_sampler_view_reference(&c->layers[layer].sampler_views[2], NULL);
737
calc_src_and_dst(&c->layers[layer], rgba->texture->width0, rgba->texture->height0,
738
src_rect ? *src_rect : default_rect(&c->layers[layer]),
739
dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
743
vl_compositor_render(struct vl_compositor *c,
744
struct pipe_surface *dst_surface,
745
struct pipe_video_rect *dst_area,
746
struct pipe_video_rect *dst_clip,
747
struct u_rect *dirty_area)
752
c->fb_state.width = dst_surface->width;
753
c->fb_state.height = dst_surface->height;
754
c->fb_state.cbufs[0] = dst_surface;
757
c->viewport.scale[0] = dst_area->w;
758
c->viewport.scale[1] = dst_area->h;
759
c->viewport.translate[0] = dst_area->x;
760
c->viewport.translate[1] = dst_area->y;
762
c->viewport.scale[0] = dst_surface->width;
763
c->viewport.scale[1] = dst_surface->height;
764
c->viewport.translate[0] = 0;
765
c->viewport.translate[1] = 0;
769
c->scissor.minx = dst_clip->x;
770
c->scissor.miny = dst_clip->y;
771
c->scissor.maxx = dst_clip->x + dst_clip->w;
772
c->scissor.maxy = dst_clip->y + dst_clip->h;
776
c->scissor.maxx = dst_surface->width;
777
c->scissor.maxy = dst_surface->height;
780
gen_vertex_data(c, dirty_area);
782
if (dirty_area && (dirty_area->x0 < dirty_area->x1 ||
783
dirty_area->y0 < dirty_area->y1)) {
785
c->pipe->clear_render_target(c->pipe, dst_surface, &c->clear_color,
786
0, 0, dst_surface->width, dst_surface->height);
787
dirty_area->x0 = dirty_area->y0 = MAX_DIRTY;
788
dirty_area->x0 = dirty_area->y1 = MIN_DIRTY;
791
c->pipe->set_scissor_state(c->pipe, &c->scissor);
792
c->pipe->set_framebuffer_state(c->pipe, &c->fb_state);
793
c->pipe->set_viewport_state(c->pipe, &c->viewport);
794
c->pipe->bind_vs_state(c->pipe, c->vs);
795
c->pipe->set_vertex_buffers(c->pipe, 1, &c->vertex_buf);
796
c->pipe->bind_vertex_elements_state(c->pipe, c->vertex_elems_state);
797
c->pipe->set_constant_buffer(c->pipe, PIPE_SHADER_FRAGMENT, 0, c->csc_matrix);
798
c->pipe->bind_rasterizer_state(c->pipe, c->rast);
800
draw_layers(c, dirty_area);
804
vl_compositor_init(struct vl_compositor *c, struct pipe_context *pipe)
806
csc_matrix csc_matrix;
810
if (!init_pipe_state(c))
813
if (!init_shaders(c)) {
814
cleanup_pipe_state(c);
818
if (!init_buffers(c)) {
820
cleanup_pipe_state(c);
824
vl_compositor_clear_layers(c);
826
vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_IDENTITY, NULL, true, csc_matrix);
827
vl_compositor_set_csc_matrix(c, csc_matrix);
829
c->clear_color.f[0] = c->clear_color.f[1] = 0.0f;
830
c->clear_color.f[2] = c->clear_color.f[3] = 0.0f;