1
/* Copyright (C) 2001-2006 Artifex Software, Inc.
4
This software is provided AS-IS with no warranty, either express or
7
This software is distributed under license and may not be copied, modified
8
or distributed except as expressly authorized under the terms of that
9
license. Refer to licensing information at http://www.artifex.com/
10
or contact Artifex Software, Inc., 7 Mt. Lassen Drive - Suite A-134,
11
San Rafael, CA 94903, U.S.A., +1(415)492-9861, for further information.
14
/* $Id: zcie.c 9043 2008-08-28 22:48:19Z giles $ */
15
/* CIE color operators */
21
#include "gxcspace.h" /* gscolor2.h requires gscspace.h */
32
#include "store.h" /* for make_null */
35
/* Empty procedures */
36
static const ref empty_procs[4] =
38
empty_ref_data(t_array, a_readonly | a_executable),
39
empty_ref_data(t_array, a_readonly | a_executable),
40
empty_ref_data(t_array, a_readonly | a_executable),
41
empty_ref_data(t_array, a_readonly | a_executable)
44
/* ------ Parameter extraction utilities ------ */
46
/* Get a range array parameter from a dictionary. */
47
/* We know that count <= 4. */
49
dict_ranges_param(const gs_memory_t *mem,
50
const ref * pdref, const char *kstr, int count,
53
int code = dict_floats_param(mem, pdref, kstr, count * 2,
54
(float *)prange, NULL);
59
memcpy(prange, Range4_default.ranges, count * sizeof(gs_range));
63
/* Get an array of procedures from a dictionary. */
64
/* We know count <= countof(empty_procs). */
66
dict_proc_array_param(const gs_memory_t *mem,
67
const ref *pdict, const char *kstr,
68
uint count, ref *pparray)
72
if (dict_find_string(pdict, kstr, &pvalue) > 0) {
75
check_array_only(*pvalue);
76
if (r_size(pvalue) != count)
77
return_error(e_rangecheck);
78
for (i = 0; i < count; i++) {
81
array_get(mem, pvalue, (long)i, &proc);
82
check_proc_only(proc);
87
make_const_array(pparray, a_readonly | avm_foreign,
88
count, &empty_procs[0]);
93
/* Get 3 ranges from a dictionary. */
95
dict_range3_param(const gs_memory_t *mem,
96
const ref *pdref, const char *kstr,
99
return dict_ranges_param(mem, pdref, kstr, 3, prange3->ranges);
102
/* Get a 3x3 matrix from a dictionary. */
104
dict_matrix3_param(const gs_memory_t *mem,
105
const ref *pdref, const char *kstr, gs_matrix3 *pmat3)
108
* We can't simply call dict_float_array_param with the matrix
109
* cast to a 9-element float array, because compilers may insert
110
* padding elements after each of the vectors. However, we can be
111
* confident that there is no padding within a single vector.
113
float values[9], defaults[9];
116
memcpy(&defaults[0], &Matrix3_default.cu, 3 * sizeof(float));
117
memcpy(&defaults[3], &Matrix3_default.cv, 3 * sizeof(float));
118
memcpy(&defaults[6], &Matrix3_default.cw, 3 * sizeof(float));
119
code = dict_floats_param(mem, pdref, kstr, 9, values, defaults);
122
memcpy(&pmat3->cu, &values[0], 3 * sizeof(float));
123
memcpy(&pmat3->cv, &values[3], 3 * sizeof(float));
124
memcpy(&pmat3->cw, &values[6], 3 * sizeof(float));
128
/* Get 3 procedures from a dictionary. */
130
dict_proc3_param(const gs_memory_t *mem, const ref *pdref, const char *kstr, ref proc3[3])
132
return dict_proc_array_param(mem, pdref, kstr, 3, proc3);
135
/* Get WhitePoint and BlackPoint values. */
137
cie_points_param(const gs_memory_t *mem,
138
const ref * pdref, gs_cie_wb * pwb)
142
if ((code = dict_floats_param(mem, pdref, "WhitePoint", 3, (float *)&pwb->WhitePoint, NULL)) < 0 ||
143
(code = dict_floats_param(mem, pdref, "BlackPoint", 3, (float *)&pwb->BlackPoint, (const float *)&BlackPoint_default)) < 0
146
if (pwb->WhitePoint.u <= 0 ||
147
pwb->WhitePoint.v != 1 ||
148
pwb->WhitePoint.w <= 0 ||
149
pwb->BlackPoint.u < 0 ||
150
pwb->BlackPoint.v < 0 ||
151
pwb->BlackPoint.w < 0
153
return_error(e_rangecheck);
157
/* Process a 3- or 4-dimensional lookup table from a dictionary. */
158
/* The caller has set pclt->n and pclt->m. */
159
/* ptref is known to be a readable array of size at least n+1. */
160
static int cie_3d_table_param(const ref * ptable, uint count, uint nbytes,
161
gs_const_string * strings);
163
cie_table_param(const ref * ptref, gx_color_lookup_table * pclt,
166
int n = pclt->n, m = pclt->m;
167
const ref *pta = ptref->value.const_refs;
171
gs_const_string *table;
173
for (i = 0; i < n; ++i) {
174
check_type_only(pta[i], t_integer);
175
if (pta[i].value.intval <= 1 || pta[i].value.intval > max_ushort)
176
return_error(e_rangecheck);
177
pclt->dims[i] = (int)pta[i].value.intval;
179
nbytes = m * pclt->dims[n - 2] * pclt->dims[n - 1];
182
gs_alloc_struct_array(mem, pclt->dims[0], gs_const_string,
183
&st_const_string_element, "cie_table_param");
185
return_error(e_VMerror);
186
code = cie_3d_table_param(pta + 3, pclt->dims[0], nbytes, table);
187
} else { /* n == 4 */
188
int d0 = pclt->dims[0], d1 = pclt->dims[1];
189
uint ntables = d0 * d1;
192
check_read_type(pta[4], t_array);
193
if (r_size(pta + 4) != d0)
194
return_error(e_rangecheck);
196
gs_alloc_struct_array(mem, ntables, gs_const_string,
197
&st_const_string_element, "cie_table_param");
199
return_error(e_VMerror);
200
psuba = pta[4].value.const_refs;
202
* We know that d0 > 0, so code will always be set in the loop:
203
* we initialize code to 0 here solely to pacify stupid compilers.
205
for (code = 0, i = 0; i < d0; ++i) {
206
code = cie_3d_table_param(psuba + i, d1, nbytes, table + d1 * i);
212
gs_free_object(mem, table, "cie_table_param");
219
cie_3d_table_param(const ref * ptable, uint count, uint nbytes,
220
gs_const_string * strings)
225
check_read_type(*ptable, t_array);
226
if (r_size(ptable) != count)
227
return_error(e_rangecheck);
228
rstrings = ptable->value.const_refs;
229
for (i = 0; i < count; ++i) {
230
const ref *const prt2 = rstrings + i;
232
check_read_type(*prt2, t_string);
233
if (r_size(prt2) != nbytes)
234
return_error(e_rangecheck);
235
strings[i].data = prt2->value.const_bytes;
236
strings[i].size = nbytes;
241
/* ------ CIE setcolorspace ------ */
243
/* Common code for the CIEBased* cases of setcolorspace. */
245
cie_lmnp_param(const gs_memory_t *mem, const ref * pdref, gs_cie_common * pcie, ref_cie_procs * pcprocs)
249
if ((code = dict_range3_param(mem, pdref, "RangeLMN", &pcie->RangeLMN)) < 0 ||
250
(code = dict_proc3_param(mem, pdref, "DecodeLMN", &pcprocs->DecodeLMN)) < 0 ||
251
(code = dict_matrix3_param(mem, pdref, "MatrixLMN", &pcie->MatrixLMN)) < 0 ||
252
(code = cie_points_param(mem, pdref, &pcie->points)) < 0
255
pcie->DecodeLMN = DecodeLMN_default;
259
/* Common code for the CIEBasedABC/DEF[G] cases of setcolorspace. */
261
cie_abc_param(const gs_memory_t *mem, const ref * pdref, gs_cie_abc * pcie, ref_cie_procs * pcprocs)
265
if ((code = dict_range3_param(mem, pdref, "RangeABC", &pcie->RangeABC)) < 0 ||
266
(code = dict_proc3_param(mem, pdref, "DecodeABC", &pcprocs->Decode.ABC)) < 0 ||
267
(code = dict_matrix3_param(mem, pdref, "MatrixABC", &pcie->MatrixABC)) < 0 ||
268
(code = cie_lmnp_param(mem, pdref, &pcie->common, pcprocs)) < 0
271
pcie->DecodeABC = DecodeABC_default;
275
/* Finish setting a CIE space (successful or not). */
277
cie_set_finish(i_ctx_t *i_ctx_p, gs_color_space * pcs,
278
const ref_cie_procs * pcprocs, int edepth, int code)
281
code = gs_setcolorspace(igs, pcs);
282
/* Delete the extra reference to the parameter tables. */
283
rc_decrement_only(pcs, "cie_set_finish");
285
ref_stack_pop_to(&e_stack, edepth);
288
istate->colorspace.procs.cie = *pcprocs;
290
return (ref_stack_count(&e_stack) == edepth ? 0 : o_push_estack);
293
/* Forward references */
294
static int cache_common(i_ctx_t *, gs_cie_common *, const ref_cie_procs *,
295
void *, gs_ref_memory_t *);
296
static int cache_abc_common(i_ctx_t *, gs_cie_abc *, const ref_cie_procs *,
297
void *, gs_ref_memory_t *);
299
static int cie_defg_finish(i_ctx_t *);
301
ciedefgspace(i_ctx_t *i_ctx_p, ref *CIEDict)
304
int edepth = ref_stack_count(&e_stack);
305
gs_memory_t *mem = gs_state_memory(igs);
306
gs_ref_memory_t *imem = (gs_ref_memory_t *)mem;
314
if ((code = dict_find_string(CIEDict, "Table", &ptref)) <= 0)
315
return (code < 0 ? code : gs_note_error(e_rangecheck));
316
check_read_type(*ptref, t_array);
317
if (r_size(ptref) != 5)
318
return_error(e_rangecheck);
319
procs = istate->colorspace.procs.cie;
320
code = gs_cspace_build_CIEDEFG(&pcs, NULL, mem);
323
pcie = pcs->params.defg;
326
if ((code = dict_ranges_param(mem, CIEDict, "RangeDEFG", 4, pcie->RangeDEFG.ranges)) < 0 ||
327
(code = dict_proc_array_param(mem, CIEDict, "DecodeDEFG", 4, &procs.PreDecode.DEFG)) < 0 ||
328
(code = dict_ranges_param(mem, CIEDict, "RangeHIJK", 4, pcie->RangeHIJK.ranges)) < 0 ||
329
(code = cie_table_param(ptref, &pcie->Table, mem)) < 0 ||
330
(code = cie_abc_param(imemory, CIEDict, (gs_cie_abc *) pcie, &procs)) < 0 ||
331
(code = cie_cache_joint(i_ctx_p, &istate->colorrendering.procs, (gs_cie_common *)pcie, igs)) < 0 || /* do this last */
332
(code = cie_cache_push_finish(i_ctx_p, cie_defg_finish, imem, pcie)) < 0 ||
333
(code = cie_prepare_cache4(i_ctx_p, &pcie->RangeDEFG,
334
procs.PreDecode.DEFG.value.const_refs,
335
&pcie->caches_defg.DecodeDEFG[0],
336
pcie, imem, "Decode.DEFG")) < 0 ||
337
(code = cache_abc_common(i_ctx_p, (gs_cie_abc *)pcie, &procs, pcie, imem)) < 0
340
return cie_set_finish(i_ctx_p, pcs, &procs, edepth, code);
343
cie_defg_finish(i_ctx_t *i_ctx_p)
346
gs_cie_defg *pcie = r_ptr(op, gs_cie_defg);
348
pcie->DecodeDEFG = DecodeDEFG_from_cache;
349
pcie->DecodeABC = DecodeABC_from_cache;
350
pcie->common.DecodeLMN = DecodeLMN_from_cache;
351
gs_cie_defg_complete(pcie);
356
static int cie_def_finish(i_ctx_t *);
358
ciedefspace(i_ctx_t *i_ctx_p, ref *CIEDict)
361
int edepth = ref_stack_count(&e_stack);
362
gs_memory_t *mem = gs_state_memory(igs);
363
gs_ref_memory_t *imem = (gs_ref_memory_t *)mem;
371
if ((code = dict_find_string(CIEDict, "Table", &ptref)) <= 0)
372
return (code < 0 ? code : gs_note_error(e_rangecheck));
373
check_read_type(*ptref, t_array);
374
if (r_size(ptref) != 4)
375
return_error(e_rangecheck);
376
procs = istate->colorspace.procs.cie;
377
code = gs_cspace_build_CIEDEF(&pcs, NULL, mem);
380
pcie = pcs->params.def;
383
if ((code = dict_range3_param(mem, CIEDict, "RangeDEF", &pcie->RangeDEF)) < 0 ||
384
(code = dict_proc3_param(mem, CIEDict, "DecodeDEF", &procs.PreDecode.DEF)) < 0 ||
385
(code = dict_range3_param(mem, CIEDict, "RangeHIJ", &pcie->RangeHIJ)) < 0 ||
386
(code = cie_table_param(ptref, &pcie->Table, mem)) < 0 ||
387
(code = cie_abc_param(imemory, CIEDict, (gs_cie_abc *) pcie, &procs)) < 0 ||
388
(code = cie_cache_joint(i_ctx_p, &istate->colorrendering.procs, (gs_cie_common *)pcie, igs)) < 0 || /* do this last */
389
(code = cie_cache_push_finish(i_ctx_p, cie_def_finish, imem, pcie)) < 0 ||
390
(code = cie_prepare_cache3(i_ctx_p, &pcie->RangeDEF,
391
procs.PreDecode.DEF.value.const_refs,
392
&pcie->caches_def.DecodeDEF[0],
393
pcie, imem, "Decode.DEF")) < 0 ||
394
(code = cache_abc_common(i_ctx_p, (gs_cie_abc *)pcie, &procs, pcie, imem)) < 0
397
return cie_set_finish(i_ctx_p, pcs, &procs, edepth, code);
400
cie_def_finish(i_ctx_t *i_ctx_p)
403
gs_cie_def *pcie = r_ptr(op, gs_cie_def);
405
pcie->DecodeDEF = DecodeDEF_from_cache;
406
pcie->DecodeABC = DecodeABC_from_cache;
407
pcie->common.DecodeLMN = DecodeLMN_from_cache;
408
gs_cie_def_complete(pcie);
413
static int cie_abc_finish(i_ctx_t *);
416
cieabcspace(i_ctx_t *i_ctx_p, ref *CIEDict)
419
int edepth = ref_stack_count(&e_stack);
420
gs_memory_t *mem = gs_state_memory(igs);
421
gs_ref_memory_t *imem = (gs_ref_memory_t *)mem;
427
push(1); /* Sacrificial */
428
procs = istate->colorspace.procs.cie;
429
code = gs_cspace_build_CIEABC(&pcs, NULL, mem);
432
pcie = pcs->params.abc;
433
code = cie_abc_param(imemory, CIEDict, pcie, &procs);
435
(code = cie_cache_joint(i_ctx_p, &istate->colorrendering.procs, (gs_cie_common *)pcie, igs)) < 0 || /* do this last */
436
(code = cie_cache_push_finish(i_ctx_p, cie_abc_finish, imem, pcie)) < 0 ||
437
(code = cache_abc_common(i_ctx_p, pcie, &procs, pcie, imem)) < 0
440
return cie_set_finish(i_ctx_p, pcs, &procs, edepth, code);
443
cie_abc_finish(i_ctx_t *i_ctx_p)
446
gs_cie_abc *pcie = r_ptr(op, gs_cie_abc);
448
pcie->DecodeABC = DecodeABC_from_cache;
449
pcie->common.DecodeLMN = DecodeLMN_from_cache;
450
gs_cie_abc_complete(pcie);
455
static int cie_a_finish(i_ctx_t *);
458
cieaspace(i_ctx_t *i_ctx_p, ref *CIEdict)
461
int edepth = ref_stack_count(&e_stack);
462
gs_memory_t *mem = gs_state_memory(igs);
463
gs_ref_memory_t *imem = (gs_ref_memory_t *)mem;
469
push(1); /* Sacrificial. cie_a_finish does a pop... */
470
procs = istate->colorspace.procs.cie;
471
if ((code = dict_proc_param(CIEdict, "DecodeA", &procs.Decode.A, true)) < 0)
473
code = gs_cspace_build_CIEA(&pcs, NULL, mem);
476
pcie = pcs->params.a;
477
if ((code = dict_floats_param(imemory, CIEdict, "RangeA", 2, (float *)&pcie->RangeA, (const float *)&RangeA_default)) < 0 ||
478
(code = dict_floats_param(imemory, CIEdict, "MatrixA", 3, (float *)&pcie->MatrixA, (const float *)&MatrixA_default)) < 0 ||
479
(code = cie_lmnp_param(imemory, CIEdict, &pcie->common, &procs)) < 0 ||
480
(code = cie_cache_joint(i_ctx_p, &istate->colorrendering.procs, (gs_cie_common *)pcie, igs)) < 0 || /* do this last */
481
(code = cie_cache_push_finish(i_ctx_p, cie_a_finish, imem, pcie)) < 0 ||
482
(code = cie_prepare_cache(i_ctx_p, &pcie->RangeA, &procs.Decode.A, &pcie->caches.DecodeA.floats, pcie, imem, "Decode.A")) < 0 ||
483
(code = cache_common(i_ctx_p, &pcie->common, &procs, pcie, imem)) < 0
486
pcie->DecodeA = DecodeA_default;
487
return cie_set_finish(i_ctx_p, pcs, &procs, edepth, code);
490
cie_a_finish(i_ctx_t *i_ctx_p)
493
gs_cie_a *pcie = r_ptr(op, gs_cie_a);
495
pcie->DecodeA = DecodeA_from_cache;
496
pcie->common.DecodeLMN = DecodeLMN_from_cache;
497
gs_cie_a_complete(pcie);
502
/* Common cache code */
505
cache_abc_common(i_ctx_t *i_ctx_p, gs_cie_abc * pcie,
506
const ref_cie_procs * pcprocs,
507
void *container, gs_ref_memory_t * imem)
510
cie_prepare_cache3(i_ctx_p, &pcie->RangeABC,
511
pcprocs->Decode.ABC.value.const_refs,
512
pcie->caches.DecodeABC.caches, pcie, imem,
515
return (code < 0 ? code :
516
cache_common(i_ctx_p, &pcie->common, pcprocs, pcie, imem));
520
cache_common(i_ctx_t *i_ctx_p, gs_cie_common * pcie,
521
const ref_cie_procs * pcprocs,
522
void *container, gs_ref_memory_t * imem)
524
return cie_prepare_cache3(i_ctx_p, &pcie->RangeLMN,
525
pcprocs->DecodeLMN.value.const_refs,
526
&pcie->caches.DecodeLMN[0], container, imem,
530
/* ------ Internal routines ------ */
532
/* Prepare to cache the values for one or more procedures. */
533
static int cie_cache_finish1(i_ctx_t *);
534
static int cie_cache_finish(i_ctx_t *);
536
cie_prepare_cache(i_ctx_t *i_ctx_p, const gs_range * domain, const ref * proc,
537
cie_cache_floats * pcache, void *container,
538
gs_ref_memory_t * imem, client_name_t cname)
540
int space = imemory_space(imem);
541
gs_sample_loop_params_t lp;
544
gs_cie_cache_init(&pcache->params, &lp, domain, cname);
545
pcache->params.is_identity = r_size(proc) == 0;
548
make_real(ep + 9, lp.A);
549
make_int(ep + 8, lp.N);
550
make_real(ep + 7, lp.B);
552
r_clear_attrs(ep + 6, a_executable);
553
make_op_estack(ep + 5, zcvx);
554
make_op_estack(ep + 4, zfor_samples);
555
make_op_estack(ep + 3, cie_cache_finish);
558
* The caches are embedded in the middle of other
559
* structures, so we represent the pointer to the cache
560
* as a pointer to the container plus an offset.
562
make_int(ep + 2, (char *)pcache - (char *)container);
563
make_struct(ep + 1, space, container);
564
return o_push_estack;
566
/* Note that pc3 may be 0, indicating that there are only 3 caches to load. */
568
cie_prepare_caches_4(i_ctx_t *i_ctx_p, const gs_range * domains,
570
cie_cache_floats * pc0, cie_cache_floats * pc1,
571
cie_cache_floats * pc2, cie_cache_floats * pc3,
573
gs_ref_memory_t * imem, client_name_t cname)
575
cie_cache_floats *pcn[4];
578
pcn[0] = pc0, pcn[1] = pc1, pcn[2] = pc2;
583
for (i = 0; i < n && code >= 0; ++i)
584
code = cie_prepare_cache(i_ctx_p, domains + i, procs + i, pcn[i],
585
container, imem, cname);
589
/* Store the result of caching one procedure. */
591
cie_cache_finish_store(i_ctx_t *i_ctx_p, bool replicate)
594
cie_cache_floats *pcache;
598
/* See above for the container + offset representation of */
599
/* the pointer to the cache. */
600
pcache = (cie_cache_floats *) (r_ptr(esp - 1, char) + esp->value.intval);
602
pcache->params.is_identity = false; /* cache_set_linear computes this */
603
if_debug3('c', "[c]cache 0x%lx base=%g, factor=%g:\n",
604
(ulong) pcache, pcache->params.base, pcache->params.factor);
606
(code = float_params(op, gx_cie_cache_size, &pcache->values[0])) < 0
608
/* We might have underflowed the current stack block. */
609
/* Handle the parameters one-by-one. */
612
for (i = 0; i < gx_cie_cache_size; i++) {
613
code = float_param(ref_stack_index(&o_stack,
614
(replicate ? 0 : gx_cie_cache_size - 1 - i)),
621
if (gs_debug_c('c')) {
624
for (i = 0; i < gx_cie_cache_size; i += 4)
625
dlprintf5("[c] cache[%3d]=%g, %g, %g, %g\n", i,
626
pcache->values[i], pcache->values[i + 1],
627
pcache->values[i + 2], pcache->values[i + 3]);
630
ref_stack_pop(&o_stack, (replicate ? 1 : gx_cie_cache_size));
631
esp -= 2; /* pop pointer to cache */
635
cie_cache_finish(i_ctx_t *i_ctx_p)
637
return cie_cache_finish_store(i_ctx_p, false);
640
cie_cache_finish1(i_ctx_t *i_ctx_p)
642
return cie_cache_finish_store(i_ctx_p, true);
645
/* Push a finishing procedure on the e-stack. */
646
/* ptr will be the top element of the o-stack. */
648
cie_cache_push_finish(i_ctx_t *i_ctx_p, op_proc_t finish_proc,
649
gs_ref_memory_t * imem, void *data)
652
push_op_estack(finish_proc);
654
make_struct(esp, imemory_space(imem), data);
655
return o_push_estack;