4
* Copyright (C) 2005-2006 trog@uncon.org
6
* This code is based on the work of Alexander L. Roshal
8
* This program is free software; you can redistribute it and/or modify
9
* it under the terms of the GNU General Public License as published by
10
* the Free Software Foundation; either version 2 of the License, or
11
* (at your option) any later version.
13
* This program is distributed in the hope that it will be useful,
14
* but WITHOUT ANY WARRANTY; without even the implied warranty of
15
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16
* GNU General Public License for more details.
18
* You should have received a copy of the GNU General Public License
19
* along with this program; if not, write to the Free Software
20
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
32
#define rar_dbgmsg printf
34
static void rar_dbgmsg(const char* fmt,...){}
37
#define MAX(a,b) (((a) > (b)) ? (a) : (b))
40
const unsigned int UNIT_SIZE=MAX(sizeof(struct ppm_context), sizeof(struct rar_mem_blk_tag));
41
const unsigned int FIXED_UNIT_SIZE=12;
42
const int INT_BITS=7, PERIOD_BITS=7, TOT_BITS=14;
43
const int INTERVAL=1 << 7, BIN_SCALE=1 << 14, MAX_FREQ=124;
44
const unsigned int TOP=1 << 24, BOT=1 << 15;
46
/************* Start of Allocator code block ********************/
47
static void sub_allocator_init(sub_allocator_t *sub_alloc)
49
sub_alloc->sub_allocator_size = 0;
52
static void sub_allocator_insert_node(sub_allocator_t *sub_alloc, void *p, int indx)
54
((struct rar_node *) p)->next = sub_alloc->free_list[indx].next;
55
sub_alloc->free_list[indx].next = (struct rar_node *) p;
58
static void *sub_allocator_remove_node(sub_allocator_t *sub_alloc, int indx)
60
struct rar_node *ret_val;
62
ret_val = sub_alloc->free_list[indx].next;
63
sub_alloc->free_list[indx].next = ret_val->next;
67
static int sub_allocator_u2b(int nu)
72
static rar_mem_blk_t* sub_allocator_mbptr(rar_mem_blk_t* base_ptr, int items)
74
return ((rar_mem_blk_t*) (((unsigned char *)(base_ptr)) + sub_allocator_u2b(items) ));
77
static void sub_allocator_split_block(sub_allocator_t *sub_alloc, void *pv,
78
int old_indx, int new_indx)
83
udiff = sub_alloc->indx2units[old_indx] - sub_alloc->indx2units[new_indx];
84
p = ((uint8_t *) pv) + sub_allocator_u2b(sub_alloc->indx2units[new_indx]);
85
if (sub_alloc->indx2units[i=sub_alloc->units2indx[udiff-1]] != udiff) {
86
sub_allocator_insert_node(sub_alloc, p, --i);
87
p += sub_allocator_u2b(i=sub_alloc->indx2units[i]);
90
sub_allocator_insert_node(sub_alloc, p, sub_alloc->units2indx[udiff-1]);
93
static long sub_allocator_get_allocated_memory(sub_allocator_t *sub_alloc)
95
return sub_alloc->sub_allocator_size;
98
static void sub_allocator_stop_sub_allocator(sub_allocator_t *sub_alloc)
100
if (sub_alloc->sub_allocator_size) {
101
sub_alloc->sub_allocator_size = 0;
102
free(sub_alloc->heap_start);
106
static int sub_allocator_start_sub_allocator(sub_allocator_t *sub_alloc, int sa_size)
108
unsigned int t, alloc_size;
111
if (sub_alloc->sub_allocator_size == t) {
114
sub_allocator_stop_sub_allocator(sub_alloc);
115
alloc_size = t/FIXED_UNIT_SIZE*UNIT_SIZE+UNIT_SIZE;
116
#if defined(__sparc) || defined(sparc) || defined(__sparcv9)
117
/* Allow for aligned access requirements */
118
alloc_size += UNIT_SIZE;
120
if ((sub_alloc->heap_start = (uint8_t *) cli_malloc(alloc_size)) == NULL) {
121
cli_dbgmsg("sub_alloc start failed\n");
124
sub_alloc->heap_end = sub_alloc->heap_start + alloc_size - UNIT_SIZE;
125
sub_alloc->sub_allocator_size = t;
129
static void sub_allocator_init_sub_allocator(sub_allocator_t *sub_alloc)
132
unsigned int size1, real_size1, size2, real_size2;
134
memset(sub_alloc->free_list, 0, sizeof(sub_alloc->free_list));
135
sub_alloc->ptext = sub_alloc->heap_start;
137
size2 = FIXED_UNIT_SIZE*(sub_alloc->sub_allocator_size/8/FIXED_UNIT_SIZE*7);
138
real_size2 = size2/FIXED_UNIT_SIZE*UNIT_SIZE;
139
size1 = sub_alloc->sub_allocator_size - size2;
140
real_size1 = size1/FIXED_UNIT_SIZE*UNIT_SIZE+size1%FIXED_UNIT_SIZE;
141
#if defined(__sparc) || defined(sparc) || defined(__sparcv9)
142
/* Allow for aligned access requirements */
143
if (size1%FIXED_UNIT_SIZE != 0) {
144
real_size1 += UNIT_SIZE - size1%FIXED_UNIT_SIZE;
147
sub_alloc->hi_unit = sub_alloc->heap_start + sub_alloc->sub_allocator_size;
148
sub_alloc->lo_unit = sub_alloc->units_start = sub_alloc->heap_start + real_size1;
149
sub_alloc->fake_units_start = sub_alloc->heap_start + size1;
150
sub_alloc->hi_unit = sub_alloc->lo_unit + real_size2;
152
for (i=0,k=1; i < N1 ; i++, k+=1) {
153
sub_alloc->indx2units[i] = k;
155
for (k++; i < N1+N2 ; i++, k+=2) {
156
sub_alloc->indx2units[i] = k;
158
for (k++; i < N1+N2+N3 ; i++, k+=3) {
159
sub_alloc->indx2units[i] = k;
161
for (k++; i < N1+N2+N3+N4 ; i++, k+=4) {
162
sub_alloc->indx2units[i] = k;
165
for (sub_alloc->glue_count=k=i=0; k < 128; k++) {
166
i += (sub_alloc->indx2units[i] < k+1);
167
sub_alloc->units2indx[k] = i;
171
static void rar_mem_blk_insertAt(rar_mem_blk_t *a, rar_mem_blk_t *p)
173
a->next = (a->prev=p)->next;
174
p->next = a->next->prev = a;
177
static void rar_mem_blk_remove(rar_mem_blk_t *a)
179
a->prev->next = a->next;
180
a->next->prev = a->prev;
183
static void sub_allocator_glue_free_blocks(sub_allocator_t *sub_alloc)
185
rar_mem_blk_t s0, *p, *p1;
188
if (sub_alloc->lo_unit != sub_alloc->hi_unit) {
189
*sub_alloc->lo_unit = 0;
191
for (i=0, s0.next=s0.prev=&s0; i < N_INDEXES; i++) {
192
while (sub_alloc->free_list[i].next) {
193
p = (rar_mem_blk_t *) sub_allocator_remove_node(sub_alloc, i);
194
rar_mem_blk_insertAt(p, &s0);
196
p->nu = sub_alloc->indx2units[i];
200
for (p=s0.next ; p != &s0 ; p=p->next) {
201
while ((p1 = sub_allocator_mbptr(p,p->nu))->stamp == 0xFFFF &&
202
((int)p->nu)+p1->nu < 0x10000) {
203
rar_mem_blk_remove(p1);
208
while ((p=s0.next) != &s0) {
209
for (rar_mem_blk_remove(p), sz=p->nu; sz > 128; sz-=128, p=sub_allocator_mbptr(p, 128)) {
210
sub_allocator_insert_node(sub_alloc, p, N_INDEXES-1);
212
if (sub_alloc->indx2units[i=sub_alloc->units2indx[sz-1]] != sz) {
213
k = sz-sub_alloc->indx2units[--i];
214
sub_allocator_insert_node(sub_alloc, sub_allocator_mbptr(p,sz-k), k-1);
216
sub_allocator_insert_node(sub_alloc, p, i);
220
static void *sub_allocator_alloc_units_rare(sub_allocator_t *sub_alloc, int indx)
225
if (!sub_alloc->glue_count) {
226
sub_alloc->glue_count = 255;
227
sub_allocator_glue_free_blocks(sub_alloc);
228
if (sub_alloc->free_list[indx].next) {
229
return sub_allocator_remove_node(sub_alloc, indx);
234
if (++i == N_INDEXES) {
235
sub_alloc->glue_count--;
236
i = sub_allocator_u2b(sub_alloc->indx2units[indx]);
237
j = 12 * sub_alloc->indx2units[indx];
238
if (sub_alloc->fake_units_start - sub_alloc->ptext > j) {
239
sub_alloc->fake_units_start -= j;
240
sub_alloc->units_start -= i;
241
return sub_alloc->units_start;
245
} while ( !sub_alloc->free_list[i].next);
246
ret_val = sub_allocator_remove_node(sub_alloc, i);
247
sub_allocator_split_block(sub_alloc, ret_val, i, indx);
251
static void *sub_allocator_alloc_units(sub_allocator_t *sub_alloc, int nu)
256
indx = sub_alloc->units2indx[nu-1];
257
if (sub_alloc->free_list[indx].next) {
258
return sub_allocator_remove_node(sub_alloc, indx);
260
ret_val = sub_alloc->lo_unit;
261
sub_alloc->lo_unit += sub_allocator_u2b(sub_alloc->indx2units[indx]);
262
if (sub_alloc->lo_unit <= sub_alloc->hi_unit) {
265
sub_alloc->lo_unit -= sub_allocator_u2b(sub_alloc->indx2units[indx]);
266
return sub_allocator_alloc_units_rare(sub_alloc, indx);
269
static void *sub_allocator_alloc_context(sub_allocator_t *sub_alloc)
271
if (sub_alloc->hi_unit != sub_alloc->lo_unit) {
272
return (sub_alloc->hi_unit -= UNIT_SIZE);
274
if (sub_alloc->free_list->next) {
275
return sub_allocator_remove_node(sub_alloc, 0);
277
return sub_allocator_alloc_units_rare(sub_alloc, 0);
280
static void *sub_allocator_expand_units(sub_allocator_t *sub_alloc, void *old_ptr, int old_nu)
285
i0 = sub_alloc->units2indx[old_nu-1];
286
i1 = sub_alloc->units2indx[old_nu];
290
ptr = sub_allocator_alloc_units(sub_alloc, old_nu+1);
292
memcpy(ptr, old_ptr, sub_allocator_u2b(old_nu));
293
sub_allocator_insert_node(sub_alloc, old_ptr, i0);
298
static void *sub_allocator_shrink_units(sub_allocator_t *sub_alloc, void *old_ptr,
299
int old_nu, int new_nu)
304
i0 = sub_alloc->units2indx[old_nu-1];
305
i1 = sub_alloc->units2indx[new_nu-1];
309
if (sub_alloc->free_list[i1].next) {
310
ptr = sub_allocator_remove_node(sub_alloc, i1);
311
memcpy(ptr, old_ptr, sub_allocator_u2b(new_nu));
312
sub_allocator_insert_node(sub_alloc, old_ptr, i0);
315
sub_allocator_split_block(sub_alloc, old_ptr, i0, i1);
320
static void sub_allocator_free_units(sub_allocator_t *sub_alloc, void *ptr, int old_nu)
322
sub_allocator_insert_node(sub_alloc, ptr, sub_alloc->units2indx[old_nu-1]);
325
/************** End of Allocator code block *********************/
327
/************** Start of Range Coder code block *********************/
328
static void range_coder_init_decoder(range_coder_t *coder, int fd,
329
unpack_data_t *unpack_data)
332
coder->low = coder->code = 0;
333
coder->range = (unsigned int) -1;
335
for (i=0; i < 4 ; i++) {
336
coder->code = (coder->code << 8) | rar_get_char(fd, unpack_data);
340
static int coder_get_current_count(range_coder_t *coder)
342
return (coder->code - coder->low) / (coder->range /= coder->scale);
345
static unsigned int coder_get_current_shift_count(range_coder_t *coder, unsigned int shift)
347
return (coder->code - coder->low) / (coder->range >>= shift);
350
#define ARI_DEC_NORMALISE(fd, unpack_data, code, low, range) \
352
while ((low^(low+range)) < TOP || (range < BOT && ((range=-low&(BOT-1)),1))) { \
353
code = (code << 8) | rar_get_char(fd, unpack_data); \
359
static void coder_decode(range_coder_t *coder)
361
coder->low += coder->range * coder->low_count;
362
coder->range *= coder->high_count - coder->low_count;
365
/******(******** End of Range Coder code block ***********(**********/
367
static void see2_init(struct see2_context_tag *see2_cont, int init_val)
369
see2_cont->summ = init_val << (see2_cont->shift=PERIOD_BITS-4);
370
see2_cont->count = 4;
373
static unsigned int get_mean(struct see2_context_tag *see2_cont)
375
unsigned int ret_val;
377
ret_val = see2_cont->summ >> see2_cont->shift;
378
see2_cont->summ -= ret_val;
379
return ret_val + (ret_val == 0);
382
static void update(struct see2_context_tag *see2_cont)
384
if (see2_cont->shift < PERIOD_BITS && --see2_cont->count == 0) {
385
see2_cont->summ += see2_cont->summ;
386
see2_cont->count = 3 << see2_cont->shift++;
390
static int restart_model_rare(ppm_data_t *ppm_data)
393
static const uint16_t init_bin_esc[] = {
394
0x3cdd, 0x1f3f, 0x59bf, 0x48f3, 0x64a1, 0x5abc, 0x6632, 0x6051
396
rar_dbgmsg("in restart_model_rare\n");
397
memset(ppm_data->char_mask, 0, sizeof(ppm_data->char_mask));
399
sub_allocator_init_sub_allocator(&ppm_data->sub_alloc);
401
ppm_data->init_rl=-(ppm_data->max_order < 12 ? ppm_data->max_order:12)-1;
402
ppm_data->min_context = ppm_data->max_context =
403
(struct ppm_context *) sub_allocator_alloc_context(&ppm_data->sub_alloc);
404
if(!ppm_data->min_context) {
405
cli_errmsg("unrar: restart_model_rare: sub_allocator_alloc_context failed\n");
408
ppm_data->min_context->suffix = NULL;
409
ppm_data->order_fall = ppm_data->max_order;
410
ppm_data->min_context->con_ut.u.summ_freq = (ppm_data->min_context->num_stats=256)+1;
411
ppm_data->found_state = ppm_data->min_context->con_ut.u.stats=
412
(struct state_tag *)sub_allocator_alloc_units(&ppm_data->sub_alloc, 256/2);
413
if(!ppm_data->found_state) {
414
cli_errmsg("unrar: restart_model_rare: sub_allocator_alloc_units failed\n");
417
for (ppm_data->run_length = ppm_data->init_rl, ppm_data->prev_success=i=0; i < 256 ; i++) {
418
ppm_data->min_context->con_ut.u.stats[i].symbol = i;
419
ppm_data->min_context->con_ut.u.stats[i].freq = 1;
420
ppm_data->min_context->con_ut.u.stats[i].successor = NULL;
423
for (i=0 ; i < 128 ; i++) {
424
for (k=0 ; k < 8 ; k++) {
425
for (m=0 ; m < 64 ; m+=8) {
426
ppm_data->bin_summ[i][k+m]=BIN_SCALE-init_bin_esc[k]/(i+2);
430
for (i=0; i < 25; i++) {
431
for (k=0 ; k < 16 ; k++) {
432
see2_init(&ppm_data->see2cont[i][k], 5*i+10);
439
static int start_model_rare(ppm_data_t *ppm_data, int max_order)
441
int i, k, m, step, ret;
443
ppm_data->esc_count = 1;
444
ppm_data->max_order = max_order;
446
if (!restart_model_rare(ppm_data)) {
447
cli_dbgmsg("unrar: start_model_rare: restart_model_rare failed\n");
451
ppm_data->ns2bsindx[0] = 2*0;
452
ppm_data->ns2bsindx[1] = 2*1;
454
memset(ppm_data->ns2bsindx+2, 2*2, 9);
455
memset(ppm_data->ns2bsindx+11, 2*3, 256-11);
457
for (i=0 ; i < 3; i++) {
458
ppm_data->ns2indx[i] = i;
460
for (m=i, k=step=1; i < 256; i++) {
461
ppm_data->ns2indx[i]=m;
467
memset(ppm_data->hb2flag, 0, 0x40);
468
memset(ppm_data->hb2flag+0x40, 0x08, 0x100-0x40);
469
ppm_data->dummy_sse2cont.shift = PERIOD_BITS;
474
/* ****************** PPM Code ***************/
476
static void ppmd_swap(struct state_tag *p0, struct state_tag *p1)
478
struct state_tag tmp;
485
static void rescale(ppm_data_t *ppm_data, struct ppm_context *context)
487
int old_ns, i, adder, esc_freq, n0, n1;
488
struct state_tag *p1, *p;
490
rar_dbgmsg("in rescale\n");
491
old_ns = context->num_stats;
492
i = context->num_stats-1;
494
for (p=ppm_data->found_state ; p != context->con_ut.u.stats ; p--) {
495
ppmd_swap(&p[0], &p[-1]);
497
context->con_ut.u.stats->freq += 4;
498
context->con_ut.u.summ_freq += 4;
499
esc_freq = context->con_ut.u.summ_freq - p->freq;
500
adder = (ppm_data->order_fall != 0);
501
context->con_ut.u.summ_freq = (p->freq = (p->freq+adder) >> 1);
503
esc_freq -= (++p)->freq;
504
context->con_ut.u.summ_freq += (p->freq = (p->freq + adder) >> 1);
505
if (p[0].freq > p[-1].freq) {
506
struct state_tag tmp = *(p1=p);
509
} while (--p1 != context->con_ut.u.stats && tmp.freq > p1[-1].freq);
517
} while ((--p)->freq == 0);
519
if ((context->num_stats -= i) == 1) {
520
struct state_tag tmp = *context->con_ut.u.stats;
522
tmp.freq -= (tmp.freq >> 1);
524
} while (esc_freq > 1);
525
sub_allocator_free_units(&ppm_data->sub_alloc,
526
context->con_ut.u.stats, (old_ns+1)>>1);
527
*(ppm_data->found_state=&context->con_ut.one_state)=tmp;
531
context->con_ut.u.summ_freq += (esc_freq -= (esc_freq >> 1));
532
n0 = (old_ns+1) >> 1;
533
n1 = (context->num_stats+1) >> 1;
535
context->con_ut.u.stats = (struct state_tag *) sub_allocator_shrink_units(&ppm_data->sub_alloc,
536
context->con_ut.u.stats, n0, n1);
538
ppm_data->found_state = context->con_ut.u.stats;
541
static struct ppm_context *create_child(ppm_data_t *ppm_data, struct ppm_context *context,
542
struct state_tag *pstats, struct state_tag *first_state)
544
struct ppm_context *pc;
545
rar_dbgmsg("in create_child\n");
546
pc = (struct ppm_context *) sub_allocator_alloc_context(&ppm_data->sub_alloc);
549
pc->con_ut.one_state = *first_state;
550
pc->suffix = context;
551
pstats->successor = pc;
556
static struct ppm_context *create_successors(ppm_data_t *ppm_data,
557
int skip, struct state_tag *p1)
559
struct state_tag up_state;
560
struct ppm_context *pc, *up_branch;
561
struct state_tag *p, *ps[MAX_O], **pps;
564
rar_dbgmsg("in create_successors\n");
565
pc = ppm_data->min_context;
566
up_branch = ppm_data->found_state->successor;
570
*pps++ = ppm_data->found_state;
582
if (pc->num_stats != 1) {
583
if ((p=pc->con_ut.u.stats)->symbol != ppm_data->found_state->symbol) {
586
} while (p->symbol != ppm_data->found_state->symbol);
589
p = &(pc->con_ut.one_state);
592
if (p->successor != up_branch) {
597
} while (pc->suffix);
602
up_state.symbol= *(uint8_t *) up_branch;
603
up_state.successor = (struct ppm_context *) (((uint8_t *) up_branch)+1);
604
if (pc->num_stats != 1) {
605
if ((uint8_t *) pc <= ppm_data->sub_alloc.ptext) {
608
if ((p=pc->con_ut.u.stats)->symbol != up_state.symbol) {
611
} while (p->symbol != up_state.symbol);
614
s0 = pc->con_ut.u.summ_freq - pc->num_stats - cf;
615
up_state.freq = 1 + ((2*cf <= s0)?(5*cf > s0):((2*cf+3*s0-1)/(2*s0)));
617
up_state.freq = pc->con_ut.one_state.freq;
620
pc = create_child(ppm_data, pc, *--pps, &up_state);
622
cli_dbgmsg("create_child failed\n");
629
static int update_model(ppm_data_t *ppm_data)
631
struct state_tag fs, *p;
632
struct ppm_context *pc, *successor;
633
unsigned int ns1, ns, cf, sf, s0;
636
rar_dbgmsg("in update_model\n");
637
fs = *ppm_data->found_state;
640
if (fs.freq < MAX_FREQ/4 && (pc=ppm_data->min_context->suffix) != NULL) {
641
if (pc->num_stats != 1) {
642
if ((p=pc->con_ut.u.stats)->symbol != fs.symbol) {
645
} while (p->symbol != fs.symbol);
646
if (p[0].freq >= p[-1].freq) {
647
ppmd_swap(&p[0], &p[-1]);
651
if (p->freq < MAX_FREQ-9) {
653
pc->con_ut.u.summ_freq += 2;
656
p = &(pc->con_ut.one_state);
657
p->freq += (p->freq < 32);
660
if (!ppm_data->order_fall) {
661
ppm_data->min_context = ppm_data->max_context =
662
ppm_data->found_state->successor = create_successors(ppm_data, TRUE, p);
663
if (!ppm_data->min_context) {
668
*ppm_data->sub_alloc.ptext++ = fs.symbol;
669
successor = (struct ppm_context *) ppm_data->sub_alloc.ptext;
670
if (ppm_data->sub_alloc.ptext >= ppm_data->sub_alloc.fake_units_start) {
674
if ((uint8_t *)fs.successor <= ppm_data->sub_alloc.ptext &&
675
(fs.successor = create_successors(ppm_data, FALSE, p)) == NULL) {
678
if (!--ppm_data->order_fall) {
679
successor = fs.successor;
680
ppm_data->sub_alloc.ptext -= (ppm_data->max_context != ppm_data->min_context);
683
ppm_data->found_state->successor = successor;
684
fs.successor = ppm_data->min_context;
686
s0 = ppm_data->min_context->con_ut.u.summ_freq-(ns=ppm_data->min_context->num_stats)-(fs.freq-1);
687
for (pc=ppm_data->max_context; pc != ppm_data->min_context ; pc=pc->suffix) {
688
if ((ns1=pc->num_stats) != 1) {
689
if ((ns1 & 1) == 0) {
690
pc->con_ut.u.stats = (struct state_tag *)
691
sub_allocator_expand_units(&ppm_data->sub_alloc,
692
pc->con_ut.u.stats, ns1>>1);
693
if (!pc->con_ut.u.stats) {
697
pc->con_ut.u.summ_freq += (2*ns1 < ns)+2*((4*ns1 <= ns) & (pc->con_ut.u.summ_freq <= 8*ns1));
699
p = (struct state_tag *) sub_allocator_alloc_units(&ppm_data->sub_alloc, 1);
703
*p = pc->con_ut.one_state;
704
pc->con_ut.u.stats = p;
705
if (p->freq < MAX_FREQ/4-1) {
708
p->freq = MAX_FREQ - 4;
710
pc->con_ut.u.summ_freq = p->freq + ppm_data->init_esc + (ns > 3);
712
cf = 2*fs.freq*(pc->con_ut.u.summ_freq+6);
713
sf = s0 + pc->con_ut.u.summ_freq;
715
cf = 1 + (cf > sf) + (cf >= 4*sf);
716
pc->con_ut.u.summ_freq += 3;
718
cf = 4 + (cf >= 9*sf) + (cf >= 12*sf) + (cf >= 15*sf);
719
pc->con_ut.u.summ_freq += cf;
721
p = pc->con_ut.u.stats + ns1;
722
p->successor = successor;
723
p->symbol = fs.symbol;
725
pc->num_stats = ++ns1;
727
ppm_data->max_context = ppm_data->min_context = fs.successor;
731
if (!restart_model_rare(ppm_data)) {
732
cli_dbgmsg("unrar: update_model: restart_model_rare: failed\n");
735
ppm_data->esc_count = 0;
739
static void update1(ppm_data_t *ppm_data, struct state_tag *p, struct ppm_context *context)
741
rar_dbgmsg("in update1\n");
742
(ppm_data->found_state=p)->freq += 4;
743
context->con_ut.u.summ_freq += 4;
744
if (p[0].freq > p[-1].freq) {
745
ppmd_swap(&p[0], &p[-1]);
746
ppm_data->found_state = --p;
747
if (p->freq > MAX_FREQ) {
748
rescale(ppm_data, context);
753
static int ppm_decode_symbol1(ppm_data_t *ppm_data, struct ppm_context *context)
756
int i, hi_cnt, count;
758
rar_dbgmsg("in ppm_decode_symbol1\n");
759
ppm_data->coder.scale = context->con_ut.u.summ_freq;
760
p = context->con_ut.u.stats;
761
count = coder_get_current_count(&ppm_data->coder);
762
if (count >= ppm_data->coder.scale) {
765
if (count < (hi_cnt = p->freq)) {
766
ppm_data->prev_success = (2 * (ppm_data->coder.high_count=hi_cnt) >
767
ppm_data->coder.scale);
768
ppm_data->run_length += ppm_data->prev_success;
769
(ppm_data->found_state=p)->freq=(hi_cnt += 4);
770
context->con_ut.u.summ_freq += 4;
771
if (hi_cnt > MAX_FREQ) {
772
rescale(ppm_data, context);
774
ppm_data->coder.low_count = 0;
776
} else if (ppm_data->found_state == NULL) {
779
ppm_data->prev_success = 0;
780
i = context->num_stats-1;
781
while ((hi_cnt += (++p)->freq) <= count) {
783
ppm_data->hi_bits_flag = ppm_data->hb2flag[ppm_data->found_state->symbol];
784
ppm_data->coder.low_count = hi_cnt;
785
ppm_data->char_mask[p->symbol] = ppm_data->esc_count;
786
i = (ppm_data->num_masked=context->num_stats) - 1;
787
ppm_data->found_state = NULL;
789
ppm_data->char_mask[(--p)->symbol] = ppm_data->esc_count;
791
ppm_data->coder.high_count = ppm_data->coder.scale;
795
ppm_data->coder.low_count = (ppm_data->coder.high_count = hi_cnt) - p->freq;
796
update1(ppm_data, p, context);
800
static const uint8_t ExpEscape[16]={ 25,14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2 };
801
#define GET_MEAN(SUMM,SHIFT,ROUND) ((SUMM+(1 << (SHIFT-ROUND))) >> (SHIFT))
803
static void ppm_decode_bin_symbol(ppm_data_t *ppm_data, struct ppm_context *context)
805
struct state_tag *rs;
808
rar_dbgmsg("in ppm_decode_bin_symbol\n");
810
rs = &context->con_ut.one_state;
812
ppm_data->hi_bits_flag = ppm_data->hb2flag[ppm_data->found_state->symbol];
813
bs = &ppm_data->bin_summ[rs->freq-1][ppm_data->prev_success +
814
ppm_data->ns2bsindx[context->suffix->num_stats-1] +
815
ppm_data->hi_bits_flag+2*ppm_data->hb2flag[rs->symbol] +
816
((ppm_data->run_length >> 26) & 0x20)];
817
if (coder_get_current_shift_count(&ppm_data->coder, TOT_BITS) < *bs) {
818
ppm_data->found_state = rs;
819
rs->freq += (rs->freq < 128);
820
ppm_data->coder.low_count = 0;
821
ppm_data->coder.high_count = *bs;
822
*bs = (uint16_t) (*bs + INTERVAL - GET_MEAN(*bs, PERIOD_BITS, 2));
823
ppm_data->prev_success = 1;
824
ppm_data->run_length++;
826
ppm_data->coder.low_count = *bs;
827
*bs = (uint16_t) (*bs - GET_MEAN(*bs, PERIOD_BITS, 2));
828
ppm_data->coder.high_count = BIN_SCALE;
829
ppm_data->init_esc = ExpEscape[*bs >> 10];
830
ppm_data->num_masked = 1;
831
ppm_data->char_mask[rs->symbol] = ppm_data->esc_count;
832
ppm_data->prev_success = 0;
833
ppm_data->found_state = NULL;
837
static void update2(ppm_data_t *ppm_data, struct state_tag *p, struct ppm_context *context)
839
rar_dbgmsg("in update2\n");
840
(ppm_data->found_state = p)->freq += 4;
841
context->con_ut.u.summ_freq += 4;
842
if (p->freq > MAX_FREQ) {
843
rescale(ppm_data, context);
845
ppm_data->esc_count++;
846
ppm_data->run_length = ppm_data->init_rl;
849
static struct see2_context_tag *make_esc_freq(ppm_data_t *ppm_data,
850
struct ppm_context *context, int diff)
852
struct see2_context_tag *psee2c;
854
if (context->num_stats != 256) {
855
psee2c = ppm_data->see2cont[ppm_data->ns2indx[diff-1]] +
856
(diff < context->suffix->num_stats-context->num_stats) +
857
2 * (context->con_ut.u.summ_freq < 11*context->num_stats)+4*
858
(ppm_data->num_masked > diff) + ppm_data->hi_bits_flag;
859
ppm_data->coder.scale = get_mean(psee2c);
861
psee2c = &ppm_data->dummy_sse2cont;
862
ppm_data->coder.scale = 1;
867
static int ppm_decode_symbol2(ppm_data_t *ppm_data, struct ppm_context *context)
869
int count, hi_cnt, i;
870
struct see2_context_tag *psee2c;
871
struct state_tag *ps[256], **pps, *p;
873
rar_dbgmsg("in ppm_decode_symbol2\n");
874
i = context->num_stats - ppm_data->num_masked;
875
psee2c = make_esc_freq(ppm_data, context, i);
877
p = context->con_ut.u.stats - 1;
883
} while (ppm_data->char_mask[p->symbol] == ppm_data->esc_count);
887
ppm_data->coder.scale += hi_cnt;
888
count = coder_get_current_count(&ppm_data->coder);
889
if (count >= ppm_data->coder.scale) {
893
if (count < hi_cnt) {
895
while ((hi_cnt += p->freq) <= count) {
898
ppm_data->coder.low_count = (ppm_data->coder.high_count=hi_cnt) - p->freq;
900
update2(ppm_data, p, context);
902
ppm_data->coder.low_count = hi_cnt;
903
ppm_data->coder.high_count = ppm_data->coder.scale;
904
i = context->num_stats - ppm_data->num_masked;
907
ppm_data->char_mask[(*++pps)->symbol] = ppm_data->esc_count;
909
psee2c->summ += ppm_data->coder.scale;
910
ppm_data->num_masked = context->num_stats;
915
static void clear_mask(ppm_data_t *ppm_data)
917
ppm_data->esc_count = 1;
918
memset(ppm_data->char_mask, 0, sizeof(ppm_data->char_mask));
921
void ppm_constructor(ppm_data_t *ppm_data)
923
sub_allocator_init(&ppm_data->sub_alloc);
924
ppm_data->min_context = NULL;
925
ppm_data->max_context = NULL;
928
void ppm_destructor(ppm_data_t *ppm_data)
930
sub_allocator_stop_sub_allocator(&ppm_data->sub_alloc);
933
int ppm_decode_init(ppm_data_t *ppm_data, int fd, unpack_data_t *unpack_data, int *EscChar)
935
int max_order, Reset, MaxMB;
937
max_order = rar_get_char(fd, unpack_data);
938
rar_dbgmsg("ppm_decode_init max_order=%d\n", max_order);
939
Reset = (max_order & 0x20) ? 1 : 0;
940
rar_dbgmsg("ppm_decode_init Reset=%d\n", Reset);
942
MaxMB = rar_get_char(fd, unpack_data);
943
rar_dbgmsg("ppm_decode_init MaxMB=%d\n", MaxMB);
945
if (sub_allocator_get_allocated_memory(&ppm_data->sub_alloc) == 0) {
949
if (max_order & 0x40) {
950
*EscChar = rar_get_char(fd, unpack_data);
951
rar_dbgmsg("ppm_decode_init EscChar=%d\n", *EscChar);
953
range_coder_init_decoder(&ppm_data->coder, fd, unpack_data);
955
max_order = (max_order & 0x1f) + 1;
956
if (max_order > 16) {
957
max_order = 16 + (max_order - 16) * 3;
959
if (max_order == 1) {
960
sub_allocator_stop_sub_allocator(&ppm_data->sub_alloc);
963
if(!sub_allocator_start_sub_allocator(&ppm_data->sub_alloc, MaxMB+1)) {
964
sub_allocator_stop_sub_allocator(&ppm_data->sub_alloc);
967
if (!start_model_rare(ppm_data, max_order)) {
968
sub_allocator_stop_sub_allocator(&ppm_data->sub_alloc);
972
rar_dbgmsg("ppm_decode_init done: %d\n", ppm_data->min_context != NULL);
973
return (ppm_data->min_context != NULL);
976
int ppm_decode_char(ppm_data_t *ppm_data, int fd, unpack_data_t *unpack_data)
980
if ((uint8_t *) ppm_data->min_context <= ppm_data->sub_alloc.ptext ||
981
(uint8_t *)ppm_data->min_context > ppm_data->sub_alloc.heap_end) {
984
if (ppm_data->min_context->num_stats != 1) {
985
if ((uint8_t *) ppm_data->min_context->con_ut.u.stats <= ppm_data->sub_alloc.ptext ||
986
(uint8_t *) ppm_data->min_context->con_ut.u.stats > ppm_data->sub_alloc.heap_end) {
989
if (!ppm_decode_symbol1(ppm_data, ppm_data->min_context)) {
993
ppm_decode_bin_symbol(ppm_data, ppm_data->min_context);
995
coder_decode(&ppm_data->coder);
997
while (!ppm_data->found_state) {
998
ARI_DEC_NORMALISE(fd, unpack_data, ppm_data->coder.code,
999
ppm_data->coder.low, ppm_data->coder.range);
1001
ppm_data->order_fall++;
1002
ppm_data->min_context = ppm_data->min_context->suffix;
1003
if ((uint8_t *)ppm_data->min_context <= ppm_data->sub_alloc.ptext ||
1004
(uint8_t *)ppm_data->min_context >
1005
ppm_data->sub_alloc.heap_end) {
1008
} while (ppm_data->min_context->num_stats == ppm_data->num_masked);
1009
if (!ppm_decode_symbol2(ppm_data, ppm_data->min_context)) {
1012
coder_decode(&ppm_data->coder);
1015
symbol = ppm_data->found_state->symbol;
1016
if (!ppm_data->order_fall && (uint8_t *) ppm_data->found_state->successor > ppm_data->sub_alloc.ptext) {
1017
ppm_data->min_context = ppm_data->max_context = ppm_data->found_state->successor;
1019
if(!update_model(ppm_data)) {
1020
cli_dbgmsg("unrar: ppm_decode_char: update_model failed\n");
1024
if (ppm_data->esc_count == 0) {
1025
clear_mask(ppm_data);
1028
ARI_DEC_NORMALISE(fd, unpack_data, ppm_data->coder.code, ppm_data->coder.low,
1029
ppm_data->coder.range);