summaryrefslogtreecommitdiffstats
path: root/src/3rdparty/libwebp/src/utils/huffman_utils.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/3rdparty/libwebp/src/utils/huffman_utils.c')
-rw-r--r--src/3rdparty/libwebp/src/utils/huffman_utils.c97
1 files changed, 79 insertions, 18 deletions
diff --git a/src/3rdparty/libwebp/src/utils/huffman_utils.c b/src/3rdparty/libwebp/src/utils/huffman_utils.c
index 90c2fbf..cf73abd 100644
--- a/src/3rdparty/libwebp/src/utils/huffman_utils.c
+++ b/src/3rdparty/libwebp/src/utils/huffman_utils.c
@@ -177,21 +177,24 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
if (num_open < 0) {
return 0;
}
- if (root_table == NULL) continue;
for (; count[len] > 0; --count[len]) {
HuffmanCode code;
if ((key & mask) != low) {
- table += table_size;
+ if (root_table != NULL) table += table_size;
table_bits = NextTableBitSize(count, len, root_bits);
table_size = 1 << table_bits;
total_size += table_size;
low = key & mask;
- root_table[low].bits = (uint8_t)(table_bits + root_bits);
- root_table[low].value = (uint16_t)((table - root_table) - low);
+ if (root_table != NULL) {
+ root_table[low].bits = (uint8_t)(table_bits + root_bits);
+ root_table[low].value = (uint16_t)((table - root_table) - low);
+ }
+ }
+ if (root_table != NULL) {
+ code.bits = (uint8_t)(len - root_bits);
+ code.value = (uint16_t)sorted[symbol++];
+ ReplicateValue(&table[key >> root_bits], step, table_size, code);
}
- code.bits = (uint8_t)(len - root_bits);
- code.value = (uint16_t)sorted[symbol++];
- ReplicateValue(&table[key >> root_bits], step, table_size, code);
key = GetNextKey(key, len);
}
}
@@ -211,25 +214,83 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
((1 << MAX_CACHE_BITS) + NUM_LITERAL_CODES + NUM_LENGTH_CODES)
// Cut-off value for switching between heap and stack allocation.
#define SORTED_SIZE_CUTOFF 512
-int VP8LBuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
+int VP8LBuildHuffmanTable(HuffmanTables* const root_table, int root_bits,
const int code_lengths[], int code_lengths_size) {
- int total_size;
+ const int total_size =
+ BuildHuffmanTable(NULL, root_bits, code_lengths, code_lengths_size, NULL);
assert(code_lengths_size <= MAX_CODE_LENGTHS_SIZE);
- if (root_table == NULL) {
- total_size = BuildHuffmanTable(NULL, root_bits,
- code_lengths, code_lengths_size, NULL);
- } else if (code_lengths_size <= SORTED_SIZE_CUTOFF) {
+ if (total_size == 0 || root_table == NULL) return total_size;
+
+ if (root_table->curr_segment->curr_table + total_size >=
+ root_table->curr_segment->start + root_table->curr_segment->size) {
+ // If 'root_table' does not have enough memory, allocate a new segment.
+ // The available part of root_table->curr_segment is left unused because we
+ // need a contiguous buffer.
+ const int segment_size = root_table->curr_segment->size;
+ struct HuffmanTablesSegment* next =
+ (HuffmanTablesSegment*)WebPSafeMalloc(1, sizeof(*next));
+ if (next == NULL) return 0;
+ // Fill the new segment.
+ // We need at least 'total_size' but if that value is small, it is better to
+ // allocate a big chunk to prevent more allocations later. 'segment_size' is
+ // therefore chosen (any other arbitrary value could be chosen).
+ next->size = total_size > segment_size ? total_size : segment_size;
+ next->start =
+ (HuffmanCode*)WebPSafeMalloc(next->size, sizeof(*next->start));
+ if (next->start == NULL) {
+ WebPSafeFree(next);
+ return 0;
+ }
+ next->curr_table = next->start;
+ next->next = NULL;
+ // Point to the new segment.
+ root_table->curr_segment->next = next;
+ root_table->curr_segment = next;
+ }
+ if (code_lengths_size <= SORTED_SIZE_CUTOFF) {
// use local stack-allocated array.
uint16_t sorted[SORTED_SIZE_CUTOFF];
- total_size = BuildHuffmanTable(root_table, root_bits,
- code_lengths, code_lengths_size, sorted);
- } else { // rare case. Use heap allocation.
+ BuildHuffmanTable(root_table->curr_segment->curr_table, root_bits,
+ code_lengths, code_lengths_size, sorted);
+ } else { // rare case. Use heap allocation.
uint16_t* const sorted =
(uint16_t*)WebPSafeMalloc(code_lengths_size, sizeof(*sorted));
if (sorted == NULL) return 0;
- total_size = BuildHuffmanTable(root_table, root_bits,
- code_lengths, code_lengths_size, sorted);
+ BuildHuffmanTable(root_table->curr_segment->curr_table, root_bits,
+ code_lengths, code_lengths_size, sorted);
WebPSafeFree(sorted);
}
return total_size;
}
+
+int VP8LHuffmanTablesAllocate(int size, HuffmanTables* huffman_tables) {
+ // Have 'segment' point to the first segment for now, 'root'.
+ HuffmanTablesSegment* const root = &huffman_tables->root;
+ huffman_tables->curr_segment = root;
+ // Allocate root.
+ root->start = (HuffmanCode*)WebPSafeMalloc(size, sizeof(*root->start));
+ if (root->start == NULL) return 0;
+ root->curr_table = root->start;
+ root->next = NULL;
+ root->size = size;
+ return 1;
+}
+
+void VP8LHuffmanTablesDeallocate(HuffmanTables* const huffman_tables) {
+ HuffmanTablesSegment *current, *next;
+ if (huffman_tables == NULL) return;
+ // Free the root node.
+ current = &huffman_tables->root;
+ next = current->next;
+ WebPSafeFree(current->start);
+ current->start = NULL;
+ current->next = NULL;
+ current = next;
+ // Free the following nodes.
+ while (current != NULL) {
+ next = current->next;
+ WebPSafeFree(current->start);
+ WebPSafeFree(current);
+ current = next;
+ }
+}