Skip to content

Decompression API

This page documents all decompression-related functions in compressionz. Each codec has its own module with tailored APIs.

const cz = @import("compressionz");
// Basic decompression
const decompressed = try cz.zstd.decompress(compressed, allocator, .{});
defer allocator.free(decompressed);
// With size limit (security)
const decompressed = try cz.zstd.decompress(compressed, allocator, .{
.max_output_size = 100 * 1024 * 1024, // 100 MB limit
});
// With dictionary
const decompressed = try cz.zstd.decompressWithDict(compressed, dictionary, allocator, .{});
pub const DecompressOptions = struct {
/// Maximum output size (decompression bomb protection)
max_output_size: ?usize = null,
};
const cz = @import("compressionz");
// Basic decompression
const decompressed = try cz.lz4.frame.decompress(compressed, allocator, .{});
defer allocator.free(decompressed);
// With size limit
const decompressed = try cz.lz4.frame.decompress(compressed, allocator, .{
.max_output_size = 100 * 1024 * 1024,
});

LZ4 block format requires the original size for decompression:

const cz = @import("compressionz");
// Compression
const compressed = try cz.lz4.block.compress(data, allocator);
const original_len = data.len; // Save this!
defer allocator.free(compressed);
// Decompression - REQUIRES original size
const decompressed = try cz.lz4.block.decompressWithSize(compressed, original_len, allocator);
defer allocator.free(decompressed);
const cz = @import("compressionz");
// No options - simple API
const decompressed = try cz.snappy.decompress(compressed, allocator);
defer allocator.free(decompressed);
// With size limit
const decompressed = try cz.snappy.decompressWithLimit(compressed, allocator, max_size);
const cz = @import("compressionz");
// Basic decompression
const decompressed = try cz.gzip.decompress(compressed, allocator, .{});
defer allocator.free(decompressed);
// With size limit
const decompressed = try cz.gzip.decompress(compressed, allocator, .{
.max_output_size = 100 * 1024 * 1024,
});
const cz = @import("compressionz");
// Zlib format
const decompressed = try cz.zlib.decompress(compressed, allocator, .{});
defer allocator.free(decompressed);
// Raw Deflate
const decompressed = try cz.zlib.decompressDeflate(compressed, allocator, .{});
// With dictionary
const decompressed = try cz.zlib.decompressWithDict(compressed, dictionary, allocator, .{});
const cz = @import("compressionz");
// Basic decompression
const decompressed = try cz.brotli.decompress(compressed, allocator, .{});
defer allocator.free(decompressed);
// With size limit
const decompressed = try cz.brotli.decompress(compressed, allocator, .{
.max_output_size = 100 * 1024 * 1024,
});

Malicious compressed data can expand to enormous sizes (e.g., a 1 KB “zip bomb” expanding to 1 TB). Use max_output_size to protect against this:

const safe = cz.gzip.decompress(untrusted_data, allocator, .{
.max_output_size = 100 * 1024 * 1024, // 100 MB limit
}) catch |err| switch (err) {
error.OutputTooLarge => {
// Data would exceed limit
return error.SuspiciousInput;
},
else => return err,
};
ContextRecommended Limit
User uploads10-100 MB
API requests1-10 MB
Config files1 MB
Internal dataBased on expected size

Some codecs support decompressing into pre-allocated buffers:

var buffer: [1024 * 1024]u8 = undefined; // 1 MB buffer
const decompressed = try cz.lz4.block.decompressInto(compressed, &buffer);
var buffer: [1024 * 1024]u8 = undefined;
const decompressed = try cz.lz4.frame.decompressInto(compressed, &buffer);
var buffer: [1024 * 1024]u8 = undefined;
const decompressed = try cz.snappy.decompressInto(compressed, &buffer);

For large data, use streaming APIs:

var decomp = try cz.gzip.Decompressor(@TypeOf(reader)).init(allocator, reader);
defer decomp.deinit();
const data = try decomp.reader().readAllAlloc(allocator, max_size);
var decomp = try cz.zstd.Decompressor(@TypeOf(reader)).init(allocator, reader);
defer decomp.deinit();
const data = try decomp.reader().readAllAlloc(allocator, max_size);
var decomp = try cz.lz4.frame.Decompressor(@TypeOf(reader)).init(allocator, reader);
defer decomp.deinit();
const data = try decomp.reader().readAllAlloc(allocator, max_size);
var decomp = try cz.brotli.Decompressor(@TypeOf(reader)).init(allocator, reader);
defer decomp.deinit();
const data = try decomp.reader().readAllAlloc(allocator, max_size);
// Zlib format
var decomp = try cz.zlib.Decompressor(@TypeOf(reader)).init(allocator, reader);
defer decomp.deinit();
// Raw Deflate
var decomp = try cz.zlib.DeflateDecompressor(@TypeOf(reader)).init(allocator, reader);

Automatically detect and decompress:

const cz = @import("compressionz");
pub fn decompressAuto(data: []const u8, allocator: std.mem.Allocator) ![]u8 {
const format = cz.detect(data);
switch (format) {
.zstd => return cz.zstd.decompress(data, allocator, .{}),
.gzip => return cz.gzip.decompress(data, allocator, .{}),
.lz4 => return cz.lz4.frame.decompress(data, allocator, .{}),
.zlib => return cz.zlib.decompress(data, allocator, .{}),
.snappy => return cz.snappy.decompress(data, allocator),
.unknown => return error.UnknownFormat,
}
}

See Codec Detection for details.


Common decompression errors:

const result = cz.zstd.decompress(data, allocator, .{}) catch |err| switch (err) {
error.InvalidData => {
// Corrupted or not actually compressed with this codec
},
error.ChecksumMismatch => {
// Data integrity check failed
},
error.OutputTooLarge => {
// Exceeds max_output_size limit
},
error.UnexpectedEof => {
// Compressed data truncated
},
error.OutOfMemory => {
// Allocation failed
},
else => return err,
};

See Error Handling for complete error reference.


CodecOne-shotStreamingZero-copyDictionaryRequires Size
zstdYesYesNoYesNo
lz4.frameYesYesYesNoNo
lz4.blockYesNoYesNoYes
snappyYesNoYesNoNo
gzipYesYesNoNoNo
zlibYesYesNoYesNo
brotliYesYesNoNoNo