Skip to content

Compression API

This page documents all compression-related functions in compressionz. Each codec has its own module with tailored APIs.

const cz = @import("compressionz");
// Basic compression
const compressed = try cz.zstd.compress(data, allocator, .{});
defer allocator.free(compressed);
// With options
const compressed = try cz.zstd.compress(data, allocator, .{
.level = .best,
});
// With dictionary
const compressed = try cz.zstd.compressWithDict(data, dict, allocator, .{});
pub const CompressOptions = struct {
/// Compression level
level: Level = .default,
};
const cz = @import("compressionz");
// Basic compression
const compressed = try cz.lz4.frame.compress(data, allocator, .{});
defer allocator.free(compressed);
// With options
const compressed = try cz.lz4.frame.compress(data, allocator, .{
.level = .fast,
.content_checksum = true,
.block_checksum = false,
.content_size = data.len,
.block_size = .max64KB,
});
pub const CompressOptions = struct {
level: Level = .default,
content_checksum: bool = true,
block_checksum: bool = false,
content_size: ?usize = null,
block_size: BlockSize = .max64KB,
independent_blocks: bool = false,
};
const cz = @import("compressionz");
// No options - raw block format
const compressed = try cz.lz4.block.compress(data, allocator);
defer allocator.free(compressed);

LZ4 block format has no options and requires you to track the original size for decompression.

const cz = @import("compressionz");
// No options - simple API
const compressed = try cz.snappy.compress(data, allocator);
defer allocator.free(compressed);

Snappy has no compression options.

const cz = @import("compressionz");
// Basic compression
const compressed = try cz.gzip.compress(data, allocator, .{});
defer allocator.free(compressed);
// With level
const compressed = try cz.gzip.compress(data, allocator, .{
.level = .best,
});
pub const CompressOptions = struct {
level: Level = .default,
};
const cz = @import("compressionz");
// Zlib format (with header/trailer)
const compressed = try cz.zlib.compress(data, allocator, .{});
defer allocator.free(compressed);
// Raw Deflate (no header/trailer)
const deflate = try cz.zlib.compressDeflate(data, allocator, .{});
defer allocator.free(deflate);
// With dictionary
const compressed = try cz.zlib.compressWithDict(data, dict, allocator, .{});
pub const CompressOptions = struct {
level: Level = .default,
};
const cz = @import("compressionz");
// Basic compression
const compressed = try cz.brotli.compress(data, allocator, .{});
defer allocator.free(compressed);
// With level
const compressed = try cz.brotli.compress(data, allocator, .{
.level = .best,
});
pub const CompressOptions = struct {
level: Level = .best,
};

All codecs that support levels use the same enum:

pub const Level = enum {
fastest, // Maximum speed, lower ratio
fast, // Good speed, good ratio
default, // Balanced (recommended)
better, // Better ratio, slower
best, // Maximum ratio, slowest
};
LevelCompressDecompressRatio
fastest12+ GB/s11+ GB/s99.8%
fast12 GB/s11+ GB/s99.9%
default12 GB/s11+ GB/s99.9%
better5 GB/s11+ GB/s99.9%
best1.3 GB/s12 GB/s99.9%
  • Use default for most cases
  • Use fast for real-time applications
  • Use best only for archival or static content

Some codecs support compressing into pre-allocated buffers:

var buffer: [65536]u8 = undefined;
const compressed = try cz.lz4.block.compressInto(data, &buffer);
var buffer: [65536]u8 = undefined;
const compressed = try cz.lz4.frame.compressInto(data, &buffer, .{});
var buffer: [65536]u8 = undefined;
const compressed = try cz.snappy.compressInto(data, &buffer);
// LZ4
const max_size = cz.lz4.block.maxCompressedSize(data.len);
const max_frame = cz.lz4.frame.maxCompressedSize(data.len);
// Snappy
const max_snappy = cz.snappy.maxCompressedSize(data.len);

For large data, use streaming APIs:

var comp = try cz.gzip.Compressor(@TypeOf(writer)).init(allocator, writer, .{});
defer comp.deinit();
try comp.writer().writeAll(data);
try comp.finish(); // MUST call to finalize
var comp = try cz.zstd.Compressor(@TypeOf(writer)).init(allocator, writer, .{});
defer comp.deinit();
try comp.writer().writeAll(data);
try comp.finish();
var comp = try cz.lz4.frame.Compressor(@TypeOf(writer)).init(allocator, writer, .{});
defer comp.deinit();
try comp.writer().writeAll(data);
try comp.finish();
var comp = try cz.brotli.Compressor(@TypeOf(writer)).init(allocator, writer, .{});
defer comp.deinit();
try comp.writer().writeAll(data);
try comp.finish();
// Zlib format
var comp = try cz.zlib.Compressor(@TypeOf(writer)).init(allocator, writer, .{});
defer comp.deinit();
try comp.writer().writeAll(data);
try comp.finish();
// Raw Deflate
var comp = try cz.zlib.DeflateCompressor(@TypeOf(writer)).init(allocator, writer, .{});

Dictionaries improve compression for small data with known patterns.

const dictionary = @embedFile("my_dictionary.bin");
// Compress
const compressed = try cz.zstd.compressWithDict(data, dictionary, allocator, .{});
defer allocator.free(compressed);
// Decompress (must use same dictionary)
const decompressed = try cz.zstd.decompressWithDict(compressed, dictionary, allocator, .{});
defer allocator.free(decompressed);
const dictionary = "common patterns...";
const compressed = try cz.zlib.compressWithDict(data, dictionary, allocator, .{});
const decompressed = try cz.zlib.decompressWithDict(compressed, dictionary, allocator, .{});

All compression functions return the same error set:

pub const Error = error{
OutOfMemory,
InvalidData,
OutputTooSmall,
UnsupportedFeature,
};

Example:

const compressed = cz.zstd.compress(data, allocator, .{}) catch |err| switch (err) {
error.OutOfMemory => {
std.debug.print("Failed to allocate memory\n", .{});
return err;
},
else => return err,
};

CodecOne-shotStreamingZero-copyDictionary
zstdYesYesNoYes
lz4.frameYesYesYesNo
lz4.blockYesNoYesNo
snappyYesNoYesNo
gzipYesYesNoNo
zlibYesYesNoYes
brotliYesYesNoNo