Skip to content

Archive Formats

compressionz includes support for ZIP and TAR archive formats, allowing you to bundle multiple files with their names and metadata.

FormatCompressionUse Case
ZIPDeflate (built-in)Cross-platform, Windows-friendly
TARNone (external)Unix, often combined with gzip/zstd
const cz = @import("compressionz");
const files = try cz.archive.extractZip(allocator, zip_data);
defer {
for (files) |f| {
allocator.free(f.name);
allocator.free(f.data);
}
allocator.free(files);
}
for (files) |file| {
std.debug.print("{s}: {d} bytes\n", .{ file.name, file.data.len });
}
const cz = @import("compressionz");
const files = [_]cz.archive.FileEntry{
.{ .name = "hello.txt", .data = "Hello, World!" },
.{ .name = "data.json", .data = "{\"key\": \"value\"}" },
};
const zip_data = try cz.archive.createZip(allocator, &files);
defer allocator.free(zip_data);
// Write to file
try std.fs.cwd().writeFile("output.zip", zip_data);
const files = try cz.archive.extractTar(allocator, tar_data);
defer {
for (files) |f| {
allocator.free(f.name);
allocator.free(f.data);
}
allocator.free(files);
}
const files = [_]cz.archive.FileEntry{
.{ .name = "file1.txt", .data = "Content 1" },
.{ .name = "file2.txt", .data = "Content 2" },
};
const tar_data = try cz.archive.createTar(allocator, &files);
defer allocator.free(tar_data);

Represents a file extracted from an archive:

pub const ExtractedFile = struct {
name: []const u8, // File path/name
data: []const u8, // File contents
is_directory: bool, // True if directory entry
};

Input for creating archives:

pub const FileEntry = struct {
name: []const u8, // File path/name
data: []const u8, // File contents
};
const cz = @import("compressionz");
// Extract all files from ZIP data
pub fn extractZip(allocator: Allocator, data: []const u8) ![]ExtractedFile
// Create ZIP from file entries
pub fn createZip(allocator: Allocator, files: []const FileEntry) ![]u8

For more control, use the Reader API:

const cz = @import("compressionz");
const std = @import("std");
pub fn processZip(allocator: std.mem.Allocator, zip_data: []const u8) !void {
var fbs = std.io.fixedBufferStream(zip_data);
var reader = try cz.archive.zip.Reader(@TypeOf(&fbs)).init(allocator, &fbs);
defer reader.deinit();
while (try reader.next()) |entry| {
std.debug.print("File: {s}\n", .{entry.name});
std.debug.print(" Compressed size: {d}\n", .{entry.compressed_size});
std.debug.print(" Uncompressed size: {d}\n", .{entry.uncompressed_size});
std.debug.print(" Is directory: {}\n", .{entry.is_directory});
if (!entry.is_directory) {
const data = try entry.readAll(allocator);
defer allocator.free(data);
// Process file data...
}
}
}
const cz = @import("compressionz");
const std = @import("std");
pub fn createZipFile(allocator: std.mem.Allocator, output_path: []const u8) !void {
const file = try std.fs.cwd().createFile(output_path, .{});
defer file.close();
var writer = try cz.archive.zip.Writer(@TypeOf(file)).init(allocator, file);
defer writer.deinit();
// Add files
try writer.addFile("readme.txt", "This is a README file.");
try writer.addFile("src/main.zig", "const std = @import(\"std\");");
try writer.addFile("data/config.json", "{\"version\": 1}");
// Finalize (writes central directory)
try writer.finish();
}
const cz = @import("compressionz");
// Extract all files from TAR data
pub fn extractTar(allocator: Allocator, data: []const u8) ![]ExtractedFile
// Create TAR from file entries
pub fn createTar(allocator: Allocator, files: []const FileEntry) ![]u8
const cz = @import("compressionz");
const std = @import("std");
pub fn processTar(allocator: std.mem.Allocator, tar_data: []const u8) !void {
var fbs = std.io.fixedBufferStream(tar_data);
var reader = cz.archive.tar.Reader(@TypeOf(&fbs)).init(allocator, &fbs);
defer reader.deinit();
while (try reader.next()) |entry| {
std.debug.print("File: {s}\n", .{entry.name});
std.debug.print(" Size: {d}\n", .{entry.size});
std.debug.print(" Type: {}\n", .{entry.file_type});
if (!entry.file_type.isDirectory()) {
const data = try entry.readAll(allocator);
defer allocator.free(data);
// Process file data...
}
}
}
const cz = @import("compressionz");
const std = @import("std");
pub fn createTarFile(allocator: std.mem.Allocator, output_path: []const u8) !void {
const file = try std.fs.cwd().createFile(output_path, .{});
defer file.close();
var writer = cz.archive.tar.Writer(@TypeOf(file)).init(file);
// Add files
try writer.addFile("file1.txt", "Content of file 1");
try writer.addFile("file2.txt", "Content of file 2");
// Finalize (writes end-of-archive markers)
try writer.finish();
}
const cz = @import("compressionz");
const std = @import("std");
// Create .tar.gz
pub fn createTarGz(allocator: std.mem.Allocator, files: []const cz.archive.FileEntry) ![]u8 {
// Create TAR
const tar_data = try cz.archive.createTar(allocator, files);
defer allocator.free(tar_data);
// Compress with Gzip
return cz.compress(.gzip, tar_data, allocator);
}
// Extract .tar.gz
pub fn extractTarGz(allocator: std.mem.Allocator, data: []const u8) ![]cz.archive.ExtractedFile {
// Decompress
const tar_data = try cz.decompress(.gzip, data, allocator);
defer allocator.free(tar_data);
// Extract TAR
return cz.archive.extractTar(allocator, tar_data);
}
// Create .tar.zst
pub fn createTarZst(allocator: std.mem.Allocator, files: []const cz.archive.FileEntry) ![]u8 {
const tar_data = try cz.archive.createTar(allocator, files);
defer allocator.free(tar_data);
return cz.compressWithOptions(.zstd, tar_data, allocator, .{
.level = .best,
});
}
// Extract .tar.zst
pub fn extractTarZst(allocator: std.mem.Allocator, data: []const u8) ![]cz.archive.ExtractedFile {
const tar_data = try cz.decompress(.zstd, data, allocator);
defer allocator.free(tar_data);
return cz.archive.extractTar(allocator, tar_data);
}
const cz = @import("compressionz");
const std = @import("std");
pub fn createBackup(allocator: std.mem.Allocator, source_dir: []const u8) ![]u8 {
var files = std.ArrayList(cz.archive.FileEntry).init(allocator);
defer files.deinit();
// Collect files from directory
var dir = try std.fs.cwd().openDir(source_dir, .{ .iterate = true });
defer dir.close();
var walker = try dir.walk(allocator);
defer walker.deinit();
while (try walker.next()) |entry| {
if (entry.kind == .file) {
const data = try dir.readFileAlloc(allocator, entry.path, 100 * 1024 * 1024);
try files.append(.{
.name = try allocator.dupe(u8, entry.path),
.data = data,
});
}
}
// Create compressed archive
const tar = try cz.archive.createTar(allocator, files.items);
defer allocator.free(tar);
return cz.compressWithOptions(.zstd, tar, allocator, .{ .level = .best });
}
pub fn handleDownload(request: *Request, response: *Response) !void {
const files = try getRequestedFiles(request);
const zip = try cz.archive.createZip(allocator, files);
defer allocator.free(zip);
response.headers.set("Content-Type", "application/zip");
response.headers.set("Content-Disposition", "attachment; filename=\"files.zip\"");
try response.send(zip);
}
const files = cz.archive.extractZip(allocator, data) catch |err| switch (err) {
error.InvalidData => {
std.debug.print("Not a valid ZIP file\n", .{});
return error.InvalidArchive;
},
error.OutOfMemory => {
std.debug.print("Archive too large\n", .{});
return error.ResourceExhausted;
},
else => return err,
};
  • No encryption support
  • No ZIP64 (large file) support
  • Deflate compression only
  • No extended attributes
  • No ACLs
  • POSIX ustar format only

For advanced archive features, consider dedicated libraries.