Tokenizer

This commit is contained in:
2024-12-13 09:39:02 +02:00
commit 6863e34fbc
9 changed files with 2209 additions and 0 deletions

47
build.zig Normal file
View File

@@ -0,0 +1,47 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{});
const lib = b.addStaticLibrary(.{
.name = "tokenizer",
.optimize = optimize,
.target = target,
});
lib.addCSourceFile(.{
.file = b.path("tokenizer.c"),
.flags = &[_][]const u8{
"-std=c11",
"-Wall",
"-Wvla",
"-Wextra",
"-Werror",
"-Wshadow",
"-Wswitch",
"-Walloca",
"-Wformat=2",
"-fno-common",
"-Wconversion",
"-Wswitch-enum",
"-Wuninitialized",
"-Wdouble-promotion",
"-fstack-protector-all",
"-Wimplicit-fallthrough",
//"-D_FORTIFY_SOURCE=2", // consider when optimization flags are enabled
},
});
lib.addIncludePath(b.path("."));
lib.linkLibC();
const test_step = b.step("test", "Run unit tests");
const test_exe = b.addTest(.{
.root_source_file = b.path("test_all.zig"),
.optimize = optimize,
});
test_exe.linkLibrary(lib);
test_exe.addIncludePath(b.path("."));
test_step.dependOn(&b.addRunArtifact(test_exe).step);
}