diff --git a/dev/bin/pginit b/dev/bin/pginit index 7505ce4..cef7ec2 100755 --- a/dev/bin/pginit +++ b/dev/bin/pginit @@ -23,6 +23,9 @@ postgres_conf= POSITIONAL_ARGS=() +rootdir=${PRJ_ROOT:-$(git rev-parse --show-toplevel)} +outdir="$rootdir/out" + while [[ $# -gt 0 ]]; do case $1 in -h) @@ -49,6 +52,10 @@ while [[ $# -gt 0 ]]; do postgres_conf="$2" shift 2 ;; + --proj) + outdir="$2" + shift 2 + ;; --* | -*) echo "Unknown option $1" echo "$USAGE" @@ -70,9 +77,6 @@ user=${3:-"postgres"} # - configurable init scipts # - custom postgresql.conf -rootdir=${PRJ_ROOT:-$(git rev-parse --show-toplevel)} -outdir=${1:-$rootdir/out} - PG_HOME=${PG_HOME:-$outdir/default} PG_BIN=$PG_HOME/bin PATH=$PG_BIN:$PATH @@ -86,11 +90,11 @@ log_file=$log_dir/server.log mkdir -p "$cluster_dir" mkdir -p "$data_dir" mkdir -p "$log_dir" -pg_ctl initdb -D "$data_dir" -o "--encoding=UTF8" +"$PG_BIN"/pg_ctl initdb -D "$data_dir" -o "--encoding=UTF8" # create database user date -pg_ctl -t 60 -D "$data_dir" -l "$log_file" start || { +"$PG_BIN"/pg_ctl -t 60 -D "$data_dir" -l "$log_file" start || { echo "Failed to start PostgreSQL" date cat "$log_file" @@ -107,7 +111,7 @@ if [ -n "$init_path" ]; then psql -U "$user" -d "$database" <"$init_path"/*.sql fi -pg_ctl -D "$data_dir" stop +"$PG_BIN"/pg_ctl -D "$data_dir" stop # update postgresql.conf if [ -n "$postgres_conf" ]; then diff --git a/flake.lock b/flake.lock index 407f7e6..c527160 100644 --- a/flake.lock +++ b/flake.lock @@ -128,30 +128,18 @@ "type": "github" } }, - "langref": { - "flake": false, - "locked": { - "narHash": "sha256-O6p2tiKD8ZMhSX+DeA/o5hhAvcPkU2J9lFys/r11peY=", - "type": "file", - "url": "https://raw.githubusercontent.com/ziglang/zig/0fb2015fd3422fc1df364995f9782dfe7255eccd/doc/langref.html.in" - }, - "original": { - "type": "file", - "url": "https://raw.githubusercontent.com/ziglang/zig/0fb2015fd3422fc1df364995f9782dfe7255eccd/doc/langref.html.in" - } - }, "nixpkgs": { "locked": { - "lastModified": 1707514827, - "narHash": "sha256-Y+wqFkvikpE1epCx57PsGw+M1hX5aY5q/xgk+ebDwxI=", - "rev": "20f65b86b6485decb43c5498780c223571dd56ef", - "revCount": 555528, + "lastModified": 1728328465, + "narHash": "sha256-a0a0M1TmXMK34y3M0cugsmpJ4FJPT/xsblhpiiX1CXo=", + "rev": "1bfbbbe5bbf888d675397c66bfdb275d0b99361c", + "revCount": 635732, "type": "tarball", - "url": "https://api.flakehub.com/f/pinned/NixOS/nixpkgs/0.2311.555528%2Brev-20f65b86b6485decb43c5498780c223571dd56ef/018d94cf-3c8b-7d01-bbd4-13c568d46a29/source.tar.gz" + "url": "https://api.flakehub.com/f/pinned/NixOS/nixpkgs/0.2405.635732%2Brev-1bfbbbe5bbf888d675397c66bfdb275d0b99361c/01926e1f-f93c-780f-9732-bc9807fb6715/source.tar.gz" }, "original": { "type": "tarball", - "url": "https://flakehub.com/f/NixOS/nixpkgs/0.2311.554738.tar.gz" + "url": "https://flakehub.com/f/NixOS/nixpkgs/0.2405.635732.tar.gz" } }, "nixpkgs-lib": { @@ -293,11 +281,11 @@ ] }, "locked": { - "lastModified": 1723205416, - "narHash": "sha256-VF5o0Ogk2PyQSs22aQijjqIC0U3Z4mzjTcrsnhSfQ9U=", + "lastModified": 1728347330, + "narHash": "sha256-THAQHrYobZWRVbTmByrW7CNZgdVgcJk7FSILh4XZVoQ=", "owner": "mitchellh", "repo": "zig-overlay", - "rev": "f4d2e3b5855a66a763e49d8030edbb6b852c4b1a", + "rev": "609ae905e215085f83195d9ed1c3de387bf21abf", "type": "github" }, "original": { @@ -310,7 +298,6 @@ "inputs": { "flake-utils": "flake-utils_3", "gitignore": "gitignore_2", - "langref": "langref", "nixpkgs": [ "nixpkgs" ], @@ -319,11 +306,11 @@ ] }, "locked": { - "lastModified": 1722987529, - "narHash": "sha256-r7tnq70psZQXfLlDj+XzidBj352vfQFrICC7hfKHw7M=", + "lastModified": 1728149800, + "narHash": "sha256-xptfKLfhC3eaERuR48/IeU/sTypFVsJiaKsJr8Jt5qc=", "owner": "zigtools", "repo": "zls", - "rev": "d8084a342f40b444addf772fdef36a589299ebe6", + "rev": "063d7fffd722165ae863a57b465593ed9831d6fe", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 60997a5..3c3992e 100644 --- a/flake.nix +++ b/flake.nix @@ -2,11 +2,7 @@ description = "Description for the project"; inputs = { - # For now let's use stable nixpkgs. `libxml2` is introducing breaking - # changes that break postgres compilation - # - # nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.1.0.tar.gz"; - nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.2311.554738.tar.gz"; + nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.2405.635732.tar.gz"; parts.url = "github:hercules-ci/flake-parts"; diff --git a/src/pgzx/collections/htab.zig b/src/pgzx/collections/htab.zig index ba0bbb0..d31e28d 100644 --- a/src/pgzx/collections/htab.zig +++ b/src/pgzx/collections/htab.zig @@ -230,7 +230,7 @@ pub fn HTab(comptime Context: type) type { inline fn conextHasValue() bool { switch (@typeInfo(Context)) { - .Struct => {}, + .@"struct" => {}, else => return false, } if (!@hasDecl(Context, "Value")) { diff --git a/src/pgzx/datum.zig b/src/pgzx/datum.zig index 6b8c2b2..af7fb62 100644 --- a/src/pgzx/datum.zig +++ b/src/pgzx/datum.zig @@ -150,9 +150,9 @@ pub fn findConv(comptime T: type) type { // Conv return switch (@typeInfo(T)) { - .Void => Void, - .Bool => Bool, - .Int => |i| switch (i.signedness) { + .void => Void, + .bool => Bool, + .int => |i| switch (i.signedness) { .signed => switch (i.bits) { 8 => Int8, 16 => Int16, @@ -168,14 +168,14 @@ pub fn findConv(comptime T: type) type { else => @compileError("unsupported unsigned int type"), }, }, - .Float => |f| switch (f.bits) { + .float => |f| switch (f.bits) { 32 => Float32, 64 => Float64, else => @compileError("unsupported float type"), }, - .Optional => |opt| OptConv(findConv(opt.child)), - .Array => @compileLog("fixed size arrays not supported"), - .Pointer => blk: { + .optional => |opt| OptConv(findConv(opt.child)), + .array => @compileLog("fixed size arrays not supported"), + .pointer => blk: { if (!meta.isStringLike(T)) { @compileLog("type:", T); @compileError("unsupported ptr type"); @@ -195,7 +195,7 @@ inline fn isConv(comptime T: type) bool { // fromDatum: fn(d: pg.Datum) !Type // toDatum: fn(v: Type) !pg.Datum - if (@typeInfo(T) != .Struct) { + if (@typeInfo(T) != .@"struct") { return false; } diff --git a/src/pgzx/err.zig b/src/pgzx/err.zig index 563aa48..002e1b3 100644 --- a/src/pgzx/err.zig +++ b/src/pgzx/err.zig @@ -183,8 +183,8 @@ pub inline fn wrap(comptime f: anytype, args: anytype) ElogIndicator!wrap_ret(@T inline fn wrap_ret(comptime f: type) type { const ti = @typeInfo(f); - if (ti != .Fn) { + if (ti != .@"fn") { @compileError("wrap only works with functions"); } - return ti.Fn.return_type.?; + return ti.@"fn".return_type.?; } diff --git a/src/pgzx/fmgr.zig b/src/pgzx/fmgr.zig index 5de8352..f62c3b6 100644 --- a/src/pgzx/fmgr.zig +++ b/src/pgzx/fmgr.zig @@ -42,7 +42,7 @@ pub const PG_FINFO_V1_RECORD = Pg_finfo_record{ /// } /// pub inline fn PG_MODULE_MAGIC() void { - @export(Pg_magic_func, .{ .name = "Pg_magic_func" }); + @export(&Pg_magic_func, .{ .name = "Pg_magic_func" }); } fn Pg_magic_func() callconv(.C) [*c]const Pg_magic_struct { @@ -55,26 +55,26 @@ pub fn FunctionV1() callconv(.C) [*c]const Pg_finfo_record { pub inline fn PG_FUNCTION_INFO_V1(comptime fun: []const u8) void { const finfo_name = "pg_finfo_" ++ fun; - @export(FunctionV1, .{ .name = finfo_name }); + @export(&FunctionV1, .{ .name = finfo_name }); } pub inline fn PG_FUNCTION_V1(comptime name: []const u8, comptime callback: anytype) void { PG_FUNCTION_INFO_V1(name); const reg = genFnCall(callback); - @export(reg.call, .{ .name = name }); + @export(®.call, .{ .name = name }); } pub inline fn PG_EXPORT(comptime mod: type) void { const decls = switch (@typeInfo(mod)) { - .Struct => |s| s.decls, + .@"struct" => |s| s.decls, else => @compileError("PG_EXPORT requires a struct"), }; inline for (decls) |decl| { const value = @field(mod, decl.name); const ft = @typeInfo(@TypeOf(value)); - if (ft != .Fn or ft.Fn.is_generic or ft.Fn.is_var_args) { + if (ft != .@"fn" or ft.@"fn".is_generic or ft.@"fn".is_var_args) { continue; } PG_FUNCTION_V1(decl.name, value); @@ -112,7 +112,7 @@ pub inline fn pgCall( } const value = switch (@typeInfo(meta.fnReturnType(fnType))) { - .ErrorUnion, .ErrorSet => @call(.no_async, impl, callArgs) catch |e| elog.throwAsPostgresError(src, e), + .error_union, .error_set => @call(.no_async, impl, callArgs) catch |e| elog.throwAsPostgresError(src, e), else => @call(.no_async, impl, callArgs), }; diff --git a/src/pgzx/mem.zig b/src/pgzx/mem.zig index 237bb77..1f84b10 100644 --- a/src/pgzx/mem.zig +++ b/src/pgzx/mem.zig @@ -242,10 +242,10 @@ pub const MemoryContextAllocator = struct { pub fn registerAllocResetCallback(self: *Self, data: anytype, f: fn (@TypeOf(data)) void) !void { const data_type = @typeInfo(@TypeOf(data)); - if (data_type != .Pointer) { + if (data_type != .pointer) { @compileError("data must be a pointer"); } - switch (data_type.Pointer.size) { + switch (data_type.pointer.size) { .One => {}, .C => {}, // allow C pointer types to raw Postgres data types. else => @compileError("data must be a pointer, found slice"), diff --git a/src/pgzx/meta.zig b/src/pgzx/meta.zig index 0d2a16c..52c2a16 100644 --- a/src/pgzx/meta.zig +++ b/src/pgzx/meta.zig @@ -1,13 +1,13 @@ pub inline fn isSlice(comptime T: type) bool { return switch (@typeInfo(T)) { - .Pointer => |p| p.size == .Slice, + .pointer => |p| p.size == .Slice, else => false, }; } pub inline fn sliceElemType(comptime T: type) type { return switch (@typeInfo(T)) { - .Pointer => |p| { + .pointer => |p| { if (p.size != .Slice) { @compileError("Expected a slice type"); } @@ -19,42 +19,42 @@ pub inline fn sliceElemType(comptime T: type) type { pub inline fn pointerElemType(comptime T: type) type { return switch (@typeInfo(T)) { - .Pointer => |p| p.child, + .pointer => |p| p.child, else => @compileError("Expected a pointer type"), }; } pub inline fn hasSentinal(comptime T: type) bool { return switch (@typeInfo(T)) { - .Pointer => |p| p.size == .Slice and p.sentinel != null, + .pointer => |p| p.size == .Slice and p.sentinel != null, else => false, }; } pub inline fn isStringLike(comptime T: type) bool { return switch (@typeInfo(T)) { - .Pointer => |p| p.size == .Slice and p.child == u8, + .pointer => |p| p.size == .Slice and p.child == u8, else => false, }; } pub inline fn isStringLikeZ(comptime T: type) bool { return switch (@typeInfo(T)) { - .Pointer => |p| p.size == .Slice and p.child == u8 and p.sentinel != null, + .pointer => |p| p.size == .Slice and p.child == u8 and p.sentinel != null, else => false, }; } pub inline fn isPrimitive(comptime T: type) bool { return switch (@typeInfo(T)) { - .Bool, .Int, .Float => true, + .bool, .int, .float => true, else => false, }; } pub inline fn getFnType(comptime T: type, name: []const u8) ?type { switch (@typeInfo(T)) { - .Struct, .Union, .Enum, .Opaque => {}, + .@"struct", .@"union", .@"enum", .@"opaque" => {}, else => return null, } if (!@hasDecl(T, name)) { @@ -62,7 +62,7 @@ pub inline fn getFnType(comptime T: type, name: []const u8) ?type { } const maybeFn = @TypeOf(@field(T, name)); - return if (@typeInfo(maybeFn) == .Fn) + return if (@typeInfo(maybeFn) == .@"fn") maybeFn else null; @@ -70,7 +70,7 @@ pub inline fn getFnType(comptime T: type, name: []const u8) ?type { pub inline fn getMethodType(comptime T: type, name: []const u8) ?type { return switch (@typeInfo(T)) { - .Pointer => |p| switch (p.size) { + .pointer => |p| switch (p.size) { .One => getFnType(p.child, name), else => null, }, @@ -80,7 +80,7 @@ pub inline fn getMethodType(comptime T: type, name: []const u8) ?type { pub inline fn fnReturnType(comptime T: type) type { return switch (@typeInfo(T)) { - .Fn => |f| f.return_type.?, + .@"fn" => |f| f.return_type.?, else => @compileError("Expected a function type"), }; } diff --git a/src/pgzx/node.zig b/src/pgzx/node.zig index f9bcb65..c5a7f58 100644 --- a/src/pgzx/node.zig +++ b/src/pgzx/node.zig @@ -78,7 +78,7 @@ pub inline fn castNode(comptime T: type, node: anytype) *T { } pub inline fn safeCastNode(comptime T: type, node: anytype) ?*T { - if (@typeInfo(@TypeOf(node)) == .Optional) { + if (@typeInfo(@TypeOf(node)) == .optional) { if (node == null) { return null; } @@ -106,7 +106,7 @@ pub inline fn asNodePtr(node: anytype) *pg.Node { inline fn checkIsPotentialNodePtr(node: anytype) void { const nodeType = @typeInfo(@TypeOf(node)); - if (nodeType != .Pointer or (nodeType.Pointer.size != .One and nodeType.Pointer.size != .C)) { + if (nodeType != .pointer or (nodeType.pointer.size != .One and nodeType.pointer.size != .C)) { @compileError("Expected single node pointer"); } } diff --git a/src/pgzx/pq.zig b/src/pgzx/pq.zig index 6718af1..38f0f0b 100644 --- a/src/pgzx/pq.zig +++ b/src/pgzx/pq.zig @@ -649,7 +649,7 @@ pub fn buildParams( ) !PGQueryParams { const argsType = @TypeOf(args); const argsInfo = @typeInfo(argsType); - if (argsInfo != .Struct or !argsInfo.Struct.is_tuple) { + if (argsInfo != .@"struct" or !argsInfo.@"struct".is_tuple) { return std.debug.panic("params must be a tuple"); } @@ -660,12 +660,12 @@ pub fn buildParams( // The buffer might grow and pointers might get invalidated. // Let's collect the positions of the values in the buffer so we can // collect the pointers after the encoding buffer has been fully written. - var value_indices = try local_allocator.alloc(i32, argsInfo.Struct.fields.len); + var value_indices = try local_allocator.alloc(i32, argsInfo.@"struct".fields.len); const writer: std.ArrayList(u8).Writer = buffer.writer(); - var types = try allocator.alloc(pg.Oid, argsInfo.Struct.fields.len); + var types = try allocator.alloc(pg.Oid, argsInfo.@"struct".fields.len); - inline for (argsInfo.Struct.fields, 0..) |field, idx| { + inline for (argsInfo.@"struct".fields, 0..) |field, idx| { const codec = conv.find(field.type); types[idx] = codec.OID; diff --git a/src/pgzx/pq/conv.zig b/src/pgzx/pq/conv.zig index c03aaff..c2cc300 100644 --- a/src/pgzx/pq/conv.zig +++ b/src/pgzx/pq/conv.zig @@ -13,8 +13,8 @@ pub fn find(comptime T: type) type { } return switch (@typeInfo(T)) { - .Bool => boolconv, - .Int => |i| switch (i.signedness) { + .bool => boolconv, + .int => |i| switch (i.signedness) { .signed => switch (i.bits) { 8 => i8conv, 16 => i16conv, @@ -35,7 +35,7 @@ pub fn find(comptime T: type) type { }, }, }, - .Float => |f| switch (f.bits) { + .float => |f| switch (f.bits) { 32 => f32conv, 64 => f64conv, else => { @@ -43,9 +43,9 @@ pub fn find(comptime T: type) type { @compileError("unsupported float type"); }, }, - .Optional => |opt| optconv(find(opt.child)), - .Array => @compileLog("fixed size arrays not supported"), - .Pointer => blk: { + .optional => |opt| optconv(find(opt.child)), + .array => @compileLog("fixed size arrays not supported"), + .pointer => blk: { if (!meta.isStringLike(T)) { @compileLog("type:", T); @compileError("unsupported ptr type"); @@ -60,7 +60,7 @@ pub fn find(comptime T: type) type { } fn isConv(comptime T: type) bool { - if (@typeInfo(T) != .Struct) { + if (@typeInfo(T) != .@"struct") { return false; } diff --git a/src/pgzx/pq/params.zig b/src/pgzx/pq/params.zig index 246d86f..ad62b55 100644 --- a/src/pgzx/pq/params.zig +++ b/src/pgzx/pq/params.zig @@ -24,19 +24,19 @@ // pub fn build(self: *const Self, params: anytype) !Params { // const paramType = @TypeOf(params); // const paramInfo = @typeInfo(paramType); -// if (paramInfo != .Struct or !paramInfo.Struct.is_tuple) { +// if (paramInfo != .@"struct" or !paramInfo.@"struct".is_tuple) { // return std.debug.panic("params must be a tuple"); // } // // var buffer = std.ArrayList(u8).init(self.allocator); // const writer: std.ArrayList(u8).Writer = buffer.writer(); // -// var value_indices = try self.allocator.alloc(i32, paramInfo.Struct.fields.len); +// var value_indices = try self.allocator.alloc(i32, paramInfo.@"struct".fields.len); // defer self.allocator.free(value_indices); // -// var types = try self.allocator.alloc(c.Oid, paramInfo.Struct.fields.len); +// var types = try self.allocator.alloc(c.Oid, paramInfo.@"struct".fields.len); // -// inline for (paramInfo.Struct.fields, 0..) |field, idx| { +// inline for (paramInfo.@"struct".fields, 0..) |field, idx| { // const codec = conv.find(field.type); // types[idx] = codec.OID; // const initPos = buffer.items.len; diff --git a/src/pgzx/spi.zig b/src/pgzx/spi.zig index 932045d..86e7719 100644 --- a/src/pgzx/spi.zig +++ b/src/pgzx/spi.zig @@ -129,15 +129,15 @@ fn scanField( column: c_int, ) !c_int { const field_info = @typeInfo(fieldType); - if (field_info != .Pointer) { + if (field_info != .pointer) { @compileError("scanField requires a pointer"); } - if (field_info.Pointer.size == .Slice) { + if (field_info.pointer.size == .Slice) { @compileError("scanField requires a single pointer, not a slice"); } - const child_type = field_info.Pointer.child; - if (@typeInfo(child_type) == .Struct) { + const child_type = field_info.pointer.child; + if (@typeInfo(child_type) == .@"struct") { var struct_column = column; inline for (std.meta.fields(child_type)) |field| { const child_ptr = &@field(to.*, field.name); diff --git a/src/pgzx/testing.zig b/src/pgzx/testing.zig index f153912..dd7a1d3 100644 --- a/src/pgzx/testing.zig +++ b/src/pgzx/testing.zig @@ -39,7 +39,7 @@ pub const pgzx_err = @import("err.zig"); /// Note that you can only call this function once in the extension. pub inline fn registerTests(comptime testfn: bool, comptime testsuites: anytype) void { const T = @TypeOf(testsuites); - if (@typeInfo(T) != .Struct) { + if (@typeInfo(T) != .@"struct") { @compileError("registerTests: testsuites must be an array of test suites. Found '" ++ @typeName(T) ++ "'"); } @@ -75,7 +75,7 @@ fn runTestSuite(T: anytype) !u32 { elog.Info(@src(), "Running test suite: {s}\n", .{@typeName(T)}); - inline for (@typeInfo(T).Struct.decls) |f| { + inline for (@typeInfo(T).@"struct".decls) |f| { if (comptime std.mem.startsWith(u8, f.name, "test")) { elog.Info(@src(), "Running test: {s}\n", .{f.name}); diff --git a/tools/gennodetags/main.zig b/tools/gennodetags/main.zig index e285a93..99a23af 100644 --- a/tools/gennodetags/main.zig +++ b/tools/gennodetags/main.zig @@ -47,7 +47,7 @@ pub fn main() !void { @setEvalBranchQuota(50000); var node_tags = std.ArrayList([]const u8).init(arena); defer node_tags.deinit(); - const pg_mod = @typeInfo(pg).Struct; + const pg_mod = @typeInfo(pg).@"struct"; inline for (pg_mod.decls) |decl| { const name = decl.name; if (std.mem.startsWith(u8, name, "T_")) {