Skip to content

Commit

Permalink
zig toolchain: update to 0.14dev-20e03be (#99)
Browse files Browse the repository at this point in the history
  • Loading branch information
urso authored Oct 9, 2024
1 parent 27844ba commit 57b5bec
Show file tree
Hide file tree
Showing 16 changed files with 77 additions and 90 deletions.
16 changes: 10 additions & 6 deletions dev/bin/pginit
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ postgres_conf=

POSITIONAL_ARGS=()

rootdir=${PRJ_ROOT:-$(git rev-parse --show-toplevel)}
outdir="$rootdir/out"

while [[ $# -gt 0 ]]; do
case $1 in
-h)
Expand All @@ -49,6 +52,10 @@ while [[ $# -gt 0 ]]; do
postgres_conf="$2"
shift 2
;;
--proj)
outdir="$2"
shift 2
;;
--* | -*)
echo "Unknown option $1"
echo "$USAGE"
Expand All @@ -70,9 +77,6 @@ user=${3:-"postgres"}
# - configurable init scipts
# - custom postgresql.conf

rootdir=${PRJ_ROOT:-$(git rev-parse --show-toplevel)}
outdir=${1:-$rootdir/out}

PG_HOME=${PG_HOME:-$outdir/default}
PG_BIN=$PG_HOME/bin
PATH=$PG_BIN:$PATH
Expand All @@ -86,11 +90,11 @@ log_file=$log_dir/server.log
mkdir -p "$cluster_dir"
mkdir -p "$data_dir"
mkdir -p "$log_dir"
pg_ctl initdb -D "$data_dir" -o "--encoding=UTF8"
"$PG_BIN"/pg_ctl initdb -D "$data_dir" -o "--encoding=UTF8"

# create database user
date
pg_ctl -t 60 -D "$data_dir" -l "$log_file" start || {
"$PG_BIN"/pg_ctl -t 60 -D "$data_dir" -l "$log_file" start || {
echo "Failed to start PostgreSQL"
date
cat "$log_file"
Expand All @@ -107,7 +111,7 @@ if [ -n "$init_path" ]; then
psql -U "$user" -d "$database" <"$init_path"/*.sql
fi

pg_ctl -D "$data_dir" stop
"$PG_BIN"/pg_ctl -D "$data_dir" stop

# update postgresql.conf
if [ -n "$postgres_conf" ]; then
Expand Down
37 changes: 12 additions & 25 deletions flake.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 1 addition & 5 deletions flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,7 @@
description = "Description for the project";

inputs = {
# For now let's use stable nixpkgs. `libxml2` is introducing breaking
# changes that break postgres compilation
#
# nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.1.0.tar.gz";
nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.2311.554738.tar.gz";
nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.2405.635732.tar.gz";

parts.url = "github:hercules-ci/flake-parts";

Expand Down
2 changes: 1 addition & 1 deletion src/pgzx/collections/htab.zig
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ pub fn HTab(comptime Context: type) type {

inline fn conextHasValue() bool {
switch (@typeInfo(Context)) {
.Struct => {},
.@"struct" => {},
else => return false,
}
if (!@hasDecl(Context, "Value")) {
Expand Down
16 changes: 8 additions & 8 deletions src/pgzx/datum.zig
Original file line number Diff line number Diff line change
Expand Up @@ -150,9 +150,9 @@ pub fn findConv(comptime T: type) type {
// Conv

return switch (@typeInfo(T)) {
.Void => Void,
.Bool => Bool,
.Int => |i| switch (i.signedness) {
.void => Void,
.bool => Bool,
.int => |i| switch (i.signedness) {
.signed => switch (i.bits) {
8 => Int8,
16 => Int16,
Expand All @@ -168,14 +168,14 @@ pub fn findConv(comptime T: type) type {
else => @compileError("unsupported unsigned int type"),
},
},
.Float => |f| switch (f.bits) {
.float => |f| switch (f.bits) {
32 => Float32,
64 => Float64,
else => @compileError("unsupported float type"),
},
.Optional => |opt| OptConv(findConv(opt.child)),
.Array => @compileLog("fixed size arrays not supported"),
.Pointer => blk: {
.optional => |opt| OptConv(findConv(opt.child)),
.array => @compileLog("fixed size arrays not supported"),
.pointer => blk: {
if (!meta.isStringLike(T)) {
@compileLog("type:", T);
@compileError("unsupported ptr type");
Expand All @@ -195,7 +195,7 @@ inline fn isConv(comptime T: type) bool {
// fromDatum: fn(d: pg.Datum) !Type
// toDatum: fn(v: Type) !pg.Datum

if (@typeInfo(T) != .Struct) {
if (@typeInfo(T) != .@"struct") {
return false;
}

Expand Down
4 changes: 2 additions & 2 deletions src/pgzx/err.zig
Original file line number Diff line number Diff line change
Expand Up @@ -183,8 +183,8 @@ pub inline fn wrap(comptime f: anytype, args: anytype) ElogIndicator!wrap_ret(@T

inline fn wrap_ret(comptime f: type) type {
const ti = @typeInfo(f);
if (ti != .Fn) {
if (ti != .@"fn") {
@compileError("wrap only works with functions");
}
return ti.Fn.return_type.?;
return ti.@"fn".return_type.?;
}
12 changes: 6 additions & 6 deletions src/pgzx/fmgr.zig
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ pub const PG_FINFO_V1_RECORD = Pg_finfo_record{
/// }
///
pub inline fn PG_MODULE_MAGIC() void {
@export(Pg_magic_func, .{ .name = "Pg_magic_func" });
@export(&Pg_magic_func, .{ .name = "Pg_magic_func" });
}

fn Pg_magic_func() callconv(.C) [*c]const Pg_magic_struct {
Expand All @@ -55,26 +55,26 @@ pub fn FunctionV1() callconv(.C) [*c]const Pg_finfo_record {

pub inline fn PG_FUNCTION_INFO_V1(comptime fun: []const u8) void {
const finfo_name = "pg_finfo_" ++ fun;
@export(FunctionV1, .{ .name = finfo_name });
@export(&FunctionV1, .{ .name = finfo_name });
}

pub inline fn PG_FUNCTION_V1(comptime name: []const u8, comptime callback: anytype) void {
PG_FUNCTION_INFO_V1(name);

const reg = genFnCall(callback);
@export(reg.call, .{ .name = name });
@export(&reg.call, .{ .name = name });
}

pub inline fn PG_EXPORT(comptime mod: type) void {
const decls = switch (@typeInfo(mod)) {
.Struct => |s| s.decls,
.@"struct" => |s| s.decls,
else => @compileError("PG_EXPORT requires a struct"),
};

inline for (decls) |decl| {
const value = @field(mod, decl.name);
const ft = @typeInfo(@TypeOf(value));
if (ft != .Fn or ft.Fn.is_generic or ft.Fn.is_var_args) {
if (ft != .@"fn" or ft.@"fn".is_generic or ft.@"fn".is_var_args) {
continue;
}
PG_FUNCTION_V1(decl.name, value);
Expand Down Expand Up @@ -112,7 +112,7 @@ pub inline fn pgCall(
}

const value = switch (@typeInfo(meta.fnReturnType(fnType))) {
.ErrorUnion, .ErrorSet => @call(.no_async, impl, callArgs) catch |e| elog.throwAsPostgresError(src, e),
.error_union, .error_set => @call(.no_async, impl, callArgs) catch |e| elog.throwAsPostgresError(src, e),
else => @call(.no_async, impl, callArgs),
};

Expand Down
4 changes: 2 additions & 2 deletions src/pgzx/mem.zig
Original file line number Diff line number Diff line change
Expand Up @@ -242,10 +242,10 @@ pub const MemoryContextAllocator = struct {

pub fn registerAllocResetCallback(self: *Self, data: anytype, f: fn (@TypeOf(data)) void) !void {
const data_type = @typeInfo(@TypeOf(data));
if (data_type != .Pointer) {
if (data_type != .pointer) {
@compileError("data must be a pointer");
}
switch (data_type.Pointer.size) {
switch (data_type.pointer.size) {
.One => {},
.C => {}, // allow C pointer types to raw Postgres data types.
else => @compileError("data must be a pointer, found slice"),
Expand Down
22 changes: 11 additions & 11 deletions src/pgzx/meta.zig
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
pub inline fn isSlice(comptime T: type) bool {
return switch (@typeInfo(T)) {
.Pointer => |p| p.size == .Slice,
.pointer => |p| p.size == .Slice,
else => false,
};
}

pub inline fn sliceElemType(comptime T: type) type {
return switch (@typeInfo(T)) {
.Pointer => |p| {
.pointer => |p| {
if (p.size != .Slice) {
@compileError("Expected a slice type");
}
Expand All @@ -19,58 +19,58 @@ pub inline fn sliceElemType(comptime T: type) type {

pub inline fn pointerElemType(comptime T: type) type {
return switch (@typeInfo(T)) {
.Pointer => |p| p.child,
.pointer => |p| p.child,
else => @compileError("Expected a pointer type"),
};
}

pub inline fn hasSentinal(comptime T: type) bool {
return switch (@typeInfo(T)) {
.Pointer => |p| p.size == .Slice and p.sentinel != null,
.pointer => |p| p.size == .Slice and p.sentinel != null,
else => false,
};
}

pub inline fn isStringLike(comptime T: type) bool {
return switch (@typeInfo(T)) {
.Pointer => |p| p.size == .Slice and p.child == u8,
.pointer => |p| p.size == .Slice and p.child == u8,
else => false,
};
}

pub inline fn isStringLikeZ(comptime T: type) bool {
return switch (@typeInfo(T)) {
.Pointer => |p| p.size == .Slice and p.child == u8 and p.sentinel != null,
.pointer => |p| p.size == .Slice and p.child == u8 and p.sentinel != null,
else => false,
};
}

pub inline fn isPrimitive(comptime T: type) bool {
return switch (@typeInfo(T)) {
.Bool, .Int, .Float => true,
.bool, .int, .float => true,
else => false,
};
}

pub inline fn getFnType(comptime T: type, name: []const u8) ?type {
switch (@typeInfo(T)) {
.Struct, .Union, .Enum, .Opaque => {},
.@"struct", .@"union", .@"enum", .@"opaque" => {},
else => return null,
}
if (!@hasDecl(T, name)) {
return null;
}

const maybeFn = @TypeOf(@field(T, name));
return if (@typeInfo(maybeFn) == .Fn)
return if (@typeInfo(maybeFn) == .@"fn")
maybeFn
else
null;
}

pub inline fn getMethodType(comptime T: type, name: []const u8) ?type {
return switch (@typeInfo(T)) {
.Pointer => |p| switch (p.size) {
.pointer => |p| switch (p.size) {
.One => getFnType(p.child, name),
else => null,
},
Expand All @@ -80,7 +80,7 @@ pub inline fn getMethodType(comptime T: type, name: []const u8) ?type {

pub inline fn fnReturnType(comptime T: type) type {
return switch (@typeInfo(T)) {
.Fn => |f| f.return_type.?,
.@"fn" => |f| f.return_type.?,
else => @compileError("Expected a function type"),
};
}
Expand Down
4 changes: 2 additions & 2 deletions src/pgzx/node.zig
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ pub inline fn castNode(comptime T: type, node: anytype) *T {
}

pub inline fn safeCastNode(comptime T: type, node: anytype) ?*T {
if (@typeInfo(@TypeOf(node)) == .Optional) {
if (@typeInfo(@TypeOf(node)) == .optional) {
if (node == null) {
return null;
}
Expand Down Expand Up @@ -106,7 +106,7 @@ pub inline fn asNodePtr(node: anytype) *pg.Node {

inline fn checkIsPotentialNodePtr(node: anytype) void {
const nodeType = @typeInfo(@TypeOf(node));
if (nodeType != .Pointer or (nodeType.Pointer.size != .One and nodeType.Pointer.size != .C)) {
if (nodeType != .pointer or (nodeType.pointer.size != .One and nodeType.pointer.size != .C)) {
@compileError("Expected single node pointer");
}
}
Expand Down
8 changes: 4 additions & 4 deletions src/pgzx/pq.zig
Original file line number Diff line number Diff line change
Expand Up @@ -649,7 +649,7 @@ pub fn buildParams(
) !PGQueryParams {
const argsType = @TypeOf(args);
const argsInfo = @typeInfo(argsType);
if (argsInfo != .Struct or !argsInfo.Struct.is_tuple) {
if (argsInfo != .@"struct" or !argsInfo.@"struct".is_tuple) {
return std.debug.panic("params must be a tuple");
}

Expand All @@ -660,12 +660,12 @@ pub fn buildParams(
// The buffer might grow and pointers might get invalidated.
// Let's collect the positions of the values in the buffer so we can
// collect the pointers after the encoding buffer has been fully written.
var value_indices = try local_allocator.alloc(i32, argsInfo.Struct.fields.len);
var value_indices = try local_allocator.alloc(i32, argsInfo.@"struct".fields.len);

const writer: std.ArrayList(u8).Writer = buffer.writer();
var types = try allocator.alloc(pg.Oid, argsInfo.Struct.fields.len);
var types = try allocator.alloc(pg.Oid, argsInfo.@"struct".fields.len);

inline for (argsInfo.Struct.fields, 0..) |field, idx| {
inline for (argsInfo.@"struct".fields, 0..) |field, idx| {
const codec = conv.find(field.type);
types[idx] = codec.OID;

Expand Down
Loading

0 comments on commit 57b5bec

Please sign in to comment.