Compare commits

..

No commits in common. "cc887fdc72aeeab406fc394679a260fbaa63428e" and "2a40712346a231d6d9f5f520ac7abadbc19b7124" have entirely different histories.

16 changed files with 633 additions and 88 deletions

View file

@ -6,20 +6,32 @@ Declaratively keep your stuff up to date in your nixos config.
## Usage
```nix
services.fooud = {
enable = true;
repos = [
{
path = "/full/path/to/your/repo.git";
hooks = [
pkgs.writeScriptBin "post-recieve" ''
git clone . /var/www/your/deployed/location
'';
];
}
];
description = "My flake config";
inputs.fooud.url = "git+https://git.squi.bid/squibid/fooud";
outputs = { self, nixpkgs, fooud }: {
nixosConfigurations.my-system = nixpkgs.lib.nixosSystem {
modules = [{
services.nginx.virtualHosts."squi.bid" = {
root = fooud.lib.gitUpdater config {
git = "https://git.squi.bid/squibid/squi.bid"; # the source of the data
dest = "/var/www/squi.bid"; # where should the files live on disk
keys = [ "BECE5684D3C4005D" ]; # requires the commit to be signed by me
check = "5m"; # we may be no more than 5 minutes out of date from the source
};
locations."/" = {
tryFiles = "$uri $uri.html $uri/";
index = "index.html index.htm";
};
};
}];
};
};
}
```
# TODO
- [ ] add support for non-git files
- extract archives
- support copying files from other parts of the filesystem (although that's rathar impure isn't it)
- add some checks

42
build.zig Normal file
View file

@ -0,0 +1,42 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{});
const mod = b.addModule("fooud", .{
.root_source_file = b.path("src/main.zig"),
.target = target,
});
const exe = b.addExecutable(.{
.name = "fooud",
.root_module = b.createModule(.{
.root_source_file = b.path("src/main.zig"),
.target = target,
.optimize = optimize,
.imports = &.{
.{ .name = "fooud", .module = mod },
},
}),
});
const dep_curl = b.dependency("curl", .{});
exe.root_module.addImport("curl", dep_curl.module("curl"));
exe.root_module.linkSystemLibrary("git2", .{});
exe.root_module.linkSystemLibrary("gpgme", .{});
exe.root_module.addIncludePath(b.path("src/util"));
exe.root_module.addCSourceFile(.{ .file = b.path("src/util/gpg_helper.c") });
exe.linkLibC();
b.installArtifact(exe);
const run_step = b.step("run", "Run the app");
const run_cmd = b.addRunArtifact(exe);
run_step.dependOn(&run_cmd.step);
run_cmd.step.dependOn(b.getInstallStep());
if (b.args) |args| {
run_cmd.addArgs(args);
}
}

20
build.zig.zon Normal file
View file

@ -0,0 +1,20 @@
.{
.name = .fooud,
.version = "1.0.0",
.fingerprint = 0xe809364a41c00a2b, // Changing this has security and trust implications.
.minimum_zig_version = "0.15.2",
.dependencies = .{
.curl = .{
.url = "https://github.com/jiacai2050/zig-curl/archive/refs/tags/v0.3.2.zip",
.hash = "curl-0.3.2-P4tT4SXPAACuV6f5eyh4jG_1SspjWwMm_vRJfoKrQep5",
},
},
.paths = .{
"build.zig",
"build.zig.zon",
"src",
// For example...
//"LICENSE",
//"README.md",
},
}

34
build.zig.zon.nix Normal file
View file

@ -0,0 +1,34 @@
# generated by zon2nix (https://github.com/nix-community/zon2nix)
{ linkFarm, fetchzip, fetchgit }:
linkFarm "zig-packages" [
{
name = "N-V-__8AAFrtpQI1j9eOv7aN3lt3eH1TJfH4npAdRIrg2gGH";
path = fetchzip {
url = "https://github.com/Mbed-TLS/mbedtls/archive/refs/tags/v3.6.0.tar.gz";
hash = "sha256-yzGBkrqh+T/5GS66xL5zJstCmvcfG09TfxqA3F8UPJg=";
};
}
{
name = "N-V-__8AAHipPQF9UuLPiaV1CtJzZIxvTN61tMGdFx8LGjIV";
path = fetchzip {
url = "https://github.com/curl/curl/releases/download/curl-8_8_0/curl-8.8.0.tar.gz";
hash = "sha256-Gqfe8iiC8aCBiuUVO6VYqW5DmqgSv5oS4XvMfQgbwFw=";
};
}
{
name = "N-V-__8AAJj_QgDBhU17TCtcvdjOZZPDfkvxrEAyZkc14VN8";
path = fetchzip {
url = "https://github.com/madler/zlib/releases/download/v1.3.1/zlib-1.3.1.tar.gz";
hash = "sha256-acY8yFzIRYbrZ2CGODoxLnZuppsP6KZy19I9Yy77pfc=";
};
}
{
name = "curl-0.3.2-P4tT4SXPAACuV6f5eyh4jG_1SspjWwMm_vRJfoKrQep5";
path = fetchzip {
url = "https://github.com/jiacai2050/zig-curl/archive/refs/tags/v0.3.2.zip";
hash = "sha256-1FjYirex2Q/zs5GLNtCGgClyV5/SW0GhxFHA1Hm+e4o=";
};
}
]

13
flake.lock generated
View file

@ -2,17 +2,18 @@
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1767892417,
"narHash": "sha256-dhhvQY67aboBk8b0/u0XB6vwHdgbROZT3fJAjyNh5Ww=",
"lastModified": 1764733908,
"narHash": "sha256-QJiih52NU+nm7XQWCj+K8SwUdIEayDQ1FQgjkYISt4I=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "3497aa5c9457a9d88d71fa93a4a8368816fbeeba",
"rev": "cadcc8de247676e4751c9d4a935acb2c0b059113",
"type": "github"
},
"original": {
"id": "nixpkgs",
"ref": "nixos-unstable",
"type": "indirect"
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {

View file

@ -1,11 +1,66 @@
{
description = "Declaratively update your data.";
inputs.nixpkgs.url = "nixpkgs/nixos-unstable";
outputs = { nixpkgs, ... }: {
lib = builtins.import ./lib.nix { pkgs = nixpkgs; };
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
outputs = { self, nixpkgs, ... }: let
system = "x86_64-linux";
pkgs = import nixpkgs { inherit system; };
package = pkgs.stdenv.mkDerivation rec {
pname = "fooud";
version = "1.0";
src = ./.;
deps = pkgs.callPackage ./build.zig.zon.nix {};
nativeBuildInputs = [
pkgs.zig.hook
pkgs.glibc
pkgs.libgit2
pkgs.gpgme
pkgs.libgpg-error
pkgs.pkg-config
];
zigBuildFlags = [
"--system" "${deps}"
# "-Doptimize=Debug"
# "-Dtarget=${system}"
];
};
in {
lib = builtins.import ./lib.nix { pkgs = pkgs; };
nixosModules = rec {
fooud = builtins.import ./module.nix;
fooud = { pkgs, lib, config, inputs, ... }: {
options.programs.fooud.enable = lib.mkEnableOption ("fooud")
// { default = true; };
config = lib.mkIf config.programs.fooud.enable {
environment.systemPackages = [
pkgs.glibc
pkgs.libgit2
pkgs.gpgme
pkgs.libgpg-error
pkgs.nix
package
];
};
};
default = fooud;
};
packages.${system} = rec { default = package; fooud = default; };
checks.${system}.build = let
package = self.packages.${system}.default;
in pkgs.runCommand "fooud-build" { buildInputs = [
pkgs.git
pkgs.nix
]; } ''
mkdir repo
git -C repo init > /dev/null 2>&1
echo "hi" > repo/README.md
git -C repo config user.email "you@example.com" > /dev/null 2>&1
git -C repo config user.name "Your Name" > /dev/null 2>&1
git -C repo add . > /dev/null 2>&1
git -C repo commit -m "initial commit" > /dev/null 2>&1
# this check won't succeed until I find a way to run a chroot store
${package}/bin/fooud --git repo --dest test
'';
};
}

68
lib.nix
View file

@ -1,12 +1,72 @@
{ pkgs, ... }:
let
post-recv = dest: pkgs.writeScriptBin "post-recv" ''
git clone . ${dest}
defaultUpdater = { git ? null, url ? null, path ? null, keys ? null, dest, check, config }:
let
config.systemd.services."fooud-${dest}" = {
serviceConfig = {
Type = "oneshot";
User = "root";
Group = "root";
ExecStart = let
dest = pkgs.lib.assertMsg dest "dest must be set";
remote =
if git then "--git " + git
else if url then "--url " + url
else if path then "--path " + path
else builtins.throw "one of git, url or path must be set";
keys_str = if git then
pkgs.lib.strings.concatStrings builtins.map (x: "--key ${x} ") keys
else throw "cannot use keys with git";
in pkgs.writeShellScript "fooud-${dest}-wrapper" ''
${pkgs.fooud}/bin/fooud ${keys_str} ${remote} ${dest}
'';
};
};
config.systemd.timers."fooud-${dest}" = {
wantedBy = [ "timers.target" ];
timerConfig = {
OnActiveSec = "0s";
OnUnitActiveSec = check;
Unit = "fooud-${dest}.service";
Persistent = true;
};
};
in
{
config = config;
dest = dest;
};
in
let
lib.git = {
inherit post-recv;
gitUpdater = config: { git, keys, dest, check }:
(defaultUpdater {
git = git;
keys = keys;
dest = dest;
check = check;
config = config;
}).dest;
fsUpdater = config: { path, dest, check }:
(defaultUpdater {
path = path;
dest = dest;
check = check;
config = config;
}).dest;
urlUpdater = config: { url, dest, check }:
(defaultUpdater {
url = url;
dest = dest;
check = check;
config = config;
}).dest;
lib = {
inherit
gitUpdater
fsUpdater
urlUpdater;
};
in
lib

View file

@ -1,59 +0,0 @@
{ config, pkgs, lib, ... }:
{
options.services.fooud = {
enable = lib.mkEnableOption config.description;
repos = lib.mkOption {
type = lib.listOf {
path = lib.mkOption {
description = "fullpath to the repositiory on your server";
type = lib.types.string;
};
hooks = lib.mkOption {
type = lib.listOf lib.types.path;
example = [
pkgs.writeScriptBin "post-recieve" ''
git clone . /var/www/your/deployed/location
''
];
};
};
};
user = lib.mkOption {
type = lib.types.str;
default = "fooud-deploy";
};
};
config = let
cfg = config.services.fooud;
in lib.mkIf cfg.enable {
environment.systemPackages = with pkgs; [ git ];
users.users."${cfg.user}" = lib.mkIf cfg.user != "root" {
group = "${cfg.user}";
isSystemUser = true;
createHome = true;
home = "/var/lib/${cfg.user}";
shell = "${pkgs.git}/bin/git-shell";
};
users.groups."${cfg.user}" = {};
systemd = {
services."fooud" = {
wantedBy = [ "multi-user.target" ];
serviceConfig = {
Type = "oneshot";
User = cfg.user;
ExecStart = pkgs.writeScriptBin "fooud-deploy"
(lib.concatMapStrings
(repo: ''
if [ -d ${repo.path} ]; then
rm -f ${repo.path}/hooks/*
cp ${repo.hooks}/bin/* ${repo.name}/hooks/
fi
'')
cfg.repos);
};
};
};
};
}

53
src/NixStore.zig Normal file
View file

@ -0,0 +1,53 @@
const NixStore = @This();
const std = @import("std");
const gpa = std.heap.page_allocator;
/// Add a path to the store, this will copy the contents of path recursively
/// into the store and return a (hopefully) valid store path. To try and keep
/// this store path valid you should follow this with a call to realize() and
/// then root the store path.
pub fn add(path: []const u8) error{Failure}![]const u8 {
const res = std.process.Child.run(.{
.allocator = gpa,
.argv = &[_][]const u8{ "nix", "store", "add", path },
}) catch return error.Failure;
if (res.term != .Exited or res.term.Exited != 0) return error.Failure;
if (res.stdout.len == 0) return error.Failure;
return res.stdout[0 .. res.stdout.len - 1]; // to chop off the \n
}
/// This tries to tell the store that a store_path should stick around a while
/// longer, there's a chance that it doesn't listen and removes that store_path
/// but in my testing it seemed to stick around.
pub fn realize(store_path: []const u8) error{Failure}!void {
_ = std.process.Child.run(.{
.allocator = gpa,
.argv = &[_][]const u8{ "nix-store", "--realize", store_path },
}) catch return error.Failure;
return;
}
/// This tells the nix store to not gc our new store_path because it has a
/// dependency at root_path and that only if root_path doesn't exist anymore
/// it can delete store_path.
pub fn root(store_path: []const u8, root_path: []const u8) error{Failure}!void {
_ = std.process.Child.run(.{
.allocator = gpa,
.argv = &[_][]const u8{ "nix-store", "--add-root", root_path, "--indirect", store_path },
}) catch return error.Failure;
return;
}
/// Delete a path from the store.
pub fn delete(store_path: []const u8) error{Failure}!void {
_ = std.process.Child.run(.{
.allocator = gpa,
.argv = &[_][]const u8{ "nix", "store", "delete", store_path },
}) catch {
return error.Failure;
};
return;
}

3
src/extractor.zig Normal file
View file

@ -0,0 +1,3 @@
const Extractor = @This();
// TODO: impl

153
src/main.zig Normal file
View file

@ -0,0 +1,153 @@
const std = @import("std");
const NixStore = @import("NixStore.zig");
const Git = @import("remotes/Git.zig");
const Curl = @import("remotes/Curl.zig");
const Fs = @import("remotes/Fs.zig");
const gpa = std.heap.page_allocator;
const Options = enum {
git,
url,
path,
dest,
key,
extract,
};
const Config = struct {
const Remote = union(enum) {
git: []const u8,
url: []const u8,
path: []const u8,
none,
};
remote: Remote,
dest: ?[]const u8,
keys: std.ArrayList([]const u8),
extract: bool,
};
pub fn main() !void {
var config: Config = .{
.remote = .none,
.dest = null,
.keys = .empty,
.extract = false,
};
try check_args(&config);
const tmp_dest = try std.fmt.allocPrint(gpa, "/tmp/{s}", .{config.dest.?});
defer gpa.free(tmp_dest);
// we're gonna delete the temp file(s) if they exist because we don't want
// to deal with them
var exists = true;
std.fs.cwd().access(tmp_dest, .{}) catch {
exists = false;
};
if (exists) try std.fs.cwd().deleteTree(tmp_dest);
const res = try switch (config.remote) {
.git => Git.get(config.remote.git, tmp_dest, try config.keys.toOwnedSlice(gpa)),
.url => Curl.get(config.remote.url, tmp_dest),
.path => Fs.get(config.remote.path, tmp_dest),
else => unreachable,
};
defer std.fs.cwd().deleteTree(tmp_dest) catch {};
if (!res) {
std.log.err("failed to obtain the remote file(s)", .{});
std.process.exit(1);
}
if (config.extract) {
// TODO: impl
}
// Now that we've gotten all our files it's time to add it to the nix store
const store_path = try NixStore.add(tmp_dest);
try NixStore.realize(store_path);
std.log.info("new store path: {s}", .{store_path});
// link the dest to the new nix store path
var dest_old_store_path: ?[]u8 = null;
while (true) {
std.fs.cwd().symLink(store_path, config.dest.?, .{}) catch |err| switch (err) {
error.PathAlreadyExists => {
// get the old nix store path
const buf: [1024]u8 = undefined;
dest_old_store_path = try std.fs.cwd().readLink(config.dest.?, @constCast(buf[0..]));
std.log.info("old store path: {s}", .{dest_old_store_path.?});
// if the old store path and the current store path are the
// same then don't do anything, nothing will change
if (std.mem.eql(u8, dest_old_store_path.?, store_path)) return;
try std.fs.cwd().deleteFile(config.dest.?);
continue;
},
else => {
if (dest_old_store_path) |path| {
try std.fs.cwd().symLink(path, config.dest.?, .{});
}
return err;
},
};
try NixStore.root(store_path, config.dest.?);
break;
}
// delete the old nix store path
if (dest_old_store_path) |path| {
std.log.info("deleting old store path: {s}", .{path});
try NixStore.delete(path);
}
}
fn check_args(config: *Config) !void {
const iter = @constCast(&std.process.args());
while (iter.next()) |arg| if (std.mem.eql(u8, arg[0..2], "--")) {
const t = std.meta.stringToEnum(Options, arg[2..]) orelse {
std.log.err("{s} is not a valid option", .{arg});
std.process.exit(1);
};
if (t == .extract) { // this option doesn't require arguments
config.extract = true;
continue;
}
const val = iter.next() orelse return error.Invalid;
switch (t) {
.git => config.remote = Config.Remote{ .git = val },
.url => config.remote = Config.Remote{ .url = val },
.path => config.remote = Config.Remote{ .path = val },
.key => try config.keys.append(gpa, val),
.dest => config.dest = val,
else => unreachable,
}
};
var err = false;
if (config.remote == .none) {
std.log.err("you must set one of --git, --url, or --path", .{});
err = true;
}
if (config.dest == null) {
std.log.err("you must set a --dest", .{});
err = true;
}
if (config.keys.items.len > 0 and config.remote != .git) {
std.log.err("you can only check keys on a git repository", .{});
err = true;
}
if (config.extract and config.remote == .git) {
std.log.err("you cannot extract a git repository", .{});
err = true;
}
if (err) std.process.exit(1);
}

33
src/remotes/Curl.zig Normal file
View file

@ -0,0 +1,33 @@
const Curl = @This();
const std = @import("std");
const curl = @import("curl");
const gpa = std.heap.page_allocator;
pub fn get(url: []const u8, dest: []const u8) !bool {
const ca_bundle = try curl.allocCABundle(gpa);
defer ca_bundle.deinit();
const easy = try curl.Easy.init(.{ .ca_bundle = ca_bundle });
defer easy.deinit();
// let's get that dest file opened up
const fp = try std.fs.cwd().createFile(dest, .{});
defer fp.close();
const buffer: [1024]u8 = undefined;
const writer = fp.writer(@constCast(&buffer));
var tmp_url = try gpa.alloc(u8, url.len + 1);
defer gpa.free(tmp_url);
@memcpy(tmp_url[0..url.len], url);
tmp_url[url.len] = 0;
// download it
try easy.setUrl(@ptrCast(tmp_url));
try easy.setWriter(@constCast(&writer.interface));
const response = try easy.perform();
if (response.status_code != 200) return false;
try @constCast(&writer.interface).flush();
return true;
}

11
src/remotes/Fs.zig Normal file
View file

@ -0,0 +1,11 @@
const Fs = @This();
const std = @import("std");
pub fn get(src: []const u8, dest: []const u8) !bool {
_ = src;
_ = dest;
// TODO: impl
return false;
}

117
src/remotes/Git.zig Normal file
View file

@ -0,0 +1,117 @@
const Git = @This();
const std = @import("std");
pub const c = @cImport({
@cInclude("git2.h");
@cInclude("gpgme.h");
@cInclude("gpg_helper.h");
});
pub const GitError = error{
InitFailed,
OpenFailed,
NotFound,
InvalidObject,
InvalidReference,
OutOfMemory,
Unknown,
};
pub fn get(url: []const u8, dest: []const u8, keys: []const []const u8) !bool {
if (c.git_libgit2_init() < 0) return GitError.InitFailed;
defer _ = c.git_libgit2_shutdown();
var exists = true;
std.fs.cwd().access(dest, .{}) catch {
exists = false;
};
var repo: ?*c.git_repository = null;
if (!exists) repo = clone(url, dest) catch return false;
defer if (repo) |r| c.git_repository_free(r);
if (keys.len <= 0) return repo != null;
return try check_signing_key(repo, keys);
}
fn clone(url: []const u8, dest: []const u8) !?*c.git_repository {
var clone_opts: c.git_clone_options = undefined;
_ = c.git_clone_options_init(&clone_opts, c.GIT_CLONE_OPTIONS_VERSION);
var repo: ?*c.git_repository = null;
const err = c.git_clone(&repo, url.ptr, dest.ptr, &clone_opts);
if (err != 0) {
const git_err = c.git_error_last();
if (git_err != null and git_err.*.message != null) {
std.log.err("Clone error {d}: {s}", .{ err, git_err.*.message });
} else {
std.log.err("Clone error {d}: <unknown>", .{err});
}
return null;
}
return repo;
}
fn check_signing_key(repo: ?*c.git_repository, keys: []const []const u8) !bool {
var head_ref: ?*c.git_reference = null;
if (c.git_repository_head(&head_ref, repo) != 0) {
std.log.err("Failed to get HEAD", .{});
return false;
}
defer c.git_reference_free(head_ref);
const oid_ptr = c.git_reference_target(head_ref);
if (oid_ptr == null) {
std.log.err("HEAD is not pointing to a commit", .{});
return false;
}
var signature = c.git_buf{};
var signed_data = c.git_buf{};
if (c.git_commit_extract_signature(&signature, &signed_data, repo, @constCast(oid_ptr), null) != 0) {
std.log.err("HEAD commit is not signed", .{});
return false;
}
defer c.git_buf_dispose(&signature);
defer c.git_buf_dispose(&signed_data);
// gpg me
_ = c.gpgme_check_version(null);
var ctx: c.gpgme_ctx_t = null;
_ = c.gpgme_new(&ctx);
defer _ = c.gpgme_release(ctx);
var sig_data: c.gpgme_data_t = null;
var signed_text_data: c.gpgme_data_t = null;
_ = c.gpgme_data_new_from_mem(&sig_data, signature.ptr, signature.size, 0);
defer _ = c.gpgme_data_release(sig_data);
_ = c.gpgme_data_new_from_mem(&signed_text_data, signed_data.ptr, signed_data.size, 0);
defer _ = c.gpgme_data_release(signed_text_data);
if (c.gpgme_op_verify(ctx, sig_data, signed_text_data, null) != 0) {
std.log.err("Failed to verify signature", .{});
return false;
}
const result = c.gpgme_op_verify_result(ctx);
if (result == null) {
std.log.err("No signature found in verification result", .{});
return false;
}
const fpr = c.first_signature_fpr(ctx);
if (fpr == null) {
std.log.err("No signature found", .{});
return false;
}
for (keys) |key| {
if (std.mem.eql(u8, std.mem.span(fpr + 24), key)) {
return true;
}
}
return false;
}

7
src/util/gpg_helper.c Normal file
View file

@ -0,0 +1,7 @@
#include "gpg_helper.h"
const char* first_signature_fpr(gpgme_ctx_t ctx) {
gpgme_verify_result_t result = gpgme_op_verify_result(ctx);
if (!result || !result->signatures) return NULL;
return result->signatures->fpr;
}

3
src/util/gpg_helper.h Normal file
View file

@ -0,0 +1,3 @@
#include <gpgme.h>
const char* first_signature_fpr(gpgme_ctx_t ctx);