Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
acdd201
refactor: extract artifact.rs into embedded_artifact crate
branchseer Apr 17, 2026
8576463
refactor(embedded_artifact): add artifact_of! macro (hashes inline)
branchseer Apr 17, 2026
669499e
feat: add embedded_artifact_build crate for build-script helpers
branchseer Apr 17, 2026
691db8f
refactor(fspy/build.rs): verify downloads with sha256 of tarball
branchseer Apr 17, 2026
d49aa77
docs(fspy/build.rs): simplify sha256 comment
branchseer Apr 17, 2026
530b300
refactor(embedded_artifact): compute hashes in build.rs via rustc-env
branchseer Apr 18, 2026
8654ba0
refactor(bundled_artifact): rename from embedded_artifact; method wri…
branchseer Apr 18, 2026
77bdc8d
update
branchseer Apr 18, 2026
966570d
fix: address PR #344 review feedback
claude Apr 18, 2026
0e6a788
chore: remove unused deps flagged by cargo-shear
claude Apr 18, 2026
0d9eb15
fix(fspy/build): match main's cache workflow — hash extracted binary
claude Apr 18, 2026
a2d7f86
refactor: rename bundled_artifact -> materialized_artifact
claude Apr 18, 2026
9eb4b53
refactor(materialized_artifact): rename ensure_in -> materialize_in
claude Apr 18, 2026
7d2f9a8
docs(materialized_artifact): explain the atomic-write invariants
claude Apr 18, 2026
51245da
refactor(materialized_artifact): fluent builder for materialize
claude Apr 18, 2026
2fb53c0
fix(materialized_artifact): build on Windows & allow reparameterized …
claude Apr 18, 2026
5145e20
docs(materialized_artifact_build): drop ENV_PREFIX doc comment
claude Apr 18, 2026
837bee1
docs(materialized_artifact): drop artifact! macro doc; restore ENV_PR…
claude Apr 18, 2026
3ba6996
docs(materialized_artifact): keep artifact! macro summary, drop ENV_P…
claude Apr 18, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 17 additions & 19 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ cc = "1.2.39"
clap = "4.5.53"
color-eyre = "0.6.5"
compact_str = "0.9.0"
const_format = "0.2.34"
constcat = "0.6.1"
copy_dir = "0.1.3"
cow-utils = "0.1.3"
Expand All @@ -70,11 +69,13 @@ derive_more = "2.0.1"
diff-struct = "0.5.3"
directories = "6.0.0"
elf = { version = "0.8.0", default-features = false }
materialized_artifact = { path = "crates/materialized_artifact" }
materialized_artifact_build = { path = "crates/materialized_artifact_build" }
flate2 = "1.0.35"
fspy = { path = "crates/fspy" }
fspy_detours_sys = { path = "crates/fspy_detours_sys" }
fspy_preload_unix = { path = "crates/fspy_preload_unix", artifact = "cdylib" }
fspy_preload_windows = { path = "crates/fspy_preload_windows", artifact = "cdylib" }
fspy_preload_unix = { path = "crates/fspy_preload_unix", artifact = "cdylib", target = "target" }
fspy_preload_windows = { path = "crates/fspy_preload_windows", artifact = "cdylib", target = "target" }
fspy_seccomp_unotify = { path = "crates/fspy_seccomp_unotify" }
fspy_shared = { path = "crates/fspy_shared" }
fspy_shared_unix = { path = "crates/fspy_shared_unix" }
Expand Down Expand Up @@ -103,7 +104,6 @@ pretty_assertions = "1.4.1"
pty_terminal = { path = "crates/pty_terminal" }
pty_terminal_test = { path = "crates/pty_terminal_test" }
pty_terminal_test_client = { path = "crates/pty_terminal_test_client" }
rand = "0.9.1"
ratatui = "0.30.0"
rayon = "1.10.0"
ref-cast = "1.0.24"
Expand Down
20 changes: 11 additions & 9 deletions crates/fspy/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,36 +9,30 @@ allocator-api2 = { workspace = true, features = ["alloc"] }
wincode = { workspace = true }
bstr = { workspace = true, default-features = false }
bumpalo = { workspace = true }
const_format = { workspace = true, features = ["fmt"] }
derive_more = { workspace = true, features = ["debug"] }
materialized_artifact = { workspace = true }
fspy_shared = { workspace = true }
futures-util = { workspace = true }
libc = { workspace = true }
ouroboros = { workspace = true }
rand = { workspace = true }
rustc-hash = { workspace = true }
tempfile = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["net", "process", "io-util", "sync", "rt"] }
tokio-util = { workspace = true }
which = { workspace = true, features = ["tracing"] }
xxhash-rust = { workspace = true }

[target.'cfg(target_os = "linux")'.dependencies]
fspy_seccomp_unotify = { workspace = true, features = ["supervisor"] }
nix = { workspace = true, features = ["uio"] }
tokio = { workspace = true, features = ["bytes"] }

[target.'cfg(all(unix, not(target_env = "musl")))'.dependencies]
fspy_preload_unix = { workspace = true }

[target.'cfg(unix)'.dependencies]
fspy_shared_unix = { workspace = true }
nix = { workspace = true, features = ["fs", "process", "socket", "feature"] }

[target.'cfg(target_os = "windows")'.dependencies]
fspy_detours_sys = { workspace = true }
fspy_preload_windows = { workspace = true }
winapi = { workspace = true, features = ["winbase", "securitybaseapi", "handleapi"] }
winsafe = { workspace = true }

Expand All @@ -59,11 +53,19 @@ fspy_test_bin = { path = "../fspy_test_bin", artifact = "bin", target = "aarch64
[target.'cfg(all(target_os = "linux", target_arch = "x86_64"))'.dev-dependencies]
fspy_test_bin = { path = "../fspy_test_bin", artifact = "bin", target = "x86_64-unknown-linux-musl" }

# Artifact build-deps must be unconditional: cargo's resolver panics when
# `artifact = "cdylib"` deps live under a `[target.cfg.build-dependencies]`
# block on cross-compile. Each preload crate's source is cfg-gated to compile
# as an empty cdylib on non-applicable targets, so the unused cross-target
# builds are cheap.
[build-dependencies]
anyhow = { workspace = true }
materialized_artifact_build = { workspace = true }
flate2 = { workspace = true }
fspy_preload_unix = { workspace = true }
fspy_preload_windows = { workspace = true }
sha2 = { workspace = true }
tar = { workspace = true }
xxhash-rust = { workspace = true, features = ["xxh3"] }

[lints]
workspace = true
Expand All @@ -72,4 +74,4 @@ workspace = true
doctest = false

[package.metadata.cargo-shear]
ignored = ["ctor", "fspy_test_bin"]
ignored = ["ctor", "fspy_test_bin", "fspy_preload_unix", "fspy_preload_windows"]
147 changes: 91 additions & 56 deletions crates/fspy/build.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
use std::{
env::{self, current_dir},
env,
fmt::Write as _,
fs,
io::{Cursor, Read},
path::Path,
path::{Path, PathBuf},
process::{Command, Stdio},
};

use anyhow::{Context, bail};
use xxhash_rust::xxh3::xxh3_128;
use sha2::{Digest, Sha256};

fn download(url: &str) -> anyhow::Result<impl Read + use<>> {
fn download(url: &str) -> anyhow::Result<Vec<u8>> {
let curl = Command::new("curl")
.args([
"-f", // fail on HTTP errors
Expand All @@ -22,15 +23,14 @@ fn download(url: &str) -> anyhow::Result<impl Read + use<>> {
if !output.status.success() {
bail!("curl exited with status {} trying to download {}", output.status, url);
}
Ok(Cursor::new(output.stdout))
Ok(output.stdout)
}

fn unpack_tar_gz(content: impl Read, path: &str) -> anyhow::Result<Vec<u8>> {
fn unpack_tar_gz(tarball: impl Read, path: &str) -> anyhow::Result<Vec<u8>> {
use flate2::read::GzDecoder;
use tar::Archive;

// let path = path.as_ref();
let tar = GzDecoder::new(content);
let tar = GzDecoder::new(tarball);
let mut archive = Archive::new(tar);
for entry in archive.entries()? {
let mut entry = entry?;
Expand All @@ -43,89 +43,124 @@ fn unpack_tar_gz(content: impl Read, path: &str) -> anyhow::Result<Vec<u8>> {
bail!("Path {path} not found in tar gz")
}

fn download_and_unpack_tar_gz(url: &str, path: &str) -> anyhow::Result<Vec<u8>> {
let resp = download(url).context(format!("Failed to get ok response from {url}"))?;
let data = unpack_tar_gz(resp, path)
.context(format!("Failed to download or unpack {path} out of {url}"))?;
Ok(data)
fn sha256_hex(bytes: &[u8]) -> String {
let digest = Sha256::digest(bytes);
let mut s = String::with_capacity(64);
for b in digest {
write!(&mut s, "{b:02x}").unwrap();
}
s
}

/// (url, `path_in_targz`, `expected_hash`)
type BinaryDownload = (&'static str, &'static str, u128);
struct BinaryDownload {
/// Identifier used both as the on-disk filename in `OUT_DIR` and as the
/// env-var prefix consumed by `artifact!($name)` at runtime.
name: &'static str,
/// GitHub release asset URL.
url: &'static str,
/// Path of the binary within the tarball.
path_in_targz: &'static str,
/// SHA-256 of the extracted binary. Doubles as the cache key: an
/// already-extracted binary in `OUT_DIR` whose content hashes to this
/// value is reused without hitting the network.
expected_sha256: &'static str,
}

const MACOS_BINARY_DOWNLOADS: &[(&str, &[BinaryDownload])] = &[
(
"aarch64",
&[
(
"https://github.com/branchseer/oils-for-unix-build/releases/download/oils-for-unix-0.37.0/oils-for-unix-0.37.0-darwin-arm64.tar.gz",
"oils-for-unix",
282_073_174_065_923_237_490_435_663_309_538_399_576,
),
(
"https://github.com/uutils/coreutils/releases/download/0.4.0/coreutils-0.4.0-aarch64-apple-darwin.tar.gz",
"coreutils-0.4.0-aarch64-apple-darwin/coreutils",
35_998_406_686_137_668_997_937_014_088_186_935_383,
),
// https://github.com/branchseer/oils-for-unix-build/releases/tag/oils-for-unix-0.37.0
BinaryDownload {
name: "oils_for_unix",
url: "https://github.com/branchseer/oils-for-unix-build/releases/download/oils-for-unix-0.37.0/oils-for-unix-0.37.0-darwin-arm64.tar.gz",
path_in_targz: "oils-for-unix",
expected_sha256: "ce4bb80b15f0a0371af08b19b65bfa5ea17d30429ebb911f487de3d2bcc7a07d",
},
// https://github.com/uutils/coreutils/releases/tag/0.4.0
BinaryDownload {
name: "coreutils",
url: "https://github.com/uutils/coreutils/releases/download/0.4.0/coreutils-0.4.0-aarch64-apple-darwin.tar.gz",
path_in_targz: "coreutils-0.4.0-aarch64-apple-darwin/coreutils",
expected_sha256: "8e8f38d9323135a19a73d617336fce85380f3c46fcb83d3ae3e031d1c0372f21",
},
],
),
(
"x86_64",
&[
(
"https://github.com/branchseer/oils-for-unix-build/releases/download/oils-for-unix-0.37.0/oils-for-unix-0.37.0-darwin-x86_64.tar.gz",
"oils-for-unix",
142_673_558_272_427_867_831_039_361_796_426_010_330,
),
(
"https://github.com/uutils/coreutils/releases/download/0.4.0/coreutils-0.4.0-x86_64-apple-darwin.tar.gz",
"coreutils-0.4.0-x86_64-apple-darwin/coreutils",
120_898_281_113_671_104_995_723_556_995_187_526_689,
),
// https://github.com/branchseer/oils-for-unix-build/releases/tag/oils-for-unix-0.37.0
BinaryDownload {
name: "oils_for_unix",
url: "https://github.com/branchseer/oils-for-unix-build/releases/download/oils-for-unix-0.37.0/oils-for-unix-0.37.0-darwin-x86_64.tar.gz",
path_in_targz: "oils-for-unix",
expected_sha256: "cf1a95993127770e2a5fff277cd256a2bb28cf97d7f83ae42fdccc172cdb540d",
},
// https://github.com/uutils/coreutils/releases/tag/0.4.0
BinaryDownload {
name: "coreutils",
url: "https://github.com/uutils/coreutils/releases/download/0.4.0/coreutils-0.4.0-x86_64-apple-darwin.tar.gz",
path_in_targz: "coreutils-0.4.0-x86_64-apple-darwin/coreutils",
expected_sha256: "6be8bee6e8b91fc44a465203b9cc30538af00084b6657dc136d9e55837753eb1",
},
],
),
];

fn fetch_macos_binaries() -> anyhow::Result<()> {
fn fetch_macos_binaries(out_dir: &Path) -> anyhow::Result<()> {
if env::var("CARGO_CFG_TARGET_OS").unwrap() != "macos" {
return Ok(());
}

let out_dir = current_dir().unwrap().join(Path::new(&std::env::var_os("OUT_DIR").unwrap()));

let target_arch = env::var("CARGO_CFG_TARGET_ARCH").unwrap();
let downloads = MACOS_BINARY_DOWNLOADS
.iter()
.find(|(arch, _)| *arch == target_arch)
.context(format!("Unsupported macOS arch: {target_arch}"))?
.1;
// let downloads = [(zsh_url.as_str(), "bin/zsh", zsh_hash)];
for (url, path_in_targz, expected_hash) in downloads.iter().copied() {
let filename = path_in_targz.split('/').next_back().unwrap();
let download_path = out_dir.join(filename);
let hash_path = out_dir.join(format!("{filename}.hash"));

let file_exists = matches!(fs::read(&download_path), Ok(existing_file_data) if xxh3_128(&existing_file_data) == expected_hash);
if !file_exists {
let data = download_and_unpack_tar_gz(url, path_in_targz)?;
fs::write(&download_path, &data).context(format!(
"Saving {path_in_targz} in {url} to {}",
download_path.display()
))?;
let actual_hash = xxh3_128(&data);
for BinaryDownload { name, url, path_in_targz, expected_sha256 } in downloads {
let dest = out_dir.join(name);
// Cache hit: an already-extracted binary whose contents hash to
// `expected_sha256` is known-good and reused without redownloading.
let cached = matches!(
fs::read(&dest),
Ok(existing) if sha256_hex(&existing) == *expected_sha256,
);
if !cached {
let tarball = download(url).context(format!("Failed to download {url}"))?;
let data = unpack_tar_gz(Cursor::new(tarball), path_in_targz)
.context(format!("Failed to extract {path_in_targz} from {url}"))?;
let actual_sha256 = sha256_hex(&data);
assert_eq!(
actual_hash, expected_hash,
"expected_hash of {path_in_targz} in {url} needs to be updated"
&actual_sha256, expected_sha256,
"sha256 of {path_in_targz} in {url} does not match — update expected value in MACOS_BINARY_DOWNLOADS",
);
fs::write(&dest, &data).with_context(|| format!("writing {}", dest.display()))?;
}
fs::write(&hash_path, format!("{expected_hash:x}"))?;
materialized_artifact_build::register(name, &dest);
}
Ok(())
// let zsh_path = ensure_downloaded(&zsh_url);
}

fn register_preload_cdylib() -> anyhow::Result<()> {
let env_name = match env::var("CARGO_CFG_TARGET_OS").unwrap().as_str() {
"windows" => "CARGO_CDYLIB_FILE_FSPY_PRELOAD_WINDOWS",
_ if env::var("CARGO_CFG_TARGET_ENV").unwrap() == "musl" => return Ok(()),
_ => "CARGO_CDYLIB_FILE_FSPY_PRELOAD_UNIX",
};
// The cdylib path is content-addressed by cargo; when its content changes
// the path changes. Track it so we re-publish the hash on update.
println!("cargo:rerun-if-env-changed={env_name}");
let dylib_path = env::var_os(env_name).with_context(|| format!("{env_name} not set"))?;
materialized_artifact_build::register("fspy_preload", Path::new(&dylib_path));
Ok(())
}

fn main() -> anyhow::Result<()> {
println!("cargo:rerun-if-changed=build.rs");
fetch_macos_binaries().context("Failed to fetch macOS binaries")?;
let out_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap());
fetch_macos_binaries(&out_dir).context("Failed to fetch macOS binaries")?;
register_preload_cdylib().context("Failed to register preload cdylib")?;
Ok(())
}
Loading
Loading