Skip to content

Commit 57ea531

Browse files
committed
Input::getAccessorUnchecked(): Wrap fetches in a path lock
This prevents multiple processes (like nix-eval-jobs instances) from fetching the same input at the same time. That doesn't matter for correctness, but it can cause a lot of redundant downloads.
1 parent 7f10286 commit 57ea531

2 files changed

Lines changed: 29 additions & 0 deletions

File tree

src/libfetchers/fetchers.cc

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,9 @@
88
#include "nix/util/url.hh"
99
#include "nix/util/forwarding-source-accessor.hh"
1010
#include "nix/util/archive.hh"
11+
#include "nix/util/users.hh"
12+
#include "nix/store/pathlocks.hh"
13+
#include "nix/util/environment-variables.hh"
1114

1215
#include <nlohmann/json.hpp>
1316

@@ -367,6 +370,18 @@ std::pair<ref<SourceAccessor>, Input> Input::getAccessorUnchecked(const Settings
367370
return {accessor, result};
368371
};
369372

373+
/* Acquire a path lock on this input. Note that fetching the same input in parallel is supposed to be safe (it's up
374+
* to the fetchers to guarantee this), so this is merely intended to avoid work duplication. */
375+
auto lockFilePath =
376+
getCacheDir() / "fetcher-locks"
377+
/ hashString(HashAlgorithm::SHA256, attrsToJSON(toAttrs()).dump()).to_string(HashFormat::Base16, false);
378+
std::filesystem::create_directories(lockFilePath.parent_path());
379+
PathLocks lock(
380+
{lockFilePath.string()}, fmt("waiting for another Nix process to finish fetching input '%s'...", to_string()));
381+
382+
if (getEnv("_NIX_TEST_CONCURRENT_FETCHES"))
383+
std::this_thread::sleep_for(std::chrono::seconds(1));
384+
370385
/* See if the input is in the cache of the fetcher. */
371386
try {
372387
if (auto res = scheme->getAccessor(settings, store, *this, true))

tests/functional/tarball.sh

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,3 +115,17 @@ path="$(nix flake prefetch --refresh --json "tarball+file://$TEST_ROOT/tar.tar"
115115
[[ $(cat "$path/a/b/xyzzy") = xyzzy ]]
116116
[[ $(cat "$path/a/b/foo") = foo ]]
117117
[[ $(cat "$path/bla") = abc ]]
118+
119+
# Test that concurrent invocations of Nix will fetch the tarball only once.
120+
clearStore
121+
rm -rf "$TEST_HOME/.cache"
122+
nix-store --init # needed because concurrent creation of the store can give SQLite errors
123+
_NIX_TEST_CONCURRENT_FETCHES=1 _NIX_FORCE_HTTP=1 nix flake prefetch -v "tarball+file://$TEST_ROOT/tar.tar" 2> "$TEST_ROOT/log1" &
124+
pid1="$!"
125+
_NIX_TEST_CONCURRENT_FETCHES=1 _NIX_FORCE_HTTP=1 nix flake prefetch -v "tarball+file://$TEST_ROOT/tar.tar" 2> "$TEST_ROOT/log2" &
126+
pid2="$!"
127+
wait "$pid1"
128+
wait "$pid2"
129+
[[ $(cat "$TEST_ROOT/log1" "$TEST_ROOT/log2" | grep -c "Download.*to") -eq 2 ]]
130+
[[ $(cat "$TEST_ROOT/log1" "$TEST_ROOT/log2" | grep -c "downloading.*tar.tar") -eq 1 ]]
131+
[[ $(cat "$TEST_ROOT/log1" "$TEST_ROOT/log2" | grep -c "waiting for another Nix process to finish fetching input") -eq 1 ]]

0 commit comments

Comments
 (0)