Skip to content
Closed
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ protected ModuleExtensionContext(
this.rootModuleHasNonDevDependency = rootModuleHasNonDevDependency;
// Record inputs to the extension that are known prior to evaluation.
RepoRecordedInput.EnvVar.wrap(staticEnvVars)
.forEach((input, value) -> recordInput(input, value.orElse(null)));
.forEach((input, value) -> recordInputWithValue(input, value.orElse(null)));
repoMappingRecorder.record(staticRepoMappingEntries);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@ java_library(
java_library(
name = "repository_fetch_function",
srcs = [
"DigestWriter.java",
"RepositoryFetchFunction.java",
],
deps = [
":digest_writer",
":exception",
":repo_definition",
":repo_definition_value",
Expand Down Expand Up @@ -83,6 +83,32 @@ java_library(
],
)

java_library(
name = "digest_writer",
srcs = [
"DigestWriter.java",
],
deps = [
":exception",
":repo_definition",
":utils",
"//src/main/java/com/google/devtools/build/lib/analysis:blaze_directories",
"//src/main/java/com/google/devtools/build/lib/bazel/bzlmod:serialization",
"//src/main/java/com/google/devtools/build/lib/cmdline",
"//src/main/java/com/google/devtools/build/lib/packages/semantics",
"//src/main/java/com/google/devtools/build/lib/repository:repository_events",
"//src/main/java/com/google/devtools/build/lib/rules:repository/repo_recorded_input",
"//src/main/java/com/google/devtools/build/lib/skyframe:repo_environment_function",
"//src/main/java/com/google/devtools/build/lib/util",
"//src/main/java/com/google/devtools/build/lib/vfs",
"//src/main/java/com/google/devtools/build/lib/vfs:pathfragment",
"//src/main/java/com/google/devtools/build/skyframe:skyframe-objects",
"//src/main/java/net/starlark/java/eval",
"//third_party:guava",
"//third_party:jsr305",
],
)

java_library(
name = "repo_definition",
srcs = [
Expand Down Expand Up @@ -144,6 +170,7 @@ java_library(
"//src/main/java/com/google/devtools/build/lib/vfs",
"//src/main/java/com/google/devtools/build/skyframe:skyframe-objects",
"//src/main/java/net/starlark/java/eval",
"//third_party:guava",
"//third_party:jsr305",
],
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.collect.Collections2;
import com.google.common.collect.ImmutableList;
import com.google.devtools.build.lib.analysis.BlazeDirectories;
import com.google.devtools.build.lib.bazel.bzlmod.GsonTypeAdapterUtil;
Expand All @@ -38,7 +39,7 @@
import net.starlark.java.eval.StarlarkSemantics;

/** Handles writing and reading of repo marker files. */
class DigestWriter {
public class DigestWriter {

// The marker file version is inject in the rule key digest so the rule key is always different
// when we decide to update the format.
Expand Down Expand Up @@ -95,49 +96,80 @@ sealed interface RepoDirectoryState {
record UpToDate() implements RepoDirectoryState {}

record OutOfDate(String reason) implements RepoDirectoryState {}

final class Indeterminate implements RepoDirectoryState {
private final ImmutableList<ImmutableList<RepoRecordedInput.WithValue>> batches;

private Indeterminate(ImmutableList<ImmutableList<RepoRecordedInput.WithValue>> batches) {
this.batches = batches;
}
}
}

RepoDirectoryState areRepositoryAndMarkerFileConsistent(Environment env)
RepoDirectoryState areRepositoryAndMarkerFileConsistent(
Environment env, @Nullable RepoDirectoryState.Indeterminate indeterminateState)
throws InterruptedException, RepositoryFunctionException {
return areRepositoryAndMarkerFileConsistent(env, markerPath);
return areRepositoryAndMarkerFileConsistent(env, markerPath, indeterminateState);
}

/**
* Checks if the state of the repository in the file system is consistent with the rule in the
* WORKSPACE file.
*
* <p>Returns null if a Skyframe status is needed.
* <p>Returns {@link RepoDirectoryState.Indeterminate} if a Skyframe status is needed.
*
* <p>We check the repository root for existence here, but we can't depend on the FileValue,
* because it's possible that we eventually create that directory in which case the FileValue and
* the state of the file system would be inconsistent.
*/
@Nullable
RepoDirectoryState areRepositoryAndMarkerFileConsistent(Environment env, Path markerPath)
RepoDirectoryState areRepositoryAndMarkerFileConsistent(
Environment env,
Path markerPath,
@Nullable RepoDirectoryState.Indeterminate intermediateState)
throws RepositoryFunctionException, InterruptedException {
if (!markerPath.exists()) {
return new RepoDirectoryState.OutOfDate("repo hasn't been fetched yet");
}

try {
String content = FileSystemUtils.readContent(markerPath, ISO_8859_1);
var recordedInputValues =
readMarkerFile(content, Preconditions.checkNotNull(predeclaredInputHash));
Optional<String> outdatedReason =
RepoRecordedInput.isAnyValueOutdated(env, directories, recordedInputValues);
if (env.valuesMissing()) {
return null;
// Avoid reading the marker file repeatedly.
if (intermediateState == null) {
String content = FileSystemUtils.readContent(markerPath, ISO_8859_1);
var recordedInputValues =
readMarkerFile(content, Preconditions.checkNotNull(predeclaredInputHash));
if (recordedInputValues.isEmpty()) {
return new RepoDirectoryState.OutOfDate(
"Bazel version, flags, repo rule definition or attributes changed");
}
// Check inputs in batches to prevent Skyframe cycles caused by outdated dependencies.
intermediateState =
new RepoDirectoryState.Indeterminate(
RepoRecordedInput.WithValue.splitIntoBatches(recordedInputValues.get()));
}
if (outdatedReason.isPresent()) {
return new RepoDirectoryState.OutOfDate(outdatedReason.get());
for (var batch : intermediateState.batches) {
RepoRecordedInput.prefetch(
env, directories, Collections2.transform(batch, RepoRecordedInput.WithValue::input));
if (env.valuesMissing()) {
return intermediateState;
}
Optional<String> outdatedReason =
RepoRecordedInput.isAnyValueOutdated(env, directories, batch);
if (outdatedReason.isPresent()) {
return new RepoDirectoryState.OutOfDate(outdatedReason.get());
}
}
return new RepoDirectoryState.UpToDate();
} catch (IOException e) {
throw new RepositoryFunctionException(e, Transience.TRANSIENT);
}
}

private static ImmutableList<RepoRecordedInput.WithValue> readMarkerFile(
/**
* Returns a list of recorded inputs with their values parsed from the given marker file if the
* predeclared input hash matches, or {@code Optional.empty()} if the hash doesn't match or any
* error occurs during parsing.
*/
public static Optional<ImmutableList<RepoRecordedInput.WithValue>> readMarkerFile(
String content, String predeclaredInputHash) {
Iterable<String> lines = Splitter.on('\n').split(content);

Expand All @@ -151,26 +183,22 @@ private static ImmutableList<RepoRecordedInput.WithValue> readMarkerFile(
if (!line.equals(predeclaredInputHash)) {
// Break early, need to reload anyway. This also detects marker file version changes
// so that unknown formats are not parsed.
return ImmutableList.of(
new RepoRecordedInput.WithValue(
new NeverUpToDateRepoRecordedInput(
"Bazel version, flags, repo rule definition or attributes changed"),
""));
return Optional.empty();
}
firstLineVerified = true;
} else {
var inputAndValue = RepoRecordedInput.WithValue.parse(line);
if (inputAndValue.isEmpty()) {
// On parse failure, just forget everything else and mark the whole input out of date.
return PARSE_FAILURE;
return Optional.empty();
}
recordedInputValues.add(inputAndValue.get());
}
}
if (!firstLineVerified) {
return PARSE_FAILURE;
return Optional.empty();
}
return recordedInputValues.build();
return Optional.of(recordedInputValues.build());
}

@Nullable
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,15 @@

import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.Collections2;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.actions.FileStateValue;
import com.google.devtools.build.lib.actions.FileValue;
import com.google.devtools.build.lib.analysis.BlazeDirectories;
import com.google.devtools.build.lib.bazel.bzlmod.NonRegistryOverride;
import com.google.devtools.build.lib.bazel.bzlmod.VendorFileValue;
import com.google.devtools.build.lib.bazel.repository.DigestWriter.RepoDirectoryState;
import com.google.devtools.build.lib.bazel.repository.RepositoryFunctionException.AlreadyReportedRepositoryAccessException;
import com.google.devtools.build.lib.bazel.repository.cache.LocalRepoContentsCache;
import com.google.devtools.build.lib.bazel.repository.cache.LocalRepoContentsCache.CandidateRepo;
Expand Down Expand Up @@ -70,6 +72,7 @@
import com.google.devtools.build.skyframe.SkyValue;
import com.google.devtools.build.skyframe.WorkerSkyKeyComputeState;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.Optional;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
Expand Down Expand Up @@ -147,6 +150,13 @@ private record FetchResult(

private static class State extends WorkerSkyKeyComputeState<FetchResult> {
@Nullable FetchResult result;
// While checking whether a particular repo candidate (either the current contents under the
// external directory or a candidate repo in the local repo contents cache) is up-to-date, this
// holds opaque intermediate state. Reset to null after each candidate.
@Nullable RepoDirectoryState.Indeterminate indeterminateState;
// The candidate repos in the local repo contents cache that still have to be checked.
@Nullable ArrayDeque<CandidateRepo> candidateRepos;
@Nullable RepoDirectoryState.Indeterminate candidateIndeterminateState;
}

@Nullable
Expand Down Expand Up @@ -218,35 +228,58 @@ public SkyValue compute(SkyKey skyKey, Environment env)
|| vendorFile.pinnedRepos().contains(repositoryName);
}

var state = env.getState(State::new);
if (shouldUseCachedRepoContents(env, repoDefinition)) {
// Make sure marker file is up-to-date; correctly describes the current repository state
var repoState = digestWriter.areRepositoryAndMarkerFileConsistent(env);
if (repoState == null) {
return null;
}
if (repoState instanceof DigestWriter.RepoDirectoryState.UpToDate) {
return new RepositoryDirectoryValue.Success(
Root.fromPath(repoRoot), excludeRepoFromVendoring);
var repoState =
digestWriter.areRepositoryAndMarkerFileConsistent(env, state.indeterminateState);
switch (repoState) {
case RepoDirectoryState.Indeterminate intermediateState -> {
state.indeterminateState = intermediateState;
return null;
}
case RepoDirectoryState.UpToDate ignored -> {
return new RepositoryDirectoryValue.Success(
Root.fromPath(repoRoot), excludeRepoFromVendoring);
}
case RepoDirectoryState.OutOfDate ignored -> {
// Fall through.
}
}

// Then check if the global repo contents cache has this.
if (repoContentsCache.isEnabled()) {
for (CandidateRepo candidate :
repoContentsCache.getCandidateRepos(digestWriter.predeclaredInputHash)) {
if (state.candidateRepos == null) {
state.candidateRepos =
new ArrayDeque<>(
repoContentsCache.getCandidateRepos(digestWriter.predeclaredInputHash));
}
for (var it = state.candidateRepos.iterator(); it.hasNext(); ) {
CandidateRepo candidate = it.next();
repoState =
digestWriter.areRepositoryAndMarkerFileConsistent(
env, candidate.recordedInputsFile());
if (repoState == null) {
return null;
}
if (repoState instanceof DigestWriter.RepoDirectoryState.UpToDate) {
if (setupOverride(candidate.contentsDir().asFragment(), env, repoRoot, repositoryName)
== null) {
env, candidate.recordedInputsFile(), state.candidateIndeterminateState);
switch (repoState) {
case RepoDirectoryState.Indeterminate intermediateState -> {
state.candidateIndeterminateState = intermediateState;
return null;
}
candidate.touch();
return new RepositoryDirectoryValue.Success(
Root.fromPath(repoRoot), excludeRepoFromVendoring);
case RepoDirectoryState.UpToDate ignored -> {
if (setupOverride(
candidate.contentsDir().asFragment(), env, repoRoot, repositoryName)
== null) {
return null;
}
candidate.touch();
return new RepositoryDirectoryValue.Success(
Root.fromPath(repoRoot), excludeRepoFromVendoring);
}
case RepoDirectoryState.OutOfDate ignored -> {
// Reset for the next candidate.
state.candidateIndeterminateState = null;
// Remove from the state so that we don't check it again on a restart.
it.remove();
}
}
}
}
Expand Down Expand Up @@ -290,6 +323,9 @@ public SkyValue compute(SkyKey skyKey, Environment env)
cachedRepoDir =
repoContentsCache.moveToCache(
repoRoot, digestWriter.markerPath, digestWriter.predeclaredInputHash);
// Refetch candidate repos upon the next restart to pick up the one we just created.
// TODO: Directly set this to the correct CandiateRepo.
state.candidateRepos = null;
} catch (IOException e) {
throw new RepositoryFunctionException(
new IOException(
Expand Down Expand Up @@ -394,17 +430,20 @@ private RepositoryDirectoryValue tryGettingValueUsingVendoredRepo(
return setupOverride(vendorRepoPath.asFragment(), env, repoRoot, repositoryName);
}

DigestWriter.RepoDirectoryState vendoredRepoState =
digestWriter.areRepositoryAndMarkerFileConsistent(env, vendorMarker);
if (vendoredRepoState == null) {
var state = env.getState(State::new);
RepoDirectoryState vendoredRepoState =
digestWriter.areRepositoryAndMarkerFileConsistent(
env, vendorMarker, state.indeterminateState);
if (vendoredRepoState instanceof RepoDirectoryState.Indeterminate intermediateState) {
state.indeterminateState = intermediateState;
return null;
}
// If our repo is up-to-date, or this is an offline build (--nofetch), then the vendored repo
// is used.
if (vendoredRepoState instanceof DigestWriter.RepoDirectoryState.UpToDate
if (vendoredRepoState instanceof RepoDirectoryState.UpToDate
|| (!RepositoryDirectoryValue.IS_VENDOR_COMMAND.get(env)
&& RepositoryDirectoryValue.FETCH_DISABLED.get(env))) {
if (vendoredRepoState instanceof DigestWriter.RepoDirectoryState.OutOfDate(String reason)) {
if (vendoredRepoState instanceof RepoDirectoryState.OutOfDate(String reason)) {
env.getListener()
.handle(
Event.warn(
Expand All @@ -427,8 +466,9 @@ private RepositoryDirectoryValue tryGettingValueUsingVendoredRepo(
+ " be fetched into the external cache and used. To update the repo"
+ " in the vendor directory, run the bazel vendor command",
repositoryName.getName(),
((DigestWriter.RepoDirectoryState.OutOfDate) vendoredRepoState).reason())));
((RepoDirectoryState.OutOfDate) vendoredRepoState).reason())));
}
state.indeterminateState = null;
} else if (vendorFile.pinnedRepos().contains(repositoryName)) {
throw new RepositoryFunctionException(
new IOException(
Expand Down
Loading