Skip to content
Draft
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions crates/cargo-util-schemas/src/core/source_kind.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ pub enum SourceKind {
LocalRegistry,
/// A directory-based registry.
Directory,
/// Package sources distributed with the rust toolchain
Builtin,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This will impact the unique identifier for the packages from this source in cargo's json output when compiling, cargo metadata, cargo <cmd> -p, etc

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'll modify there too, and add a note to check the stdout in various use cases. The RFCs often make notes on what the output of various commands will be. Note that builtin doesn't actually appear in Units - they're all Path dependencies by that point.

An interesting point on cargo metadata is that we decided that we have an unresolved question regarding if deps of builtins should be shown on output, which will be a little hard here as they're not attached until unit generation.

}

// The hash here is important for what folder packages get downloaded into.
Expand All @@ -40,6 +42,7 @@ impl SourceKind {
SourceKind::SparseRegistry => None,
SourceKind::LocalRegistry => Some("local-registry"),
SourceKind::Directory => Some("directory"),
SourceKind::Builtin => Some("builtin"),
}
}
}
Expand Down Expand Up @@ -71,6 +74,10 @@ impl Ord for SourceKind {
(_, SourceKind::Directory) => Ordering::Greater,

(SourceKind::Git(a), SourceKind::Git(b)) => a.cmp(b),
(SourceKind::Git(_), _) => Ordering::Less,
(_, SourceKind::Git(_)) => Ordering::Greater,

(SourceKind::Builtin, SourceKind::Builtin) => Ordering::Equal,
}
}
}
Expand Down
1 change: 1 addition & 0 deletions src/cargo/core/compiler/standard_lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ pub fn resolve_std<'gctx>(
HasDevUnits::No,
crate::core::resolver::features::ForceAllTargets::No,
dry_run,
false,
)?;
debug_assert_eq!(resolve.specs_and_features.len(), 1);
Ok((
Expand Down
32 changes: 32 additions & 0 deletions src/cargo/core/dependency.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,10 @@ struct Inner {
// This dependency should be used only for this platform.
// `None` means *all platforms*.
platform: Option<Platform>,

// Opaque dependencies should be resolved with a separate resolver run, and handled
// by unit generation.
opaque: bool,
}

#[derive(Serialize)]
Expand Down Expand Up @@ -162,6 +166,30 @@ impl Dependency {
platform: None,
explicit_name_in_toml: None,
artifact: None,
opaque: false,
}),
}
}

pub fn new_injected_builtin(name: InternedString) -> Dependency {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what do you see as the role of this compared to the other news?

assert!(!name.is_empty());
Dependency {
inner: Arc::new(Inner {
name,
source_id: SourceId::new_builtin(&name).expect("package name is valid url"),
registry_id: None,
req: OptVersionReq::Any,
kind: DepKind::Normal,
only_match_name: true,
optional: false,
public: false,
features: Vec::new(),
default_features: true,
specified_req: false,
platform: None,
explicit_name_in_toml: None,
artifact: None,
opaque: true,
}),
}
}
Expand Down Expand Up @@ -455,6 +483,10 @@ impl Dependency {
pub(crate) fn maybe_lib(&self) -> bool {
self.artifact().map(|a| a.is_lib).unwrap_or(true)
}

pub fn is_opaque(&self) -> bool {
self.inner.opaque
}
}

/// The presence of an artifact turns an ordinary dependency into an Artifact dependency.
Expand Down
33 changes: 32 additions & 1 deletion src/cargo/core/registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
//! The former is just one kind of source,
//! while the latter involves operations on the registry Web API.

use std::collections::{HashMap, HashSet};
use std::collections::{BTreeMap, HashMap, HashSet};
use std::task::{Poll, ready};

use crate::core::{Dependency, PackageId, PackageSet, Patch, SourceId, Summary};
Expand All @@ -24,6 +24,7 @@ use crate::util::{CanonicalUrl, GlobalContext};
use annotate_snippets::Level;
use anyhow::Context as _;
use itertools::Itertools;
use semver::Version;
use tracing::{debug, trace};
use url::Url;

Expand Down Expand Up @@ -724,6 +725,36 @@ impl<'gctx> Registry for PackageRegistry<'gctx> {
)
})?;

if dep.is_opaque() {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd like to find a way to ask the source for the opaque variant of the summary. The tricky thing will be that we need to work with both variants.

// Currently, all opaque dependencies are builtins.
// Create a dummy Summary that can be replaced with a real package during
// unit generation
trace!(
"Injecting package to satisfy builtin dependency on {}",
dep.package_name()
);
let ver = if dep.package_name() == "compiler_builtins" {
//TODO: hack
Version::new(0, 1, 160)
} else {
Version::new(0, 0, 0)
};
let pkg_id = PackageId::new(
dep.package_name(),
ver,
SourceId::new_builtin(&dep.package_name()).expect("SourceId ok"),
);

let summary = Summary::new(
pkg_id,
vec![],
&BTreeMap::new(), // TODO: bodge
Option::<String>::None,
None,
)?;
f(IndexSummary::Candidate(summary));
return Poll::Ready(Ok(()));
}
let source = self.sources.get_mut(dep.source_id());
match (override_summary, source) {
(Some(_), None) => {
Expand Down
4 changes: 4 additions & 0 deletions src/cargo/core/resolver/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ pub struct ResolverContext {
/// a way to look up for a package in activations what packages required it
/// and all of the exact deps that it fulfilled.
pub parents: Graph<PackageId, im_rc::HashSet<Dependency, rustc_hash::FxBuildHasher>>,
// Opaque dependencies require a separate resolver run as they allow for multiple
// different semver-compatible versions of crates in the final resolve. This is the
// (unactivated) set of Summaries that need handling in a future invocation
//pub promises: HashSet<Dependency>,
}

/// When backtracking it can be useful to know how far back to go.
Expand Down
38 changes: 35 additions & 3 deletions src/cargo/core/resolver/dep_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,21 +48,42 @@ pub struct RegistryQueryer<'a> {
>,
/// all the cases we ended up using a supplied replacement
used_replacements: HashMap<PackageId, Summary>,
/// Cached builtin dependencies that should be injected. Empty implies that builtins shouldn't
/// be injected
builtins: Vec<Dependency>,
}

impl<'a> RegistryQueryer<'a> {
pub fn new(
registry: &'a mut dyn Registry,
replacements: &'a [(PackageIdSpec, Dependency)],
version_prefs: &'a VersionPreferences,
inject_builtins: bool,
) -> Self {
let builtins = if inject_builtins {
[
"std",
"alloc",
"core",
"panic_unwind",
"proc_macro",
"compiler_builtins",
]
.iter()
.map(|&krate| Dependency::new_injected_builtin(krate.into()))
.collect()
} else {
vec![]
};

RegistryQueryer {
registry,
replacements,
version_prefs,
registry_cache: HashMap::new(),
summary_cache: HashMap::new(),
used_replacements: HashMap::new(),
builtins,
}
}

Expand Down Expand Up @@ -238,10 +259,11 @@ impl<'a> RegistryQueryer<'a> {
{
return Ok(out.0.clone());
}

// First, figure out our set of dependencies based on the requested set
// of features. This also calculates what features we're going to enable
// for our own dependencies.
let (used_features, deps) = resolve_features(parent, candidate, opts)?;
let (used_features, deps) = resolve_features(parent, candidate, opts, &self.builtins)?;

// Next, transform all dependencies into a list of possible candidates
// which can satisfy that dependency.
Expand Down Expand Up @@ -291,18 +313,28 @@ pub fn resolve_features<'b>(
parent: Option<PackageId>,
s: &'b Summary,
opts: &'b ResolveOpts,
builtins: &[Dependency],
) -> ActivateResult<(HashSet<InternedString>, Vec<(Dependency, FeaturesSet)>)> {
// First, filter by dev-dependencies.
let deps = s.dependencies();
let deps = deps.iter().filter(|d| d.is_transitive() || opts.dev_deps);

let deps = deps
.into_iter()
.filter(|d| d.is_transitive() || opts.dev_deps);
let builtin_deps = if s.source_id().is_builtin() {
// Don't add builtin deps to dummy builtin packages
None
} else {
Some(builtins.iter())
};
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I assume this is for implicit builtins. Is there a reason you chose to do this here?


let reqs = build_requirements(parent, s, opts)?;
let mut ret = Vec::new();
let default_dep = BTreeSet::new();
let mut valid_dep_names = HashSet::new();

// Next, collect all actually enabled dependencies and their features.
for dep in deps {
for dep in deps.chain(builtin_deps.into_iter().flatten()) {
// Skip optional dependencies, but not those enabled through a
// feature
if dep.is_optional() && !reqs.deps.contains_key(&dep.name_in_toml()) {
Expand Down
3 changes: 2 additions & 1 deletion src/cargo/core/resolver/encode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -661,7 +661,8 @@ pub fn encodable_package_id(
}

fn encodable_source_id(id: SourceId, version: ResolveVersion) -> Option<TomlLockfileSourceId> {
if id.is_path() {
// TODO: Not enough to stop builtins from appearing in the lockfile
if id.is_path() || id.is_builtin() {
None
} else {
Some(
Expand Down
3 changes: 2 additions & 1 deletion src/cargo/core/resolver/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,14 +126,15 @@ pub fn resolve(
version_prefs: &VersionPreferences,
resolve_version: ResolveVersion,
gctx: Option<&GlobalContext>,
inject_builtins: bool,
) -> CargoResult<Resolve> {
let first_version = match gctx {
Some(config) if config.cli_unstable().direct_minimal_versions => {
Some(VersionOrdering::MinimumVersionsFirst)
}
_ => None,
};
let mut registry = RegistryQueryer::new(registry, replacements, version_prefs);
let mut registry = RegistryQueryer::new(registry, replacements, version_prefs, inject_builtins);

// Global cache of the reasons for each time we backtrack.
let mut past_conflicting_activations = conflict_cache::ConflictCache::new();
Expand Down
2 changes: 1 addition & 1 deletion src/cargo/core/resolver/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ impl ResolveBehavior {
}
}

/// Options for how the resolve should work.
/// Options for how a Summary should be activated during the resolve.
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct ResolveOpts {
/// Whether or not dev-dependencies should be included.
Expand Down
32 changes: 32 additions & 0 deletions src/cargo/core/source_id.rs
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,21 @@ impl SourceId {
}
}

pub fn new_builtin(name: &str) -> CargoResult<SourceId> {
// Injecting builtins earlier (somewhere with access to RustcTargetData) is needed instead of this
let home = std::env::var("HOME").expect("HOME is set");
let path = format!(
"file://{home}/.rustup/toolchains/nightly-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library/"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The URL in source ids is public facing. I think we'll need something more generic and then a new Source

);
if name == "compiler_builtins" {
Self::from_url(&format!(
"builtin+{path}compiler-builtins/compiler-builtins"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why the special case?

))
} else {
Self::from_url(&format!("builtin+{path}{name}"))
}
}

/// Parses a source URL and returns the corresponding ID.
///
/// ## Example
Expand Down Expand Up @@ -176,6 +191,10 @@ impl SourceId {
let url = url.into_url()?;
SourceId::new(SourceKind::Path, url, None)
}
"builtin" => {
let url = url.into_url()?;
SourceId::new(SourceKind::Builtin, url, None)
}
kind => Err(anyhow::format_err!("unsupported source protocol: {}", kind)),
}
}
Expand Down Expand Up @@ -387,6 +406,10 @@ impl SourceId {
matches!(self.inner.kind, SourceKind::Git(_))
}

pub fn is_builtin(self) -> bool {
matches!(self.inner.kind, SourceKind::Builtin)
}

/// Creates an implementation of `Source` corresponding to this ID.
///
/// * `yanked_whitelist` --- Packages allowed to be used, even if they are yanked.
Expand Down Expand Up @@ -433,6 +456,14 @@ impl SourceId {
.expect("path sources cannot be remote");
Ok(Box::new(DirectorySource::new(&path, self, gctx)))
}
SourceKind::Builtin => {
let path = self
.inner
.url
.to_file_path()
.expect("builtin sources should not be remote");
Ok(Box::new(PathSource::new(&path, self, gctx)))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Will using a PathSsource directly like this work?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This particularly could have interesting design questions

}
}
}

Expand Down Expand Up @@ -679,6 +710,7 @@ impl fmt::Display for SourceId {
}
SourceKind::LocalRegistry => write!(f, "registry `{}`", url_display(&self.inner.url)),
SourceKind::Directory => write!(f, "dir {}", url_display(&self.inner.url)),
SourceKind::Builtin => write!(f, "builtin {}", url_display(&self.inner.url)),
}
}
}
Expand Down
5 changes: 5 additions & 0 deletions src/cargo/core/summary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ impl Summary {
)
}
}

let feature_map = build_feature_map(features, &dependencies)?;
Ok(Summary {
inner: Arc::new(Inner {
Expand All @@ -96,6 +97,10 @@ impl Summary {
})
}

// Virtual summary representing a package Cargo knows how to retrieve later
// pub fn new_builtin(pkg_id: PackageId,
// features: &BTreeMap<InternedString, Vec<InternedString>>) ->

pub fn package_id(&self) -> PackageId {
self.inner.package_id
}
Expand Down
1 change: 1 addition & 0 deletions src/cargo/ops/cargo_compile/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -322,6 +322,7 @@ pub fn create_bcx<'a, 'gctx>(
has_dev_units,
ForceAllTargets::No,
dry_run,
true,
)?;
let WorkspaceResolve {
mut pkg_set,
Expand Down
1 change: 1 addition & 0 deletions src/cargo/ops/cargo_output_metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,7 @@ fn build_resolve_graph(
HasDevUnits::Yes,
force_all,
dry_run,
true,
)?;

let package_map: BTreeMap<PackageId, Package> = ws_resolve
Expand Down
1 change: 1 addition & 0 deletions src/cargo/ops/cargo_package/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -778,6 +778,7 @@ fn build_lock(
None,
&[],
true,
true,
)?;

let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
Expand Down
Loading