Always check package status when version is not pinned
When the version isn't a git sha or a tag, we always check that we got the last version of a particular dependency before building. This is to avoid those awkward moments where someone try to use something from the stdlib that is brand new, and despite using 'main' they get a strange build failure regarding how it's not available. An important note is that we don't actually re-download the package when the case occurs; we merely check an HTTP ETag from a (cheap) 'HEAD' request on the package registry. If the tag hasn't changed then that means the local version is correct. The behavior is completely bypassed if the version is specified using a git sha or a tag, as here, we can assume that fetching it once it enough (and that it can change). If a package maintainer force-pushed a tag however, there may be discrepency and the only way around that is to `rm -r ./build`.
This commit is contained in:
parent
3c3a7f2423
commit
87087a1811
|
@ -110,7 +110,7 @@ impl LocalPackages {
|
||||||
&p.name != root
|
&p.name != root
|
||||||
&& !matches!(
|
&& !matches!(
|
||||||
self.packages.iter().find(|p2| p2.name == p.name),
|
self.packages.iter().find(|p2| p2.name == p.name),
|
||||||
Some(Dependency { version, .. }) if &p.version == version,
|
Some(Dependency { version, .. }) if paths::is_git_sha_or_tag(version) && &p.version == version,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -203,29 +203,47 @@ async fn fetch_missing_packages<T>(
|
||||||
where
|
where
|
||||||
T: EventListener,
|
T: EventListener,
|
||||||
{
|
{
|
||||||
let mut count = 0;
|
|
||||||
|
|
||||||
let mut missing = local
|
let mut missing = local
|
||||||
.missing_local_packages(manifest, &project_name)
|
.missing_local_packages(manifest, &project_name)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|package| {
|
|
||||||
count += 1;
|
|
||||||
package
|
|
||||||
})
|
|
||||||
.peekable();
|
.peekable();
|
||||||
|
|
||||||
if missing.peek().is_some() {
|
if missing.peek().is_some() {
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
|
|
||||||
event_listener.handle_event(Event::DownloadingPackage {
|
event_listener.handle_event(Event::ResolvingPackages {
|
||||||
name: "packages".to_string(),
|
name: format!("{project_name}"),
|
||||||
});
|
});
|
||||||
|
|
||||||
let downloader = Downloader::new(root_path);
|
let downloader = Downloader::new(root_path);
|
||||||
|
|
||||||
downloader.download_packages(missing, &project_name).await?;
|
let statuses = downloader
|
||||||
|
.download_packages(event_listener, missing, &project_name)
|
||||||
|
.await?;
|
||||||
|
|
||||||
event_listener.handle_event(Event::PackagesDownloaded { start, count });
|
let downloaded_from_network = statuses
|
||||||
|
.iter()
|
||||||
|
.filter(|(_, downloaded)| *downloaded)
|
||||||
|
.count();
|
||||||
|
if downloaded_from_network > 0 {
|
||||||
|
event_listener.handle_event(Event::PackagesDownloaded {
|
||||||
|
start,
|
||||||
|
count: downloaded_from_network,
|
||||||
|
source: DownloadSource::Network,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let downloaded_from_cache = statuses
|
||||||
|
.iter()
|
||||||
|
.filter(|(_, downloaded)| !downloaded)
|
||||||
|
.count();
|
||||||
|
if downloaded_from_cache > 0 {
|
||||||
|
event_listener.handle_event(Event::PackagesDownloaded {
|
||||||
|
start,
|
||||||
|
count: downloaded_from_cache,
|
||||||
|
source: DownloadSource::Cache,
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -28,31 +28,40 @@ impl<'a> Downloader<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn download_packages<T>(
|
pub async fn download_packages<I, T>(
|
||||||
&self,
|
&self,
|
||||||
packages: T,
|
event_listener: &T,
|
||||||
|
packages: I,
|
||||||
project_name: &PackageName,
|
project_name: &PackageName,
|
||||||
) -> Result<(), Error>
|
) -> Result<Vec<(PackageName, bool)>, Error>
|
||||||
where
|
where
|
||||||
T: Iterator<Item = &'a Package>,
|
T: EventListener,
|
||||||
|
I: Iterator<Item = &'a Package>,
|
||||||
{
|
{
|
||||||
let tasks = packages
|
future::try_join_all(
|
||||||
.filter(|package| project_name != &package.name)
|
packages
|
||||||
.map(|package| self.ensure_package_in_build_directory(package));
|
.filter(|package| project_name != &package.name)
|
||||||
|
.map(|package| self.ensure_package_in_build_directory(event_listener, package)),
|
||||||
let _results = future::try_join_all(tasks).await?;
|
)
|
||||||
|
.await
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn ensure_package_in_build_directory(
|
pub async fn ensure_package_in_build_directory<T>(
|
||||||
&self,
|
&self,
|
||||||
|
event_listener: &T,
|
||||||
package: &Package,
|
package: &Package,
|
||||||
) -> Result<bool, Error> {
|
) -> Result<(PackageName, bool), Error>
|
||||||
let cache_key = paths::CacheKey::new(&self.http, package).await?;
|
where
|
||||||
self.ensure_package_downloaded(package, &cache_key).await?;
|
T: EventListener,
|
||||||
self.extract_package_from_cache(&package.name, &cache_key)
|
{
|
||||||
|
let cache_key = paths::CacheKey::new(&self.http, event_listener, package).await?;
|
||||||
|
let downloaded = self
|
||||||
|
.ensure_package_downloaded(package, &cache_key)
|
||||||
.await
|
.await
|
||||||
|
.map(|downloaded| (package.name.clone(), downloaded))?;
|
||||||
|
self.extract_package_from_cache(&package.name, &cache_key)
|
||||||
|
.await?;
|
||||||
|
Ok(downloaded)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn ensure_package_downloaded(
|
pub async fn ensure_package_downloaded(
|
||||||
|
@ -101,14 +110,9 @@ impl<'a> Downloader<'a> {
|
||||||
&self,
|
&self,
|
||||||
name: &PackageName,
|
name: &PackageName,
|
||||||
cache_key: &CacheKey,
|
cache_key: &CacheKey,
|
||||||
) -> Result<bool, Error> {
|
) -> Result<(), Error> {
|
||||||
let destination = self.root_path.join(paths::build_deps_package(name));
|
let destination = self.root_path.join(paths::build_deps_package(name));
|
||||||
|
|
||||||
// If the directory already exists then there's nothing for us to do
|
|
||||||
if destination.is_dir() {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
tokio::fs::create_dir_all(&destination).await?;
|
tokio::fs::create_dir_all(&destination).await?;
|
||||||
|
|
||||||
let zipball_path = self.root_path.join(paths::package_cache_zipball(cache_key));
|
let zipball_path = self.root_path.join(paths::package_cache_zipball(cache_key));
|
||||||
|
@ -133,7 +137,7 @@ impl<'a> Downloader<'a> {
|
||||||
|
|
||||||
result?;
|
result?;
|
||||||
|
|
||||||
Ok(true)
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::script::EvalInfo;
|
use crate::script::EvalInfo;
|
||||||
use std::path::PathBuf;
|
use std::{fmt::Display, path::PathBuf};
|
||||||
|
|
||||||
pub trait EventListener {
|
pub trait EventListener {
|
||||||
fn handle_event(&self, _event: Event) {}
|
fn handle_event(&self, _event: Event) {}
|
||||||
|
@ -37,12 +37,30 @@ pub enum Event {
|
||||||
tests: Vec<EvalInfo>,
|
tests: Vec<EvalInfo>,
|
||||||
},
|
},
|
||||||
WaitingForBuildDirLock,
|
WaitingForBuildDirLock,
|
||||||
DownloadingPackage {
|
ResolvingPackages {
|
||||||
|
name: String,
|
||||||
|
},
|
||||||
|
PackageResolveFallback {
|
||||||
name: String,
|
name: String,
|
||||||
},
|
},
|
||||||
PackagesDownloaded {
|
PackagesDownloaded {
|
||||||
start: tokio::time::Instant,
|
start: tokio::time::Instant,
|
||||||
count: usize,
|
count: usize,
|
||||||
|
source: DownloadSource,
|
||||||
},
|
},
|
||||||
ResolvingVersions,
|
ResolvingVersions,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub enum DownloadSource {
|
||||||
|
Network,
|
||||||
|
Cache,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for DownloadSource {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
DownloadSource::Network => write!(f, "network"),
|
||||||
|
DownloadSource::Cache => write!(f, "cache"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue