Always check package status when version is not pinned

When the version isn't a git sha or a tag, we always check that we got
  the last version of a particular dependency before building. This is
  to avoid those awkward moments where someone try to use something from
  the stdlib that is brand new, and despite using 'main' they get a
  strange build failure regarding how it's not available.

  An important note is that we don't actually re-download the package
  when the case occurs; we merely check an HTTP ETag from a (cheap) 'HEAD'
  request on the package registry. If the tag hasn't changed then that
  means the local version is correct.

  The behavior is completely bypassed if the version is specified using
  a git sha or a tag, as here, we can assume that fetching it once it
  enough (and that it can change). If a package maintainer force-pushed
  a tag however, there may be discrepency and the only way around that
  is to `rm -r ./build`.
This commit is contained in:
KtorZ 2023-09-08 14:48:50 +02:00 committed by Lucas
parent 3c3a7f2423
commit 87087a1811
3 changed files with 76 additions and 36 deletions

View File

@ -110,7 +110,7 @@ impl LocalPackages {
&p.name != root
&& !matches!(
self.packages.iter().find(|p2| p2.name == p.name),
Some(Dependency { version, .. }) if &p.version == version,
Some(Dependency { version, .. }) if paths::is_git_sha_or_tag(version) && &p.version == version,
)
})
.collect()
@ -203,29 +203,47 @@ async fn fetch_missing_packages<T>(
where
T: EventListener,
{
let mut count = 0;
let mut missing = local
.missing_local_packages(manifest, &project_name)
.into_iter()
.map(|package| {
count += 1;
package
})
.peekable();
if missing.peek().is_some() {
let start = Instant::now();
event_listener.handle_event(Event::DownloadingPackage {
name: "packages".to_string(),
event_listener.handle_event(Event::ResolvingPackages {
name: format!("{project_name}"),
});
let downloader = Downloader::new(root_path);
downloader.download_packages(missing, &project_name).await?;
let statuses = downloader
.download_packages(event_listener, missing, &project_name)
.await?;
event_listener.handle_event(Event::PackagesDownloaded { start, count });
let downloaded_from_network = statuses
.iter()
.filter(|(_, downloaded)| *downloaded)
.count();
if downloaded_from_network > 0 {
event_listener.handle_event(Event::PackagesDownloaded {
start,
count: downloaded_from_network,
source: DownloadSource::Network,
});
}
let downloaded_from_cache = statuses
.iter()
.filter(|(_, downloaded)| !downloaded)
.count();
if downloaded_from_cache > 0 {
event_listener.handle_event(Event::PackagesDownloaded {
start,
count: downloaded_from_cache,
source: DownloadSource::Cache,
});
}
}
Ok(())

View File

@ -28,31 +28,40 @@ impl<'a> Downloader<'a> {
}
}
pub async fn download_packages<T>(
pub async fn download_packages<I, T>(
&self,
packages: T,
event_listener: &T,
packages: I,
project_name: &PackageName,
) -> Result<(), Error>
) -> Result<Vec<(PackageName, bool)>, Error>
where
T: Iterator<Item = &'a Package>,
T: EventListener,
I: Iterator<Item = &'a Package>,
{
let tasks = packages
.filter(|package| project_name != &package.name)
.map(|package| self.ensure_package_in_build_directory(package));
let _results = future::try_join_all(tasks).await?;
Ok(())
future::try_join_all(
packages
.filter(|package| project_name != &package.name)
.map(|package| self.ensure_package_in_build_directory(event_listener, package)),
)
.await
}
pub async fn ensure_package_in_build_directory(
pub async fn ensure_package_in_build_directory<T>(
&self,
event_listener: &T,
package: &Package,
) -> Result<bool, Error> {
let cache_key = paths::CacheKey::new(&self.http, package).await?;
self.ensure_package_downloaded(package, &cache_key).await?;
self.extract_package_from_cache(&package.name, &cache_key)
) -> Result<(PackageName, bool), Error>
where
T: EventListener,
{
let cache_key = paths::CacheKey::new(&self.http, event_listener, package).await?;
let downloaded = self
.ensure_package_downloaded(package, &cache_key)
.await
.map(|downloaded| (package.name.clone(), downloaded))?;
self.extract_package_from_cache(&package.name, &cache_key)
.await?;
Ok(downloaded)
}
pub async fn ensure_package_downloaded(
@ -101,14 +110,9 @@ impl<'a> Downloader<'a> {
&self,
name: &PackageName,
cache_key: &CacheKey,
) -> Result<bool, Error> {
) -> Result<(), Error> {
let destination = self.root_path.join(paths::build_deps_package(name));
// If the directory already exists then there's nothing for us to do
if destination.is_dir() {
return Ok(false);
}
tokio::fs::create_dir_all(&destination).await?;
let zipball_path = self.root_path.join(paths::package_cache_zipball(cache_key));
@ -133,7 +137,7 @@ impl<'a> Downloader<'a> {
result?;
Ok(true)
Ok(())
}
}

View File

@ -1,5 +1,5 @@
use crate::script::EvalInfo;
use std::path::PathBuf;
use std::{fmt::Display, path::PathBuf};
pub trait EventListener {
fn handle_event(&self, _event: Event) {}
@ -37,12 +37,30 @@ pub enum Event {
tests: Vec<EvalInfo>,
},
WaitingForBuildDirLock,
DownloadingPackage {
ResolvingPackages {
name: String,
},
PackageResolveFallback {
name: String,
},
PackagesDownloaded {
start: tokio::time::Instant,
count: usize,
source: DownloadSource,
},
ResolvingVersions,
}
pub enum DownloadSource {
Network,
Cache,
}
impl Display for DownloadSource {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DownloadSource::Network => write!(f, "network"),
DownloadSource::Cache => write!(f, "cache"),
}
}
}