Skip to content

Commit

Permalink
Merge pull request #1 from prefix-dev/feat/extras
Browse files Browse the repository at this point in the history
Feat/extras
  • Loading branch information
baszalmstra authored Sep 25, 2023
2 parents 425d571 + 896df5b commit 28e4fcf
Show file tree
Hide file tree
Showing 6 changed files with 351 additions and 249 deletions.
2 changes: 1 addition & 1 deletion crates/rattler_installs_packages/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,4 @@ pub use package_name::{NormalizedPackageName, PackageName, ParsePackageNameError
pub use pep440::Version;
pub use project_info::{ArtifactHashes, ArtifactInfo, DistInfoMetadata, Meta, Yanked};
pub use requirement::PackageRequirement;
pub use specifier::{Specifier, Specifiers};
pub use specifier::{CompareOp, Specifier, Specifiers};
14 changes: 7 additions & 7 deletions crates/rattler_installs_packages/src/package_database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,11 @@ use crate::{
http::{CacheMode, Http},
package_name::PackageName,
project_info::{ArtifactInfo, ProjectInfo},
FileStore,
FileStore, NormalizedPackageName,
};
use elsa::FrozenMap;
use futures::{pin_mut, stream, StreamExt};
use http::header::{CONTENT_TYPE};
use http::header::CONTENT_TYPE;
use http::{HeaderMap, HeaderValue, Method};
use indexmap::IndexMap;
use miette::{self, Diagnostic, IntoDiagnostic};
Expand All @@ -34,7 +34,7 @@ pub struct PackageDb {
metadata_cache: FileStore,

/// A cache of package name to version to artifacts.
artifacts: FrozenMap<PackageName, Box<IndexMap<Version, Vec<ArtifactInfo>>>>,
artifacts: FrozenMap<NormalizedPackageName, Box<IndexMap<Version, Vec<ArtifactInfo>>>>,
}

impl PackageDb {
Expand All @@ -53,11 +53,12 @@ impl PackageDb {
}

/// Downloads and caches information about available artifiacts of a package from the index.
pub async fn available_artifacts(
pub async fn available_artifacts<P: Into<NormalizedPackageName>>(
&self,
p: &PackageName,
p: P,
) -> miette::Result<&IndexMap<Version, Vec<ArtifactInfo>>> {
if let Some(cached) = self.artifacts.get(p) {
let p = p.into();
if let Some(cached) = self.artifacts.get(&p) {
Ok(cached)
} else {
// Start downloading the information for each url.
Expand Down Expand Up @@ -337,7 +338,6 @@ mod test {
.get_metadata::<Wheel, _>(&artifact_info)
.await
.unwrap();

}
}

Expand Down
26 changes: 16 additions & 10 deletions crates/rip/src/bin/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,7 @@ use tracing_subscriber::fmt::format::FmtSpan;
use tracing_subscriber::util::SubscriberInitExt;
use url::Url;

use rattler_installs_packages::{
Extra, PackageName, PackageRequirement, Wheel,
};
use rattler_installs_packages::{Extra, NormalizedPackageName, PackageName, PackageRequirement, Wheel};
use rip::writer::{global_multi_progress, IndicatifWriter};

#[derive(Parser)]
Expand Down Expand Up @@ -77,7 +75,7 @@ pub async fn index(index_url: Url) -> Result<(), miette::Error> {
bar.inc(1);
let package_name = PackageName::from_str(&n)?;
let mut artifacts_per_version =
package_db.available_artifacts(&package_name).await?.clone();
package_db.available_artifacts(package_name.clone()).await?.clone();
artifacts_per_version.sort_keys();

let (chosen_version, available_artifacts) =
Expand Down Expand Up @@ -176,15 +174,23 @@ pub fn query_extras() -> Result<(), miette::Error> {
Ok(extras)
})
.into_diagnostic()?;
for requirement in iter
{
let requires_dist =
serde_json::from_str::<Vec<PackageRequirement>>(requirement.into_diagnostic()?.as_str())
.into_diagnostic()?;
for requirement in iter {
let requires_dist = serde_json::from_str::<Vec<PackageRequirement>>(
requirement.into_diagnostic()?.as_str(),
)
.into_diagnostic()?;
total += requires_dist.len();
for req in requires_dist {
if req.extras.len() > 0 {
println!("{}: {}", req.name.as_str(), req.extras.iter().map(|e| e.as_str()).collect::<Vec<_>>().join(", "));
println!(
"{}: {}",
req.name.as_str(),
req.extras
.iter()
.map(|e| e.as_str())
.collect::<Vec<_>>()
.join(", ")
);
count += 1;
}
}
Expand Down
1 change: 1 addition & 0 deletions crates/rip/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
pub mod pypi_provider;
pub mod writer;
258 changes: 27 additions & 231 deletions crates/rip/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,24 +1,19 @@
use std::fmt::{Debug, Display};
use std::io::Write;

use clap::Parser;
use miette::IntoDiagnostic;
use rattler_installs_packages::requirement::Requirement;
use rattler_installs_packages::{
NormalizedPackageName, PackageDb, PackageRequirement, Specifiers, Version, Wheel,
};
use rattler_libsolv_rs::{
Candidates, DefaultSolvableDisplay, Dependencies, DependencyProvider, NameId, Pool, SolvableId,
Solver, SolverCache, VersionSet,
};
use rip::writer::{global_multi_progress, IndicatifWriter};
use std::collections::HashMap;
use std::fmt::{Debug, Display, Formatter};
use std::io::Write;
use tokio::runtime::Handle;
use tokio::task;
use rattler_libsolv_rs::{DefaultSolvableDisplay, DependencyProvider, Solver, VersionSet};
use tracing::Level;
use tracing_subscriber::fmt::format::FmtSpan;
use tracing_subscriber::util::SubscriberInitExt;
use url::Url;

use rattler_installs_packages::requirement::Requirement;
use rattler_installs_packages::{NormalizedPackageName, PackageRequirement};
use rip::pypi_provider::{PypiDependencyProvider, PypiPackageName};
use rip::writer::{global_multi_progress, IndicatifWriter};

#[derive(Parser)]
#[command(author, version, about, long_about = None)]
struct Args {
Expand All @@ -31,217 +26,6 @@ struct Args {
index_url: Url,
}

#[repr(transparent)]
#[derive(Clone, Debug, Hash, Eq, PartialEq)]
struct PypiVersionSet(Specifiers);

impl From<Specifiers> for PypiVersionSet {
fn from(value: Specifiers) -> Self {
Self(value)
}
}

impl Display for PypiVersionSet {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}

#[repr(transparent)]
#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
struct PypiVersion(Version);

impl VersionSet for PypiVersionSet {
type V = PypiVersion;

fn contains(&self, v: &Self::V) -> bool {
match self.0.satisfied_by(&v.0) {
Err(e) => {
tracing::error!("failed to determine if '{}' contains '{}': {e}", &self.0, v);
false
}
Ok(result) => result,
}
}
}

impl Display for PypiVersion {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", &self.0)
}
}

struct PypiDependencyProvider {
pool: Pool<PypiVersionSet, NormalizedPackageName>,
package_db: PackageDb,
}

impl DependencyProvider<PypiVersionSet, NormalizedPackageName> for PypiDependencyProvider {
fn pool(&self) -> &Pool<PypiVersionSet, NormalizedPackageName> {
&self.pool
}

fn sort_candidates(
&self,
solver: &SolverCache<PypiVersionSet, NormalizedPackageName, Self>,
solvables: &mut [SolvableId],
) {
solvables.sort_by(|&a, &b| {
let solvable_a = solver.pool().resolve_solvable(a);
let solvable_b = solver.pool().resolve_solvable(b);

let a = &solvable_a.inner().0;
let b = &solvable_b.inner().0;

// Sort in reverse order from highest to lowest.
b.cmp(a)
})
}

fn get_candidates(&self, name: NameId) -> Option<Candidates> {
let package_name = self.pool.resolve_package_name(name);
tracing::info!("Fetching metadata for {}", package_name.as_str());

// Get all the metadata for this package
let result = task::block_in_place(move || {
Handle::current().block_on(
self.package_db
.available_artifacts(&package_name.clone().into()),
)
});
let artifacts = match result {
Ok(artifacts) => artifacts,
Err(err) => {
tracing::error!(
"failed to fetch artifacts of '{package_name}': {err:?}, skipping.."
);
return None;
}
};
let mut candidates = Candidates::default();
for (version, artifacts) in artifacts.iter() {
// Filter only artifacts we can work with
let available_artifacts = artifacts
.iter()
// We are only interested in wheels
.filter(|a| a.is::<Wheel>())
// TODO: How to filter prereleases correctly?
.filter(|a| {
a.filename.version().pre.is_none() && a.filename.version().dev.is_none()
})
.collect::<Vec<_>>();

// Check if there are wheel artifacts for this version
if available_artifacts.is_empty() {
// If there are no wheel artifacts, we're just gonna skip it
tracing::warn!("No available wheel artifact {package_name} {version} (skipping)");
continue;
}

// Filter yanked artifacts
let non_yanked_artifacts = artifacts
.iter()
.filter(|a| !a.yanked.yanked)
.collect::<Vec<_>>();

if non_yanked_artifacts.is_empty() {
tracing::info!("{package_name} {version} was yanked (skipping)");
continue;
}
let solvable_id = self
.pool
.intern_solvable(name, PypiVersion(version.clone()));
candidates.candidates.push(solvable_id);
}
Some(candidates)
}

fn get_dependencies(&self, solvable: SolvableId) -> Dependencies {
// TODO: https://peps.python.org/pep-0508/#environment-markers
let env = HashMap::from_iter([
// TODO: We should add some proper values here.
// See: https://peps.python.org/pep-0508/#environment-markers
("os_name", ""),
("sys_platform", ""),
("platform_machine", ""),
("platform_python_implementation", ""),
("platform_release", ""),
("platform_system", ""),
("platform_version", ""),
("python_version", "3.9"),
("python_full_version", ""),
("implementation_name", ""),
("implementation_version", ""),
// TODO: Add support for extras
("extra", ""),
]);

let solvable = self.pool.resolve_solvable(solvable);
let package_name = self.pool.resolve_package_name(solvable.name_id());

let mut dependencies = Dependencies::default();
let result = task::block_in_place(move || {
Handle::current().block_on(
self.package_db
.available_artifacts(&package_name.clone().into()),
)
});

let artifacts_per_version = match result {
Ok(artifacts) => artifacts,
Err(e) => {
tracing::error!("failed to fetch artifacts of '{package_name}': {e:?}, skipping..");
return dependencies;
}
};

let artifacts = artifacts_per_version
.get(&solvable.inner().0.clone())
.expect("strange, no artificats are available");

// Filter yanked artifacts
let non_yanked_artifacts = artifacts
.iter()
.filter(|a| !a.yanked.yanked)
.collect::<Vec<_>>();

if non_yanked_artifacts.is_empty() {
panic!("no artifacts are available after removing yanked artifacts");
}

let (_, metadata) = task::block_in_place(|| {
Handle::current()
.block_on(
self.package_db
.get_metadata::<Wheel, _>(&non_yanked_artifacts),
)
.unwrap()
});

for requirement in metadata.requires_dist {
// Evaluate environment markers
if let Some(env_marker) = &requirement.env_marker_expr {
if !env_marker.eval(&env).unwrap() {
// tracing::info!("skipping dependency {requirement}");
continue;
}
}

// Add the dependency to the pool
let Requirement {
name, specifiers, ..
} = requirement.into_inner();

let dependency_name_id = self.pool.intern_package_name(name);
let version_set_id = self
.pool
.intern_version_set(dependency_name_id, specifiers.into());
dependencies.requirements.push(version_set_id)
}
dependencies
}
}

async fn actual_main() -> miette::Result<()> {
let args = Args::parse();

Expand All @@ -267,22 +51,34 @@ async fn actual_main() -> miette::Result<()> {
)
.into_diagnostic()?;

let provider = PypiDependencyProvider {
pool: Pool::new(),
package_db,
};
let provider = PypiDependencyProvider::new(package_db);

// Create a task to solve the specs passed on the command line.
let mut root_requirements = Vec::with_capacity(args.specs.len());
for Requirement {
name, specifiers, ..
name,
specifiers,
extras,
..
} in args.specs.iter().map(PackageRequirement::as_inner)
{
let dependency_package_name = provider.pool().intern_package_name(name.clone());
let dependency_package_name = provider
.pool()
.intern_package_name(PypiPackageName::Base(name.clone().into()));
let version_set_id = provider
.pool()
.intern_version_set(dependency_package_name, specifiers.clone().into());
root_requirements.push(version_set_id);

for extra in extras {
let dependency_package_name = provider
.pool()
.intern_package_name(PypiPackageName::Extra(name.clone().into(), extra.clone()));
let version_set_id = provider
.pool()
.intern_version_set(dependency_package_name, specifiers.clone().into());
root_requirements.push(version_set_id);
}
}

// Solve the jobs
Expand Down
Loading

0 comments on commit 28e4fcf

Please sign in to comment.