Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor getPackageConfigs #2908

Merged
merged 7 commits into from
May 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 39 additions & 0 deletions source/dub/internal/utils.d
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import std.conv : to;
import std.exception : enforce;
import std.file;
import std.format;
import std.range;
import std.string : format;
import std.process;
import std.traits : isIntegral;
Expand Down Expand Up @@ -795,3 +796,41 @@ void deepCompareImpl (T) (
file, line);
}
}

/** Filters a forward range with the given predicate and returns a prefix range.

This function filters elements in-place, as opposed to returning a new
range. This can be particularly useful when working with arrays, as this
does not require any memory allocations.

This function guarantees that `pred` is called exactly once per element and
deterministically destroys any elements for which `pred` returns `false`.
*/
auto filterInPlace(alias pred, R)(R elems)
if (isForwardRange!R)
{
import std.algorithm.mutation : move;

R telems = elems.save;
bool any_removed = false;
size_t nret = 0;
foreach (ref el; elems.save) {
if (pred(el)) {
if (any_removed) move(el, telems.front);
telems.popFront();
nret++;
} else any_removed = true;
}
return elems.takeExactly(nret);
}

///
unittest {
int[] arr = [1, 2, 3, 4, 5, 6, 7, 8];

arr = arr.filterInPlace!(e => e % 2 == 0);
assert(arr == [2, 4, 6, 8]);

arr = arr.filterInPlace!(e => e < 5);
assert(arr == [2, 4]);
}
191 changes: 102 additions & 89 deletions source/dub/project.d
Original file line number Diff line number Diff line change
Expand Up @@ -648,43 +648,26 @@ class Project {
import std.typecons : Rebindable, rebindable;
import std.range : only;

struct Vertex { size_t pack = size_t.max; string config; }
struct Edge { size_t from, to; }

// prepare by collecting information about all packages in the project
// qualified names and dependencies are cached, to avoid recomputing
// them multiple times during the algorithm
auto packages = collectPackageInformation();

// graph of the project's package configuration dependencies
// (package, config) -> (sub-package, sub-config)
static struct Vertex { size_t pack = size_t.max; string config; }
static struct Edge { size_t from, to; }
Vertex[] configs;
void[0][Vertex] configs_set;
Edge[] edges;

// cached package information to avoid continuous re-computation
// during the resolution process
size_t[string] package_map;
size_t[][] parents;
const(Package)[] package_list;
string[] package_names;
PackageDependency[][] package_dependencies;
package_list.reserve(m_dependencies.length);
package_names.reserve(m_dependencies.length);
package_dependencies.reserve(m_dependencies.length);
foreach (p; getTopologicalPackageList()) {
auto pname = p.name;
package_map[pname] = package_list.length;
package_list ~= p;
package_names ~= pname;
package_dependencies ~= p.getAllDependencies();
}
parents.length = package_list.length;
foreach (pack_idx, pack_deps; package_dependencies) {
foreach (d; pack_deps)
if (auto pi = d.name.toString() in package_map)
parents[*pi] ~= pack_idx;
}

size_t createConfig(size_t pack_idx, string config) {
foreach (i, v; configs)
if (v.pack == pack_idx && v.config == config)
return i;

auto pname = package_names[pack_idx];
auto pname = packages[pack_idx].name;
assert(pname !in m_overriddenConfigs || config == m_overriddenConfigs[pname]);
logDebug("Add config %s %s", pname, config);
auto cfg = Vertex(pack_idx, config);
Expand All @@ -697,61 +680,48 @@ class Project {
return (Vertex(pack_idx, config) in configs_set) !is null;
}

void removeConfig(size_t i) {
logDebug("Eliminating config %s for %s", configs[i].config, configs[i].pack);
void removeConfig(size_t config_index) {
logDebug("Eliminating config %s for %s", configs[config_index].config, configs[config_index].pack);
auto had_dep_to_pack = new bool[configs.length];
auto still_has_dep_to_pack = new bool[configs.length];

// eliminate all edges that connect to config 'i'
size_t eti = 0;
foreach (ei, e; edges) {
if (e.to == i) {
// eliminate all edges that connect to config 'config_index' and
// track all connected configs
edges = edges.filterInPlace!((e) {
if (e.to == config_index) {
had_dep_to_pack[e.from] = true;
continue;
} else if (configs[e.to].pack == configs[i].pack) {
return false;
} else if (configs[e.to].pack == configs[config_index].pack) {
still_has_dep_to_pack[e.from] = true;
}
if (e.from == i) continue;

// keep edge
if (eti != ei) edges[eti] = edges[ei];
eti++;
}
edges = edges[0 .. eti];
return e.from != config_index;
});

// mark config as removed
configs_set.remove(configs[i]);
configs[i] = Vertex.init;
configs_set.remove(configs[config_index]);
configs[config_index] = Vertex.init;

// also remove any configs that cannot be satisfied anymore
foreach (j; 0 .. configs.length)
if (j != i && had_dep_to_pack[j] && !still_has_dep_to_pack[j])
if (j != config_index && had_dep_to_pack[j] && !still_has_dep_to_pack[j])
removeConfig(j);
}

bool isReachable(size_t pack, string conf) {
if (pack == configs[0].pack && configs[0].config == conf) return true;
foreach (e; edges)
if (configs[e.to].pack == pack && configs[e.to].config == conf)
return true;
return false;
//return (pack == configs[0].pack && conf == configs[0].config) || edges.canFind!(e => configs[e.to].pack == pack && configs[e.to].config == config);
}

bool[] reachable = new bool[package_list.length]; // reused to avoid continuous re-allocation
bool[] reachable = new bool[packages.length]; // reused to avoid continuous re-allocation
bool isReachableByAllParentPacks(size_t cidx) {
foreach (p; parents[configs[cidx].pack]) reachable[p] = false;
foreach (p; packages[configs[cidx].pack].parents) reachable[p] = false;
foreach (e; edges) {
if (e.to != cidx) continue;
reachable[configs[e.from].pack] = true;
}
foreach (p; parents[configs[cidx].pack])
foreach (p; packages[configs[cidx].pack].parents)
if (!reachable[p])
return false;
return true;
}

string[][] depconfigs = new string[][](package_list.length);
string[][] depconfigs = new string[][](packages.length);
void determineDependencyConfigs(size_t pack_idx, string c)
{
void[0][Edge] edges_set;
Expand All @@ -763,18 +733,16 @@ class Project {
edges_set[Edge(from, to)] = (void[0]).init;
}

auto p = package_list[pack_idx];
auto pname = package_names[pack_idx];
auto pdeps = package_dependencies[pack_idx];
auto pack = &packages[pack_idx];

// below we call createConfig for the main package if
// config.length is not zero. Carry on for that case,
// otherwise we've handle the pair (p, c) already
if(haveConfig(pack_idx, c) && !(config.length && pname == m_rootPackage.name && config == c))
if(haveConfig(pack_idx, c) && !(config.length && pack.name == m_rootPackage.name && config == c))
return;

foreach (d; pdeps) {
auto dp = package_map.get(d.name.toString(), size_t.max);
foreach (d; pack.dependencies) {
auto dp = packages.getPackageIndex(d.name.toString());
if (dp == size_t.max) continue;

depconfigs[dp].length = 0;
Expand All @@ -785,80 +753,81 @@ class Project {
.filter!(c => haveConfig(dp, c))
.each!((c) { depconfigs[dp] ~= c; });
}
if (auto pc = package_names[dp] in m_overriddenConfigs) {
if (auto pc = packages[dp].name in m_overriddenConfigs) {
setConfigs(only(*pc));
} else {
auto subconf = p.getSubConfiguration(c, package_list[dp], platform);
auto subconf = pack.package_.getSubConfiguration(c, packages[dp].package_, platform);
if (!subconf.empty) setConfigs(only(subconf));
else setConfigs(package_list[dp].getPlatformConfigurations(platform));
else setConfigs(packages[dp].package_.getPlatformConfigurations(platform));
}

// if no valid configuration was found for a dependency, don't include the
// current configuration
if (!depconfigs[dp].length) {
logDebug("Skip %s %s (missing configuration for %s)", pname, c, package_names[dp]);
logDebug("Skip %s %s (missing configuration for %s)", pack.name, c, packages[dp].name);
return;
}
}

// add this configuration to the graph
size_t cidx = createConfig(pack_idx, c);
foreach (d; pdeps) {
if (auto pdp = d.name.toString() in package_map)
foreach (d; pack.dependencies) {
if (auto pdp = d.name.toString() in packages)
foreach (sc; depconfigs[*pdp])
createEdge(cidx, createConfig(*pdp, sc));
}
}

// create a graph of all possible package configurations (package, config) -> (sub-package, sub-config)
string[] allconfigs_path;
void determineAllConfigs(size_t pack_idx)
{
auto p = package_list[pack_idx];
auto pname = package_names[pack_idx];
auto pdeps = package_dependencies[pack_idx];
auto pack = &packages[pack_idx];

auto idx = allconfigs_path.countUntil(pname);
enforce(idx < 0, format("Detected dependency cycle: %s", (allconfigs_path[idx .. $] ~ pname).join("->")));
allconfigs_path ~= pname;
auto idx = allconfigs_path.countUntil(pack.name);
enforce(idx < 0, format("Detected dependency cycle: %s", (allconfigs_path[idx .. $] ~ pack.name).join("->")));
allconfigs_path ~= pack.name;
scope (exit) {
allconfigs_path.length--;
allconfigs_path.assumeSafeAppend;
}

// first, add all dependency configurations
foreach (d; pdeps)
if (auto pi = d.name.toString() in package_map)
foreach (d; pack.dependencies)
if (auto pi = d.name.toString() in packages)
determineAllConfigs(*pi);

// for each configuration, determine the configurations usable for the dependencies
if (auto pc = pname in m_overriddenConfigs)
if (auto pc = pack.name in m_overriddenConfigs)
determineDependencyConfigs(pack_idx, *pc);
else
foreach (c; p.getPlatformConfigurations(platform, p is m_rootPackage && allow_non_library))
foreach (c; pack.package_.getPlatformConfigurations(platform, pack.package_ is m_rootPackage && allow_non_library))
determineDependencyConfigs(pack_idx, c);
}
assert(package_list[0] is m_rootPackage);


// first, create a graph of all possible package configurations
assert(packages[0].package_ is m_rootPackage);
if (config.length) createConfig(0, config);
determineAllConfigs(0);

// successively remove configurations until only one configuration per package is left
// then, successively remove configurations until only one configuration
// per package is left
bool changed;
do {
// remove all configs that are not reachable by all parent packages
changed = false;
foreach (i, ref c; configs) {
if (c == Vertex.init) continue; // ignore deleted configurations
if (!isReachableByAllParentPacks(i)) {
logDebug("%s %s NOT REACHABLE by all of (%s):", c.pack, c.config, parents[c.pack]);
logDebug("%s %s NOT REACHABLE by all of (%s):", c.pack, c.config, packages[c.pack].parents);
removeConfig(i);
changed = true;
}
}

// when all edges are cleaned up, pick one package and remove all but one config
if (!changed) {
foreach (pidx, p; package_list) {
foreach (pidx; 0 .. packages.length) {
size_t cnt = 0;
foreach (i, ref c; configs)
if (c.pack == pidx && ++cnt > 1) {
Expand All @@ -880,30 +849,74 @@ class Project {
string[string] ret;
foreach (c; configs) {
if (c == Vertex.init) continue; // ignore deleted configurations
auto pname = package_names[c.pack];
auto pname = packages[c.pack].name;
assert(ret.get(pname, c.config) == c.config, format("Conflicting configurations for %s found: %s vs. %s", pname, c.config, ret[pname]));
logDebug("Using configuration '%s' for %s", c.config, pname);
ret[pname] = c.config;
}

// check for conflicts (packages missing in the final configuration graph)
auto visited = new bool[](package_list.length);
auto visited = new bool[](packages.length);
void checkPacksRec(size_t pack_idx) {
if (visited[pack_idx]) return;
visited[pack_idx] = true;
auto pname = package_names[pack_idx];
auto pname = packages[pack_idx].name;
auto pc = pname in ret;
enforce(pc !is null, "Could not resolve configuration for package "~pname);
foreach (p, dep; package_list[pack_idx].getDependencies(*pc)) {
foreach (p, dep; packages[pack_idx].package_.getDependencies(*pc)) {
auto deppack = getDependency(p, dep.optional);
if (deppack) checkPacksRec(package_list.countUntil(deppack));
if (deppack) checkPacksRec(packages[].countUntil!(p => p.package_ is deppack));
}
}
checkPacksRec(0);

return ret;
}

/** Returns an ordered list of all packages with the additional possibility
to look up by name.
*/
private auto collectPackageInformation()
const {
static struct PackageInfo {
const(Package) package_;
size_t[] parents;
string name;
PackageDependency[] dependencies;
}

static struct PackageInfoAccessor {
private {
PackageInfo[] m_packages;
size_t[string] m_packageMap;
}

private void initialize(P)(P all_packages, size_t reserve_count)
{
m_packages.reserve(reserve_count);
foreach (p; all_packages) {
auto pname = p.name;
m_packageMap[pname] = m_packages.length;
m_packages ~= PackageInfo(p, null, pname, p.getAllDependencies());
}
foreach (pack_idx, ref pack_info; m_packages)
foreach (d; pack_info.dependencies)
if (auto pi = d.name.toString() in m_packageMap)
m_packages[*pi].parents ~= pack_idx;
}

size_t length() const { return m_packages.length; }
const(PackageInfo)[] opIndex() const { return m_packages; }
ref const(PackageInfo) opIndex(size_t package_index) const { return m_packages[package_index]; }
size_t getPackageIndex(string package_name) const { return m_packageMap.get(package_name, size_t.max); }
const(size_t)* opBinaryRight(string op = "in")(string package_name) const { return package_name in m_packageMap; }
}

PackageInfoAccessor ret;
ret.initialize(getTopologicalPackageList(), m_dependencies.length);
return ret;
}

/**
* Fills `dst` with values from this project.
*
Expand Down
Loading